in_source_id
stringlengths 13
58
| issue
stringlengths 3
241k
| before_files
listlengths 0
3
| after_files
listlengths 0
3
| pr_diff
stringlengths 109
107M
⌀ |
---|---|---|---|---|
chainer__chainer-764 | cuda.cupy.clip errors
If I runt he code
`cuda.cupy.clip(cuda.cupy.arange(10), 2, 7)`
I get the following error
```
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-7-e529e5fea46e> in <module>()
----> 1 cuda.cupy.clip(cuda.cupy.arange(10), 2, 7)
/usr/local/lib/python2.7/dist-packages/cupy/math/misc.pyc in clip(a, a_min, a_max, out)
24 '''
25 # TODO(okuta): check type
---> 26 return a(a_min, a_max, out=out)
27
28
TypeError: 'cupy.core.core.ndarray' object is not callable
```
Expected output via numpy code `np.clip(np.arange(10), 2, 7)` is `array([2, 2, 2, 3, 4, 5, 6, 7, 7, 7])`
| [
{
"content": "from cupy import core\n\n\n# TODO(okuta): Implement convolve\n\n\ndef clip(a, a_min, a_max, out=None):\n '''Clips the values of an array to a given interval.\n\n This is equivalent to ``maximum(minimum(a, a_max), a_min)``, while this\n function is more efficient.\n\n Args:\n a (cupy.ndarray): The source array.\n a_min (scalar or cupy.ndarray): The left side of the interval.\n a_max (scalar or cupy.ndarray): The right side of the interval.\n out (cupy.ndarray): Output array.\n\n Returns:\n cupy.ndarray: Clipped array.\n\n .. seealso:: :func:`numpy.clip`\n\n '''\n # TODO(okuta): check type\n return a(a_min, a_max, out=out)\n\n\nsqrt = core.create_ufunc(\n 'cupy_sqrt',\n # I think this order is a bug of NumPy, though we select this \"buggy\"\n # behavior for compatibility with NumPy.\n ('f->f', 'd->d', 'e->e'),\n 'out0 = sqrt(in0)',\n doc='''Elementwise positive square-root function.\n\n .. note::\n This ufunc outputs float32 arrays for float16 arrays input by default as\n well as NumPy 1.9. If you want to override this behavior, specify the\n dtype argument explicitly, or use ``cupy.math.misc.sqrt_fixed`` instead.\n\n .. seealso:: :data:`numpy.sqrt`\n\n ''')\n\n\nsqrt_fixed = core.sqrt_fixed\n\n\nsquare = core.create_ufunc(\n 'cupy_square',\n ('b->b', 'B->B', 'h->h', 'H->H', 'i->i', 'I->I', 'l->l', 'L->L', 'q->q',\n 'Q->Q', 'e->e', 'f->f', 'd->d'),\n 'out0 = in0 * in0',\n doc='''Elementwise square function.\n\n .. seealso:: :data:`numpy.square`\n\n ''')\n\n\nabsolute = core.absolute\n\n\n# TODO(beam2d): Implement it\n# fabs\n\n\n_unsigned_sign = 'out0 = in0 > 0'\nsign = core.create_ufunc(\n 'cupy_sign',\n ('b->b', ('B->B', _unsigned_sign), 'h->h', ('H->H', _unsigned_sign),\n 'i->i', ('I->I', _unsigned_sign), 'l->l', ('L->L', _unsigned_sign),\n 'q->q', ('Q->Q', _unsigned_sign), 'e->e', 'f->f', 'd->d'),\n 'out0 = (in0 > 0) - (in0 < 0)',\n doc='''Elementwise sign function.\n\n It returns -1, 0, or 1 depending on the sign of the input.\n\n .. seealso:: :data:`numpy.sign`\n\n ''')\n\n\n_float_maximum = \\\n 'out0 = isnan(in0) ? in0 : isnan(in1) ? in1 : max(in0, in1)'\nmaximum = core.create_ufunc(\n 'cupy_maximum',\n ('??->?', 'bb->b', 'BB->B', 'hh->h', 'HH->H', 'ii->i', 'II->I', 'll->l',\n 'LL->L', 'qq->q', 'QQ->Q',\n ('ee->e', _float_maximum),\n ('ff->f', _float_maximum),\n ('dd->d', _float_maximum)),\n 'out0 = max(in0, in1)',\n doc='''Takes the maximum of two arrays elementwise.\n\n If NaN appears, it returns the NaN.\n\n .. seealso:: :data:`numpy.maximum`\n\n ''')\n\n\n_float_minimum = \\\n 'out0 = isnan(in0) ? in0 : isnan(in1) ? in1 : min(in0, in1)'\nminimum = core.create_ufunc(\n 'cupy_minimum',\n ('??->?', 'bb->b', 'BB->B', 'hh->h', 'HH->H', 'ii->i', 'II->I', 'll->l',\n 'LL->L', 'qq->q', 'QQ->Q',\n ('ee->e', _float_minimum),\n ('ff->f', _float_minimum),\n ('dd->d', _float_minimum)),\n 'out0 = min(in0, in1)',\n doc='''Takes the minimum of two arrays elementwise.\n\n If NaN appears, it returns the NaN.\n\n .. seealso:: :data:`numpy.minimum`\n\n ''')\n\n\nfmax = core.create_ufunc(\n 'cupy_fmax',\n ('??->?', 'bb->b', 'BB->B', 'hh->h', 'HH->H', 'ii->i', 'II->I', 'll->l',\n 'LL->L', 'qq->q', 'QQ->Q', 'ee->e', 'ff->f', 'dd->d'),\n 'out0 = max(in0, in1)',\n doc='''Takes the maximum of two arrays elementwise.\n\n If NaN appears, it returns the other operand.\n\n .. seealso:: :data:`numpy.fmax`\n\n ''')\n\n\nfmin = core.create_ufunc(\n 'cupy_fmin',\n ('??->?', 'bb->b', 'BB->B', 'hh->h', 'HH->H', 'ii->i', 'II->I', 'll->l',\n 'LL->L', 'qq->q', 'QQ->Q', 'ee->e', 'ff->f', 'dd->d'),\n 'out0 = min(in0, in1)',\n doc='''Takes the minimum of two arrays elementwise.\n\n If NaN apperas, it returns the other operand.\n\n .. seealso:: :data:`numpy.fmin`\n\n ''')\n\n\n# TODO(okuta): Implement nan_to_num\n\n\n# TODO(okuta): Implement real_if_close\n\n\n# TODO(okuta): Implement interp\n",
"path": "cupy/math/misc.py"
}
] | [
{
"content": "from cupy import core\n\n\n# TODO(okuta): Implement convolve\n\n\ndef clip(a, a_min, a_max, out=None):\n '''Clips the values of an array to a given interval.\n\n This is equivalent to ``maximum(minimum(a, a_max), a_min)``, while this\n function is more efficient.\n\n Args:\n a (cupy.ndarray): The source array.\n a_min (scalar or cupy.ndarray): The left side of the interval.\n a_max (scalar or cupy.ndarray): The right side of the interval.\n out (cupy.ndarray): Output array.\n\n Returns:\n cupy.ndarray: Clipped array.\n\n .. seealso:: :func:`numpy.clip`\n\n '''\n # TODO(okuta): check type\n return a.clip(a_min, a_max, out=out)\n\n\nsqrt = core.create_ufunc(\n 'cupy_sqrt',\n # I think this order is a bug of NumPy, though we select this \"buggy\"\n # behavior for compatibility with NumPy.\n ('f->f', 'd->d', 'e->e'),\n 'out0 = sqrt(in0)',\n doc='''Elementwise positive square-root function.\n\n .. note::\n This ufunc outputs float32 arrays for float16 arrays input by default as\n well as NumPy 1.9. If you want to override this behavior, specify the\n dtype argument explicitly, or use ``cupy.math.misc.sqrt_fixed`` instead.\n\n .. seealso:: :data:`numpy.sqrt`\n\n ''')\n\n\nsqrt_fixed = core.sqrt_fixed\n\n\nsquare = core.create_ufunc(\n 'cupy_square',\n ('b->b', 'B->B', 'h->h', 'H->H', 'i->i', 'I->I', 'l->l', 'L->L', 'q->q',\n 'Q->Q', 'e->e', 'f->f', 'd->d'),\n 'out0 = in0 * in0',\n doc='''Elementwise square function.\n\n .. seealso:: :data:`numpy.square`\n\n ''')\n\n\nabsolute = core.absolute\n\n\n# TODO(beam2d): Implement it\n# fabs\n\n\n_unsigned_sign = 'out0 = in0 > 0'\nsign = core.create_ufunc(\n 'cupy_sign',\n ('b->b', ('B->B', _unsigned_sign), 'h->h', ('H->H', _unsigned_sign),\n 'i->i', ('I->I', _unsigned_sign), 'l->l', ('L->L', _unsigned_sign),\n 'q->q', ('Q->Q', _unsigned_sign), 'e->e', 'f->f', 'd->d'),\n 'out0 = (in0 > 0) - (in0 < 0)',\n doc='''Elementwise sign function.\n\n It returns -1, 0, or 1 depending on the sign of the input.\n\n .. seealso:: :data:`numpy.sign`\n\n ''')\n\n\n_float_maximum = \\\n 'out0 = isnan(in0) ? in0 : isnan(in1) ? in1 : max(in0, in1)'\nmaximum = core.create_ufunc(\n 'cupy_maximum',\n ('??->?', 'bb->b', 'BB->B', 'hh->h', 'HH->H', 'ii->i', 'II->I', 'll->l',\n 'LL->L', 'qq->q', 'QQ->Q',\n ('ee->e', _float_maximum),\n ('ff->f', _float_maximum),\n ('dd->d', _float_maximum)),\n 'out0 = max(in0, in1)',\n doc='''Takes the maximum of two arrays elementwise.\n\n If NaN appears, it returns the NaN.\n\n .. seealso:: :data:`numpy.maximum`\n\n ''')\n\n\n_float_minimum = \\\n 'out0 = isnan(in0) ? in0 : isnan(in1) ? in1 : min(in0, in1)'\nminimum = core.create_ufunc(\n 'cupy_minimum',\n ('??->?', 'bb->b', 'BB->B', 'hh->h', 'HH->H', 'ii->i', 'II->I', 'll->l',\n 'LL->L', 'qq->q', 'QQ->Q',\n ('ee->e', _float_minimum),\n ('ff->f', _float_minimum),\n ('dd->d', _float_minimum)),\n 'out0 = min(in0, in1)',\n doc='''Takes the minimum of two arrays elementwise.\n\n If NaN appears, it returns the NaN.\n\n .. seealso:: :data:`numpy.minimum`\n\n ''')\n\n\nfmax = core.create_ufunc(\n 'cupy_fmax',\n ('??->?', 'bb->b', 'BB->B', 'hh->h', 'HH->H', 'ii->i', 'II->I', 'll->l',\n 'LL->L', 'qq->q', 'QQ->Q', 'ee->e', 'ff->f', 'dd->d'),\n 'out0 = max(in0, in1)',\n doc='''Takes the maximum of two arrays elementwise.\n\n If NaN appears, it returns the other operand.\n\n .. seealso:: :data:`numpy.fmax`\n\n ''')\n\n\nfmin = core.create_ufunc(\n 'cupy_fmin',\n ('??->?', 'bb->b', 'BB->B', 'hh->h', 'HH->H', 'ii->i', 'II->I', 'll->l',\n 'LL->L', 'qq->q', 'QQ->Q', 'ee->e', 'ff->f', 'dd->d'),\n 'out0 = min(in0, in1)',\n doc='''Takes the minimum of two arrays elementwise.\n\n If NaN apperas, it returns the other operand.\n\n .. seealso:: :data:`numpy.fmin`\n\n ''')\n\n\n# TODO(okuta): Implement nan_to_num\n\n\n# TODO(okuta): Implement real_if_close\n\n\n# TODO(okuta): Implement interp\n",
"path": "cupy/math/misc.py"
}
] | diff --git a/cupy/math/misc.py b/cupy/math/misc.py
index 434da3ce9c3b..01697aee8c0b 100644
--- a/cupy/math/misc.py
+++ b/cupy/math/misc.py
@@ -23,7 +23,7 @@ def clip(a, a_min, a_max, out=None):
'''
# TODO(okuta): check type
- return a(a_min, a_max, out=out)
+ return a.clip(a_min, a_max, out=out)
sqrt = core.create_ufunc(
diff --git a/tests/cupy_tests/math_tests/test_misc.py b/tests/cupy_tests/math_tests/test_misc.py
index 65396d8166c5..2a6a90409cb7 100644
--- a/tests/cupy_tests/math_tests/test_misc.py
+++ b/tests/cupy_tests/math_tests/test_misc.py
@@ -50,6 +50,12 @@ def test_clip1(self, xp, dtype):
a = testing.shaped_arange((2, 3, 4), xp, dtype)
return a.clip(3, 13)
+ @testing.for_all_dtypes()
+ @testing.numpy_cupy_array_equal()
+ def test_clip_func(self, xp, dtype):
+ a = testing.shaped_arange((2, 3, 4), xp, dtype)
+ return xp.clip(a, 3, 13)
+
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_clip2(self, xp, dtype):
|
dask__dask-6299 | importing fails when calling python -OO
This was discovered by `xarray`'s `upstream-dev` CI ([environment](https://dev.azure.com/xarray/xarray/_build/results?buildId=2996&view=logs&j=2280efed-fda1-53bd-9213-1fa8ec9b4fa8&t=031ddd67-e55f-5fbd-2283-1ff4dfed6587)) a few days ago, but we were a bit slow in reporting so this also happens with the newly released `2.18.0`.
The problem is this:
```
$ python -OO -c 'import dask.array'
Traceback (most recent call last):
File "<string>", line 1, in <module>
File ".../lib/python3.8/site-packages/dask/array/__init__.py", line 26, in <module>
from .routines import (
File ".../lib/python3.8/site-packages/dask/array/routines.py", line 18, in <module>
from .creation import arange, diag, empty, indices
File ".../lib/python3.8/site-packages/dask/array/creation.py", line 26, in <module>
from .wrap import empty, ones, zeros, full
File ".../lib/python3.8/site-packages/dask/array/wrap.py", line 173, in <module>
full.__name__ = _full.__name__
AttributeError: 'functools.partial' object has no attribute '__name__'
```
without the optimization, the import obviously works.
See also pydata/xarray#4124
| [
{
"content": "from functools import partial\nfrom itertools import product\n\nimport numpy as np\n\nfrom tlz import curry\n\nfrom ..base import tokenize\nfrom ..utils import funcname\nfrom .core import Array, normalize_chunks\nfrom .utils import meta_from_array\n\n\ndef _parse_wrap_args(func, args, kwargs, shape):\n if isinstance(shape, np.ndarray):\n shape = shape.tolist()\n\n if not isinstance(shape, (tuple, list)):\n shape = (shape,)\n\n name = kwargs.pop(\"name\", None)\n chunks = kwargs.pop(\"chunks\", \"auto\")\n\n dtype = kwargs.pop(\"dtype\", None)\n if dtype is None:\n dtype = func(shape, *args, **kwargs).dtype\n dtype = np.dtype(dtype)\n\n chunks = normalize_chunks(chunks, shape, dtype=dtype)\n\n name = name or funcname(func) + \"-\" + tokenize(\n func, shape, chunks, dtype, args, kwargs\n )\n\n return {\n \"shape\": shape,\n \"dtype\": dtype,\n \"kwargs\": kwargs,\n \"chunks\": chunks,\n \"name\": name,\n }\n\n\ndef wrap_func_shape_as_first_arg(func, *args, **kwargs):\n \"\"\"\n Transform np creation function into blocked version\n \"\"\"\n if \"shape\" not in kwargs:\n shape, args = args[0], args[1:]\n else:\n shape = kwargs.pop(\"shape\")\n\n if isinstance(shape, Array):\n raise TypeError(\n \"Dask array input not supported. \"\n \"Please use tuple, list, or a 1D numpy array instead.\"\n )\n\n parsed = _parse_wrap_args(func, args, kwargs, shape)\n shape = parsed[\"shape\"]\n dtype = parsed[\"dtype\"]\n chunks = parsed[\"chunks\"]\n name = parsed[\"name\"]\n kwargs = parsed[\"kwargs\"]\n\n keys = product([name], *[range(len(bd)) for bd in chunks])\n shapes = product(*chunks)\n func = partial(func, dtype=dtype, **kwargs)\n vals = ((func,) + (s,) + args for s in shapes)\n\n dsk = dict(zip(keys, vals))\n return Array(dsk, name, chunks, dtype=dtype)\n\n\ndef wrap_func_like(func, *args, **kwargs):\n \"\"\"\n Transform np creation function into blocked version\n \"\"\"\n x = args[0]\n meta = meta_from_array(x)\n shape = kwargs.get(\"shape\", x.shape)\n\n parsed = _parse_wrap_args(func, args, kwargs, shape)\n shape = parsed[\"shape\"]\n dtype = parsed[\"dtype\"]\n chunks = parsed[\"chunks\"]\n name = parsed[\"name\"]\n kwargs = parsed[\"kwargs\"]\n\n keys = product([name], *[range(len(bd)) for bd in chunks])\n shapes = product(*chunks)\n shapes = list(shapes)\n kw = [kwargs for _ in shapes]\n for i, s in enumerate(list(shapes)):\n kw[i][\"shape\"] = s\n vals = ((partial(func, dtype=dtype, **k),) + args for (k, s) in zip(kw, shapes))\n\n dsk = dict(zip(keys, vals))\n\n return Array(dsk, name, chunks, meta=meta.astype(dtype))\n\n\ndef wrap_func_like_safe(func, func_like, *args, **kwargs):\n \"\"\"\n Safe implementation for wrap_func_like(), attempts to use func_like(),\n if the shape keyword argument, falls back to func().\n \"\"\"\n try:\n return func_like(*args, **kwargs)\n except TypeError:\n return func(*args, **kwargs)\n\n\n@curry\ndef wrap(wrap_func, func, **kwargs):\n func_like = kwargs.pop(\"func_like\", None)\n if func_like is None:\n f = partial(wrap_func, func, **kwargs)\n else:\n f = partial(wrap_func, func_like, **kwargs)\n template = \"\"\"\n Blocked variant of %(name)s\n\n Follows the signature of %(name)s exactly except that it also features\n optional keyword arguments ``chunks: int, tuple, or dict`` and ``name: str``.\n\n Original signature follows below.\n \"\"\"\n if func.__doc__ is not None:\n f.__doc__ = template % {\"name\": func.__name__} + func.__doc__\n f.__name__ = \"blocked_\" + func.__name__\n return f\n\n\nw = wrap(wrap_func_shape_as_first_arg)\n\nones = w(np.ones, dtype=\"f8\")\nzeros = w(np.zeros, dtype=\"f8\")\nempty = w(np.empty, dtype=\"f8\")\n\n\nw_like = wrap(wrap_func_like_safe)\n\n\nempty_like = w_like(np.empty, func_like=np.empty_like)\n\n\n# full and full_like require special casing due to argument check on fill_value\n# Generate wrapped functions only once\n_full = w(np.full)\n_full_like = w_like(np.full, func_like=np.full_like)\n\n\ndef full(shape, fill_value, *args, **kwargs):\n # np.isscalar has somewhat strange behavior:\n # https://docs.scipy.org/doc/numpy/reference/generated/numpy.isscalar.html\n if np.ndim(fill_value) != 0:\n raise ValueError(\n f\"fill_value must be scalar. Received {type(fill_value).__name__} instead.\"\n )\n return _full(shape=shape, fill_value=fill_value, *args, **kwargs)\n\n\ndef full_like(a, fill_value, *args, **kwargs):\n if np.ndim(fill_value) != 0:\n raise ValueError(\n f\"fill_value must be scalar. Received {type(fill_value).__name__} instead.\"\n )\n return _full_like(a=a, fill_value=fill_value, *args, **kwargs,)\n\n\nfull.__doc__ = _full.__doc__\nfull.__name__ = _full.__name__\nfull_like.__doc__ = _full_like.__doc__\nfull_like.__name__ = _full_like.__name__\n",
"path": "dask/array/wrap.py"
}
] | [
{
"content": "from functools import partial\nfrom itertools import product\n\nimport numpy as np\n\nfrom tlz import curry\n\nfrom ..base import tokenize\nfrom ..utils import funcname\nfrom .core import Array, normalize_chunks\nfrom .utils import meta_from_array\n\n\ndef _parse_wrap_args(func, args, kwargs, shape):\n if isinstance(shape, np.ndarray):\n shape = shape.tolist()\n\n if not isinstance(shape, (tuple, list)):\n shape = (shape,)\n\n name = kwargs.pop(\"name\", None)\n chunks = kwargs.pop(\"chunks\", \"auto\")\n\n dtype = kwargs.pop(\"dtype\", None)\n if dtype is None:\n dtype = func(shape, *args, **kwargs).dtype\n dtype = np.dtype(dtype)\n\n chunks = normalize_chunks(chunks, shape, dtype=dtype)\n\n name = name or funcname(func) + \"-\" + tokenize(\n func, shape, chunks, dtype, args, kwargs\n )\n\n return {\n \"shape\": shape,\n \"dtype\": dtype,\n \"kwargs\": kwargs,\n \"chunks\": chunks,\n \"name\": name,\n }\n\n\ndef wrap_func_shape_as_first_arg(func, *args, **kwargs):\n \"\"\"\n Transform np creation function into blocked version\n \"\"\"\n if \"shape\" not in kwargs:\n shape, args = args[0], args[1:]\n else:\n shape = kwargs.pop(\"shape\")\n\n if isinstance(shape, Array):\n raise TypeError(\n \"Dask array input not supported. \"\n \"Please use tuple, list, or a 1D numpy array instead.\"\n )\n\n parsed = _parse_wrap_args(func, args, kwargs, shape)\n shape = parsed[\"shape\"]\n dtype = parsed[\"dtype\"]\n chunks = parsed[\"chunks\"]\n name = parsed[\"name\"]\n kwargs = parsed[\"kwargs\"]\n\n keys = product([name], *[range(len(bd)) for bd in chunks])\n shapes = product(*chunks)\n func = partial(func, dtype=dtype, **kwargs)\n vals = ((func,) + (s,) + args for s in shapes)\n\n dsk = dict(zip(keys, vals))\n return Array(dsk, name, chunks, dtype=dtype)\n\n\ndef wrap_func_like(func, *args, **kwargs):\n \"\"\"\n Transform np creation function into blocked version\n \"\"\"\n x = args[0]\n meta = meta_from_array(x)\n shape = kwargs.get(\"shape\", x.shape)\n\n parsed = _parse_wrap_args(func, args, kwargs, shape)\n shape = parsed[\"shape\"]\n dtype = parsed[\"dtype\"]\n chunks = parsed[\"chunks\"]\n name = parsed[\"name\"]\n kwargs = parsed[\"kwargs\"]\n\n keys = product([name], *[range(len(bd)) for bd in chunks])\n shapes = product(*chunks)\n shapes = list(shapes)\n kw = [kwargs for _ in shapes]\n for i, s in enumerate(list(shapes)):\n kw[i][\"shape\"] = s\n vals = ((partial(func, dtype=dtype, **k),) + args for (k, s) in zip(kw, shapes))\n\n dsk = dict(zip(keys, vals))\n\n return Array(dsk, name, chunks, meta=meta.astype(dtype))\n\n\ndef wrap_func_like_safe(func, func_like, *args, **kwargs):\n \"\"\"\n Safe implementation for wrap_func_like(), attempts to use func_like(),\n if the shape keyword argument, falls back to func().\n \"\"\"\n try:\n return func_like(*args, **kwargs)\n except TypeError:\n return func(*args, **kwargs)\n\n\n@curry\ndef wrap(wrap_func, func, **kwargs):\n func_like = kwargs.pop(\"func_like\", None)\n if func_like is None:\n f = partial(wrap_func, func, **kwargs)\n else:\n f = partial(wrap_func, func_like, **kwargs)\n template = \"\"\"\n Blocked variant of %(name)s\n\n Follows the signature of %(name)s exactly except that it also features\n optional keyword arguments ``chunks: int, tuple, or dict`` and ``name: str``.\n\n Original signature follows below.\n \"\"\"\n if func.__doc__ is not None:\n f.__doc__ = template % {\"name\": func.__name__} + func.__doc__\n f.__name__ = \"blocked_\" + func.__name__\n return f\n\n\nw = wrap(wrap_func_shape_as_first_arg)\n\nones = w(np.ones, dtype=\"f8\")\nzeros = w(np.zeros, dtype=\"f8\")\nempty = w(np.empty, dtype=\"f8\")\n\n\nw_like = wrap(wrap_func_like_safe)\n\n\nempty_like = w_like(np.empty, func_like=np.empty_like)\n\n\n# full and full_like require special casing due to argument check on fill_value\n# Generate wrapped functions only once\n_full = w(np.full)\n_full_like = w_like(np.full, func_like=np.full_like)\n\n\ndef full(shape, fill_value, *args, **kwargs):\n # np.isscalar has somewhat strange behavior:\n # https://docs.scipy.org/doc/numpy/reference/generated/numpy.isscalar.html\n if np.ndim(fill_value) != 0:\n raise ValueError(\n f\"fill_value must be scalar. Received {type(fill_value).__name__} instead.\"\n )\n return _full(shape=shape, fill_value=fill_value, *args, **kwargs)\n\n\ndef full_like(a, fill_value, *args, **kwargs):\n if np.ndim(fill_value) != 0:\n raise ValueError(\n f\"fill_value must be scalar. Received {type(fill_value).__name__} instead.\"\n )\n return _full_like(a=a, fill_value=fill_value, *args, **kwargs,)\n\n\nfull.__doc__ = _full.__doc__\nfull_like.__doc__ = _full_like.__doc__\n",
"path": "dask/array/wrap.py"
}
] | diff --git a/dask/array/wrap.py b/dask/array/wrap.py
index af3d1487f1a..7d06ce01c1b 100644
--- a/dask/array/wrap.py
+++ b/dask/array/wrap.py
@@ -170,6 +170,4 @@ def full_like(a, fill_value, *args, **kwargs):
full.__doc__ = _full.__doc__
-full.__name__ = _full.__name__
full_like.__doc__ = _full_like.__doc__
-full_like.__name__ = _full_like.__name__
|
unionai-oss__pandera-1419 | Date type not exported
**Describe the bug**
In the `__all__` list [here](https://github.com/unionai-oss/pandera/blob/37c24d94ae719dcf4cdc36d1f204478539fce74a/pandera/__init__.py#L104-L106), the type `Date` is missing, causing complaints from mypy if you refer to the type as `pa.Date` -- you have to fully qualify it as `pa.typing.common.Date`.
- [x] I have checked that this issue has not already been reported.
- [x] I have confirmed this bug exists on the latest version of pandera.
- [x] (optional) I have confirmed this bug exists on the master branch of pandera.
**Note**: Please read [this guide](https://matthewrocklin.com/blog/work/2018/02/28/minimal-bug-reports) detailing how to provide the necessary information for us to reproduce your bug.
#### Code Sample, a copy-pastable example
```python
import pandera as pa
# Mypy errors [name-defined]
class ErrorSchema(pa.DataFrameModel):
date_col: pa.Date
# Mypy is happy
class NoErrorSchema(pa.DataFrameModel):
date_col: pa.typing.common.Date
```
#### Expected behavior
No errors from mypy in both cases
#### Desktop (please complete the following information):
- OS: [Manjaro Linux kernel 6.1.60 - 1]
- Browser [Firefox 119.0]
- Version [pandera 0.17.2]
| [
{
"content": "\"\"\"A flexible and expressive pandas validation library.\"\"\"\nimport platform\n\nimport pandera.backends\nfrom pandera import errors, external_config, typing\nfrom pandera.accessors import pandas_accessor\nfrom pandera.api import extensions\nfrom pandera.api.checks import Check\nfrom pandera.api.hypotheses import Hypothesis\nfrom pandera.api.pandas.array import SeriesSchema\nfrom pandera.api.pandas.container import DataFrameSchema\nfrom pandera.api.pandas.components import Column, Index, MultiIndex\nfrom pandera.api.pandas.model import DataFrameModel, SchemaModel\nfrom pandera.api.pandas.model_components import Field, check, dataframe_check\nfrom pandera.decorators import check_input, check_io, check_output, check_types\nfrom pandera.dtypes import (\n Bool,\n Category,\n Complex,\n Complex64,\n Complex128,\n DataType,\n Date,\n DateTime,\n Decimal,\n Float,\n Float16,\n Float32,\n Float64,\n Int,\n Int8,\n Int16,\n Int32,\n Int64,\n String,\n Timedelta,\n Timestamp,\n UInt,\n UInt8,\n UInt16,\n UInt32,\n UInt64,\n)\nfrom pandera.engines.numpy_engine import Object\nfrom pandera.engines.pandas_engine import (\n BOOL,\n INT8,\n INT16,\n INT32,\n INT64,\n PANDAS_1_2_0_PLUS,\n PANDAS_1_3_0_PLUS,\n STRING,\n UINT8,\n UINT16,\n UINT32,\n UINT64,\n pandas_version,\n)\n\nimport pandera.backends.base.builtin_checks\nimport pandera.backends.base.builtin_hypotheses\nimport pandera.backends.pandas\n\nfrom pandera.schema_inference.pandas import infer_schema\nfrom pandera.version import __version__\n\n\nif platform.system() != \"Windows\":\n # pylint: disable=ungrouped-imports\n from pandera.dtypes import Complex256, Float128\n\n\ntry:\n import dask.dataframe\n\n from pandera.accessors import dask_accessor\nexcept ImportError:\n pass\n\n\ntry:\n import pyspark.pandas\n\n from pandera.accessors import pyspark_accessor\nexcept ImportError:\n pass\n\ntry:\n import modin.pandas\n\n from pandera.accessors import modin_accessor\nexcept ImportError:\n pass\n\n__all__ = [\n # dtypes\n \"Bool\",\n \"Category\",\n \"Complex\",\n \"Complex64\",\n \"Complex128\",\n \"Complex256\",\n \"DataType\",\n \"DateTime\",\n \"Float\",\n \"Float16\",\n \"Float32\",\n \"Float64\",\n \"Float128\",\n \"Int\",\n \"Int8\",\n \"Int16\",\n \"Int32\",\n \"Int64\",\n \"String\",\n \"Timedelta\",\n \"Timestamp\",\n \"UInt\",\n \"UInt8\",\n \"UInt16\",\n \"UInt32\",\n \"UInt64\",\n # numpy_engine\n \"Object\",\n # pandas_engine\n \"BOOL\",\n \"INT8\",\n \"INT16\",\n \"INT32\",\n \"INT64\",\n \"PANDAS_1_3_0_PLUS\",\n \"STRING\",\n \"UINT8\",\n \"UINT16\",\n \"UINT32\",\n \"UINT64\",\n # pandera.engines.pandas_engine\n \"pandas_version\",\n # checks\n \"Check\",\n # decorators\n \"check_input\",\n \"check_io\",\n \"check_output\",\n \"check_types\",\n # hypotheses\n \"Hypothesis\",\n # model\n \"DataFrameModel\",\n \"SchemaModel\",\n # model_components\n \"Field\",\n \"check\",\n \"dataframe_check\",\n # schema_components\n \"Column\",\n \"Index\",\n \"MultiIndex\",\n # schema_inference\n \"infer_schema\",\n # schemas\n \"DataFrameSchema\",\n \"SeriesSchema\",\n # version\n \"__version__\",\n]\n",
"path": "pandera/__init__.py"
}
] | [
{
"content": "\"\"\"A flexible and expressive pandas validation library.\"\"\"\nimport platform\n\nimport pandera.backends\nfrom pandera import errors, external_config, typing\nfrom pandera.accessors import pandas_accessor\nfrom pandera.api import extensions\nfrom pandera.api.checks import Check\nfrom pandera.api.hypotheses import Hypothesis\nfrom pandera.api.pandas.array import SeriesSchema\nfrom pandera.api.pandas.container import DataFrameSchema\nfrom pandera.api.pandas.components import Column, Index, MultiIndex\nfrom pandera.api.pandas.model import DataFrameModel, SchemaModel\nfrom pandera.api.pandas.model_components import Field, check, dataframe_check\nfrom pandera.decorators import check_input, check_io, check_output, check_types\nfrom pandera.dtypes import (\n Bool,\n Category,\n Complex,\n Complex64,\n Complex128,\n DataType,\n Date,\n DateTime,\n Decimal,\n Float,\n Float16,\n Float32,\n Float64,\n Int,\n Int8,\n Int16,\n Int32,\n Int64,\n String,\n Timedelta,\n Timestamp,\n UInt,\n UInt8,\n UInt16,\n UInt32,\n UInt64,\n)\nfrom pandera.engines.numpy_engine import Object\nfrom pandera.engines.pandas_engine import (\n BOOL,\n INT8,\n INT16,\n INT32,\n INT64,\n PANDAS_1_2_0_PLUS,\n PANDAS_1_3_0_PLUS,\n STRING,\n UINT8,\n UINT16,\n UINT32,\n UINT64,\n pandas_version,\n)\n\nimport pandera.backends.base.builtin_checks\nimport pandera.backends.base.builtin_hypotheses\nimport pandera.backends.pandas\n\nfrom pandera.schema_inference.pandas import infer_schema\nfrom pandera.version import __version__\n\n\nif platform.system() != \"Windows\":\n # pylint: disable=ungrouped-imports\n from pandera.dtypes import Complex256, Float128\n\n\ntry:\n import dask.dataframe\n\n from pandera.accessors import dask_accessor\nexcept ImportError:\n pass\n\n\ntry:\n import pyspark.pandas\n\n from pandera.accessors import pyspark_accessor\nexcept ImportError:\n pass\n\ntry:\n import modin.pandas\n\n from pandera.accessors import modin_accessor\nexcept ImportError:\n pass\n\n__all__ = [\n # dtypes\n \"Bool\",\n \"Category\",\n \"Complex\",\n \"Complex64\",\n \"Complex128\",\n \"Complex256\",\n \"Date\",\n \"DataType\",\n \"DateTime\",\n \"Float\",\n \"Float16\",\n \"Float32\",\n \"Float64\",\n \"Float128\",\n \"Int\",\n \"Int8\",\n \"Int16\",\n \"Int32\",\n \"Int64\",\n \"String\",\n \"Timedelta\",\n \"Timestamp\",\n \"UInt\",\n \"UInt8\",\n \"UInt16\",\n \"UInt32\",\n \"UInt64\",\n # numpy_engine\n \"Object\",\n # pandas_engine\n \"BOOL\",\n \"INT8\",\n \"INT16\",\n \"INT32\",\n \"INT64\",\n \"PANDAS_1_3_0_PLUS\",\n \"STRING\",\n \"UINT8\",\n \"UINT16\",\n \"UINT32\",\n \"UINT64\",\n # pandera.engines.pandas_engine\n \"pandas_version\",\n # checks\n \"Check\",\n # decorators\n \"check_input\",\n \"check_io\",\n \"check_output\",\n \"check_types\",\n # hypotheses\n \"Hypothesis\",\n # model\n \"DataFrameModel\",\n \"SchemaModel\",\n # model_components\n \"Field\",\n \"check\",\n \"dataframe_check\",\n # schema_components\n \"Column\",\n \"Index\",\n \"MultiIndex\",\n # schema_inference\n \"infer_schema\",\n # schemas\n \"DataFrameSchema\",\n \"SeriesSchema\",\n # version\n \"__version__\",\n]\n",
"path": "pandera/__init__.py"
}
] | diff --git a/pandera/__init__.py b/pandera/__init__.py
index 1ebee0126..ecbc07a7c 100644
--- a/pandera/__init__.py
+++ b/pandera/__init__.py
@@ -101,6 +101,7 @@
"Complex64",
"Complex128",
"Complex256",
+ "Date",
"DataType",
"DateTime",
"Float",
|
pyodide__pyodide-2939 | Add lzma
As mentioned by @hoodmane in https://github.com/pyodide/pyodide/discussions/2930#discussioncomment-3316181
> Is there an issue open about lzma? What is our position on it again? That we want it but there is no emscripten port and we haven't gotten to it?
I think the main concern was the size increase for everyone vs few people actually needing it. Depending on the size maybe we could make it an unvendored stdlib package (or include by default if the size is negligible).
| [
{
"content": "import contextlib\nimport functools\nimport os\nimport subprocess\nimport sys\nfrom collections.abc import Generator, Iterable, Iterator, Mapping\nfrom pathlib import Path\n\nimport tomli\nfrom packaging.tags import Tag, compatible_tags, cpython_tags\nfrom packaging.utils import parse_wheel_filename\n\nfrom .io import parse_package_config\n\n\ndef emscripten_version() -> str:\n return get_make_flag(\"PYODIDE_EMSCRIPTEN_VERSION\")\n\n\ndef platform() -> str:\n emscripten_version = get_make_flag(\"PYODIDE_EMSCRIPTEN_VERSION\")\n version = emscripten_version.replace(\".\", \"_\")\n return f\"emscripten_{version}_wasm32\"\n\n\ndef pyodide_tags() -> Iterator[Tag]:\n \"\"\"\n Returns the sequence of tag triples for the Pyodide interpreter.\n\n The sequence is ordered in decreasing specificity.\n \"\"\"\n PYMAJOR = get_make_flag(\"PYMAJOR\")\n PYMINOR = get_make_flag(\"PYMINOR\")\n PLATFORM = platform()\n python_version = (int(PYMAJOR), int(PYMINOR))\n yield from cpython_tags(platforms=[PLATFORM], python_version=python_version)\n yield from compatible_tags(platforms=[PLATFORM], python_version=python_version)\n\n\ndef find_matching_wheels(wheel_paths: Iterable[Path]) -> Iterator[Path]:\n \"\"\"\n Returns the sequence wheels whose tags match the Pyodide interpreter.\n\n Parameters\n ----------\n wheel_paths\n A list of paths to wheels\n\n Returns\n -------\n The subset of wheel_paths that have tags that match the Pyodide interpreter.\n \"\"\"\n wheel_paths = list(wheel_paths)\n wheel_tags_list: list[frozenset[Tag]] = []\n for wheel in wheel_paths:\n _, _, _, tags = parse_wheel_filename(wheel.name)\n wheel_tags_list.append(tags)\n for supported_tag in pyodide_tags():\n for wheel_path, wheel_tags in zip(wheel_paths, wheel_tags_list):\n if supported_tag in wheel_tags:\n yield wheel_path\n\n\nUNVENDORED_STDLIB_MODULES = {\"test\", \"distutils\"}\n\nALWAYS_PACKAGES = {\n \"pyparsing\",\n \"packaging\",\n \"micropip\",\n}\n\nCORE_PACKAGES = {\n \"micropip\",\n \"pyparsing\",\n \"pytz\",\n \"packaging\",\n \"Jinja2\",\n \"regex\",\n \"fpcast-test\",\n \"sharedlib-test-py\",\n \"cpp-exceptions-test\",\n \"ssl\",\n \"pytest\",\n \"tblib\",\n}\n\nCORE_SCIPY_PACKAGES = {\n \"numpy\",\n \"scipy\",\n \"pandas\",\n \"matplotlib\",\n \"scikit-learn\",\n \"joblib\",\n \"pytest\",\n}\n\n\ndef _parse_package_subset(query: str | None) -> set[str]:\n \"\"\"Parse the list of packages specified with PYODIDE_PACKAGES env var.\n\n Also add the list of mandatory packages: [\"pyparsing\", \"packaging\",\n \"micropip\"]\n\n Supports following meta-packages,\n - 'core': corresponds to packages needed to run the core test suite\n {\"micropip\", \"pyparsing\", \"pytz\", \"packaging\", \"Jinja2\", \"fpcast-test\"}. This is the default option\n if query is None.\n - 'min-scipy-stack': includes the \"core\" meta-package as well as some of the\n core packages from the scientific python stack and their dependencies:\n {\"numpy\", \"scipy\", \"pandas\", \"matplotlib\", \"scikit-learn\", \"joblib\", \"pytest\"}.\n This option is non exhaustive and is mainly intended to make build faster\n while testing a diverse set of scientific packages.\n - '*': corresponds to all packages (returns None)\n\n Note: None as input is equivalent to PYODIDE_PACKAGES being unset and leads\n to only the core packages being built.\n\n Returns:\n a set of package names to build or None (build all packages).\n \"\"\"\n if query is None:\n query = \"core\"\n\n packages = {el.strip() for el in query.split(\",\")}\n packages.update(ALWAYS_PACKAGES)\n packages.update(UNVENDORED_STDLIB_MODULES)\n # handle meta-packages\n if \"core\" in packages:\n packages |= CORE_PACKAGES\n packages.discard(\"core\")\n if \"min-scipy-stack\" in packages:\n packages |= CORE_PACKAGES | CORE_SCIPY_PACKAGES\n packages.discard(\"min-scipy-stack\")\n\n # Hack to deal with the circular dependence between soupsieve and\n # beautifulsoup4\n if \"beautifulsoup4\" in packages:\n packages.add(\"soupsieve\")\n packages.discard(\"\")\n return packages\n\n\ndef get_make_flag(name: str) -> str:\n \"\"\"Get flags from makefile.envs.\n\n For building packages we currently use:\n SIDE_MODULE_LDFLAGS\n SIDE_MODULE_CFLAGS\n SIDE_MODULE_CXXFLAGS\n TOOLSDIR\n \"\"\"\n return get_make_environment_vars()[name]\n\n\ndef get_pyversion() -> str:\n PYMAJOR = get_make_flag(\"PYMAJOR\")\n PYMINOR = get_make_flag(\"PYMINOR\")\n return f\"python{PYMAJOR}.{PYMINOR}\"\n\n\ndef get_hostsitepackages() -> str:\n return get_make_flag(\"HOSTSITEPACKAGES\")\n\n\[email protected]\ndef get_make_environment_vars() -> dict[str, str]:\n \"\"\"Load environment variables from Makefile.envs\n\n This allows us to set all build vars in one place\"\"\"\n\n PYODIDE_ROOT = get_pyodide_root()\n environment = {}\n result = subprocess.run(\n [\"make\", \"-f\", str(PYODIDE_ROOT / \"Makefile.envs\"), \".output_vars\"],\n capture_output=True,\n text=True,\n )\n for line in result.stdout.splitlines():\n equalPos = line.find(\"=\")\n if equalPos != -1:\n varname = line[0:equalPos]\n value = line[equalPos + 1 :]\n value = value.strip(\"'\").strip()\n environment[varname] = value\n return environment\n\n\ndef search_pyodide_root(curdir: str | Path, *, max_depth: int = 5) -> Path:\n \"\"\"\n Recursively search for the root of the Pyodide repository,\n by looking for the pyproject.toml file in the parent directories\n which contains [tool.pyodide] section.\n \"\"\"\n\n # We want to include \"curdir\" in parent_dirs, so add a garbage suffix\n parent_dirs = (Path(curdir) / \"garbage\").parents[:max_depth]\n\n for base in parent_dirs:\n pyproject_file = base / \"pyproject.toml\"\n\n if not pyproject_file.is_file():\n continue\n\n try:\n with pyproject_file.open(\"rb\") as f:\n configs = tomli.load(f)\n except tomli.TOMLDecodeError:\n raise ValueError(f\"Could not parse {pyproject_file}.\")\n\n if \"tool\" in configs and \"pyodide\" in configs[\"tool\"]:\n return base\n\n raise FileNotFoundError(\n \"Could not find Pyodide root directory. If you are not in the Pyodide directory, set `PYODIDE_ROOT=<pyodide-root-directory>`.\"\n )\n\n\ndef init_environment() -> None:\n if os.environ.get(\"__LOADED_PYODIDE_ENV\"):\n return\n os.environ[\"__LOADED_PYODIDE_ENV\"] = \"1\"\n # If we are building docs, we don't need to know the PYODIDE_ROOT\n if \"sphinx\" in sys.modules:\n os.environ[\"PYODIDE_ROOT\"] = \"\"\n\n if \"PYODIDE_ROOT\" in os.environ:\n os.environ[\"PYODIDE_ROOT\"] = str(Path(os.environ[\"PYODIDE_ROOT\"]).resolve())\n else:\n os.environ[\"PYODIDE_ROOT\"] = str(search_pyodide_root(os.getcwd()))\n\n os.environ.update(get_make_environment_vars())\n try:\n hostsitepackages = get_hostsitepackages()\n pythonpath = [\n hostsitepackages,\n ]\n os.environ[\"PYTHONPATH\"] = \":\".join(pythonpath)\n except KeyError:\n pass\n os.environ[\"BASH_ENV\"] = \"\"\n get_unisolated_packages()\n\n\[email protected]\ndef get_pyodide_root() -> Path:\n init_environment()\n return Path(os.environ[\"PYODIDE_ROOT\"])\n\n\[email protected]\ndef get_unisolated_packages() -> list[str]:\n import json\n\n if \"UNISOLATED_PACKAGES\" in os.environ:\n return json.loads(os.environ[\"UNISOLATED_PACKAGES\"])\n PYODIDE_ROOT = get_pyodide_root()\n unisolated_file = PYODIDE_ROOT / \"unisolated.txt\"\n if unisolated_file.exists():\n # in xbuild env, read from file\n unisolated_packages = unisolated_file.read_text().splitlines()\n else:\n unisolated_packages = []\n for pkg in (PYODIDE_ROOT / \"packages\").glob(\"**/meta.yaml\"):\n config = parse_package_config(pkg, check=False)\n if config.get(\"build\", {}).get(\"cross-build-env\", False):\n unisolated_packages.append(config[\"package\"][\"name\"])\n # TODO: remove setuptools_rust from this when they release the next version.\n unisolated_packages.append(\"setuptools_rust\")\n os.environ[\"UNISOLATED_PACKAGES\"] = json.dumps(unisolated_packages)\n return unisolated_packages\n\n\[email protected]\ndef replace_env(build_env: Mapping[str, str]) -> Generator[None, None, None]:\n old_environ = dict(os.environ)\n os.environ.clear()\n os.environ.update(build_env)\n try:\n yield\n finally:\n os.environ.clear()\n os.environ.update(old_environ)\n",
"path": "pyodide-build/pyodide_build/common.py"
}
] | [
{
"content": "import contextlib\nimport functools\nimport os\nimport subprocess\nimport sys\nfrom collections.abc import Generator, Iterable, Iterator, Mapping\nfrom pathlib import Path\n\nimport tomli\nfrom packaging.tags import Tag, compatible_tags, cpython_tags\nfrom packaging.utils import parse_wheel_filename\n\nfrom .io import parse_package_config\n\n\ndef emscripten_version() -> str:\n return get_make_flag(\"PYODIDE_EMSCRIPTEN_VERSION\")\n\n\ndef platform() -> str:\n emscripten_version = get_make_flag(\"PYODIDE_EMSCRIPTEN_VERSION\")\n version = emscripten_version.replace(\".\", \"_\")\n return f\"emscripten_{version}_wasm32\"\n\n\ndef pyodide_tags() -> Iterator[Tag]:\n \"\"\"\n Returns the sequence of tag triples for the Pyodide interpreter.\n\n The sequence is ordered in decreasing specificity.\n \"\"\"\n PYMAJOR = get_make_flag(\"PYMAJOR\")\n PYMINOR = get_make_flag(\"PYMINOR\")\n PLATFORM = platform()\n python_version = (int(PYMAJOR), int(PYMINOR))\n yield from cpython_tags(platforms=[PLATFORM], python_version=python_version)\n yield from compatible_tags(platforms=[PLATFORM], python_version=python_version)\n\n\ndef find_matching_wheels(wheel_paths: Iterable[Path]) -> Iterator[Path]:\n \"\"\"\n Returns the sequence wheels whose tags match the Pyodide interpreter.\n\n Parameters\n ----------\n wheel_paths\n A list of paths to wheels\n\n Returns\n -------\n The subset of wheel_paths that have tags that match the Pyodide interpreter.\n \"\"\"\n wheel_paths = list(wheel_paths)\n wheel_tags_list: list[frozenset[Tag]] = []\n for wheel in wheel_paths:\n _, _, _, tags = parse_wheel_filename(wheel.name)\n wheel_tags_list.append(tags)\n for supported_tag in pyodide_tags():\n for wheel_path, wheel_tags in zip(wheel_paths, wheel_tags_list):\n if supported_tag in wheel_tags:\n yield wheel_path\n\n\nUNVENDORED_STDLIB_MODULES = {\"test\", \"distutils\"}\n\nALWAYS_PACKAGES = {\n \"pyparsing\",\n \"packaging\",\n \"micropip\",\n}\n\nCORE_PACKAGES = {\n \"micropip\",\n \"pyparsing\",\n \"pytz\",\n \"packaging\",\n \"Jinja2\",\n \"regex\",\n \"fpcast-test\",\n \"sharedlib-test-py\",\n \"cpp-exceptions-test\",\n \"ssl\",\n \"lzma\",\n \"pytest\",\n \"tblib\",\n}\n\nCORE_SCIPY_PACKAGES = {\n \"numpy\",\n \"scipy\",\n \"pandas\",\n \"matplotlib\",\n \"scikit-learn\",\n \"joblib\",\n \"pytest\",\n}\n\n\ndef _parse_package_subset(query: str | None) -> set[str]:\n \"\"\"Parse the list of packages specified with PYODIDE_PACKAGES env var.\n\n Also add the list of mandatory packages: [\"pyparsing\", \"packaging\",\n \"micropip\"]\n\n Supports following meta-packages,\n - 'core': corresponds to packages needed to run the core test suite\n {\"micropip\", \"pyparsing\", \"pytz\", \"packaging\", \"Jinja2\", \"fpcast-test\"}. This is the default option\n if query is None.\n - 'min-scipy-stack': includes the \"core\" meta-package as well as some of the\n core packages from the scientific python stack and their dependencies:\n {\"numpy\", \"scipy\", \"pandas\", \"matplotlib\", \"scikit-learn\", \"joblib\", \"pytest\"}.\n This option is non exhaustive and is mainly intended to make build faster\n while testing a diverse set of scientific packages.\n - '*': corresponds to all packages (returns None)\n\n Note: None as input is equivalent to PYODIDE_PACKAGES being unset and leads\n to only the core packages being built.\n\n Returns:\n a set of package names to build or None (build all packages).\n \"\"\"\n if query is None:\n query = \"core\"\n\n packages = {el.strip() for el in query.split(\",\")}\n packages.update(ALWAYS_PACKAGES)\n packages.update(UNVENDORED_STDLIB_MODULES)\n # handle meta-packages\n if \"core\" in packages:\n packages |= CORE_PACKAGES\n packages.discard(\"core\")\n if \"min-scipy-stack\" in packages:\n packages |= CORE_PACKAGES | CORE_SCIPY_PACKAGES\n packages.discard(\"min-scipy-stack\")\n\n # Hack to deal with the circular dependence between soupsieve and\n # beautifulsoup4\n if \"beautifulsoup4\" in packages:\n packages.add(\"soupsieve\")\n packages.discard(\"\")\n return packages\n\n\ndef get_make_flag(name: str) -> str:\n \"\"\"Get flags from makefile.envs.\n\n For building packages we currently use:\n SIDE_MODULE_LDFLAGS\n SIDE_MODULE_CFLAGS\n SIDE_MODULE_CXXFLAGS\n TOOLSDIR\n \"\"\"\n return get_make_environment_vars()[name]\n\n\ndef get_pyversion() -> str:\n PYMAJOR = get_make_flag(\"PYMAJOR\")\n PYMINOR = get_make_flag(\"PYMINOR\")\n return f\"python{PYMAJOR}.{PYMINOR}\"\n\n\ndef get_hostsitepackages() -> str:\n return get_make_flag(\"HOSTSITEPACKAGES\")\n\n\[email protected]\ndef get_make_environment_vars() -> dict[str, str]:\n \"\"\"Load environment variables from Makefile.envs\n\n This allows us to set all build vars in one place\"\"\"\n\n PYODIDE_ROOT = get_pyodide_root()\n environment = {}\n result = subprocess.run(\n [\"make\", \"-f\", str(PYODIDE_ROOT / \"Makefile.envs\"), \".output_vars\"],\n capture_output=True,\n text=True,\n )\n for line in result.stdout.splitlines():\n equalPos = line.find(\"=\")\n if equalPos != -1:\n varname = line[0:equalPos]\n value = line[equalPos + 1 :]\n value = value.strip(\"'\").strip()\n environment[varname] = value\n return environment\n\n\ndef search_pyodide_root(curdir: str | Path, *, max_depth: int = 5) -> Path:\n \"\"\"\n Recursively search for the root of the Pyodide repository,\n by looking for the pyproject.toml file in the parent directories\n which contains [tool.pyodide] section.\n \"\"\"\n\n # We want to include \"curdir\" in parent_dirs, so add a garbage suffix\n parent_dirs = (Path(curdir) / \"garbage\").parents[:max_depth]\n\n for base in parent_dirs:\n pyproject_file = base / \"pyproject.toml\"\n\n if not pyproject_file.is_file():\n continue\n\n try:\n with pyproject_file.open(\"rb\") as f:\n configs = tomli.load(f)\n except tomli.TOMLDecodeError:\n raise ValueError(f\"Could not parse {pyproject_file}.\")\n\n if \"tool\" in configs and \"pyodide\" in configs[\"tool\"]:\n return base\n\n raise FileNotFoundError(\n \"Could not find Pyodide root directory. If you are not in the Pyodide directory, set `PYODIDE_ROOT=<pyodide-root-directory>`.\"\n )\n\n\ndef init_environment() -> None:\n if os.environ.get(\"__LOADED_PYODIDE_ENV\"):\n return\n os.environ[\"__LOADED_PYODIDE_ENV\"] = \"1\"\n # If we are building docs, we don't need to know the PYODIDE_ROOT\n if \"sphinx\" in sys.modules:\n os.environ[\"PYODIDE_ROOT\"] = \"\"\n\n if \"PYODIDE_ROOT\" in os.environ:\n os.environ[\"PYODIDE_ROOT\"] = str(Path(os.environ[\"PYODIDE_ROOT\"]).resolve())\n else:\n os.environ[\"PYODIDE_ROOT\"] = str(search_pyodide_root(os.getcwd()))\n\n os.environ.update(get_make_environment_vars())\n try:\n hostsitepackages = get_hostsitepackages()\n pythonpath = [\n hostsitepackages,\n ]\n os.environ[\"PYTHONPATH\"] = \":\".join(pythonpath)\n except KeyError:\n pass\n os.environ[\"BASH_ENV\"] = \"\"\n get_unisolated_packages()\n\n\[email protected]\ndef get_pyodide_root() -> Path:\n init_environment()\n return Path(os.environ[\"PYODIDE_ROOT\"])\n\n\[email protected]\ndef get_unisolated_packages() -> list[str]:\n import json\n\n if \"UNISOLATED_PACKAGES\" in os.environ:\n return json.loads(os.environ[\"UNISOLATED_PACKAGES\"])\n PYODIDE_ROOT = get_pyodide_root()\n unisolated_file = PYODIDE_ROOT / \"unisolated.txt\"\n if unisolated_file.exists():\n # in xbuild env, read from file\n unisolated_packages = unisolated_file.read_text().splitlines()\n else:\n unisolated_packages = []\n for pkg in (PYODIDE_ROOT / \"packages\").glob(\"**/meta.yaml\"):\n config = parse_package_config(pkg, check=False)\n if config.get(\"build\", {}).get(\"cross-build-env\", False):\n unisolated_packages.append(config[\"package\"][\"name\"])\n # TODO: remove setuptools_rust from this when they release the next version.\n unisolated_packages.append(\"setuptools_rust\")\n os.environ[\"UNISOLATED_PACKAGES\"] = json.dumps(unisolated_packages)\n return unisolated_packages\n\n\[email protected]\ndef replace_env(build_env: Mapping[str, str]) -> Generator[None, None, None]:\n old_environ = dict(os.environ)\n os.environ.clear()\n os.environ.update(build_env)\n try:\n yield\n finally:\n os.environ.clear()\n os.environ.update(old_environ)\n",
"path": "pyodide-build/pyodide_build/common.py"
}
] | diff --git a/Makefile.envs b/Makefile.envs
index 207c7ecdcb5..d53d266ef91 100644
--- a/Makefile.envs
+++ b/Makefile.envs
@@ -145,7 +145,7 @@ export MAIN_MODULE_CFLAGS= $(CFLAGS_BASE) \
-I$(PYTHONINCLUDE) \
-s EXCEPTION_CATCHING_ALLOWED=['we only want to allow exception handling in side modules']
-export STDLIB_MODULE_CFLAGS= $(SIDE_MODULE_CFLAGS) -I Include/ -I .
+export STDLIB_MODULE_CFLAGS= $(SIDE_MODULE_CFLAGS) -I Include/ -I . -I Include/internal/
# For RUST
export CARGO_HOME ?= $(HOME)/.cargo
diff --git a/docs/project/changelog.md b/docs/project/changelog.md
index 436c35dd548..985fd82f212 100644
--- a/docs/project/changelog.md
+++ b/docs/project/changelog.md
@@ -12,6 +12,10 @@ substitutions:
# Change Log
+## Unreleased
+
+- New packages: the standard library lzma module {pr}`2939`
+
## Version 0.21.0
_August 9, 2022_
diff --git a/packages/liblzma/meta.yaml b/packages/liblzma/meta.yaml
new file mode 100644
index 00000000000..a3d8bf53f81
--- /dev/null
+++ b/packages/liblzma/meta.yaml
@@ -0,0 +1,25 @@
+package:
+ name: liblzma
+ version: 5.2.2
+
+source:
+ url: https://github.com/xz-mirror/xz/releases/download/v5.2.2/xz-5.2.2.tar.gz
+ sha256: 73df4d5d34f0468bd57d09f2d8af363e95ed6cc3a4a86129d2f2c366259902a2
+
+build:
+ library: true
+ script: |
+ emconfigure ./configure \
+ CFLAGS="-fPIC" \
+ --disable-xz \
+ --disable-xzdec \
+ --disable-lzmadec \
+ --disable-lzmainfo \
+ --disable-lzma-links \
+ --disable-scripts \
+ --disable-doc \
+ --enable-shared=no \
+ --prefix=${WASM_LIBRARY_DIR}
+
+ emmake make -j ${PYODIDE_JOBS:-3}
+ emmake make install
diff --git a/packages/lzma/empty/.keep b/packages/lzma/empty/.keep
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/packages/lzma/meta.yaml b/packages/lzma/meta.yaml
new file mode 100644
index 00000000000..507bcafca3c
--- /dev/null
+++ b/packages/lzma/meta.yaml
@@ -0,0 +1,23 @@
+package:
+ name: lzma
+ version: 1.0.0 # Nonesense
+ _cpython_dynlib: true
+source:
+ path: empty
+build:
+ sharedlibrary: true
+ script: |
+ mkdir dist
+ export DISTDIR=$(pwd)/dist
+ cd $CPYTHONBUILD
+ emcc $STDLIB_MODULE_CFLAGS -c Modules/_lzmamodule.c -o Modules/_lzmamodule.o \
+ $(pkg-config --cflags --dont-define-prefix liblzma)
+
+ emcc Modules/_lzmamodule.o -o $DISTDIR/_lzma.so $SIDE_MODULE_LDFLAGS \
+ $(pkg-config --libs --dont-define-prefix liblzma)
+requirements:
+ run:
+ - liblzma
+test:
+ imports:
+ - lzma
diff --git a/packages/lzma/test_lzma.py b/packages/lzma/test_lzma.py
new file mode 100644
index 00000000000..937af92873f
--- /dev/null
+++ b/packages/lzma/test_lzma.py
@@ -0,0 +1,17 @@
+from pytest_pyodide import run_in_pyodide
+
+
+@run_in_pyodide(packages=["test", "lzma"], pytest_assert_rewrites=False)
+def test_lzma(selenium):
+ # TODO: libregrtest.main(["test_lzma"]) doesn't collect any tests for some unknown reason.
+
+ import test.test_lzma
+ import unittest
+
+ suite = unittest.TestSuite(
+ [unittest.TestLoader().loadTestsFromModule(test.test_lzma)]
+ )
+
+ runner = unittest.TextTestRunner(verbosity=2)
+ result = runner.run(suite)
+ assert result.wasSuccessful()
diff --git a/pyodide-build/pyodide_build/common.py b/pyodide-build/pyodide_build/common.py
index 9ad476f243b..13f1acdef7d 100644
--- a/pyodide-build/pyodide_build/common.py
+++ b/pyodide-build/pyodide_build/common.py
@@ -80,6 +80,7 @@ def find_matching_wheels(wheel_paths: Iterable[Path]) -> Iterator[Path]:
"sharedlib-test-py",
"cpp-exceptions-test",
"ssl",
+ "lzma",
"pytest",
"tblib",
}
|
wemake-services__wemake-python-styleguide-776 | Add `reveal_type` to forbidden functions
Now it is not recognised as invalid.
However, there's no reason to use it in production.
| [
{
"content": "# -*- coding: utf-8 -*-\n\n\"\"\"\nThis module contains list of white- and black-listed ``python`` members.\n\nIt contains lists of keywords and built-in functions we discourage to use.\nIt also contains some exceptions that we allow to use in our codebase.\n\"\"\"\n\nimport re\n\nfrom typing_extensions import Final\n\n#: List of functions we forbid to use.\nFUNCTIONS_BLACKLIST: Final = frozenset((\n # Code generation:\n 'eval',\n 'exec',\n 'compile',\n\n # Termination:\n 'exit',\n 'quit',\n\n # Magic:\n 'globals',\n 'locals',\n 'vars',\n 'dir',\n\n # IO:\n 'input', # print is handled via `flake8-print`\n 'breakpoint',\n\n # Attribute access:\n 'hasattr',\n 'delattr',\n\n # Gratis:\n 'copyright',\n 'help',\n 'credits',\n\n # Dynamic imports:\n '__import__',\n\n # OOP:\n 'staticmethod',\n))\n\n#: List of module metadata we forbid to use.\nMODULE_METADATA_VARIABLES_BLACKLIST: Final = frozenset((\n '__author__',\n '__all__',\n '__version__',\n '__about__',\n))\n\n#: List of variable names we forbid to use.\nVARIABLE_NAMES_BLACKLIST: Final = frozenset((\n # Meaningless words:\n 'data',\n 'result',\n 'results',\n 'item',\n 'items',\n 'value',\n 'values',\n 'val',\n 'vals',\n 'var',\n 'vars',\n 'variable',\n 'content',\n 'contents',\n 'info',\n 'handle',\n 'handler',\n 'file',\n 'obj',\n 'objects',\n 'objs',\n 'some',\n 'do',\n 'param',\n 'params',\n 'parameters',\n\n # Confuseables:\n 'no',\n 'true',\n 'false',\n\n # Names from examples:\n 'foo',\n 'bar',\n 'baz',\n))\n\n#: List of special names that are used only as first argument in methods.\nSPECIAL_ARGUMENT_NAMES_WHITELIST: Final = frozenset((\n 'self',\n 'cls',\n 'mcs',\n))\n\n#: List of all magic methods from the python docs.\nALL_MAGIC_METHODS: Final = frozenset((\n '__new__',\n '__init__',\n '__del__',\n\n '__repr__',\n '__str__',\n '__bytes__',\n '__format__',\n\n '__lt__',\n '__le__',\n '__eq__',\n '__ne__',\n '__gt__',\n '__ge__',\n\n '__hash__',\n '__bool__',\n\n '__getattr__',\n '__getattribute__',\n '__setattr__',\n '__delattr__',\n '__dir__',\n\n '__get__',\n '__set__',\n '__delete__',\n '__set_name__',\n\n '__init_subclass__',\n '__instancecheck__',\n '__subclasscheck__',\n '__class_getitem__',\n\n '__call__',\n '__len__',\n '__length_hint__',\n '__getitem__',\n '__setitem__',\n '__delitem__',\n '__missing__',\n '__iter__',\n '__reversed__',\n '__contains__',\n\n '__add__',\n '__sub__',\n '__mul__',\n '__matmul__',\n '__truediv__',\n '__floordiv__',\n '__mod__',\n '__divmod__',\n '__pow__',\n '__lshift__',\n '__rshift__',\n '__and__',\n '__xor__',\n '__or__',\n '__radd__',\n '__rsub__',\n '__rmul__',\n '__rmatmul__',\n '__rtruediv__',\n '__rfloordiv__',\n '__rmod__',\n '__rdivmod__',\n '__rpow__',\n '__rlshift__',\n '__rrshift__',\n '__rand__',\n '__rxor__',\n '__ror__',\n '__iadd__',\n '__isub__',\n '__imul__',\n '__imatmul__',\n '__itruediv__',\n '__ifloordiv__',\n '__imod__',\n '__ipow__',\n '__ilshift__',\n '__irshift__',\n '__iand__',\n '__ixor__',\n '__ior__',\n '__neg__',\n '__pos__',\n '__abs__',\n '__invert__',\n '__complex__',\n '__int__',\n '__float__',\n '__index__',\n '__round__',\n '__trunc__',\n '__floor__',\n '__ceil__',\n\n '__enter__',\n '__exit__',\n\n '__await__',\n '__aiter__',\n '__anext__',\n '__aenter__',\n '__aexit__',\n))\n\n#: List of magic methods that are forbidden to use.\nMAGIC_METHODS_BLACKLIST: Final = frozenset((\n # Since we don't use `del`:\n '__del__',\n '__delitem__',\n '__delete__',\n\n '__dir__', # since we don't use `dir()`\n '__delattr__', # since we don't use `delattr()`\n))\n\n#: List of magic methods that are not allowed to be generators.\nYIELD_MAGIC_METHODS_BLACKLIST: Final = ALL_MAGIC_METHODS.difference({\n # Allowed to be used with ``yield`` keyowrd:\n '__iter__',\n})\n\n#: List of magic methods that are not allowed to be async.\nASYNC_MAGIC_METHODS_BLACKLIST: Final = ALL_MAGIC_METHODS.difference({\n # In order of appearance on\n # https://docs.python.org/3/reference/datamodel.html#basic-customization\n # Allowed magic methods are:\n '__anext__',\n '__aenter__',\n '__aexit__',\n})\n\n#: List of nested classes' names we allow to use.\nNESTED_CLASSES_WHITELIST: Final = frozenset((\n 'Meta', # django forms, models, drf, etc\n 'Params', # factoryboy specific\n))\n\n#: List of builtin classes that are allowed to subclass.\nALLOWED_BUILTIN_CLASSES: Final = frozenset((\n 'type',\n 'object',\n))\n\n#: List of nested functions' names we allow to use.\nNESTED_FUNCTIONS_WHITELIST: Final = frozenset((\n 'decorator',\n 'factory',\n))\n\n#: List of allowed ``__future__`` imports.\nFUTURE_IMPORTS_WHITELIST: Final = frozenset((\n 'annotations',\n 'generator_stop',\n))\n\n#: List of blacklisted module names.\nMODULE_NAMES_BLACKLIST: Final = frozenset((\n 'util',\n 'utils',\n 'utilities',\n 'helpers',\n))\n\n#: List of allowed module magic names.\nMAGIC_MODULE_NAMES_WHITELIST: Final = frozenset((\n '__init__',\n '__main__',\n))\n\n#: List of bad magic module functions.\nMAGIC_MODULE_NAMES_BLACKLIST: Final = frozenset((\n '__getattr__',\n '__dir__',\n))\n\n#: Regex pattern to name modules.\nMODULE_NAME_PATTERN: Final = re.compile(r'^_?_?[a-z][a-z\\d_]*[a-z\\d](__)?$')\n\n#: Common numbers that are allowed to be used without being called \"magic\".\nMAGIC_NUMBERS_WHITELIST: Final = frozenset((\n 0, # both int and float\n 0.5,\n 100,\n 1000,\n 1024, # bytes\n 24, # hours\n 60, # seconds, minutes\n\n 1j, # imaginary part of a complex number\n))\n\n#: Maximum amount of ``noqa`` comments per module.\nMAX_NOQA_COMMENTS: Final = 10\n\n#: Maximum amount of ``pragma`` no-cover comments per module.\nMAX_NO_COVER_COMMENTS: Final = 5\n\n#: Maximum length of ``yield`` ``tuple`` expressions.\nMAX_LEN_YIELD_TUPLE: Final = 5\n\n\n# Internal variables\n# They are not publicly documented since they are not used by the end user.\n\n# Used as a default filename, when it is not passed by flake8:\nSTDIN: Final = 'stdin'\n\n# Used as a special name patterns for unused variables, like _, __:\nUNUSED_VARIABLE_REGEX: Final = re.compile(r'^_+$')\n\n# Used to specify as a placeholder for `__init__`:\nINIT: Final = '__init__'\n\n# Allowed magic number modulo:\nNON_MAGIC_MODULO: Final = 10\n\n# Used to specify a pattern which checks variables and modules for underscored\n# numbers in their names:\nUNDERSCORED_NUMBER_PATTERN: Final = re.compile(r'.+\\D\\_\\d+(\\D|$)')\n",
"path": "wemake_python_styleguide/constants.py"
}
] | [
{
"content": "# -*- coding: utf-8 -*-\n\n\"\"\"\nThis module contains list of white- and black-listed ``python`` members.\n\nIt contains lists of keywords and built-in functions we discourage to use.\nIt also contains some exceptions that we allow to use in our codebase.\n\"\"\"\n\nimport re\n\nfrom typing_extensions import Final\n\n#: List of functions we forbid to use.\nFUNCTIONS_BLACKLIST: Final = frozenset((\n # Code generation:\n 'eval',\n 'exec',\n 'compile',\n\n # Termination:\n 'exit',\n 'quit',\n\n # Magic:\n 'globals',\n 'locals',\n 'vars',\n 'dir',\n\n # IO:\n 'input', # print is handled via `flake8-print`\n 'breakpoint',\n\n # Attribute access:\n 'hasattr',\n 'delattr',\n\n # Gratis:\n 'copyright',\n 'help',\n 'credits',\n\n # Dynamic imports:\n '__import__',\n\n # OOP:\n 'staticmethod',\n\n # Mypy:\n 'reveal_type',\n))\n\n#: List of module metadata we forbid to use.\nMODULE_METADATA_VARIABLES_BLACKLIST: Final = frozenset((\n '__author__',\n '__all__',\n '__version__',\n '__about__',\n))\n\n#: List of variable names we forbid to use.\nVARIABLE_NAMES_BLACKLIST: Final = frozenset((\n # Meaningless words:\n 'data',\n 'result',\n 'results',\n 'item',\n 'items',\n 'value',\n 'values',\n 'val',\n 'vals',\n 'var',\n 'vars',\n 'variable',\n 'content',\n 'contents',\n 'info',\n 'handle',\n 'handler',\n 'file',\n 'obj',\n 'objects',\n 'objs',\n 'some',\n 'do',\n 'param',\n 'params',\n 'parameters',\n\n # Confuseables:\n 'no',\n 'true',\n 'false',\n\n # Names from examples:\n 'foo',\n 'bar',\n 'baz',\n))\n\n#: List of special names that are used only as first argument in methods.\nSPECIAL_ARGUMENT_NAMES_WHITELIST: Final = frozenset((\n 'self',\n 'cls',\n 'mcs',\n))\n\n#: List of all magic methods from the python docs.\nALL_MAGIC_METHODS: Final = frozenset((\n '__new__',\n '__init__',\n '__del__',\n\n '__repr__',\n '__str__',\n '__bytes__',\n '__format__',\n\n '__lt__',\n '__le__',\n '__eq__',\n '__ne__',\n '__gt__',\n '__ge__',\n\n '__hash__',\n '__bool__',\n\n '__getattr__',\n '__getattribute__',\n '__setattr__',\n '__delattr__',\n '__dir__',\n\n '__get__',\n '__set__',\n '__delete__',\n '__set_name__',\n\n '__init_subclass__',\n '__instancecheck__',\n '__subclasscheck__',\n '__class_getitem__',\n\n '__call__',\n '__len__',\n '__length_hint__',\n '__getitem__',\n '__setitem__',\n '__delitem__',\n '__missing__',\n '__iter__',\n '__reversed__',\n '__contains__',\n\n '__add__',\n '__sub__',\n '__mul__',\n '__matmul__',\n '__truediv__',\n '__floordiv__',\n '__mod__',\n '__divmod__',\n '__pow__',\n '__lshift__',\n '__rshift__',\n '__and__',\n '__xor__',\n '__or__',\n '__radd__',\n '__rsub__',\n '__rmul__',\n '__rmatmul__',\n '__rtruediv__',\n '__rfloordiv__',\n '__rmod__',\n '__rdivmod__',\n '__rpow__',\n '__rlshift__',\n '__rrshift__',\n '__rand__',\n '__rxor__',\n '__ror__',\n '__iadd__',\n '__isub__',\n '__imul__',\n '__imatmul__',\n '__itruediv__',\n '__ifloordiv__',\n '__imod__',\n '__ipow__',\n '__ilshift__',\n '__irshift__',\n '__iand__',\n '__ixor__',\n '__ior__',\n '__neg__',\n '__pos__',\n '__abs__',\n '__invert__',\n '__complex__',\n '__int__',\n '__float__',\n '__index__',\n '__round__',\n '__trunc__',\n '__floor__',\n '__ceil__',\n\n '__enter__',\n '__exit__',\n\n '__await__',\n '__aiter__',\n '__anext__',\n '__aenter__',\n '__aexit__',\n))\n\n#: List of magic methods that are forbidden to use.\nMAGIC_METHODS_BLACKLIST: Final = frozenset((\n # Since we don't use `del`:\n '__del__',\n '__delitem__',\n '__delete__',\n\n '__dir__', # since we don't use `dir()`\n '__delattr__', # since we don't use `delattr()`\n))\n\n#: List of magic methods that are not allowed to be generators.\nYIELD_MAGIC_METHODS_BLACKLIST: Final = ALL_MAGIC_METHODS.difference({\n # Allowed to be used with ``yield`` keyowrd:\n '__iter__',\n})\n\n#: List of magic methods that are not allowed to be async.\nASYNC_MAGIC_METHODS_BLACKLIST: Final = ALL_MAGIC_METHODS.difference({\n # In order of appearance on\n # https://docs.python.org/3/reference/datamodel.html#basic-customization\n # Allowed magic methods are:\n '__anext__',\n '__aenter__',\n '__aexit__',\n})\n\n#: List of nested classes' names we allow to use.\nNESTED_CLASSES_WHITELIST: Final = frozenset((\n 'Meta', # django forms, models, drf, etc\n 'Params', # factoryboy specific\n))\n\n#: List of builtin classes that are allowed to subclass.\nALLOWED_BUILTIN_CLASSES: Final = frozenset((\n 'type',\n 'object',\n))\n\n#: List of nested functions' names we allow to use.\nNESTED_FUNCTIONS_WHITELIST: Final = frozenset((\n 'decorator',\n 'factory',\n))\n\n#: List of allowed ``__future__`` imports.\nFUTURE_IMPORTS_WHITELIST: Final = frozenset((\n 'annotations',\n 'generator_stop',\n))\n\n#: List of blacklisted module names.\nMODULE_NAMES_BLACKLIST: Final = frozenset((\n 'util',\n 'utils',\n 'utilities',\n 'helpers',\n))\n\n#: List of allowed module magic names.\nMAGIC_MODULE_NAMES_WHITELIST: Final = frozenset((\n '__init__',\n '__main__',\n))\n\n#: List of bad magic module functions.\nMAGIC_MODULE_NAMES_BLACKLIST: Final = frozenset((\n '__getattr__',\n '__dir__',\n))\n\n#: Regex pattern to name modules.\nMODULE_NAME_PATTERN: Final = re.compile(r'^_?_?[a-z][a-z\\d_]*[a-z\\d](__)?$')\n\n#: Common numbers that are allowed to be used without being called \"magic\".\nMAGIC_NUMBERS_WHITELIST: Final = frozenset((\n 0, # both int and float\n 0.5,\n 100,\n 1000,\n 1024, # bytes\n 24, # hours\n 60, # seconds, minutes\n\n 1j, # imaginary part of a complex number\n))\n\n#: Maximum amount of ``noqa`` comments per module.\nMAX_NOQA_COMMENTS: Final = 10\n\n#: Maximum amount of ``pragma`` no-cover comments per module.\nMAX_NO_COVER_COMMENTS: Final = 5\n\n#: Maximum length of ``yield`` ``tuple`` expressions.\nMAX_LEN_YIELD_TUPLE: Final = 5\n\n\n# Internal variables\n# They are not publicly documented since they are not used by the end user.\n\n# Used as a default filename, when it is not passed by flake8:\nSTDIN: Final = 'stdin'\n\n# Used as a special name patterns for unused variables, like _, __:\nUNUSED_VARIABLE_REGEX: Final = re.compile(r'^_+$')\n\n# Used to specify as a placeholder for `__init__`:\nINIT: Final = '__init__'\n\n# Allowed magic number modulo:\nNON_MAGIC_MODULO: Final = 10\n\n# Used to specify a pattern which checks variables and modules for underscored\n# numbers in their names:\nUNDERSCORED_NUMBER_PATTERN: Final = re.compile(r'.+\\D\\_\\d+(\\D|$)')\n",
"path": "wemake_python_styleguide/constants.py"
}
] | diff --git a/wemake_python_styleguide/constants.py b/wemake_python_styleguide/constants.py
index 5db381af4..0ce2b4050 100644
--- a/wemake_python_styleguide/constants.py
+++ b/wemake_python_styleguide/constants.py
@@ -46,6 +46,9 @@
# OOP:
'staticmethod',
+
+ # Mypy:
+ 'reveal_type',
))
#: List of module metadata we forbid to use.
|
e2nIEE__pandapower-2242 | connected_components documentation error
### Feature Checklist
- [X] Searched the [issues page](https://github.com/e2nIEE/pandapower/issues) for similar reports
- [X] Read the relevant sections of the [documentation](https://pandapower.readthedocs.io/en/latest/about.html)
- [ ] Browse the [tutorials](https://github.com/e2nIEE/pandapower/tree/develop/tutorials) and [tests](https://github.com/e2nIEE/pandapower/tree/develop/pandapower/test) for usefull code snippets and examples of use
### Issue
Error in the Docs of `pandapower.topology.connected_components(mg, notravbuses={})`
The examples states:
```python
import pandapower.topology as top
mg = top.create_nxgraph(net)
cc = top.connected_components(net, 5)
```
but it should be
```python
import pandapower.topology as top
mg = top.create_nxgraph(net)
cc = top.connected_components(mg, 5)
```
note the `net` in the last line. connected components takes a graph not a pandapowerNet.
### Label
- [ ] Relevant labels are selected
| [
{
"content": "# -*- coding: utf-8 -*-\n\n# Copyright (c) 2016-2023 by University of Kassel and Fraunhofer Institute for Energy Economics\n# and Energy System Technology (IEE), Kassel. All rights reserved.\n\n\nimport networkx as nx\nimport pandas as pd\nfrom collections import deque\nfrom itertools import combinations\n\nfrom pandapower.topology.create_graph import create_nxgraph\n\n\ndef connected_component(mg, bus, notravbuses=[]):\n \"\"\"\n Finds all buses in a NetworkX graph that are connected to a certain bus.\n\n INPUT:\n **mg** (NetworkX graph) - NetworkX Graph or MultiGraph that represents a pandapower network.\n\n **bus** (integer) - Index of the bus at which the search for connected components originates\n\n\n OPTIONAL:\n **notravbuses** (list/set) - indices of notravbuses: lines connected to these buses are\n not being considered in the graph\n\n OUTPUT:\n **cc** (generator) - Returns a generator that yields all buses connected to the input bus\n\n EXAMPLE:\n import pandapower.topology as top\n\n mg = top.create_nxgraph(net)\n\n cc = top.connected_component(mg, 5)\n\n \"\"\"\n yield bus\n visited = {bus}\n stack = deque([iter(mg[bus])])\n while stack:\n for child in stack.pop():\n if child not in visited:\n yield child\n visited.add(child)\n if child not in notravbuses:\n stack.append(iter(mg[child]))\n\n\ndef connected_components(mg, notravbuses=set()):\n \"\"\"\n Clusters all buses in a NetworkX graph that are connected to each other.\n\n INPUT:\n **mg** (NetworkX graph) - NetworkX Graph or MultiGraph that represents a pandapower network.\n\n\n OPTIONAL:\n **notravbuses** (set) - Indices of notravbuses: lines connected to these buses are\n not being considered in the graph\n\n OUTPUT:\n **cc** (generator) - Returns a generator that yields all clusters of buses connected\n to each other.\n\n EXAMPLE:\n import pandapower.topology as top\n\n mg = top.create_nxgraph(net)\n\n cc = top.connected_components(net, 5)\n\n \"\"\"\n\n nodes = set(mg.nodes()) - notravbuses\n while nodes:\n cc = set(connected_component(mg, nodes.pop(), notravbuses=notravbuses))\n yield cc\n nodes -= cc\n # the above does not work if two notravbuses are directly connected\n if len(notravbuses) > 0:\n for f, t in mg.edges(notravbuses):\n if f in notravbuses and t in notravbuses:\n yield set([f, t])\n\n\ndef calc_distance_to_bus(net, bus, respect_switches=True, nogobuses=None,\n notravbuses=None, weight='weight', g=None):\n \"\"\"\n Calculates the shortest distance between a source bus and all buses connected to it.\n\n INPUT:\n **net** (pandapowerNet) - Variable that contains a pandapower network.\n\n **bus** (integer) - Index of the source bus.\n\n\n OPTIONAL:\n **respect_switches** (boolean, True)\n\n True: open line switches are being considered (no edge between nodes).\n\n False: open line switches are being ignored.\n\n **nogobuses** (integer/list, None) - nogobuses are not being considered.\n\n **notravbuses** (integer/list, None) - lines connected to these buses are not being considered.\n\n **weight** (string, None) – Edge data key corresponding to the edge weight.\n\n **g** (nx.MultiGraph, None) – MultiGraph of the network. If None, the graph will be created.\n\n OUTPUT:\n **dist** - Returns a pandas series with containing all distances to the source bus\n in km. If weight=None dist is the topological distance (int).\n\n EXAMPLE:\n import pandapower.topology as top\n\n dist = top.calc_distance_to_bus(net, 5)\n\n \"\"\"\n if g is None:\n g = create_nxgraph(net, respect_switches=respect_switches, nogobuses=nogobuses,\n notravbuses=notravbuses)\n return pd.Series(nx.single_source_dijkstra_path_length(g, bus, weight=weight))\n\n\ndef unsupplied_buses(net, mg=None, slacks=None, respect_switches=True):\n \"\"\"\n Finds buses, that are not connected electrically (no lines, trafos etc or if respect_switches\n is True only connected via open switches) to an external grid and that are in service.\n\n INPUT:\n **net** (pandapowerNet) - variable that contains a pandapower network\n\n OPTIONAL:\n **mg** (NetworkX graph) - NetworkX Graph or MultiGraph that represents a pandapower network.\n\n **in_service_only** (boolean, False) - Defines whether only in service buses should be\n included in unsupplied_buses.\n\n **slacks** (set, None) - buses which are considered as root / slack buses. If None, all\n existing slack buses are considered.\n\n **respect_switches** (boolean, True) - Fixes how to consider switches - only in case of no\n given mg.\n\n OUTPUT:\n **ub** (set) - unsupplied buses\n\n EXAMPLE:\n import pandapower.topology as top\n\n top.unsupplied_buses(net)\n \"\"\"\n\n mg = mg or create_nxgraph(net, respect_switches=respect_switches)\n if slacks is None:\n slacks = set(net.ext_grid[net.ext_grid.in_service].bus.values) | set(\n net.gen[net.gen.in_service & net.gen.slack].bus.values)\n not_supplied = set()\n for cc in nx.connected_components(mg):\n if not set(cc) & slacks:\n not_supplied.update(set(cc))\n\n return not_supplied\n\n\ndef find_basic_graph_characteristics(g, roots, characteristics):\n \"\"\"\n Determines basic characteristics of the given graph like connected buses, stubs, bridges,\n and articulation points.\n\n .. note::\n\n This is the base function for find_graph_characteristics. Please use the latter\n function instead!\n \"\"\"\n connected = 'connected' in characteristics\n stub_buses = 'stub_buses' in characteristics\n bridges = {'bridges', 'required_bridges'} & set(characteristics)\n articulation_points = {'articulation_points', 'notn1_areas'} & set(characteristics)\n notn1_starts = 'notn1_areas' in characteristics\n\n char_dict = {'connected': set(), 'stub_buses': set(), 'bridges': set(),\n 'articulation_points': set(), 'notn1_starts': set()}\n\n discovery = {root: 0 for root in roots} # \"time\" of first discovery of node during search\n low = {root: 0 for root in roots}\n visited = set(roots)\n path = []\n stack = [(root, root, iter(g[root])) for root in roots]\n while stack:\n grandparent, parent, children = stack[-1]\n try:\n child = next(children)\n if stub_buses:\n if child not in visited:\n path.append(child) # keep track of movement through the graph\n if grandparent == child:\n continue\n if child in visited:\n if discovery[child] <= discovery[parent]: # back edge\n low[parent] = min(low[parent], discovery[child])\n else:\n low[child] = discovery[child] = len(discovery)\n visited.add(child)\n stack.append((parent, child, iter(g[child])))\n except StopIteration:\n back = stack.pop()\n path.append(back[0])\n if low[parent] >= discovery[grandparent]:\n # Articulation points and start of not n-1 safe buses\n if grandparent not in roots:\n if articulation_points:\n char_dict['articulation_points'].add(grandparent)\n if notn1_starts:\n char_dict['notn1_starts'].add(parent)\n if low[parent] > discovery[grandparent]:\n # Bridges\n if bridges:\n char_dict['bridges'].add((grandparent, parent))\n\n # Stub buses\n if stub_buses:\n stub = path.pop()\n if stub != grandparent:\n char_dict['stub_buses'].add(stub)\n while path and path[-1] != grandparent and path[-1] not in roots:\n stub = path.pop()\n char_dict['stub_buses'].add(stub)\n low[grandparent] = min(low[parent], low[grandparent])\n\n if connected:\n char_dict['connected'] = visited\n return char_dict\n\n\ndef find_graph_characteristics(g, roots, characteristics):\n \"\"\"\n Finds and returns different characteristics of the given graph which can be specified.\n\n INPUT:\n **g** (NetworkX graph) - Graph of the network\n\n **roots** (list) - Root buses of the graphsearch\n\n **characteristics** (list) - List of characteristics this function determines and returns\n\n .. note::\n\n Possible characteristics:\n\n - 'connected' - All buses which have a connection to at least one of the root buses\n - 'articulation_points' - Buses which lead to disconnected areas if they get removed\n - 'bridges' - Edges which lead to disconnected areas if they get removed\n - 'stub_buses' - Buses which arent't connected if one specific edge gets removed\n - 'required_bridges' - Bridges which are strictly needed to connect a specific bus\n - 'notn1_areas' - Areas which aren't connected if one specific bus gets removed\n\n OUTPUT:\n\n **char_dict** (dict) - dictionary which contains the wanted characteristics\n\n ======================= ================================================================\n key dict value\n ======================= ================================================================\n 'connected' set of all connected buses\n 'articulation_points' set of all articulation points\n 'bridges' set of tuples which represent start and end bus of each bridge\n 'stub_buses' set of all buses which lie on a stub\n 'required_bridges' dict of all buses which are connected via at least one bridge.\n The dict values contain a set of bridges which are needed to\n connect the key buses\n 'notn1_areas' dict of not n-1 safe areas. The dict values contain a set of\n not n-1 safe buses which aren't connected if the key bus gets\n removed\n ======================= ================================================================\n\n EXAMPLE::\n\n import topology as top\n g = top.create_nxgraph(net, respect_switches=False)\n char_dict = top.find_graph_characteristics(g, roots=[0, 3], characteristics=['connected', 'stub_buses'])\n \"\"\"\n char_dict = find_basic_graph_characteristics(g, roots, characteristics)\n\n required_bridges = 'required_bridges' in characteristics\n notn1_areas = 'notn1_areas' in characteristics\n\n if not required_bridges and not notn1_areas:\n return {key: char_dict[key] for key in characteristics}\n\n char_dict.update({'required_bridges': dict(), 'notn1_areas': dict()})\n\n visited = set(roots)\n visited_bridges = []\n notn1_area_start = None\n curr_notn1_area = []\n\n stack = [(root, root, iter(g[root])) for root in roots]\n while stack:\n grandparent, parent, children = stack[-1]\n try:\n child = next(children)\n if child == grandparent:\n continue\n if child not in visited:\n visited.add(child)\n stack.append((parent, child, iter(g[child])))\n if required_bridges and ((parent, child) in char_dict['bridges'] or\n (child, parent) in char_dict['bridges']):\n visited_bridges.append((parent, child))\n\n if notn1_areas:\n if child in char_dict['notn1_starts'] and not notn1_area_start:\n notn1_area_start = parent\n if notn1_area_start:\n curr_notn1_area.append(child)\n\n except StopIteration:\n stack.pop()\n if required_bridges:\n if len(visited_bridges) > 0:\n char_dict['required_bridges'][parent] = visited_bridges[:]\n if ((parent, grandparent) in char_dict['bridges'] or\n (grandparent, parent) in char_dict['bridges']):\n visited_bridges.pop()\n\n if notn1_areas and grandparent == notn1_area_start:\n if grandparent in char_dict[\"notn1_areas\"]:\n char_dict[\"notn1_areas\"][grandparent].update(set(curr_notn1_area[:]))\n else:\n char_dict[\"notn1_areas\"][grandparent] = set(curr_notn1_area[:])\n del curr_notn1_area[:]\n notn1_area_start = None\n\n return {key: char_dict[key] for key in characteristics}\n\n\ndef get_2connected_buses(g, roots):\n \"\"\"\n Get all buses which have at least two connections to the roots\n\n INPUT:\n **g** (NetworkX graph) - NetworkX Graph or MultiGraph that represents a pandapower network\n\n **roots** - Roots of the graphsearch\n \"\"\"\n char_dict = find_graph_characteristics(g, roots, characteristics=['connected', 'stub_buses'])\n connected, stub_buses = char_dict['connected'], char_dict['stub_buses']\n two_connected = connected - stub_buses\n return connected, two_connected\n\n\ndef determine_stubs(net, roots=None, mg=None, respect_switches=False):\n \"\"\"\n Finds stubs in a network. Open switches are being ignored. Results are being written in a new\n column in the bus table (\"on_stub\") and line table (\"is_stub\") as True/False value.\n\n\n INPUT:\n **net** (pandapowerNet) - Variable that contains a pandapower network.\n\n OPTIONAL:\n **roots** (integer/list, None) - indices of buses that should be excluded (by default, the\n ext_grid buses will be set as roots)\n\n EXAMPLE:\n import pandapower.topology as top\n\n top.determine_stubs(net, roots = [0, 1])\n\n\n \"\"\"\n if mg is None:\n mg = create_nxgraph(net, respect_switches=respect_switches)\n # remove buses with degree lower 2 until none left\n if roots is None:\n roots = set(net.ext_grid.bus)\n # mg.add_edges_from((a, b) for a, b in zip(list(roots)[:-1], list(roots)[1:]))\n # while True:\n # dgo = {g for g, d in list(mg.degree().items()) if d < 2} #- roots\n # if not dgo:\n # break\n # mg.remove_nodes_from(dgo)\n # n1_buses = mg.nodes()\n _, n1_buses = get_2connected_buses(mg, roots)\n net.bus[\"on_stub\"] = True\n net.bus.loc[list(n1_buses), \"on_stub\"] = False\n net.line[\"is_stub\"] = ~((net.line.from_bus.isin(n1_buses)) & (net.line.to_bus.isin(n1_buses)))\n stubs = set(net.bus.index) - set(n1_buses)\n return stubs\n\n\ndef lines_on_path(mg, path):\n \"\"\"\n Finds all lines that connect a given path of buses.\n\n INPUT:\n **mg** (NetworkX graph) - NetworkX Graph or MultiGraph that represents a pandapower network.\n\n **path** (list) - List of connected buses.\n\n OUTPUT:\n **lines** (list) - Returns a list of all lines on the path.\n\n EXAMPLE:\n import topology as top\n\n mg = top.create_nxgraph(net)\n lines = top.lines_on_path(mg, [4, 5, 6])\n\n \"\"\"\n\n return elements_on_path(mg, path, \"line\")\n\n\ndef elements_on_path(mg, path, element=\"line\"):\n \"\"\"\n Finds all elements that connect a given path of buses.\n\n INPUT:\n **mg** (NetworkX graph) - NetworkX Graph or MultiGraph that represents a pandapower network.\n\n **path** (list) - List of connected buses.\n\n **element** (string, \"l\") - element type\n\n **multi** (boolean, True) - True: Applied on a NetworkX MultiGraph\n False: Applied on a NetworkX Graph\n\n OUTPUT:\n **elements** (list) - Returns a list of all lines on the path.\n\n EXAMPLE:\n import topology as top\n\n mg = top.create_nxgraph(net)\n elements = top.elements_on_path(mg, [4, 5, 6])\n\n \"\"\"\n if element not in [\"line\", \"switch\", \"trafo\", \"trafo3w\"]:\n raise ValueError(\"Invalid element type %s\"%element)\n if isinstance(mg, nx.MultiGraph):\n return [edge[1] for b1, b2 in zip(path, path[1:]) for edge in mg.get_edge_data(b1, b2).keys()\n if edge[0]==element]\n else:\n return [mg.get_edge_data(b1, b2)[\"key\"][1] for b1, b2 in zip(path, path[1:])\n if mg.get_edge_data(b1, b2)[\"key\"][0]==element]\n\n\ndef get_end_points_of_continuously_connected_lines(net, lines):\n mg = nx.MultiGraph()\n line_buses = net.line.loc[lines, [\"from_bus\", \"to_bus\"]].values\n mg.add_edges_from(line_buses)\n switch_buses = net.switch[[\"bus\", \"element\"]].values[net.switch.et.values==\"b\"]\n mg.add_edges_from(switch_buses)\n\n all_buses = set(line_buses.flatten())\n longest_path = []\n for b1, b2 in combinations(all_buses, 2):\n try:\n path = nx.shortest_path(mg, b1, b2)\n except nx.NetworkXNoPath:\n raise UserWarning(\"Lines not continuously connected\")\n if len(path) > len(longest_path):\n longest_path = path\n if all_buses - set(longest_path):\n raise UserWarning(\"Lines have branching points\")\n return longest_path[0], longest_path[-1]\n",
"path": "pandapower/topology/graph_searches.py"
}
] | [
{
"content": "# -*- coding: utf-8 -*-\n\n# Copyright (c) 2016-2023 by University of Kassel and Fraunhofer Institute for Energy Economics\n# and Energy System Technology (IEE), Kassel. All rights reserved.\n\n\nimport networkx as nx\nimport pandas as pd\nfrom collections import deque\nfrom itertools import combinations\n\nfrom pandapower.topology.create_graph import create_nxgraph\n\n\ndef connected_component(mg, bus, notravbuses=[]):\n \"\"\"\n Finds all buses in a NetworkX graph that are connected to a certain bus.\n\n INPUT:\n **mg** (NetworkX graph) - NetworkX Graph or MultiGraph that represents a pandapower network.\n\n **bus** (integer) - Index of the bus at which the search for connected components originates\n\n\n OPTIONAL:\n **notravbuses** (list/set) - indices of notravbuses: lines connected to these buses are\n not being considered in the graph\n\n OUTPUT:\n **cc** (generator) - Returns a generator that yields all buses connected to the input bus\n\n EXAMPLE:\n import pandapower.topology as top\n\n mg = top.create_nxgraph(net)\n\n cc = top.connected_component(mg, 5)\n\n \"\"\"\n yield bus\n visited = {bus}\n stack = deque([iter(mg[bus])])\n while stack:\n for child in stack.pop():\n if child not in visited:\n yield child\n visited.add(child)\n if child not in notravbuses:\n stack.append(iter(mg[child]))\n\n\ndef connected_components(mg, notravbuses=set()):\n \"\"\"\n Clusters all buses in a NetworkX graph that are connected to each other.\n\n INPUT:\n **mg** (NetworkX graph) - NetworkX Graph or MultiGraph that represents a pandapower network.\n\n\n OPTIONAL:\n **notravbuses** (set) - Indices of notravbuses: lines connected to these buses are\n not being considered in the graph\n\n OUTPUT:\n **cc** (generator) - Returns a generator that yields all clusters of buses connected\n to each other.\n\n EXAMPLE:\n import pandapower.topology as top\n\n mg = top.create_nxgraph(net)\n\n cc = top.connected_components(mg, 5)\n\n \"\"\"\n\n nodes = set(mg.nodes()) - notravbuses\n while nodes:\n cc = set(connected_component(mg, nodes.pop(), notravbuses=notravbuses))\n yield cc\n nodes -= cc\n # the above does not work if two notravbuses are directly connected\n if len(notravbuses) > 0:\n for f, t in mg.edges(notravbuses):\n if f in notravbuses and t in notravbuses:\n yield set([f, t])\n\n\ndef calc_distance_to_bus(net, bus, respect_switches=True, nogobuses=None,\n notravbuses=None, weight='weight', g=None):\n \"\"\"\n Calculates the shortest distance between a source bus and all buses connected to it.\n\n INPUT:\n **net** (pandapowerNet) - Variable that contains a pandapower network.\n\n **bus** (integer) - Index of the source bus.\n\n\n OPTIONAL:\n **respect_switches** (boolean, True)\n\n True: open line switches are being considered (no edge between nodes).\n\n False: open line switches are being ignored.\n\n **nogobuses** (integer/list, None) - nogobuses are not being considered.\n\n **notravbuses** (integer/list, None) - lines connected to these buses are not being considered.\n\n **weight** (string, None) – Edge data key corresponding to the edge weight.\n\n **g** (nx.MultiGraph, None) – MultiGraph of the network. If None, the graph will be created.\n\n OUTPUT:\n **dist** - Returns a pandas series with containing all distances to the source bus\n in km. If weight=None dist is the topological distance (int).\n\n EXAMPLE:\n import pandapower.topology as top\n\n dist = top.calc_distance_to_bus(net, 5)\n\n \"\"\"\n if g is None:\n g = create_nxgraph(net, respect_switches=respect_switches, nogobuses=nogobuses,\n notravbuses=notravbuses)\n return pd.Series(nx.single_source_dijkstra_path_length(g, bus, weight=weight))\n\n\ndef unsupplied_buses(net, mg=None, slacks=None, respect_switches=True):\n \"\"\"\n Finds buses, that are not connected electrically (no lines, trafos etc or if respect_switches\n is True only connected via open switches) to an external grid and that are in service.\n\n INPUT:\n **net** (pandapowerNet) - variable that contains a pandapower network\n\n OPTIONAL:\n **mg** (NetworkX graph) - NetworkX Graph or MultiGraph that represents a pandapower network.\n\n **in_service_only** (boolean, False) - Defines whether only in service buses should be\n included in unsupplied_buses.\n\n **slacks** (set, None) - buses which are considered as root / slack buses. If None, all\n existing slack buses are considered.\n\n **respect_switches** (boolean, True) - Fixes how to consider switches - only in case of no\n given mg.\n\n OUTPUT:\n **ub** (set) - unsupplied buses\n\n EXAMPLE:\n import pandapower.topology as top\n\n top.unsupplied_buses(net)\n \"\"\"\n\n mg = mg or create_nxgraph(net, respect_switches=respect_switches)\n if slacks is None:\n slacks = set(net.ext_grid[net.ext_grid.in_service].bus.values) | set(\n net.gen[net.gen.in_service & net.gen.slack].bus.values)\n not_supplied = set()\n for cc in nx.connected_components(mg):\n if not set(cc) & slacks:\n not_supplied.update(set(cc))\n\n return not_supplied\n\n\ndef find_basic_graph_characteristics(g, roots, characteristics):\n \"\"\"\n Determines basic characteristics of the given graph like connected buses, stubs, bridges,\n and articulation points.\n\n .. note::\n\n This is the base function for find_graph_characteristics. Please use the latter\n function instead!\n \"\"\"\n connected = 'connected' in characteristics\n stub_buses = 'stub_buses' in characteristics\n bridges = {'bridges', 'required_bridges'} & set(characteristics)\n articulation_points = {'articulation_points', 'notn1_areas'} & set(characteristics)\n notn1_starts = 'notn1_areas' in characteristics\n\n char_dict = {'connected': set(), 'stub_buses': set(), 'bridges': set(),\n 'articulation_points': set(), 'notn1_starts': set()}\n\n discovery = {root: 0 for root in roots} # \"time\" of first discovery of node during search\n low = {root: 0 for root in roots}\n visited = set(roots)\n path = []\n stack = [(root, root, iter(g[root])) for root in roots]\n while stack:\n grandparent, parent, children = stack[-1]\n try:\n child = next(children)\n if stub_buses:\n if child not in visited:\n path.append(child) # keep track of movement through the graph\n if grandparent == child:\n continue\n if child in visited:\n if discovery[child] <= discovery[parent]: # back edge\n low[parent] = min(low[parent], discovery[child])\n else:\n low[child] = discovery[child] = len(discovery)\n visited.add(child)\n stack.append((parent, child, iter(g[child])))\n except StopIteration:\n back = stack.pop()\n path.append(back[0])\n if low[parent] >= discovery[grandparent]:\n # Articulation points and start of not n-1 safe buses\n if grandparent not in roots:\n if articulation_points:\n char_dict['articulation_points'].add(grandparent)\n if notn1_starts:\n char_dict['notn1_starts'].add(parent)\n if low[parent] > discovery[grandparent]:\n # Bridges\n if bridges:\n char_dict['bridges'].add((grandparent, parent))\n\n # Stub buses\n if stub_buses:\n stub = path.pop()\n if stub != grandparent:\n char_dict['stub_buses'].add(stub)\n while path and path[-1] != grandparent and path[-1] not in roots:\n stub = path.pop()\n char_dict['stub_buses'].add(stub)\n low[grandparent] = min(low[parent], low[grandparent])\n\n if connected:\n char_dict['connected'] = visited\n return char_dict\n\n\ndef find_graph_characteristics(g, roots, characteristics):\n \"\"\"\n Finds and returns different characteristics of the given graph which can be specified.\n\n INPUT:\n **g** (NetworkX graph) - Graph of the network\n\n **roots** (list) - Root buses of the graphsearch\n\n **characteristics** (list) - List of characteristics this function determines and returns\n\n .. note::\n\n Possible characteristics:\n\n - 'connected' - All buses which have a connection to at least one of the root buses\n - 'articulation_points' - Buses which lead to disconnected areas if they get removed\n - 'bridges' - Edges which lead to disconnected areas if they get removed\n - 'stub_buses' - Buses which arent't connected if one specific edge gets removed\n - 'required_bridges' - Bridges which are strictly needed to connect a specific bus\n - 'notn1_areas' - Areas which aren't connected if one specific bus gets removed\n\n OUTPUT:\n\n **char_dict** (dict) - dictionary which contains the wanted characteristics\n\n ======================= ================================================================\n key dict value\n ======================= ================================================================\n 'connected' set of all connected buses\n 'articulation_points' set of all articulation points\n 'bridges' set of tuples which represent start and end bus of each bridge\n 'stub_buses' set of all buses which lie on a stub\n 'required_bridges' dict of all buses which are connected via at least one bridge.\n The dict values contain a set of bridges which are needed to\n connect the key buses\n 'notn1_areas' dict of not n-1 safe areas. The dict values contain a set of\n not n-1 safe buses which aren't connected if the key bus gets\n removed\n ======================= ================================================================\n\n EXAMPLE::\n\n import topology as top\n g = top.create_nxgraph(net, respect_switches=False)\n char_dict = top.find_graph_characteristics(g, roots=[0, 3], characteristics=['connected', 'stub_buses'])\n \"\"\"\n char_dict = find_basic_graph_characteristics(g, roots, characteristics)\n\n required_bridges = 'required_bridges' in characteristics\n notn1_areas = 'notn1_areas' in characteristics\n\n if not required_bridges and not notn1_areas:\n return {key: char_dict[key] for key in characteristics}\n\n char_dict.update({'required_bridges': dict(), 'notn1_areas': dict()})\n\n visited = set(roots)\n visited_bridges = []\n notn1_area_start = None\n curr_notn1_area = []\n\n stack = [(root, root, iter(g[root])) for root in roots]\n while stack:\n grandparent, parent, children = stack[-1]\n try:\n child = next(children)\n if child == grandparent:\n continue\n if child not in visited:\n visited.add(child)\n stack.append((parent, child, iter(g[child])))\n if required_bridges and ((parent, child) in char_dict['bridges'] or\n (child, parent) in char_dict['bridges']):\n visited_bridges.append((parent, child))\n\n if notn1_areas:\n if child in char_dict['notn1_starts'] and not notn1_area_start:\n notn1_area_start = parent\n if notn1_area_start:\n curr_notn1_area.append(child)\n\n except StopIteration:\n stack.pop()\n if required_bridges:\n if len(visited_bridges) > 0:\n char_dict['required_bridges'][parent] = visited_bridges[:]\n if ((parent, grandparent) in char_dict['bridges'] or\n (grandparent, parent) in char_dict['bridges']):\n visited_bridges.pop()\n\n if notn1_areas and grandparent == notn1_area_start:\n if grandparent in char_dict[\"notn1_areas\"]:\n char_dict[\"notn1_areas\"][grandparent].update(set(curr_notn1_area[:]))\n else:\n char_dict[\"notn1_areas\"][grandparent] = set(curr_notn1_area[:])\n del curr_notn1_area[:]\n notn1_area_start = None\n\n return {key: char_dict[key] for key in characteristics}\n\n\ndef get_2connected_buses(g, roots):\n \"\"\"\n Get all buses which have at least two connections to the roots\n\n INPUT:\n **g** (NetworkX graph) - NetworkX Graph or MultiGraph that represents a pandapower network\n\n **roots** - Roots of the graphsearch\n \"\"\"\n char_dict = find_graph_characteristics(g, roots, characteristics=['connected', 'stub_buses'])\n connected, stub_buses = char_dict['connected'], char_dict['stub_buses']\n two_connected = connected - stub_buses\n return connected, two_connected\n\n\ndef determine_stubs(net, roots=None, mg=None, respect_switches=False):\n \"\"\"\n Finds stubs in a network. Open switches are being ignored. Results are being written in a new\n column in the bus table (\"on_stub\") and line table (\"is_stub\") as True/False value.\n\n\n INPUT:\n **net** (pandapowerNet) - Variable that contains a pandapower network.\n\n OPTIONAL:\n **roots** (integer/list, None) - indices of buses that should be excluded (by default, the\n ext_grid buses will be set as roots)\n\n EXAMPLE:\n import pandapower.topology as top\n\n top.determine_stubs(net, roots = [0, 1])\n\n\n \"\"\"\n if mg is None:\n mg = create_nxgraph(net, respect_switches=respect_switches)\n # remove buses with degree lower 2 until none left\n if roots is None:\n roots = set(net.ext_grid.bus)\n # mg.add_edges_from((a, b) for a, b in zip(list(roots)[:-1], list(roots)[1:]))\n # while True:\n # dgo = {g for g, d in list(mg.degree().items()) if d < 2} #- roots\n # if not dgo:\n # break\n # mg.remove_nodes_from(dgo)\n # n1_buses = mg.nodes()\n _, n1_buses = get_2connected_buses(mg, roots)\n net.bus[\"on_stub\"] = True\n net.bus.loc[list(n1_buses), \"on_stub\"] = False\n net.line[\"is_stub\"] = ~((net.line.from_bus.isin(n1_buses)) & (net.line.to_bus.isin(n1_buses)))\n stubs = set(net.bus.index) - set(n1_buses)\n return stubs\n\n\ndef lines_on_path(mg, path):\n \"\"\"\n Finds all lines that connect a given path of buses.\n\n INPUT:\n **mg** (NetworkX graph) - NetworkX Graph or MultiGraph that represents a pandapower network.\n\n **path** (list) - List of connected buses.\n\n OUTPUT:\n **lines** (list) - Returns a list of all lines on the path.\n\n EXAMPLE:\n import topology as top\n\n mg = top.create_nxgraph(net)\n lines = top.lines_on_path(mg, [4, 5, 6])\n\n \"\"\"\n\n return elements_on_path(mg, path, \"line\")\n\n\ndef elements_on_path(mg, path, element=\"line\"):\n \"\"\"\n Finds all elements that connect a given path of buses.\n\n INPUT:\n **mg** (NetworkX graph) - NetworkX Graph or MultiGraph that represents a pandapower network.\n\n **path** (list) - List of connected buses.\n\n **element** (string, \"l\") - element type\n\n **multi** (boolean, True) - True: Applied on a NetworkX MultiGraph\n False: Applied on a NetworkX Graph\n\n OUTPUT:\n **elements** (list) - Returns a list of all lines on the path.\n\n EXAMPLE:\n import topology as top\n\n mg = top.create_nxgraph(net)\n elements = top.elements_on_path(mg, [4, 5, 6])\n\n \"\"\"\n if element not in [\"line\", \"switch\", \"trafo\", \"trafo3w\"]:\n raise ValueError(\"Invalid element type %s\"%element)\n if isinstance(mg, nx.MultiGraph):\n return [edge[1] for b1, b2 in zip(path, path[1:]) for edge in mg.get_edge_data(b1, b2).keys()\n if edge[0]==element]\n else:\n return [mg.get_edge_data(b1, b2)[\"key\"][1] for b1, b2 in zip(path, path[1:])\n if mg.get_edge_data(b1, b2)[\"key\"][0]==element]\n\n\ndef get_end_points_of_continuously_connected_lines(net, lines):\n mg = nx.MultiGraph()\n line_buses = net.line.loc[lines, [\"from_bus\", \"to_bus\"]].values\n mg.add_edges_from(line_buses)\n switch_buses = net.switch[[\"bus\", \"element\"]].values[net.switch.et.values==\"b\"]\n mg.add_edges_from(switch_buses)\n\n all_buses = set(line_buses.flatten())\n longest_path = []\n for b1, b2 in combinations(all_buses, 2):\n try:\n path = nx.shortest_path(mg, b1, b2)\n except nx.NetworkXNoPath:\n raise UserWarning(\"Lines not continuously connected\")\n if len(path) > len(longest_path):\n longest_path = path\n if all_buses - set(longest_path):\n raise UserWarning(\"Lines have branching points\")\n return longest_path[0], longest_path[-1]\n",
"path": "pandapower/topology/graph_searches.py"
}
] | diff --git a/pandapower/topology/graph_searches.py b/pandapower/topology/graph_searches.py
index d3adeffab..2490646c9 100644
--- a/pandapower/topology/graph_searches.py
+++ b/pandapower/topology/graph_searches.py
@@ -70,7 +70,7 @@ def connected_components(mg, notravbuses=set()):
mg = top.create_nxgraph(net)
- cc = top.connected_components(net, 5)
+ cc = top.connected_components(mg, 5)
"""
|
pyqtgraph__pyqtgraph-868 | Crash on closing Matplotlib export
E.g. when opening the Matplotlib exporter multiple times, and closing the windows again, Python crashes with a segmentation fault.
This is caused by the Matplotlib QMainWindow listening to the closeEvent and deleting the only reference of the window before it is closed properly.
| [
{
"content": "from ..Qt import QtGui, QtCore\nfrom .Exporter import Exporter\nfrom .. import PlotItem\nfrom .. import functions as fn\n\n__all__ = ['MatplotlibExporter']\n\n\"\"\"\nIt is helpful when using the matplotlib Exporter if your\n.matplotlib/matplotlibrc file is configured appropriately.\nThe following are suggested for getting usable PDF output that\ncan be edited in Illustrator, etc.\n\nbackend : Qt4Agg\ntext.usetex : True # Assumes you have a findable LaTeX installation\ninteractive : False\nfont.family : sans-serif\nfont.sans-serif : 'Arial' # (make first in list)\nmathtext.default : sf\nfigure.facecolor : white # personal preference\n# next setting allows pdf font to be readable in Adobe Illustrator\npdf.fonttype : 42 # set fonts to TrueType (otherwise it will be 3\n # and the text will be vectorized.\ntext.dvipnghack : True # primarily to clean up font appearance on Mac\n\nThe advantage is that there is less to do to get an exported file cleaned and ready for\npublication. Fonts are not vectorized (outlined), and window colors are white.\n\n\"\"\"\n \nclass MatplotlibExporter(Exporter):\n Name = \"Matplotlib Window\"\n windows = []\n def __init__(self, item):\n Exporter.__init__(self, item)\n \n def parameters(self):\n return None\n\n def cleanAxes(self, axl):\n if type(axl) is not list:\n axl = [axl]\n for ax in axl:\n if ax is None:\n continue\n for loc, spine in ax.spines.items():\n if loc in ['left', 'bottom']:\n pass\n elif loc in ['right', 'top']:\n spine.set_color('none')\n # do not draw the spine\n else:\n raise ValueError('Unknown spine location: %s' % loc)\n # turn off ticks when there is no spine\n ax.xaxis.set_ticks_position('bottom')\n \n def export(self, fileName=None):\n \n if isinstance(self.item, PlotItem):\n mpw = MatplotlibWindow()\n MatplotlibExporter.windows.append(mpw)\n\n stdFont = 'Arial'\n \n fig = mpw.getFigure()\n \n # get labels from the graphic item\n xlabel = self.item.axes['bottom']['item'].label.toPlainText()\n ylabel = self.item.axes['left']['item'].label.toPlainText()\n title = self.item.titleLabel.text\n\n ax = fig.add_subplot(111, title=title)\n ax.clear()\n self.cleanAxes(ax)\n #ax.grid(True)\n for item in self.item.curves:\n x, y = item.getData()\n opts = item.opts\n pen = fn.mkPen(opts['pen'])\n if pen.style() == QtCore.Qt.NoPen:\n linestyle = ''\n else:\n linestyle = '-'\n color = tuple([c/255. for c in fn.colorTuple(pen.color())])\n symbol = opts['symbol']\n if symbol == 't':\n symbol = '^'\n symbolPen = fn.mkPen(opts['symbolPen'])\n symbolBrush = fn.mkBrush(opts['symbolBrush'])\n markeredgecolor = tuple([c/255. for c in fn.colorTuple(symbolPen.color())])\n markerfacecolor = tuple([c/255. for c in fn.colorTuple(symbolBrush.color())])\n markersize = opts['symbolSize']\n \n if opts['fillLevel'] is not None and opts['fillBrush'] is not None:\n fillBrush = fn.mkBrush(opts['fillBrush'])\n fillcolor = tuple([c/255. for c in fn.colorTuple(fillBrush.color())])\n ax.fill_between(x=x, y1=y, y2=opts['fillLevel'], facecolor=fillcolor)\n \n pl = ax.plot(x, y, marker=symbol, color=color, linewidth=pen.width(), \n linestyle=linestyle, markeredgecolor=markeredgecolor, markerfacecolor=markerfacecolor,\n markersize=markersize)\n xr, yr = self.item.viewRange()\n ax.set_xbound(*xr)\n ax.set_ybound(*yr)\n ax.set_xlabel(xlabel) # place the labels.\n ax.set_ylabel(ylabel)\n mpw.draw()\n else:\n raise Exception(\"Matplotlib export currently only works with plot items\")\n \nMatplotlibExporter.register() \n \n\nclass MatplotlibWindow(QtGui.QMainWindow):\n def __init__(self):\n from ..widgets import MatplotlibWidget\n QtGui.QMainWindow.__init__(self)\n self.mpl = MatplotlibWidget.MatplotlibWidget()\n self.setCentralWidget(self.mpl)\n self.show()\n \n def __getattr__(self, attr):\n return getattr(self.mpl, attr)\n \n def closeEvent(self, ev):\n MatplotlibExporter.windows.remove(self)\n\n\n",
"path": "pyqtgraph/exporters/Matplotlib.py"
}
] | [
{
"content": "from ..Qt import QtGui, QtCore\nfrom .Exporter import Exporter\nfrom .. import PlotItem\nfrom .. import functions as fn\n\n__all__ = ['MatplotlibExporter']\n\n\"\"\"\nIt is helpful when using the matplotlib Exporter if your\n.matplotlib/matplotlibrc file is configured appropriately.\nThe following are suggested for getting usable PDF output that\ncan be edited in Illustrator, etc.\n\nbackend : Qt4Agg\ntext.usetex : True # Assumes you have a findable LaTeX installation\ninteractive : False\nfont.family : sans-serif\nfont.sans-serif : 'Arial' # (make first in list)\nmathtext.default : sf\nfigure.facecolor : white # personal preference\n# next setting allows pdf font to be readable in Adobe Illustrator\npdf.fonttype : 42 # set fonts to TrueType (otherwise it will be 3\n # and the text will be vectorized.\ntext.dvipnghack : True # primarily to clean up font appearance on Mac\n\nThe advantage is that there is less to do to get an exported file cleaned and ready for\npublication. Fonts are not vectorized (outlined), and window colors are white.\n\n\"\"\"\n \nclass MatplotlibExporter(Exporter):\n Name = \"Matplotlib Window\"\n windows = []\n def __init__(self, item):\n Exporter.__init__(self, item)\n \n def parameters(self):\n return None\n\n def cleanAxes(self, axl):\n if type(axl) is not list:\n axl = [axl]\n for ax in axl:\n if ax is None:\n continue\n for loc, spine in ax.spines.items():\n if loc in ['left', 'bottom']:\n pass\n elif loc in ['right', 'top']:\n spine.set_color('none')\n # do not draw the spine\n else:\n raise ValueError('Unknown spine location: %s' % loc)\n # turn off ticks when there is no spine\n ax.xaxis.set_ticks_position('bottom')\n \n def export(self, fileName=None):\n \n if isinstance(self.item, PlotItem):\n mpw = MatplotlibWindow()\n MatplotlibExporter.windows.append(mpw)\n\n stdFont = 'Arial'\n \n fig = mpw.getFigure()\n \n # get labels from the graphic item\n xlabel = self.item.axes['bottom']['item'].label.toPlainText()\n ylabel = self.item.axes['left']['item'].label.toPlainText()\n title = self.item.titleLabel.text\n\n ax = fig.add_subplot(111, title=title)\n ax.clear()\n self.cleanAxes(ax)\n #ax.grid(True)\n for item in self.item.curves:\n x, y = item.getData()\n opts = item.opts\n pen = fn.mkPen(opts['pen'])\n if pen.style() == QtCore.Qt.NoPen:\n linestyle = ''\n else:\n linestyle = '-'\n color = tuple([c/255. for c in fn.colorTuple(pen.color())])\n symbol = opts['symbol']\n if symbol == 't':\n symbol = '^'\n symbolPen = fn.mkPen(opts['symbolPen'])\n symbolBrush = fn.mkBrush(opts['symbolBrush'])\n markeredgecolor = tuple([c/255. for c in fn.colorTuple(symbolPen.color())])\n markerfacecolor = tuple([c/255. for c in fn.colorTuple(symbolBrush.color())])\n markersize = opts['symbolSize']\n \n if opts['fillLevel'] is not None and opts['fillBrush'] is not None:\n fillBrush = fn.mkBrush(opts['fillBrush'])\n fillcolor = tuple([c/255. for c in fn.colorTuple(fillBrush.color())])\n ax.fill_between(x=x, y1=y, y2=opts['fillLevel'], facecolor=fillcolor)\n \n pl = ax.plot(x, y, marker=symbol, color=color, linewidth=pen.width(), \n linestyle=linestyle, markeredgecolor=markeredgecolor, markerfacecolor=markerfacecolor,\n markersize=markersize)\n xr, yr = self.item.viewRange()\n ax.set_xbound(*xr)\n ax.set_ybound(*yr)\n ax.set_xlabel(xlabel) # place the labels.\n ax.set_ylabel(ylabel)\n mpw.draw()\n else:\n raise Exception(\"Matplotlib export currently only works with plot items\")\n \nMatplotlibExporter.register() \n \n\nclass MatplotlibWindow(QtGui.QMainWindow):\n def __init__(self):\n from ..widgets import MatplotlibWidget\n QtGui.QMainWindow.__init__(self)\n self.mpl = MatplotlibWidget.MatplotlibWidget()\n self.setCentralWidget(self.mpl)\n self.show()\n \n def __getattr__(self, attr):\n return getattr(self.mpl, attr)\n \n def closeEvent(self, ev):\n MatplotlibExporter.windows.remove(self)\n self.deleteLater()\n",
"path": "pyqtgraph/exporters/Matplotlib.py"
}
] | diff --git a/pyqtgraph/exporters/Matplotlib.py b/pyqtgraph/exporters/Matplotlib.py
index 2da979b118..dedc2b8741 100644
--- a/pyqtgraph/exporters/Matplotlib.py
+++ b/pyqtgraph/exporters/Matplotlib.py
@@ -124,5 +124,4 @@ def __getattr__(self, attr):
def closeEvent(self, ev):
MatplotlibExporter.windows.remove(self)
-
-
+ self.deleteLater()
|
nautobot__nautobot-987 | FileVar job variable causes Server Error
<!--
NOTE: IF YOUR ISSUE DOES NOT FOLLOW THIS TEMPLATE, IT WILL BE CLOSED.
This form is only for reporting reproducible bugs. If you need assistance
with Nautobot installation, or if you have a general question, please start a
discussion instead: https://github.com/nautobot/nautobot/discussions
Please describe the environment in which you are running Nautobot. Be sure
that you are running an unmodified instance of the latest stable release
before submitting a bug report, and that any plugins have been disabled.
-->
### Environment
* Python version: 3.8.12
* Nautobot version: 1.1.3
<!--
Describe in detail the exact steps that someone else can take to reproduce
this bug using the current stable release of Nautobot. Begin with the
creation of any necessary database objects and call out every operation
being performed explicitly. If reporting a bug in the REST API, be sure to
reconstruct the raw HTTP request(s) being made: Don't rely on a client
library such as pynautobot.
-->
### Steps to Reproduce
1. Create a custom `MyCustomJob` job script which has a `nautobot.extras.jobs.FileVar` variable.
2. Navigate to **Extensibility - Jobs - MyCustomJob**
3.
<!-- What did you expect to happen? -->
### Expected Behavior
Job Data table including a file input form field to select a file as input for the script.
<!-- What happened instead? -->
### Observed Behavior

### Workaround
```
# nautobot_config.py
EXTRA_INSTALLED_APPS = ["db_file_storage"]
```
| [
{
"content": "import os\nimport platform\n\nfrom django.contrib.messages import constants as messages\n\nfrom nautobot import __version__\nfrom nautobot.core.settings_funcs import is_truthy, parse_redis_connection\n\n#\n# Environment setup\n#\n\n# This is used for display in the UI.\nVERSION = __version__\n\n# Hostname of the system. This is displayed in the web UI footers along with the\n# version.\nHOSTNAME = platform.node()\n\n# Set the base directory two levels up (i.e. the base nautobot/ directory)\nBASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\n\n# Set the swapable User model to the Nautobot custom User model\nAUTH_USER_MODEL = \"users.User\"\n\n\n###############################################################\n# NAUTOBOT - Settings for Nautobot internals/plugins/defaults #\n###############################################################\n\n#\n# Nautobot optional settings/defaults\n#\nALLOWED_URL_SCHEMES = (\n \"file\",\n \"ftp\",\n \"ftps\",\n \"http\",\n \"https\",\n \"irc\",\n \"mailto\",\n \"sftp\",\n \"ssh\",\n \"tel\",\n \"telnet\",\n \"tftp\",\n \"vnc\",\n \"xmpp\",\n)\nBANNER_BOTTOM = \"\"\nBANNER_LOGIN = \"\"\nBANNER_TOP = \"\"\n\n# Base directory wherein all created files (jobs, git repositories, file uploads, static files) will be stored)\nNAUTOBOT_ROOT = os.getenv(\"NAUTOBOT_ROOT\", os.path.expanduser(\"~/.nautobot\"))\n\nCHANGELOG_RETENTION = 90\nDOCS_ROOT = os.path.join(BASE_DIR, \"docs\")\nHIDE_RESTRICTED_UI = False\n\n# By default, Nautobot will permit users to create duplicate prefixes and IP addresses in the global\n# table (that is, those which are not assigned to any VRF). This behavior can be disabled by setting\n# ENFORCE_GLOBAL_UNIQUE to True.\nENFORCE_GLOBAL_UNIQUE = False\n\n# Exclude potentially sensitive models from wildcard view exemption. These may still be exempted\n# by specifying the model individually in the EXEMPT_VIEW_PERMISSIONS configuration parameter.\nEXEMPT_EXCLUDE_MODELS = (\n (\"auth\", \"group\"),\n (\"users\", \"user\"),\n (\"users\", \"objectpermission\"),\n)\n\nEXEMPT_VIEW_PERMISSIONS = []\nGIT_ROOT = os.getenv(\"NAUTOBOT_GIT_ROOT\", os.path.join(NAUTOBOT_ROOT, \"git\").rstrip(\"/\"))\nHTTP_PROXIES = None\nJOBS_ROOT = os.getenv(\"NAUTOBOT_JOBS_ROOT\", os.path.join(NAUTOBOT_ROOT, \"jobs\").rstrip(\"/\"))\nMAINTENANCE_MODE = False\nMAX_PAGE_SIZE = 1000\n\n# Metrics\nMETRICS_ENABLED = False\n\n# Napalm\nNAPALM_ARGS = {}\nNAPALM_PASSWORD = \"\"\nNAPALM_TIMEOUT = 30\nNAPALM_USERNAME = \"\"\n\n# Pagination\nPAGINATE_COUNT = 50\nPER_PAGE_DEFAULTS = [25, 50, 100, 250, 500, 1000]\n\n# Plugins\nPLUGINS = []\nPLUGINS_CONFIG = {}\n\n# IPv4?\nPREFER_IPV4 = False\n\n# Racks\nRACK_ELEVATION_DEFAULT_UNIT_HEIGHT = 22\nRACK_ELEVATION_DEFAULT_UNIT_WIDTH = 220\n\n# Global 3rd-party authentication settings\nEXTERNAL_AUTH_DEFAULT_GROUPS = []\nEXTERNAL_AUTH_DEFAULT_PERMISSIONS = {}\n\n# Remote auth backend settings\nREMOTE_AUTH_AUTO_CREATE_USER = False\nREMOTE_AUTH_HEADER = \"HTTP_REMOTE_USER\"\n\n# Releases\nRELEASE_CHECK_URL = None\nRELEASE_CHECK_TIMEOUT = 24 * 3600\n\n# SSO backend settings https://python-social-auth.readthedocs.io/en/latest/configuration/settings.html\nSOCIAL_AUTH_POSTGRES_JSONFIELD = False\n# Nautobot related - May be overridden if using custom social auth backend\nSOCIAL_AUTH_BACKEND_PREFIX = \"social_core.backends\"\n\n# Storage\nSTORAGE_BACKEND = None\nSTORAGE_CONFIG = {}\n\n# Test runner that is aware of our use of \"integration\" tags and only runs\n# integration tests if explicitly passed in with `nautobot-server test --tag integration`.\nTEST_RUNNER = \"nautobot.core.tests.runner.NautobotTestRunner\"\n\n#\n# Django cryptography\n#\n\n# CRYPTOGRAPHY_BACKEND = cryptography.hazmat.backends.default_backend()\n# CRYPTOGRAPHY_DIGEST = cryptography.hazmat.primitives.hashes.SHA256\nCRYPTOGRAPHY_KEY = None # Defaults to SECRET_KEY if unset\nCRYPTOGRAPHY_SALT = \"nautobot-cryptography\"\n\n\n#\n# Django Prometheus\n#\n\nPROMETHEUS_EXPORT_MIGRATIONS = False\n\n\n#\n# Django filters\n#\n\nFILTERS_NULL_CHOICE_LABEL = \"None\"\nFILTERS_NULL_CHOICE_VALUE = \"null\"\n\n\n#\n# Django REST framework (API)\n#\n\nREST_FRAMEWORK_VERSION = VERSION.rsplit(\".\", 1)[0] # Use major.minor as API version\nREST_FRAMEWORK = {\n \"ALLOWED_VERSIONS\": [REST_FRAMEWORK_VERSION],\n \"DEFAULT_AUTHENTICATION_CLASSES\": (\n \"rest_framework.authentication.SessionAuthentication\",\n \"nautobot.core.api.authentication.TokenAuthentication\",\n ),\n \"DEFAULT_FILTER_BACKENDS\": (\"django_filters.rest_framework.DjangoFilterBackend\",),\n \"DEFAULT_METADATA_CLASS\": \"nautobot.core.api.metadata.BulkOperationMetadata\",\n \"DEFAULT_PAGINATION_CLASS\": \"nautobot.core.api.pagination.OptionalLimitOffsetPagination\",\n \"DEFAULT_PERMISSION_CLASSES\": (\"nautobot.core.api.authentication.TokenPermissions\",),\n \"DEFAULT_RENDERER_CLASSES\": (\n \"rest_framework.renderers.JSONRenderer\",\n \"nautobot.core.api.renderers.FormlessBrowsableAPIRenderer\",\n ),\n \"DEFAULT_VERSION\": REST_FRAMEWORK_VERSION,\n \"DEFAULT_VERSIONING_CLASS\": \"rest_framework.versioning.AcceptHeaderVersioning\",\n \"PAGE_SIZE\": PAGINATE_COUNT,\n \"SCHEMA_COERCE_METHOD_NAMES\": {\n # Default mappings\n \"retrieve\": \"read\",\n \"destroy\": \"delete\",\n # Custom operations\n \"bulk_destroy\": \"bulk_delete\",\n },\n \"VIEW_NAME_FUNCTION\": \"nautobot.utilities.api.get_view_name\",\n}\n\n\n#\n# drf_yasg (OpenAPI/Swagger)\n#\n\nSWAGGER_SETTINGS = {\n \"DEFAULT_AUTO_SCHEMA_CLASS\": \"nautobot.utilities.custom_inspectors.NautobotSwaggerAutoSchema\",\n \"DEFAULT_FIELD_INSPECTORS\": [\n \"nautobot.utilities.custom_inspectors.StatusFieldInspector\",\n \"nautobot.utilities.custom_inspectors.CustomFieldsDataFieldInspector\",\n \"nautobot.utilities.custom_inspectors.JSONFieldInspector\",\n \"nautobot.utilities.custom_inspectors.NullableBooleanFieldInspector\",\n \"nautobot.utilities.custom_inspectors.ChoiceFieldInspector\",\n \"nautobot.utilities.custom_inspectors.SerializedPKRelatedFieldInspector\",\n \"drf_yasg.inspectors.CamelCaseJSONFilter\",\n \"drf_yasg.inspectors.ReferencingSerializerInspector\",\n \"drf_yasg.inspectors.RelatedFieldInspector\",\n \"drf_yasg.inspectors.ChoiceFieldInspector\",\n \"drf_yasg.inspectors.FileFieldInspector\",\n \"drf_yasg.inspectors.DictFieldInspector\",\n \"drf_yasg.inspectors.SerializerMethodFieldInspector\",\n \"drf_yasg.inspectors.SimpleFieldInspector\",\n \"drf_yasg.inspectors.StringDefaultFieldInspector\",\n ],\n \"DEFAULT_FILTER_INSPECTORS\": [\n \"drf_yasg.inspectors.CoreAPICompatInspector\",\n ],\n \"DEFAULT_INFO\": \"nautobot.core.urls.openapi_info\",\n \"DEFAULT_MODEL_DEPTH\": 1,\n \"DEFAULT_PAGINATOR_INSPECTORS\": [\n \"nautobot.utilities.custom_inspectors.NullablePaginatorInspector\",\n \"drf_yasg.inspectors.DjangoRestResponsePagination\",\n \"drf_yasg.inspectors.CoreAPICompatInspector\",\n ],\n \"SECURITY_DEFINITIONS\": {\n \"Bearer\": {\n \"type\": \"apiKey\",\n \"name\": \"Authorization\",\n \"in\": \"header\",\n }\n },\n \"VALIDATOR_URL\": None,\n}\n\n\n##############################################\n# DJANGO - Core settings required for Django #\n##############################################\n\n#\n# Databases\n#\n\n# Only PostgresSQL is supported, so database driver is hard-coded. This can\n# still be overloaded in custom settings.\n# https://docs.djangoproject.com/en/stable/ref/settings/#databases\nDATABASES = {\n \"default\": {\n \"NAME\": os.getenv(\"NAUTOBOT_DATABASE\", \"nautobot\"),\n \"USER\": os.getenv(\"NAUTOBOT_USER\", \"\"),\n \"PASSWORD\": os.getenv(\"NAUTOBOT_PASSWORD\", \"\"),\n \"HOST\": os.getenv(\"NAUTOBOT_DB_HOST\", \"localhost\"),\n \"PORT\": os.getenv(\"NAUTOBOT_DB_PORT\", \"\"),\n \"CONN_MAX_AGE\": int(os.getenv(\"NAUTOBOT_DB_TIMEOUT\", 300)),\n \"ENGINE\": os.getenv(\"NAUTOBOT_DB_ENGINE\", \"django.db.backends.postgresql\"),\n }\n}\n\n# The secret key is used to encrypt session keys and salt passwords.\nSECRET_KEY = os.getenv(\"SECRET_KEY\")\n\n# Default overrides\nALLOWED_HOSTS = []\nCSRF_TRUSTED_ORIGINS = []\nDATETIME_FORMAT = \"N j, Y g:i a\"\nINTERNAL_IPS = (\"127.0.0.1\", \"::1\")\nFORCE_SCRIPT_NAME = None\nLOGGING = {}\nMEDIA_ROOT = os.path.join(NAUTOBOT_ROOT, \"media\").rstrip(\"/\")\nSESSION_FILE_PATH = None\nSHORT_DATE_FORMAT = \"Y-m-d\"\nSHORT_DATETIME_FORMAT = \"Y-m-d H:i\"\nTIME_FORMAT = \"g:i a\"\nTIME_ZONE = \"UTC\"\n\n# Installed apps and Django plugins. Nautobot plugins will be appended here later.\nINSTALLED_APPS = [\n \"django.contrib.admin\",\n \"django.contrib.auth\",\n \"django.contrib.contenttypes\",\n \"django.contrib.sessions\",\n \"django.contrib.messages\",\n \"django.contrib.staticfiles\",\n \"django.contrib.humanize\",\n \"cacheops\",\n \"corsheaders\",\n \"django_filters\",\n \"django_jinja\",\n \"django_tables2\",\n \"django_prometheus\",\n \"mptt\",\n \"rest_framework\",\n \"social_django\",\n \"taggit\",\n \"timezone_field\",\n \"nautobot.core\",\n \"nautobot.circuits\",\n \"nautobot.dcim\",\n \"nautobot.ipam\",\n \"nautobot.extras\",\n \"nautobot.tenancy\",\n \"nautobot.users\",\n \"nautobot.utilities\",\n \"nautobot.virtualization\",\n \"django_rq\", # Must come after nautobot.extras to allow overriding management commands\n \"drf_yasg\",\n \"graphene_django\",\n \"health_check\",\n \"health_check.cache\",\n \"health_check.storage\",\n]\n\n# Middleware\nMIDDLEWARE = [\n \"django_prometheus.middleware.PrometheusBeforeMiddleware\",\n \"corsheaders.middleware.CorsMiddleware\",\n \"django.contrib.sessions.middleware.SessionMiddleware\",\n \"django.middleware.common.CommonMiddleware\",\n \"django.middleware.csrf.CsrfViewMiddleware\",\n \"django.contrib.auth.middleware.AuthenticationMiddleware\",\n \"django.contrib.messages.middleware.MessageMiddleware\",\n \"django.middleware.clickjacking.XFrameOptionsMiddleware\",\n \"django.middleware.security.SecurityMiddleware\",\n \"nautobot.core.middleware.ExceptionHandlingMiddleware\",\n \"nautobot.core.middleware.RemoteUserMiddleware\",\n \"nautobot.core.middleware.ExternalAuthMiddleware\",\n \"nautobot.core.middleware.APIVersionMiddleware\",\n \"nautobot.core.middleware.ObjectChangeMiddleware\",\n \"django_prometheus.middleware.PrometheusAfterMiddleware\",\n]\n\nROOT_URLCONF = \"nautobot.core.urls\"\n\nTEMPLATES = [\n {\n \"NAME\": \"django\",\n \"BACKEND\": \"django.template.backends.django.DjangoTemplates\",\n \"DIRS\": [],\n \"APP_DIRS\": True,\n \"OPTIONS\": {\n \"context_processors\": [\n \"django.template.context_processors.debug\",\n \"django.template.context_processors.request\",\n \"django.template.context_processors.media\",\n \"django.contrib.auth.context_processors.auth\",\n \"django.contrib.messages.context_processors.messages\",\n \"social_django.context_processors.backends\",\n \"social_django.context_processors.login_redirect\",\n \"nautobot.core.context_processors.settings_and_registry\",\n \"nautobot.core.context_processors.sso_auth\",\n ],\n },\n },\n {\n \"NAME\": \"jinja\",\n \"BACKEND\": \"django_jinja.backend.Jinja2\",\n \"DIRS\": [],\n \"APP_DIRS\": False,\n \"OPTIONS\": {\n \"context_processors\": [\n \"django.template.context_processors.debug\",\n \"django.template.context_processors.request\",\n \"django.template.context_processors.media\",\n \"django.contrib.auth.context_processors.auth\",\n \"django.contrib.messages.context_processors.messages\",\n \"social_django.context_processors.backends\",\n \"social_django.context_processors.login_redirect\",\n \"nautobot.core.context_processors.settings_and_registry\",\n \"nautobot.core.context_processors.sso_auth\",\n ],\n },\n },\n]\n\n# Set up authentication backends\nAUTHENTICATION_BACKENDS = [\n # Always check object permissions\n \"nautobot.core.authentication.ObjectPermissionBackend\",\n]\n\n# Internationalization\nLANGUAGE_CODE = \"en-us\"\nUSE_I18N = True\nUSE_TZ = True\n\n# WSGI\nWSGI_APPLICATION = \"nautobot.core.wsgi.application\"\nSECURE_PROXY_SSL_HEADER = (\"HTTP_X_FORWARDED_PROTO\", \"https\")\nUSE_X_FORWARDED_HOST = True\nX_FRAME_OPTIONS = \"DENY\"\n\n# Static files (CSS, JavaScript, Images)\nSTATIC_ROOT = os.path.join(NAUTOBOT_ROOT, \"static\")\nSTATIC_URL = \"static/\"\nSTATICFILES_DIRS = (os.path.join(BASE_DIR, \"project-static\"),)\n\n# Media\nMEDIA_URL = \"media/\"\n\n# Disable default limit of 1000 fields per request. Needed for bulk deletion of objects. (Added in Django 1.10.)\nDATA_UPLOAD_MAX_NUMBER_FIELDS = None\n\n# Messages\nMESSAGE_TAGS = {\n messages.ERROR: \"danger\",\n}\n\n# Authentication URLs\n# This is the URL route name for the login view.\nLOGIN_URL = \"login\"\n\n# This is the URL route name for the home page (index) view.\nLOGIN_REDIRECT_URL = \"home\"\n\n#\n# From django-cors-headers\n#\n\n# If True, all origins will be allowed. Other settings restricting allowed origins will be ignored.\n# Defaults to False. Setting this to True can be dangerous, as it allows any website to make\n# cross-origin requests to yours. Generally you'll want to restrict the list of allowed origins with\n# CORS_ALLOWED_ORIGINS or CORS_ALLOWED_ORIGIN_REGEXES.\nCORS_ALLOW_ALL_ORIGINS = False\n\n# A list of strings representing regexes that match Origins that are authorized to make cross-site\n# HTTP requests. Defaults to [].\nCORS_ALLOWED_ORIGIN_REGEXES = []\n\n# A list of origins that are authorized to make cross-site HTTP requests. Defaults to [].\nCORS_ALLOWED_ORIGINS = []\n\n#\n# GraphQL\n#\n\nGRAPHENE = {\n \"SCHEMA\": \"nautobot.core.graphql.schema_init.schema\",\n \"DJANGO_CHOICE_FIELD_ENUM_V3_NAMING\": True, # any field with a name of type will break in Graphene otherwise.\n}\nGRAPHQL_CUSTOM_FIELD_PREFIX = \"cf\"\nGRAPHQL_RELATIONSHIP_PREFIX = \"rel\"\nGRAPHQL_COMPUTED_FIELD_PREFIX = \"cpf\"\n\n\n#\n# Caching\n#\n\n# The django-cacheops plugin is used to cache querysets. The built-in Django\n# caching is not used.\nCACHEOPS = {\n \"auth.user\": {\"ops\": \"get\", \"timeout\": 60 * 15},\n \"auth.*\": {\"ops\": (\"fetch\", \"get\")},\n \"auth.permission\": {\"ops\": \"all\"},\n \"circuits.*\": {\"ops\": \"all\"},\n \"dcim.inventoryitem\": None, # MPTT models are exempt due to raw SQL\n \"dcim.region\": None, # MPTT models are exempt due to raw SQL\n \"dcim.rackgroup\": None, # MPTT models are exempt due to raw SQL\n \"dcim.*\": {\"ops\": \"all\"},\n \"ipam.*\": {\"ops\": \"all\"},\n \"extras.*\": {\"ops\": \"all\"},\n \"users.*\": {\"ops\": \"all\"},\n \"tenancy.tenantgroup\": None, # MPTT models are exempt due to raw SQL\n \"tenancy.*\": {\"ops\": \"all\"},\n \"virtualization.*\": {\"ops\": \"all\"},\n}\nCACHEOPS_DEGRADE_ON_FAILURE = True\nCACHEOPS_ENABLED = True\nCACHEOPS_REDIS = \"redis://localhost:6379/1\"\nCACHEOPS_DEFAULTS = {\"timeout\": 900}\n\n# The django-redis cache is used to establish concurrent locks using Redis. The\n# django-rq settings will use the same instance/database by default.\nCACHES = {\n \"default\": {\n \"BACKEND\": \"django_redis.cache.RedisCache\",\n \"LOCATION\": \"redis://localhost:6379/0\",\n \"TIMEOUT\": 300,\n \"OPTIONS\": {\n \"CLIENT_CLASS\": \"django_redis.client.DefaultClient\",\n \"PASSWORD\": \"\",\n },\n }\n}\n\n#\n# Django RQ (used for legacy background processesing)\n#\n\n# These defaults utilize the Django caches setting defined for django-redis.\n# See: https://github.com/rq/django-rq#support-for-django-redis-and-django-redis-cache\nRQ_QUEUES = {\n \"default\": {\n \"USE_REDIS_CACHE\": \"default\",\n },\n \"check_releases\": {\n \"USE_REDIS_CACHE\": \"default\",\n },\n \"custom_fields\": {\n \"USE_REDIS_CACHE\": \"default\",\n },\n \"webhooks\": {\n \"USE_REDIS_CACHE\": \"default\",\n },\n}\n\n#\n# Celery (used for background processing)\n#\n\n# Celery broker URL used to tell workers where queues are located\nCELERY_BROKER_URL = os.getenv(\"NAUTOBOT_CELERY_BROKER_URL\", parse_redis_connection(redis_database=0))\n\n# Celery results backend URL to tell workers where to publish task results\nCELERY_RESULT_BACKEND = os.getenv(\"NAUTOBOT_CELERY_RESULT_BACKEND\", parse_redis_connection(redis_database=0))\n\n# Instruct celery to report the started status of a job, instead of just `pending`, `finished`, or `failed`\nCELERY_TASK_TRACK_STARTED = True\n\n# Global task time limits (seconds)\n# Exceeding the soft limit will result in a SoftTimeLimitExceeded exception,\n# while exceeding the hard limit will result in a SIGKILL.\nCELERY_TASK_SOFT_TIME_LIMIT = int(os.getenv(\"NAUTOBOT_CELERY_TASK_SOFT_TIME_LIMIT\", 5 * 60))\nCELERY_TASK_TIME_LIMIT = int(os.getenv(\"NAUTOBOT_CELERY_TASK_TIME_LIMIT\", 10 * 60))\n\n# These settings define the custom nautobot serialization encoding as an accepted data encoding format\n# and register that format for task input and result serialization\nCELERY_ACCEPT_CONTENT = [\"nautobot_json\"]\nCELERY_RESULT_ACCEPT_CONTENT = [\"nautobot_json\"]\nCELERY_TASK_SERIALIZER = \"nautobot_json\"\nCELERY_RESULT_SERIALIZER = \"nautobot_json\"\n",
"path": "nautobot/core/settings.py"
}
] | [
{
"content": "import os\nimport platform\n\nfrom django.contrib.messages import constants as messages\n\nfrom nautobot import __version__\nfrom nautobot.core.settings_funcs import is_truthy, parse_redis_connection\n\n#\n# Environment setup\n#\n\n# This is used for display in the UI.\nVERSION = __version__\n\n# Hostname of the system. This is displayed in the web UI footers along with the\n# version.\nHOSTNAME = platform.node()\n\n# Set the base directory two levels up (i.e. the base nautobot/ directory)\nBASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\n\n# Set the swapable User model to the Nautobot custom User model\nAUTH_USER_MODEL = \"users.User\"\n\n\n###############################################################\n# NAUTOBOT - Settings for Nautobot internals/plugins/defaults #\n###############################################################\n\n#\n# Nautobot optional settings/defaults\n#\nALLOWED_URL_SCHEMES = (\n \"file\",\n \"ftp\",\n \"ftps\",\n \"http\",\n \"https\",\n \"irc\",\n \"mailto\",\n \"sftp\",\n \"ssh\",\n \"tel\",\n \"telnet\",\n \"tftp\",\n \"vnc\",\n \"xmpp\",\n)\nBANNER_BOTTOM = \"\"\nBANNER_LOGIN = \"\"\nBANNER_TOP = \"\"\n\n# Base directory wherein all created files (jobs, git repositories, file uploads, static files) will be stored)\nNAUTOBOT_ROOT = os.getenv(\"NAUTOBOT_ROOT\", os.path.expanduser(\"~/.nautobot\"))\n\nCHANGELOG_RETENTION = 90\nDOCS_ROOT = os.path.join(BASE_DIR, \"docs\")\nHIDE_RESTRICTED_UI = False\n\n# By default, Nautobot will permit users to create duplicate prefixes and IP addresses in the global\n# table (that is, those which are not assigned to any VRF). This behavior can be disabled by setting\n# ENFORCE_GLOBAL_UNIQUE to True.\nENFORCE_GLOBAL_UNIQUE = False\n\n# Exclude potentially sensitive models from wildcard view exemption. These may still be exempted\n# by specifying the model individually in the EXEMPT_VIEW_PERMISSIONS configuration parameter.\nEXEMPT_EXCLUDE_MODELS = (\n (\"auth\", \"group\"),\n (\"users\", \"user\"),\n (\"users\", \"objectpermission\"),\n)\n\nEXEMPT_VIEW_PERMISSIONS = []\nGIT_ROOT = os.getenv(\"NAUTOBOT_GIT_ROOT\", os.path.join(NAUTOBOT_ROOT, \"git\").rstrip(\"/\"))\nHTTP_PROXIES = None\nJOBS_ROOT = os.getenv(\"NAUTOBOT_JOBS_ROOT\", os.path.join(NAUTOBOT_ROOT, \"jobs\").rstrip(\"/\"))\nMAINTENANCE_MODE = False\nMAX_PAGE_SIZE = 1000\n\n# Metrics\nMETRICS_ENABLED = False\n\n# Napalm\nNAPALM_ARGS = {}\nNAPALM_PASSWORD = \"\"\nNAPALM_TIMEOUT = 30\nNAPALM_USERNAME = \"\"\n\n# Pagination\nPAGINATE_COUNT = 50\nPER_PAGE_DEFAULTS = [25, 50, 100, 250, 500, 1000]\n\n# Plugins\nPLUGINS = []\nPLUGINS_CONFIG = {}\n\n# IPv4?\nPREFER_IPV4 = False\n\n# Racks\nRACK_ELEVATION_DEFAULT_UNIT_HEIGHT = 22\nRACK_ELEVATION_DEFAULT_UNIT_WIDTH = 220\n\n# Global 3rd-party authentication settings\nEXTERNAL_AUTH_DEFAULT_GROUPS = []\nEXTERNAL_AUTH_DEFAULT_PERMISSIONS = {}\n\n# Remote auth backend settings\nREMOTE_AUTH_AUTO_CREATE_USER = False\nREMOTE_AUTH_HEADER = \"HTTP_REMOTE_USER\"\n\n# Releases\nRELEASE_CHECK_URL = None\nRELEASE_CHECK_TIMEOUT = 24 * 3600\n\n# SSO backend settings https://python-social-auth.readthedocs.io/en/latest/configuration/settings.html\nSOCIAL_AUTH_POSTGRES_JSONFIELD = False\n# Nautobot related - May be overridden if using custom social auth backend\nSOCIAL_AUTH_BACKEND_PREFIX = \"social_core.backends\"\n\n# Storage\nSTORAGE_BACKEND = None\nSTORAGE_CONFIG = {}\n\n# Test runner that is aware of our use of \"integration\" tags and only runs\n# integration tests if explicitly passed in with `nautobot-server test --tag integration`.\nTEST_RUNNER = \"nautobot.core.tests.runner.NautobotTestRunner\"\n\n#\n# Django cryptography\n#\n\n# CRYPTOGRAPHY_BACKEND = cryptography.hazmat.backends.default_backend()\n# CRYPTOGRAPHY_DIGEST = cryptography.hazmat.primitives.hashes.SHA256\nCRYPTOGRAPHY_KEY = None # Defaults to SECRET_KEY if unset\nCRYPTOGRAPHY_SALT = \"nautobot-cryptography\"\n\n\n#\n# Django Prometheus\n#\n\nPROMETHEUS_EXPORT_MIGRATIONS = False\n\n\n#\n# Django filters\n#\n\nFILTERS_NULL_CHOICE_LABEL = \"None\"\nFILTERS_NULL_CHOICE_VALUE = \"null\"\n\n\n#\n# Django REST framework (API)\n#\n\nREST_FRAMEWORK_VERSION = VERSION.rsplit(\".\", 1)[0] # Use major.minor as API version\nREST_FRAMEWORK = {\n \"ALLOWED_VERSIONS\": [REST_FRAMEWORK_VERSION],\n \"DEFAULT_AUTHENTICATION_CLASSES\": (\n \"rest_framework.authentication.SessionAuthentication\",\n \"nautobot.core.api.authentication.TokenAuthentication\",\n ),\n \"DEFAULT_FILTER_BACKENDS\": (\"django_filters.rest_framework.DjangoFilterBackend\",),\n \"DEFAULT_METADATA_CLASS\": \"nautobot.core.api.metadata.BulkOperationMetadata\",\n \"DEFAULT_PAGINATION_CLASS\": \"nautobot.core.api.pagination.OptionalLimitOffsetPagination\",\n \"DEFAULT_PERMISSION_CLASSES\": (\"nautobot.core.api.authentication.TokenPermissions\",),\n \"DEFAULT_RENDERER_CLASSES\": (\n \"rest_framework.renderers.JSONRenderer\",\n \"nautobot.core.api.renderers.FormlessBrowsableAPIRenderer\",\n ),\n \"DEFAULT_VERSION\": REST_FRAMEWORK_VERSION,\n \"DEFAULT_VERSIONING_CLASS\": \"rest_framework.versioning.AcceptHeaderVersioning\",\n \"PAGE_SIZE\": PAGINATE_COUNT,\n \"SCHEMA_COERCE_METHOD_NAMES\": {\n # Default mappings\n \"retrieve\": \"read\",\n \"destroy\": \"delete\",\n # Custom operations\n \"bulk_destroy\": \"bulk_delete\",\n },\n \"VIEW_NAME_FUNCTION\": \"nautobot.utilities.api.get_view_name\",\n}\n\n\n#\n# drf_yasg (OpenAPI/Swagger)\n#\n\nSWAGGER_SETTINGS = {\n \"DEFAULT_AUTO_SCHEMA_CLASS\": \"nautobot.utilities.custom_inspectors.NautobotSwaggerAutoSchema\",\n \"DEFAULT_FIELD_INSPECTORS\": [\n \"nautobot.utilities.custom_inspectors.StatusFieldInspector\",\n \"nautobot.utilities.custom_inspectors.CustomFieldsDataFieldInspector\",\n \"nautobot.utilities.custom_inspectors.JSONFieldInspector\",\n \"nautobot.utilities.custom_inspectors.NullableBooleanFieldInspector\",\n \"nautobot.utilities.custom_inspectors.ChoiceFieldInspector\",\n \"nautobot.utilities.custom_inspectors.SerializedPKRelatedFieldInspector\",\n \"drf_yasg.inspectors.CamelCaseJSONFilter\",\n \"drf_yasg.inspectors.ReferencingSerializerInspector\",\n \"drf_yasg.inspectors.RelatedFieldInspector\",\n \"drf_yasg.inspectors.ChoiceFieldInspector\",\n \"drf_yasg.inspectors.FileFieldInspector\",\n \"drf_yasg.inspectors.DictFieldInspector\",\n \"drf_yasg.inspectors.SerializerMethodFieldInspector\",\n \"drf_yasg.inspectors.SimpleFieldInspector\",\n \"drf_yasg.inspectors.StringDefaultFieldInspector\",\n ],\n \"DEFAULT_FILTER_INSPECTORS\": [\n \"drf_yasg.inspectors.CoreAPICompatInspector\",\n ],\n \"DEFAULT_INFO\": \"nautobot.core.urls.openapi_info\",\n \"DEFAULT_MODEL_DEPTH\": 1,\n \"DEFAULT_PAGINATOR_INSPECTORS\": [\n \"nautobot.utilities.custom_inspectors.NullablePaginatorInspector\",\n \"drf_yasg.inspectors.DjangoRestResponsePagination\",\n \"drf_yasg.inspectors.CoreAPICompatInspector\",\n ],\n \"SECURITY_DEFINITIONS\": {\n \"Bearer\": {\n \"type\": \"apiKey\",\n \"name\": \"Authorization\",\n \"in\": \"header\",\n }\n },\n \"VALIDATOR_URL\": None,\n}\n\n\n##############################################\n# DJANGO - Core settings required for Django #\n##############################################\n\n#\n# Databases\n#\n\n# Only PostgresSQL is supported, so database driver is hard-coded. This can\n# still be overloaded in custom settings.\n# https://docs.djangoproject.com/en/stable/ref/settings/#databases\nDATABASES = {\n \"default\": {\n \"NAME\": os.getenv(\"NAUTOBOT_DATABASE\", \"nautobot\"),\n \"USER\": os.getenv(\"NAUTOBOT_USER\", \"\"),\n \"PASSWORD\": os.getenv(\"NAUTOBOT_PASSWORD\", \"\"),\n \"HOST\": os.getenv(\"NAUTOBOT_DB_HOST\", \"localhost\"),\n \"PORT\": os.getenv(\"NAUTOBOT_DB_PORT\", \"\"),\n \"CONN_MAX_AGE\": int(os.getenv(\"NAUTOBOT_DB_TIMEOUT\", 300)),\n \"ENGINE\": os.getenv(\"NAUTOBOT_DB_ENGINE\", \"django.db.backends.postgresql\"),\n }\n}\n\n# The secret key is used to encrypt session keys and salt passwords.\nSECRET_KEY = os.getenv(\"SECRET_KEY\")\n\n# Default overrides\nALLOWED_HOSTS = []\nCSRF_TRUSTED_ORIGINS = []\nDATETIME_FORMAT = \"N j, Y g:i a\"\nINTERNAL_IPS = (\"127.0.0.1\", \"::1\")\nFORCE_SCRIPT_NAME = None\nLOGGING = {}\nMEDIA_ROOT = os.path.join(NAUTOBOT_ROOT, \"media\").rstrip(\"/\")\nSESSION_FILE_PATH = None\nSHORT_DATE_FORMAT = \"Y-m-d\"\nSHORT_DATETIME_FORMAT = \"Y-m-d H:i\"\nTIME_FORMAT = \"g:i a\"\nTIME_ZONE = \"UTC\"\n\n# Installed apps and Django plugins. Nautobot plugins will be appended here later.\nINSTALLED_APPS = [\n \"django.contrib.admin\",\n \"django.contrib.auth\",\n \"django.contrib.contenttypes\",\n \"django.contrib.sessions\",\n \"django.contrib.messages\",\n \"django.contrib.staticfiles\",\n \"django.contrib.humanize\",\n \"cacheops\",\n \"corsheaders\",\n \"django_filters\",\n \"django_jinja\",\n \"django_tables2\",\n \"django_prometheus\",\n \"mptt\",\n \"rest_framework\",\n \"social_django\",\n \"taggit\",\n \"timezone_field\",\n \"nautobot.core\",\n \"nautobot.circuits\",\n \"nautobot.dcim\",\n \"nautobot.ipam\",\n \"nautobot.extras\",\n \"nautobot.tenancy\",\n \"nautobot.users\",\n \"nautobot.utilities\",\n \"nautobot.virtualization\",\n \"django_rq\", # Must come after nautobot.extras to allow overriding management commands\n \"drf_yasg\",\n \"graphene_django\",\n \"health_check\",\n \"health_check.cache\",\n \"health_check.storage\",\n \"db_file_storage\",\n]\n\n# Middleware\nMIDDLEWARE = [\n \"django_prometheus.middleware.PrometheusBeforeMiddleware\",\n \"corsheaders.middleware.CorsMiddleware\",\n \"django.contrib.sessions.middleware.SessionMiddleware\",\n \"django.middleware.common.CommonMiddleware\",\n \"django.middleware.csrf.CsrfViewMiddleware\",\n \"django.contrib.auth.middleware.AuthenticationMiddleware\",\n \"django.contrib.messages.middleware.MessageMiddleware\",\n \"django.middleware.clickjacking.XFrameOptionsMiddleware\",\n \"django.middleware.security.SecurityMiddleware\",\n \"nautobot.core.middleware.ExceptionHandlingMiddleware\",\n \"nautobot.core.middleware.RemoteUserMiddleware\",\n \"nautobot.core.middleware.ExternalAuthMiddleware\",\n \"nautobot.core.middleware.APIVersionMiddleware\",\n \"nautobot.core.middleware.ObjectChangeMiddleware\",\n \"django_prometheus.middleware.PrometheusAfterMiddleware\",\n]\n\nROOT_URLCONF = \"nautobot.core.urls\"\n\nTEMPLATES = [\n {\n \"NAME\": \"django\",\n \"BACKEND\": \"django.template.backends.django.DjangoTemplates\",\n \"DIRS\": [],\n \"APP_DIRS\": True,\n \"OPTIONS\": {\n \"context_processors\": [\n \"django.template.context_processors.debug\",\n \"django.template.context_processors.request\",\n \"django.template.context_processors.media\",\n \"django.contrib.auth.context_processors.auth\",\n \"django.contrib.messages.context_processors.messages\",\n \"social_django.context_processors.backends\",\n \"social_django.context_processors.login_redirect\",\n \"nautobot.core.context_processors.settings_and_registry\",\n \"nautobot.core.context_processors.sso_auth\",\n ],\n },\n },\n {\n \"NAME\": \"jinja\",\n \"BACKEND\": \"django_jinja.backend.Jinja2\",\n \"DIRS\": [],\n \"APP_DIRS\": False,\n \"OPTIONS\": {\n \"context_processors\": [\n \"django.template.context_processors.debug\",\n \"django.template.context_processors.request\",\n \"django.template.context_processors.media\",\n \"django.contrib.auth.context_processors.auth\",\n \"django.contrib.messages.context_processors.messages\",\n \"social_django.context_processors.backends\",\n \"social_django.context_processors.login_redirect\",\n \"nautobot.core.context_processors.settings_and_registry\",\n \"nautobot.core.context_processors.sso_auth\",\n ],\n },\n },\n]\n\n# Set up authentication backends\nAUTHENTICATION_BACKENDS = [\n # Always check object permissions\n \"nautobot.core.authentication.ObjectPermissionBackend\",\n]\n\n# Internationalization\nLANGUAGE_CODE = \"en-us\"\nUSE_I18N = True\nUSE_TZ = True\n\n# WSGI\nWSGI_APPLICATION = \"nautobot.core.wsgi.application\"\nSECURE_PROXY_SSL_HEADER = (\"HTTP_X_FORWARDED_PROTO\", \"https\")\nUSE_X_FORWARDED_HOST = True\nX_FRAME_OPTIONS = \"DENY\"\n\n# Static files (CSS, JavaScript, Images)\nSTATIC_ROOT = os.path.join(NAUTOBOT_ROOT, \"static\")\nSTATIC_URL = \"static/\"\nSTATICFILES_DIRS = (os.path.join(BASE_DIR, \"project-static\"),)\n\n# Media\nMEDIA_URL = \"media/\"\n\n# Disable default limit of 1000 fields per request. Needed for bulk deletion of objects. (Added in Django 1.10.)\nDATA_UPLOAD_MAX_NUMBER_FIELDS = None\n\n# Messages\nMESSAGE_TAGS = {\n messages.ERROR: \"danger\",\n}\n\n# Authentication URLs\n# This is the URL route name for the login view.\nLOGIN_URL = \"login\"\n\n# This is the URL route name for the home page (index) view.\nLOGIN_REDIRECT_URL = \"home\"\n\n#\n# From django-cors-headers\n#\n\n# If True, all origins will be allowed. Other settings restricting allowed origins will be ignored.\n# Defaults to False. Setting this to True can be dangerous, as it allows any website to make\n# cross-origin requests to yours. Generally you'll want to restrict the list of allowed origins with\n# CORS_ALLOWED_ORIGINS or CORS_ALLOWED_ORIGIN_REGEXES.\nCORS_ALLOW_ALL_ORIGINS = False\n\n# A list of strings representing regexes that match Origins that are authorized to make cross-site\n# HTTP requests. Defaults to [].\nCORS_ALLOWED_ORIGIN_REGEXES = []\n\n# A list of origins that are authorized to make cross-site HTTP requests. Defaults to [].\nCORS_ALLOWED_ORIGINS = []\n\n#\n# GraphQL\n#\n\nGRAPHENE = {\n \"SCHEMA\": \"nautobot.core.graphql.schema_init.schema\",\n \"DJANGO_CHOICE_FIELD_ENUM_V3_NAMING\": True, # any field with a name of type will break in Graphene otherwise.\n}\nGRAPHQL_CUSTOM_FIELD_PREFIX = \"cf\"\nGRAPHQL_RELATIONSHIP_PREFIX = \"rel\"\nGRAPHQL_COMPUTED_FIELD_PREFIX = \"cpf\"\n\n\n#\n# Caching\n#\n\n# The django-cacheops plugin is used to cache querysets. The built-in Django\n# caching is not used.\nCACHEOPS = {\n \"auth.user\": {\"ops\": \"get\", \"timeout\": 60 * 15},\n \"auth.*\": {\"ops\": (\"fetch\", \"get\")},\n \"auth.permission\": {\"ops\": \"all\"},\n \"circuits.*\": {\"ops\": \"all\"},\n \"dcim.inventoryitem\": None, # MPTT models are exempt due to raw SQL\n \"dcim.region\": None, # MPTT models are exempt due to raw SQL\n \"dcim.rackgroup\": None, # MPTT models are exempt due to raw SQL\n \"dcim.*\": {\"ops\": \"all\"},\n \"ipam.*\": {\"ops\": \"all\"},\n \"extras.*\": {\"ops\": \"all\"},\n \"users.*\": {\"ops\": \"all\"},\n \"tenancy.tenantgroup\": None, # MPTT models are exempt due to raw SQL\n \"tenancy.*\": {\"ops\": \"all\"},\n \"virtualization.*\": {\"ops\": \"all\"},\n}\nCACHEOPS_DEGRADE_ON_FAILURE = True\nCACHEOPS_ENABLED = True\nCACHEOPS_REDIS = \"redis://localhost:6379/1\"\nCACHEOPS_DEFAULTS = {\"timeout\": 900}\n\n# The django-redis cache is used to establish concurrent locks using Redis. The\n# django-rq settings will use the same instance/database by default.\nCACHES = {\n \"default\": {\n \"BACKEND\": \"django_redis.cache.RedisCache\",\n \"LOCATION\": \"redis://localhost:6379/0\",\n \"TIMEOUT\": 300,\n \"OPTIONS\": {\n \"CLIENT_CLASS\": \"django_redis.client.DefaultClient\",\n \"PASSWORD\": \"\",\n },\n }\n}\n\n#\n# Django RQ (used for legacy background processesing)\n#\n\n# These defaults utilize the Django caches setting defined for django-redis.\n# See: https://github.com/rq/django-rq#support-for-django-redis-and-django-redis-cache\nRQ_QUEUES = {\n \"default\": {\n \"USE_REDIS_CACHE\": \"default\",\n },\n \"check_releases\": {\n \"USE_REDIS_CACHE\": \"default\",\n },\n \"custom_fields\": {\n \"USE_REDIS_CACHE\": \"default\",\n },\n \"webhooks\": {\n \"USE_REDIS_CACHE\": \"default\",\n },\n}\n\n#\n# Celery (used for background processing)\n#\n\n# Celery broker URL used to tell workers where queues are located\nCELERY_BROKER_URL = os.getenv(\"NAUTOBOT_CELERY_BROKER_URL\", parse_redis_connection(redis_database=0))\n\n# Celery results backend URL to tell workers where to publish task results\nCELERY_RESULT_BACKEND = os.getenv(\"NAUTOBOT_CELERY_RESULT_BACKEND\", parse_redis_connection(redis_database=0))\n\n# Instruct celery to report the started status of a job, instead of just `pending`, `finished`, or `failed`\nCELERY_TASK_TRACK_STARTED = True\n\n# Global task time limits (seconds)\n# Exceeding the soft limit will result in a SoftTimeLimitExceeded exception,\n# while exceeding the hard limit will result in a SIGKILL.\nCELERY_TASK_SOFT_TIME_LIMIT = int(os.getenv(\"NAUTOBOT_CELERY_TASK_SOFT_TIME_LIMIT\", 5 * 60))\nCELERY_TASK_TIME_LIMIT = int(os.getenv(\"NAUTOBOT_CELERY_TASK_TIME_LIMIT\", 10 * 60))\n\n# These settings define the custom nautobot serialization encoding as an accepted data encoding format\n# and register that format for task input and result serialization\nCELERY_ACCEPT_CONTENT = [\"nautobot_json\"]\nCELERY_RESULT_ACCEPT_CONTENT = [\"nautobot_json\"]\nCELERY_TASK_SERIALIZER = \"nautobot_json\"\nCELERY_RESULT_SERIALIZER = \"nautobot_json\"\n",
"path": "nautobot/core/settings.py"
}
] | diff --git a/nautobot/core/settings.py b/nautobot/core/settings.py
index 5a8f0fd8de5..6cf4e63b811 100644
--- a/nautobot/core/settings.py
+++ b/nautobot/core/settings.py
@@ -304,6 +304,7 @@
"health_check",
"health_check.cache",
"health_check.storage",
+ "db_file_storage",
]
# Middleware
diff --git a/nautobot/extras/tests/dummy_jobs/test_field_order.py b/nautobot/extras/tests/dummy_jobs/test_field_order.py
index a9a71592df7..389ef290c1a 100644
--- a/nautobot/extras/tests/dummy_jobs/test_field_order.py
+++ b/nautobot/extras/tests/dummy_jobs/test_field_order.py
@@ -1,4 +1,4 @@
-from nautobot.extras.jobs import Job, StringVar
+from nautobot.extras.jobs import Job, FileVar, StringVar
class TestFieldOrder(Job):
@@ -8,7 +8,9 @@ class TestFieldOrder(Job):
var2 = StringVar(description="Hello")
+ var1 = FileVar(description="Some file wants to be first")
+
class Meta:
"""Metaclass attrs."""
- field_order = ["var2", "var23"]
+ field_order = ["var1", "var2", "var23"]
diff --git a/nautobot/extras/tests/test_jobs.py b/nautobot/extras/tests/test_jobs.py
index 66923d1bed4..e928082ffb7 100644
--- a/nautobot/extras/tests/test_jobs.py
+++ b/nautobot/extras/tests/test_jobs.py
@@ -83,7 +83,10 @@ def test_field_order(self):
self.assertHTMLEqual(
form.as_table(),
- """<tr><th><label for="id_var2">Var2:</label></th><td>
+ """<tr><th><label for="id_var1">Var1:</label></th><td>
+<input class="form-control form-control" id="id_var1" name="var1" placeholder="None" required type="file">
+<br><span class="helptext">Some file wants to be first</span></td></tr>
+<tr><th><label for="id_var2">Var2:</label></th><td>
<input class="form-control form-control" id="id_var2" name="var2" placeholder="None" required type="text">
<br><span class="helptext">Hello</span></td></tr>
<tr><th><label for="id_var23">Var23:</label></th><td>
|
CTFd__CTFd-863 | get_config return default
get_config(key) should probably be get_config(key, default=None). This helps in some ideas where you want to do different behavior if get_config returns None.
| [
{
"content": "import sys\nimport os\n\nfrom distutils.version import StrictVersion\nfrom flask import Flask, Request\nfrom werkzeug.utils import cached_property\nfrom werkzeug.contrib.fixers import ProxyFix\nfrom jinja2 import FileSystemLoader\nfrom jinja2.sandbox import SandboxedEnvironment\nfrom six.moves import input\n\nfrom CTFd import utils\nfrom CTFd.utils.migrations import migrations, migrate, upgrade, stamp, create_database\nfrom CTFd.utils.sessions import CachingSessionInterface\nfrom CTFd.utils.updates import update_check\nfrom CTFd.utils.initialization import init_request_processors, init_template_filters, init_template_globals, init_logs\nfrom CTFd.utils.events import socketio\nfrom CTFd.plugins import init_plugins\n\n# Hack to support Unicode in Python 2 properly\nif sys.version_info[0] < 3:\n reload(sys)\n sys.setdefaultencoding(\"utf-8\")\n\n__version__ = '2.0.3'\n\n\nclass CTFdRequest(Request):\n @cached_property\n def path(self):\n \"\"\"\n Hijack the original Flask request path because it does not account for subdirectory deployments in an intuitive\n manner. We append script_root so that the path always points to the full path as seen in the browser.\n e.g. /subdirectory/path/route vs /path/route\n\n :return: string\n \"\"\"\n return self.script_root + super(CTFdRequest, self).path\n\n\nclass CTFdFlask(Flask):\n def __init__(self, *args, **kwargs):\n \"\"\"Overriden Jinja constructor setting a custom jinja_environment\"\"\"\n self.jinja_environment = SandboxedBaseEnvironment\n self.session_interface = CachingSessionInterface(key_prefix='session')\n self.request_class = CTFdRequest\n Flask.__init__(self, *args, **kwargs)\n\n def create_jinja_environment(self):\n \"\"\"Overridden jinja environment constructor\"\"\"\n return super(CTFdFlask, self).create_jinja_environment()\n\n\nclass SandboxedBaseEnvironment(SandboxedEnvironment):\n \"\"\"SandboxEnvironment that mimics the Flask BaseEnvironment\"\"\"\n def __init__(self, app, **options):\n if 'loader' not in options:\n options['loader'] = app.create_global_jinja_loader()\n # Disable cache entirely so that themes can be switched (#662)\n # If the cache is enabled, switching themes will cause odd rendering errors\n SandboxedEnvironment.__init__(self, cache_size=0, **options)\n self.app = app\n\n\nclass ThemeLoader(FileSystemLoader):\n \"\"\"Custom FileSystemLoader that switches themes based on the configuration value\"\"\"\n def __init__(self, searchpath, encoding='utf-8', followlinks=False):\n super(ThemeLoader, self).__init__(searchpath, encoding, followlinks)\n self.overriden_templates = {}\n\n def get_source(self, environment, template):\n # Check if the template has been overriden\n if template in self.overriden_templates:\n return self.overriden_templates[template], template, True\n\n # Check if the template requested is for the admin panel\n if template.startswith('admin/'):\n template = template[6:] # Strip out admin/\n template = \"/\".join(['admin', 'templates', template])\n return super(ThemeLoader, self).get_source(environment, template)\n\n # Load regular theme data\n theme = utils.get_config('ctf_theme')\n template = \"/\".join([theme, 'templates', template])\n return super(ThemeLoader, self).get_source(environment, template)\n\n\ndef confirm_upgrade():\n if sys.stdin.isatty():\n print(\"/*\\\\ CTFd has updated and must update the database! /*\\\\\")\n print(\"/*\\\\ Please backup your database before proceeding! /*\\\\\")\n print(\"/*\\\\ CTFd maintainers are not responsible for any data loss! /*\\\\\")\n if input('Run database migrations (Y/N)').lower().strip() == 'y':\n return True\n else:\n print('/*\\\\ Ignored database migrations... /*\\\\')\n return False\n else:\n return True\n\n\ndef run_upgrade():\n upgrade()\n utils.set_config('ctf_version', __version__)\n\n\ndef create_app(config='CTFd.config.Config'):\n app = CTFdFlask(__name__)\n with app.app_context():\n app.config.from_object(config)\n\n theme_loader = ThemeLoader(os.path.join(app.root_path, 'themes'), followlinks=True)\n app.jinja_loader = theme_loader\n\n from CTFd.models import db, Teams, Solves, Challenges, Fails, Flags, Tags, Files, Tracking\n\n url = create_database()\n\n # This allows any changes to the SQLALCHEMY_DATABASE_URI to get pushed back in\n # This is mostly so we can force MySQL's charset\n app.config['SQLALCHEMY_DATABASE_URI'] = str(url)\n\n # Register database\n db.init_app(app)\n\n # Register Flask-Migrate\n migrations.init_app(app, db)\n\n # Alembic sqlite support is lacking so we should just create_all anyway\n if url.drivername.startswith('sqlite'):\n db.create_all()\n stamp()\n else:\n # This creates tables instead of db.create_all()\n # Allows migrations to happen properly\n upgrade()\n\n from CTFd.models import ma\n\n ma.init_app(app)\n\n app.db = db\n app.VERSION = __version__\n\n from CTFd.cache import cache\n\n cache.init_app(app)\n app.cache = cache\n\n # If you have multiple workers you must have a shared cache\n socketio.init_app(\n app,\n async_mode=app.config.get('SOCKETIO_ASYNC_MODE'),\n message_queue=app.config.get('CACHE_REDIS_URL')\n )\n\n if app.config.get('REVERSE_PROXY'):\n app.wsgi_app = ProxyFix(app.wsgi_app)\n\n version = utils.get_config('ctf_version')\n\n # Upgrading from an older version of CTFd\n if version and (StrictVersion(version) < StrictVersion(__version__)):\n if confirm_upgrade():\n run_upgrade()\n else:\n exit()\n\n if not version:\n utils.set_config('ctf_version', __version__)\n\n if not utils.get_config('ctf_theme'):\n utils.set_config('ctf_theme', 'core')\n\n update_check(force=True)\n\n init_request_processors(app)\n init_template_filters(app)\n init_template_globals(app)\n\n # Importing here allows tests to use sensible names (e.g. api instead of api_bp)\n from CTFd.views import views\n from CTFd.teams import teams\n from CTFd.users import users\n from CTFd.challenges import challenges\n from CTFd.scoreboard import scoreboard\n from CTFd.auth import auth\n from CTFd.admin import admin\n from CTFd.api import api\n from CTFd.events import events\n from CTFd.errors import page_not_found, forbidden, general_error, gateway_error\n\n app.register_blueprint(views)\n app.register_blueprint(teams)\n app.register_blueprint(users)\n app.register_blueprint(challenges)\n app.register_blueprint(scoreboard)\n app.register_blueprint(auth)\n app.register_blueprint(api)\n app.register_blueprint(events)\n\n app.register_blueprint(admin)\n\n app.register_error_handler(404, page_not_found)\n app.register_error_handler(403, forbidden)\n app.register_error_handler(500, general_error)\n app.register_error_handler(502, gateway_error)\n\n init_logs(app)\n init_plugins(app)\n\n return app\n",
"path": "CTFd/__init__.py"
}
] | [
{
"content": "import sys\nimport os\n\nfrom distutils.version import StrictVersion\nfrom flask import Flask, Request\nfrom werkzeug.utils import cached_property\nfrom werkzeug.contrib.fixers import ProxyFix\nfrom jinja2 import FileSystemLoader\nfrom jinja2.sandbox import SandboxedEnvironment\nfrom six.moves import input\n\nfrom CTFd import utils\nfrom CTFd.utils.migrations import migrations, migrate, upgrade, stamp, create_database\nfrom CTFd.utils.sessions import CachingSessionInterface\nfrom CTFd.utils.updates import update_check\nfrom CTFd.utils.initialization import init_request_processors, init_template_filters, init_template_globals, init_logs\nfrom CTFd.utils.events import socketio\nfrom CTFd.plugins import init_plugins\n\n# Hack to support Unicode in Python 2 properly\nif sys.version_info[0] < 3:\n reload(sys)\n sys.setdefaultencoding(\"utf-8\")\n\n__version__ = '2.0.4'\n\n\nclass CTFdRequest(Request):\n @cached_property\n def path(self):\n \"\"\"\n Hijack the original Flask request path because it does not account for subdirectory deployments in an intuitive\n manner. We append script_root so that the path always points to the full path as seen in the browser.\n e.g. /subdirectory/path/route vs /path/route\n\n :return: string\n \"\"\"\n return self.script_root + super(CTFdRequest, self).path\n\n\nclass CTFdFlask(Flask):\n def __init__(self, *args, **kwargs):\n \"\"\"Overriden Jinja constructor setting a custom jinja_environment\"\"\"\n self.jinja_environment = SandboxedBaseEnvironment\n self.session_interface = CachingSessionInterface(key_prefix='session')\n self.request_class = CTFdRequest\n Flask.__init__(self, *args, **kwargs)\n\n def create_jinja_environment(self):\n \"\"\"Overridden jinja environment constructor\"\"\"\n return super(CTFdFlask, self).create_jinja_environment()\n\n\nclass SandboxedBaseEnvironment(SandboxedEnvironment):\n \"\"\"SandboxEnvironment that mimics the Flask BaseEnvironment\"\"\"\n def __init__(self, app, **options):\n if 'loader' not in options:\n options['loader'] = app.create_global_jinja_loader()\n # Disable cache entirely so that themes can be switched (#662)\n # If the cache is enabled, switching themes will cause odd rendering errors\n SandboxedEnvironment.__init__(self, cache_size=0, **options)\n self.app = app\n\n\nclass ThemeLoader(FileSystemLoader):\n \"\"\"Custom FileSystemLoader that switches themes based on the configuration value\"\"\"\n def __init__(self, searchpath, encoding='utf-8', followlinks=False):\n super(ThemeLoader, self).__init__(searchpath, encoding, followlinks)\n self.overriden_templates = {}\n\n def get_source(self, environment, template):\n # Check if the template has been overriden\n if template in self.overriden_templates:\n return self.overriden_templates[template], template, True\n\n # Check if the template requested is for the admin panel\n if template.startswith('admin/'):\n template = template[6:] # Strip out admin/\n template = \"/\".join(['admin', 'templates', template])\n return super(ThemeLoader, self).get_source(environment, template)\n\n # Load regular theme data\n theme = utils.get_config('ctf_theme')\n template = \"/\".join([theme, 'templates', template])\n return super(ThemeLoader, self).get_source(environment, template)\n\n\ndef confirm_upgrade():\n if sys.stdin.isatty():\n print(\"/*\\\\ CTFd has updated and must update the database! /*\\\\\")\n print(\"/*\\\\ Please backup your database before proceeding! /*\\\\\")\n print(\"/*\\\\ CTFd maintainers are not responsible for any data loss! /*\\\\\")\n if input('Run database migrations (Y/N)').lower().strip() == 'y':\n return True\n else:\n print('/*\\\\ Ignored database migrations... /*\\\\')\n return False\n else:\n return True\n\n\ndef run_upgrade():\n upgrade()\n utils.set_config('ctf_version', __version__)\n\n\ndef create_app(config='CTFd.config.Config'):\n app = CTFdFlask(__name__)\n with app.app_context():\n app.config.from_object(config)\n\n theme_loader = ThemeLoader(os.path.join(app.root_path, 'themes'), followlinks=True)\n app.jinja_loader = theme_loader\n\n from CTFd.models import db, Teams, Solves, Challenges, Fails, Flags, Tags, Files, Tracking\n\n url = create_database()\n\n # This allows any changes to the SQLALCHEMY_DATABASE_URI to get pushed back in\n # This is mostly so we can force MySQL's charset\n app.config['SQLALCHEMY_DATABASE_URI'] = str(url)\n\n # Register database\n db.init_app(app)\n\n # Register Flask-Migrate\n migrations.init_app(app, db)\n\n # Alembic sqlite support is lacking so we should just create_all anyway\n if url.drivername.startswith('sqlite'):\n db.create_all()\n stamp()\n else:\n # This creates tables instead of db.create_all()\n # Allows migrations to happen properly\n upgrade()\n\n from CTFd.models import ma\n\n ma.init_app(app)\n\n app.db = db\n app.VERSION = __version__\n\n from CTFd.cache import cache\n\n cache.init_app(app)\n app.cache = cache\n\n # If you have multiple workers you must have a shared cache\n socketio.init_app(\n app,\n async_mode=app.config.get('SOCKETIO_ASYNC_MODE'),\n message_queue=app.config.get('CACHE_REDIS_URL')\n )\n\n if app.config.get('REVERSE_PROXY'):\n app.wsgi_app = ProxyFix(app.wsgi_app)\n\n version = utils.get_config('ctf_version')\n\n # Upgrading from an older version of CTFd\n if version and (StrictVersion(version) < StrictVersion(__version__)):\n if confirm_upgrade():\n run_upgrade()\n else:\n exit()\n\n if not version:\n utils.set_config('ctf_version', __version__)\n\n if not utils.get_config('ctf_theme'):\n utils.set_config('ctf_theme', 'core')\n\n update_check(force=True)\n\n init_request_processors(app)\n init_template_filters(app)\n init_template_globals(app)\n\n # Importing here allows tests to use sensible names (e.g. api instead of api_bp)\n from CTFd.views import views\n from CTFd.teams import teams\n from CTFd.users import users\n from CTFd.challenges import challenges\n from CTFd.scoreboard import scoreboard\n from CTFd.auth import auth\n from CTFd.admin import admin\n from CTFd.api import api\n from CTFd.events import events\n from CTFd.errors import page_not_found, forbidden, general_error, gateway_error\n\n app.register_blueprint(views)\n app.register_blueprint(teams)\n app.register_blueprint(users)\n app.register_blueprint(challenges)\n app.register_blueprint(scoreboard)\n app.register_blueprint(auth)\n app.register_blueprint(api)\n app.register_blueprint(events)\n\n app.register_blueprint(admin)\n\n app.register_error_handler(404, page_not_found)\n app.register_error_handler(403, forbidden)\n app.register_error_handler(500, general_error)\n app.register_error_handler(502, gateway_error)\n\n init_logs(app)\n init_plugins(app)\n\n return app\n",
"path": "CTFd/__init__.py"
}
] | diff --git a/CHANGELOG.md b/CHANGELOG.md
index 5f8fc51fb..b9edd2324 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,31 @@
+2.0.4 / 2019-01-30
+==================
+
+**General**
+* Block user & team name changes if name changes are disabled (Closes #835)
+* Set accounts to unconfirmed if email is changed while `verify_emails` is enabled
+* Only allow users to change their email to emails with domains in the whitelist.
+* Add `email.check_email_is_whitelisted()` to verify that a user's email is whitelisted.
+* Create a `get_config` wrapper around the internal `_get_config` to let us set a default config value (Closes #659)
+* Remove `utils.get_app_config()` from memoization and also give it a `default` parameter
+* Move `utils.logging.init_logs()` into `utils.initialization` and properly call `init_logs()` to save logs to the logs folder
+* Block the creation of users/teams from MLC if registration_visibility is private
+* Fix showing incorrect 'CTF has ended' error if `view_after_ctf` is set.
+* Fix creating users from the admin panel while name changes are disabled.
+
+**API**
+* `/api/v1/teams/<team_id>` now coerced to an int (i.e. `/api/v1/teams/<int:team_id>`)
+
+**Deployment**
+* Re-add the `LOG_FOLDER` envvar to docker-compose so we don't try to write to the read-only host
+* Stop gunicorn from logging to `LOG_FOLDER` in docker without explicit opt-in
+* Add `ACCESS_LOG` and `ERROR_LOG` envvars to docker to specify where gunicorn will log to
+* Allow `DATABASE_URL` to contain custom MySQL ports for `docker-entrypoint.sh`
+* Drop `WORKERS` count to 1 to avoid dealing with Flask-SocketIO sticky sessions'
+* Install `gevent-websocket` and use it by default until we have a better solution
+* NOTE: In future releases, websockets functionality will likely be removed. (#852)
+
+
2.0.3 / 2019-01-12
==================
diff --git a/CTFd/__init__.py b/CTFd/__init__.py
index 38e019ba8..7fbbbdd64 100644
--- a/CTFd/__init__.py
+++ b/CTFd/__init__.py
@@ -22,7 +22,7 @@
reload(sys)
sys.setdefaultencoding("utf-8")
-__version__ = '2.0.3'
+__version__ = '2.0.4'
class CTFdRequest(Request):
|
pydantic__pydantic-2139 | `underscore_attrs_are_private` breaks generics
### Checks
* [x] I added a descriptive title to this issue
* [x] I have searched (google, github) for similar issues and couldn't find anything
* [x] I have read and followed [the docs](https://pydantic-docs.helpmanual.io/) and still think this is a bug
# Bug
Output of `python -c "import pydantic.utils; print(pydantic.utils.version_info())"`:
```
pydantic version: 1.7.2
pydantic compiled: False
install path: /nix/store/4snc9a6ywd1m75z7k5v863h9kl3s38dy-python3.7-pydantic-1.7.2/lib/python3.7/site-packages/pydantic
python version: 3.7.7 (default, Mar 10 2020, 06:34:06) [GCC 9.3.0]
platform: Linux-4.15.0-123-generic-x86_64-with-debian-buster-sid
optional deps. installed: ['typing-extensions', 'email-validator']
```
----
The `underscore_attrs_are_private` config option seems to break generics. In particular, it seems to be messing up with the model's `__orig_bases__`, which ends up causing a `TypeError` in `typing.Generic`. Unfortunately, I'm not familiar enough with Pydantic's code to pinpoint the exact root of the issue.
To reproduce:
```py
from pydantic.generics import GenericModel
from typing import TypeVar, Generic
T = TypeVar('T')
class Model(GenericModel, Generic[T]):
class Config:
underscore_attrs_are_private = True
value: T
```
Output:
```python
TypeError Traceback (most recent call last)
<ipython-input-17-86d3af5f0365> in <module>
----> 1 class Model(GenericModel, Generic[T]):
2 class Config:
3 underscore_attrs_are_private = True
4 id: T
5
/nix/store/5mlyrz5jm75dbjd92wsq89b9lsd0bhww-python3-3.7.7-env/lib/python3.7/site-packages/pydantic/main.py in __new__(mcs, name, bases, namespace, **kwargs)
322 }
323
--> 324 cls = super().__new__(mcs, name, bases, new_namespace, **kwargs)
325 # set __signature__ attr only for model class, but not for its instances
326 cls.__signature__ = ClassAttribute('__signature__', generate_model_signature(cls.__init__, fields, config))
/nix/store/k2w1idz2vdag50xl88113845mr74z823-python3-3.7.7/lib/python3.7/abc.py in __new__(mcls, name, bases, namespace, **kwargs)
124 """
125 def __new__(mcls, name, bases, namespace, **kwargs):
--> 126 cls = super().__new__(mcls, name, bases, namespace, **kwargs)
127 _abc_init(cls)
128 return cls
/nix/store/k2w1idz2vdag50xl88113845mr74z823-python3-3.7.7/lib/python3.7/typing.py in __init_subclass__(cls, *args, **kwargs)
848 tvars = []
849 if '__orig_bases__' in cls.__dict__:
--> 850 error = Generic in cls.__orig_bases__
851 else:
852 error = Generic in cls.__bases__ and cls.__name__ != '_Protocol'
TypeError: argument of type 'member_descriptor' is not iterable
```
----
Removing `underscore_attrs_are_private` or setting it to `False` makes it work as expected. Using `PrivateAttr` instead of the config option works well too.
| [
{
"content": "import warnings\nimport weakref\nfrom collections import OrderedDict, defaultdict, deque\nfrom copy import deepcopy\nfrom itertools import islice\nfrom types import BuiltinFunctionType, CodeType, FunctionType, GeneratorType, LambdaType, ModuleType\nfrom typing import (\n TYPE_CHECKING,\n AbstractSet,\n Any,\n Callable,\n Dict,\n Generator,\n Iterator,\n List,\n Mapping,\n Optional,\n Set,\n Tuple,\n Type,\n TypeVar,\n Union,\n no_type_check,\n)\n\nfrom .typing import NoneType, display_as_type\nfrom .version import version_info\n\nif TYPE_CHECKING:\n from inspect import Signature\n from pathlib import Path\n\n from .dataclasses import Dataclass # noqa: F401\n from .fields import ModelField # noqa: F401\n from .main import BaseConfig, BaseModel # noqa: F401\n from .typing import AbstractSetIntStr, DictIntStrAny, IntStr, MappingIntStrAny, ReprArgs # noqa: F401\n\n__all__ = (\n 'import_string',\n 'sequence_like',\n 'validate_field_name',\n 'lenient_issubclass',\n 'in_ipython',\n 'deep_update',\n 'update_not_none',\n 'almost_equal_floats',\n 'get_model',\n 'to_camel',\n 'is_valid_field',\n 'smart_deepcopy',\n 'PyObjectStr',\n 'Representation',\n 'GetterDict',\n 'ValueItems',\n 'version_info', # required here to match behaviour in v1.3\n 'ClassAttribute',\n 'path_type',\n 'ROOT_KEY',\n)\n\nROOT_KEY = '__root__'\n# these are types that are returned unchanged by deepcopy\nIMMUTABLE_NON_COLLECTIONS_TYPES: Set[Type[Any]] = {\n int,\n float,\n complex,\n str,\n bool,\n bytes,\n type,\n NoneType,\n FunctionType,\n BuiltinFunctionType,\n LambdaType,\n weakref.ref,\n CodeType,\n # note: including ModuleType will differ from behaviour of deepcopy by not producing error.\n # It might be not a good idea in general, but considering that this function used only internally\n # against default values of fields, this will allow to actually have a field with module as default value\n ModuleType,\n NotImplemented.__class__,\n Ellipsis.__class__,\n}\n\n# these are types that if empty, might be copied with simple copy() instead of deepcopy()\nBUILTIN_COLLECTIONS: Set[Type[Any]] = {\n list,\n set,\n tuple,\n frozenset,\n dict,\n OrderedDict,\n defaultdict,\n deque,\n}\n\n\ndef import_string(dotted_path: str) -> Any:\n \"\"\"\n Stolen approximately from django. Import a dotted module path and return the attribute/class designated by the\n last name in the path. Raise ImportError if the import fails.\n \"\"\"\n from importlib import import_module\n\n try:\n module_path, class_name = dotted_path.strip(' ').rsplit('.', 1)\n except ValueError as e:\n raise ImportError(f'\"{dotted_path}\" doesn\\'t look like a module path') from e\n\n module = import_module(module_path)\n try:\n return getattr(module, class_name)\n except AttributeError as e:\n raise ImportError(f'Module \"{module_path}\" does not define a \"{class_name}\" attribute') from e\n\n\ndef truncate(v: Union[str], *, max_len: int = 80) -> str:\n \"\"\"\n Truncate a value and add a unicode ellipsis (three dots) to the end if it was too long\n \"\"\"\n warnings.warn('`truncate` is no-longer used by pydantic and is deprecated', DeprecationWarning)\n if isinstance(v, str) and len(v) > (max_len - 2):\n # -3 so quote + string + … + quote has correct length\n return (v[: (max_len - 3)] + '…').__repr__()\n try:\n v = v.__repr__()\n except TypeError:\n v = v.__class__.__repr__(v) # in case v is a type\n if len(v) > max_len:\n v = v[: max_len - 1] + '…'\n return v\n\n\ndef sequence_like(v: Type[Any]) -> bool:\n return isinstance(v, (list, tuple, set, frozenset, GeneratorType, deque))\n\n\ndef validate_field_name(bases: List[Type['BaseModel']], field_name: str) -> None:\n \"\"\"\n Ensure that the field's name does not shadow an existing attribute of the model.\n \"\"\"\n for base in bases:\n if getattr(base, field_name, None):\n raise NameError(\n f'Field name \"{field_name}\" shadows a BaseModel attribute; '\n f'use a different field name with \"alias=\\'{field_name}\\'\".'\n )\n\n\ndef lenient_issubclass(cls: Any, class_or_tuple: Union[Type[Any], Tuple[Type[Any], ...]]) -> bool:\n return isinstance(cls, type) and issubclass(cls, class_or_tuple)\n\n\ndef in_ipython() -> bool:\n \"\"\"\n Check whether we're in an ipython environment, including jupyter notebooks.\n \"\"\"\n try:\n eval('__IPYTHON__')\n except NameError:\n return False\n else: # pragma: no cover\n return True\n\n\nKeyType = TypeVar('KeyType')\n\n\ndef deep_update(mapping: Dict[KeyType, Any], *updating_mappings: Dict[KeyType, Any]) -> Dict[KeyType, Any]:\n updated_mapping = mapping.copy()\n for updating_mapping in updating_mappings:\n for k, v in updating_mapping.items():\n if k in updated_mapping and isinstance(updated_mapping[k], dict) and isinstance(v, dict):\n updated_mapping[k] = deep_update(updated_mapping[k], v)\n else:\n updated_mapping[k] = v\n return updated_mapping\n\n\ndef update_not_none(mapping: Dict[Any, Any], **update: Any) -> None:\n mapping.update({k: v for k, v in update.items() if v is not None})\n\n\ndef almost_equal_floats(value_1: float, value_2: float, *, delta: float = 1e-8) -> bool:\n \"\"\"\n Return True if two floats are almost equal\n \"\"\"\n return abs(value_1 - value_2) <= delta\n\n\ndef generate_model_signature(\n init: Callable[..., None], fields: Dict[str, 'ModelField'], config: Type['BaseConfig']\n) -> 'Signature':\n \"\"\"\n Generate signature for model based on its fields\n \"\"\"\n from inspect import Parameter, Signature, signature\n\n present_params = signature(init).parameters.values()\n merged_params: Dict[str, Parameter] = {}\n var_kw = None\n use_var_kw = False\n\n for param in islice(present_params, 1, None): # skip self arg\n if param.kind is param.VAR_KEYWORD:\n var_kw = param\n continue\n merged_params[param.name] = param\n\n if var_kw: # if custom init has no var_kw, fields which are not declared in it cannot be passed through\n allow_names = config.allow_population_by_field_name\n for field_name, field in fields.items():\n param_name = field.alias\n if field_name in merged_params or param_name in merged_params:\n continue\n elif not param_name.isidentifier():\n if allow_names and field_name.isidentifier():\n param_name = field_name\n else:\n use_var_kw = True\n continue\n\n # TODO: replace annotation with actual expected types once #1055 solved\n kwargs = {'default': field.default} if not field.required else {}\n merged_params[param_name] = Parameter(\n param_name, Parameter.KEYWORD_ONLY, annotation=field.outer_type_, **kwargs\n )\n\n if config.extra is config.extra.allow:\n use_var_kw = True\n\n if var_kw and use_var_kw:\n # Make sure the parameter for extra kwargs\n # does not have the same name as a field\n default_model_signature = [\n ('__pydantic_self__', Parameter.POSITIONAL_OR_KEYWORD),\n ('data', Parameter.VAR_KEYWORD),\n ]\n if [(p.name, p.kind) for p in present_params] == default_model_signature:\n # if this is the standard model signature, use extra_data as the extra args name\n var_kw_name = 'extra_data'\n else:\n # else start from var_kw\n var_kw_name = var_kw.name\n\n # generate a name that's definitely unique\n while var_kw_name in fields:\n var_kw_name += '_'\n merged_params[var_kw_name] = var_kw.replace(name=var_kw_name)\n\n return Signature(parameters=list(merged_params.values()), return_annotation=None)\n\n\ndef get_model(obj: Union[Type['BaseModel'], Type['Dataclass']]) -> Type['BaseModel']:\n from .main import BaseModel # noqa: F811\n\n try:\n model_cls = obj.__pydantic_model__ # type: ignore\n except AttributeError:\n model_cls = obj\n\n if not issubclass(model_cls, BaseModel):\n raise TypeError('Unsupported type, must be either BaseModel or dataclass')\n return model_cls\n\n\ndef to_camel(string: str) -> str:\n return ''.join(word.capitalize() for word in string.split('_'))\n\n\nT = TypeVar('T')\n\n\ndef unique_list(input_list: Union[List[T], Tuple[T, ...]]) -> List[T]:\n \"\"\"\n Make a list unique while maintaining order.\n \"\"\"\n result = []\n unique_set = set()\n for v in input_list:\n if v not in unique_set:\n unique_set.add(v)\n result.append(v)\n\n return result\n\n\ndef update_normalized_all(\n item: Union['AbstractSetIntStr', 'MappingIntStrAny'],\n all_items: Union['AbstractSetIntStr', 'MappingIntStrAny'],\n) -> Union['AbstractSetIntStr', 'MappingIntStrAny']:\n \"\"\"\n Update item based on what all items contains.\n\n The update is done based on these cases:\n\n - if both arguments are dicts then each key-value pair existing in ``all_items`` is merged into ``item``,\n while the rest of the key-value pairs are updated recursively with this function.\n - if both arguments are sets then they are just merged.\n - if ``item`` is a dictionary and ``all_items`` is a set then all values of it are added to ``item`` as\n ``key: ...``.\n - if ``item`` is set and ``all_items`` is a dictionary, then ``item`` is converted to a dictionary and then the\n key-value pairs of ``all_items`` are merged in it.\n\n During recursive calls, there is a case where ``all_items`` can be an Ellipsis, in which case the ``item`` is\n returned as is.\n \"\"\"\n if not item:\n return all_items\n if isinstance(item, dict) and isinstance(all_items, dict):\n item = dict(item)\n item.update({k: update_normalized_all(item[k], v) for k, v in all_items.items() if k in item})\n item.update({k: v for k, v in all_items.items() if k not in item})\n return item\n if isinstance(item, set) and isinstance(all_items, set):\n item = set(item)\n item.update(all_items)\n return item\n if isinstance(item, dict) and isinstance(all_items, set):\n item = dict(item)\n item.update({k: ... for k in all_items if k not in item})\n return item\n if isinstance(item, set) and isinstance(all_items, dict):\n item = {k: ... for k in item}\n item.update({k: v for k, v in all_items.items() if k not in item})\n return item\n # Case when item or all_items is ... (in recursive calls).\n return item\n\n\nclass PyObjectStr(str):\n \"\"\"\n String class where repr doesn't include quotes. Useful with Representation when you want to return a string\n representation of something that valid (or pseudo-valid) python.\n \"\"\"\n\n def __repr__(self) -> str:\n return str(self)\n\n\nclass Representation:\n \"\"\"\n Mixin to provide __str__, __repr__, and __pretty__ methods. See #884 for more details.\n\n __pretty__ is used by [devtools](https://python-devtools.helpmanual.io/) to provide human readable representations\n of objects.\n \"\"\"\n\n __slots__: Tuple[str, ...] = tuple()\n\n def __repr_args__(self) -> 'ReprArgs':\n \"\"\"\n Returns the attributes to show in __str__, __repr__, and __pretty__ this is generally overridden.\n\n Can either return:\n * name - value pairs, e.g.: `[('foo_name', 'foo'), ('bar_name', ['b', 'a', 'r'])]`\n * or, just values, e.g.: `[(None, 'foo'), (None, ['b', 'a', 'r'])]`\n \"\"\"\n attrs = ((s, getattr(self, s)) for s in self.__slots__)\n return [(a, v) for a, v in attrs if v is not None]\n\n def __repr_name__(self) -> str:\n \"\"\"\n Name of the instance's class, used in __repr__.\n \"\"\"\n return self.__class__.__name__\n\n def __repr_str__(self, join_str: str) -> str:\n return join_str.join(repr(v) if a is None else f'{a}={v!r}' for a, v in self.__repr_args__())\n\n def __pretty__(self, fmt: Callable[[Any], Any], **kwargs: Any) -> Generator[Any, None, None]:\n \"\"\"\n Used by devtools (https://python-devtools.helpmanual.io/) to provide a human readable representations of objects\n \"\"\"\n yield self.__repr_name__() + '('\n yield 1\n for name, value in self.__repr_args__():\n if name is not None:\n yield name + '='\n yield fmt(value)\n yield ','\n yield 0\n yield -1\n yield ')'\n\n def __str__(self) -> str:\n return self.__repr_str__(' ')\n\n def __repr__(self) -> str:\n return f'{self.__repr_name__()}({self.__repr_str__(\", \")})'\n\n\nclass GetterDict(Representation):\n \"\"\"\n Hack to make object's smell just enough like dicts for validate_model.\n\n We can't inherit from Mapping[str, Any] because it upsets cython so we have to implement all methods ourselves.\n \"\"\"\n\n __slots__ = ('_obj',)\n\n def __init__(self, obj: Any):\n self._obj = obj\n\n def __getitem__(self, key: str) -> Any:\n try:\n return getattr(self._obj, key)\n except AttributeError as e:\n raise KeyError(key) from e\n\n def get(self, key: Any, default: Any = None) -> Any:\n return getattr(self._obj, key, default)\n\n def extra_keys(self) -> Set[Any]:\n \"\"\"\n We don't want to get any other attributes of obj if the model didn't explicitly ask for them\n \"\"\"\n return set()\n\n def keys(self) -> List[Any]:\n \"\"\"\n Keys of the pseudo dictionary, uses a list not set so order information can be maintained like python\n dictionaries.\n \"\"\"\n return list(self)\n\n def values(self) -> List[Any]:\n return [self[k] for k in self]\n\n def items(self) -> Iterator[Tuple[str, Any]]:\n for k in self:\n yield k, self.get(k)\n\n def __iter__(self) -> Iterator[str]:\n for name in dir(self._obj):\n if not name.startswith('_'):\n yield name\n\n def __len__(self) -> int:\n return sum(1 for _ in self)\n\n def __contains__(self, item: Any) -> bool:\n return item in self.keys()\n\n def __eq__(self, other: Any) -> bool:\n return dict(self) == dict(other.items())\n\n def __repr_args__(self) -> 'ReprArgs':\n return [(None, dict(self))]\n\n def __repr_name__(self) -> str:\n return f'GetterDict[{display_as_type(self._obj)}]'\n\n\nclass ValueItems(Representation):\n \"\"\"\n Class for more convenient calculation of excluded or included fields on values.\n \"\"\"\n\n __slots__ = ('_items', '_type')\n\n def __init__(self, value: Any, items: Union['AbstractSetIntStr', 'MappingIntStrAny']) -> None:\n if TYPE_CHECKING:\n self._items: Union['AbstractSetIntStr', 'MappingIntStrAny']\n self._type: Type[Union[set, dict]] # type: ignore\n\n # For further type checks speed-up\n if isinstance(items, Mapping):\n self._type = dict\n elif isinstance(items, AbstractSet):\n self._type = set\n else:\n raise TypeError(f'Unexpected type of exclude value {items.__class__}')\n\n if isinstance(value, (list, tuple)):\n items = self._normalize_indexes(items, len(value))\n\n self._items = items\n\n @no_type_check\n def is_excluded(self, item: Any) -> bool:\n \"\"\"\n Check if item is fully excluded\n (value considered excluded if self._type is set and item contained in self._items\n or self._type is dict and self._items.get(item) is ...\n\n :param item: key or index of a value\n \"\"\"\n if self._type is set:\n return item in self._items\n return self._items.get(item) is ...\n\n @no_type_check\n def is_included(self, item: Any) -> bool:\n \"\"\"\n Check if value is contained in self._items\n\n :param item: key or index of value\n \"\"\"\n return item in self._items\n\n @no_type_check\n def for_element(self, e: 'IntStr') -> Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']]:\n \"\"\"\n :param e: key or index of element on value\n :return: raw values for elemet if self._items is dict and contain needed element\n \"\"\"\n\n if self._type is dict:\n item = self._items.get(e)\n return item if item is not ... else None\n return None\n\n @no_type_check\n def _normalize_indexes(\n self, items: Union['AbstractSetIntStr', 'MappingIntStrAny'], v_length: int\n ) -> Union['AbstractSetIntStr', 'DictIntStrAny']:\n \"\"\"\n :param items: dict or set of indexes which will be normalized\n :param v_length: length of sequence indexes of which will be\n\n >>> self._normalize_indexes({0, -2, -1}, 4)\n {0, 2, 3}\n >>> self._normalize_indexes({'__all__'}, 4)\n {0, 1, 2, 3}\n \"\"\"\n if any(not isinstance(i, int) and i != '__all__' for i in items):\n raise TypeError(\n 'Excluding fields from a sequence of sub-models or dicts must be performed index-wise: '\n 'expected integer keys or keyword \"__all__\"'\n )\n if self._type is set:\n if '__all__' in items:\n if items != {'__all__'}:\n raise ValueError('set with keyword \"__all__\" must not contain other elements')\n return {i for i in range(v_length)}\n return {v_length + i if i < 0 else i for i in items}\n else:\n all_items = items.get('__all__')\n for i, v in items.items():\n if not (isinstance(v, Mapping) or isinstance(v, AbstractSet) or v is ...):\n raise TypeError(f'Unexpected type of exclude value for index \"{i}\" {v.__class__}')\n normalized_items = {v_length + i if i < 0 else i: v for i, v in items.items() if i != '__all__'}\n if all_items:\n default: Type[Union[Set[Any], Dict[Any, Any]]]\n if isinstance(all_items, Mapping):\n default = dict\n elif isinstance(all_items, AbstractSet):\n default = set\n else:\n for i in range(v_length):\n normalized_items.setdefault(i, ...)\n return normalized_items\n for i in range(v_length):\n normalized_item = normalized_items.setdefault(i, default())\n if normalized_item is not ...:\n normalized_items[i] = update_normalized_all(normalized_item, all_items)\n return normalized_items\n\n def __repr_args__(self) -> 'ReprArgs':\n return [(None, self._items)]\n\n\nclass ClassAttribute:\n \"\"\"\n Hide class attribute from its instances\n \"\"\"\n\n __slots__ = (\n 'name',\n 'value',\n )\n\n def __init__(self, name: str, value: Any) -> None:\n self.name = name\n self.value = value\n\n def __get__(self, instance: Any, owner: Type[Any]) -> None:\n if instance is None:\n return self.value\n raise AttributeError(f'{self.name!r} attribute of {owner.__name__!r} is class-only')\n\n\npath_types = {\n 'is_dir': 'directory',\n 'is_file': 'file',\n 'is_mount': 'mount point',\n 'is_symlink': 'symlink',\n 'is_block_device': 'block device',\n 'is_char_device': 'char device',\n 'is_fifo': 'FIFO',\n 'is_socket': 'socket',\n}\n\n\ndef path_type(p: 'Path') -> str:\n \"\"\"\n Find out what sort of thing a path is.\n \"\"\"\n assert p.exists(), 'path does not exist'\n for method, name in path_types.items():\n if getattr(p, method)():\n return name\n\n return 'unknown'\n\n\nObj = TypeVar('Obj')\n\n\ndef smart_deepcopy(obj: Obj) -> Obj:\n \"\"\"\n Return type as is for immutable built-in types\n Use obj.copy() for built-in empty collections\n Use copy.deepcopy() for non-empty collections and unknown objects\n \"\"\"\n\n obj_type = obj.__class__\n if obj_type in IMMUTABLE_NON_COLLECTIONS_TYPES:\n return obj # fastest case: obj is immutable and not collection therefore will not be copied anyway\n elif not obj and obj_type in BUILTIN_COLLECTIONS:\n # faster way for empty collections, no need to copy its members\n return obj if obj_type is tuple else obj.copy() # type: ignore # tuple doesn't have copy method\n return deepcopy(obj) # slowest way when we actually might need a deepcopy\n\n\ndef is_valid_field(name: str) -> bool:\n if not name.startswith('_'):\n return True\n return ROOT_KEY == name\n\n\ndef is_valid_private_name(name: str) -> bool:\n return not is_valid_field(name) and name not in {\n '__annotations__',\n '__classcell__',\n '__doc__',\n '__module__',\n '__qualname__',\n }\n",
"path": "pydantic/utils.py"
}
] | [
{
"content": "import warnings\nimport weakref\nfrom collections import OrderedDict, defaultdict, deque\nfrom copy import deepcopy\nfrom itertools import islice\nfrom types import BuiltinFunctionType, CodeType, FunctionType, GeneratorType, LambdaType, ModuleType\nfrom typing import (\n TYPE_CHECKING,\n AbstractSet,\n Any,\n Callable,\n Dict,\n Generator,\n Iterator,\n List,\n Mapping,\n Optional,\n Set,\n Tuple,\n Type,\n TypeVar,\n Union,\n no_type_check,\n)\n\nfrom .typing import NoneType, display_as_type\nfrom .version import version_info\n\nif TYPE_CHECKING:\n from inspect import Signature\n from pathlib import Path\n\n from .dataclasses import Dataclass # noqa: F401\n from .fields import ModelField # noqa: F401\n from .main import BaseConfig, BaseModel # noqa: F401\n from .typing import AbstractSetIntStr, DictIntStrAny, IntStr, MappingIntStrAny, ReprArgs # noqa: F401\n\n__all__ = (\n 'import_string',\n 'sequence_like',\n 'validate_field_name',\n 'lenient_issubclass',\n 'in_ipython',\n 'deep_update',\n 'update_not_none',\n 'almost_equal_floats',\n 'get_model',\n 'to_camel',\n 'is_valid_field',\n 'smart_deepcopy',\n 'PyObjectStr',\n 'Representation',\n 'GetterDict',\n 'ValueItems',\n 'version_info', # required here to match behaviour in v1.3\n 'ClassAttribute',\n 'path_type',\n 'ROOT_KEY',\n)\n\nROOT_KEY = '__root__'\n# these are types that are returned unchanged by deepcopy\nIMMUTABLE_NON_COLLECTIONS_TYPES: Set[Type[Any]] = {\n int,\n float,\n complex,\n str,\n bool,\n bytes,\n type,\n NoneType,\n FunctionType,\n BuiltinFunctionType,\n LambdaType,\n weakref.ref,\n CodeType,\n # note: including ModuleType will differ from behaviour of deepcopy by not producing error.\n # It might be not a good idea in general, but considering that this function used only internally\n # against default values of fields, this will allow to actually have a field with module as default value\n ModuleType,\n NotImplemented.__class__,\n Ellipsis.__class__,\n}\n\n# these are types that if empty, might be copied with simple copy() instead of deepcopy()\nBUILTIN_COLLECTIONS: Set[Type[Any]] = {\n list,\n set,\n tuple,\n frozenset,\n dict,\n OrderedDict,\n defaultdict,\n deque,\n}\n\n\ndef import_string(dotted_path: str) -> Any:\n \"\"\"\n Stolen approximately from django. Import a dotted module path and return the attribute/class designated by the\n last name in the path. Raise ImportError if the import fails.\n \"\"\"\n from importlib import import_module\n\n try:\n module_path, class_name = dotted_path.strip(' ').rsplit('.', 1)\n except ValueError as e:\n raise ImportError(f'\"{dotted_path}\" doesn\\'t look like a module path') from e\n\n module = import_module(module_path)\n try:\n return getattr(module, class_name)\n except AttributeError as e:\n raise ImportError(f'Module \"{module_path}\" does not define a \"{class_name}\" attribute') from e\n\n\ndef truncate(v: Union[str], *, max_len: int = 80) -> str:\n \"\"\"\n Truncate a value and add a unicode ellipsis (three dots) to the end if it was too long\n \"\"\"\n warnings.warn('`truncate` is no-longer used by pydantic and is deprecated', DeprecationWarning)\n if isinstance(v, str) and len(v) > (max_len - 2):\n # -3 so quote + string + … + quote has correct length\n return (v[: (max_len - 3)] + '…').__repr__()\n try:\n v = v.__repr__()\n except TypeError:\n v = v.__class__.__repr__(v) # in case v is a type\n if len(v) > max_len:\n v = v[: max_len - 1] + '…'\n return v\n\n\ndef sequence_like(v: Type[Any]) -> bool:\n return isinstance(v, (list, tuple, set, frozenset, GeneratorType, deque))\n\n\ndef validate_field_name(bases: List[Type['BaseModel']], field_name: str) -> None:\n \"\"\"\n Ensure that the field's name does not shadow an existing attribute of the model.\n \"\"\"\n for base in bases:\n if getattr(base, field_name, None):\n raise NameError(\n f'Field name \"{field_name}\" shadows a BaseModel attribute; '\n f'use a different field name with \"alias=\\'{field_name}\\'\".'\n )\n\n\ndef lenient_issubclass(cls: Any, class_or_tuple: Union[Type[Any], Tuple[Type[Any], ...]]) -> bool:\n return isinstance(cls, type) and issubclass(cls, class_or_tuple)\n\n\ndef in_ipython() -> bool:\n \"\"\"\n Check whether we're in an ipython environment, including jupyter notebooks.\n \"\"\"\n try:\n eval('__IPYTHON__')\n except NameError:\n return False\n else: # pragma: no cover\n return True\n\n\nKeyType = TypeVar('KeyType')\n\n\ndef deep_update(mapping: Dict[KeyType, Any], *updating_mappings: Dict[KeyType, Any]) -> Dict[KeyType, Any]:\n updated_mapping = mapping.copy()\n for updating_mapping in updating_mappings:\n for k, v in updating_mapping.items():\n if k in updated_mapping and isinstance(updated_mapping[k], dict) and isinstance(v, dict):\n updated_mapping[k] = deep_update(updated_mapping[k], v)\n else:\n updated_mapping[k] = v\n return updated_mapping\n\n\ndef update_not_none(mapping: Dict[Any, Any], **update: Any) -> None:\n mapping.update({k: v for k, v in update.items() if v is not None})\n\n\ndef almost_equal_floats(value_1: float, value_2: float, *, delta: float = 1e-8) -> bool:\n \"\"\"\n Return True if two floats are almost equal\n \"\"\"\n return abs(value_1 - value_2) <= delta\n\n\ndef generate_model_signature(\n init: Callable[..., None], fields: Dict[str, 'ModelField'], config: Type['BaseConfig']\n) -> 'Signature':\n \"\"\"\n Generate signature for model based on its fields\n \"\"\"\n from inspect import Parameter, Signature, signature\n\n present_params = signature(init).parameters.values()\n merged_params: Dict[str, Parameter] = {}\n var_kw = None\n use_var_kw = False\n\n for param in islice(present_params, 1, None): # skip self arg\n if param.kind is param.VAR_KEYWORD:\n var_kw = param\n continue\n merged_params[param.name] = param\n\n if var_kw: # if custom init has no var_kw, fields which are not declared in it cannot be passed through\n allow_names = config.allow_population_by_field_name\n for field_name, field in fields.items():\n param_name = field.alias\n if field_name in merged_params or param_name in merged_params:\n continue\n elif not param_name.isidentifier():\n if allow_names and field_name.isidentifier():\n param_name = field_name\n else:\n use_var_kw = True\n continue\n\n # TODO: replace annotation with actual expected types once #1055 solved\n kwargs = {'default': field.default} if not field.required else {}\n merged_params[param_name] = Parameter(\n param_name, Parameter.KEYWORD_ONLY, annotation=field.outer_type_, **kwargs\n )\n\n if config.extra is config.extra.allow:\n use_var_kw = True\n\n if var_kw and use_var_kw:\n # Make sure the parameter for extra kwargs\n # does not have the same name as a field\n default_model_signature = [\n ('__pydantic_self__', Parameter.POSITIONAL_OR_KEYWORD),\n ('data', Parameter.VAR_KEYWORD),\n ]\n if [(p.name, p.kind) for p in present_params] == default_model_signature:\n # if this is the standard model signature, use extra_data as the extra args name\n var_kw_name = 'extra_data'\n else:\n # else start from var_kw\n var_kw_name = var_kw.name\n\n # generate a name that's definitely unique\n while var_kw_name in fields:\n var_kw_name += '_'\n merged_params[var_kw_name] = var_kw.replace(name=var_kw_name)\n\n return Signature(parameters=list(merged_params.values()), return_annotation=None)\n\n\ndef get_model(obj: Union[Type['BaseModel'], Type['Dataclass']]) -> Type['BaseModel']:\n from .main import BaseModel # noqa: F811\n\n try:\n model_cls = obj.__pydantic_model__ # type: ignore\n except AttributeError:\n model_cls = obj\n\n if not issubclass(model_cls, BaseModel):\n raise TypeError('Unsupported type, must be either BaseModel or dataclass')\n return model_cls\n\n\ndef to_camel(string: str) -> str:\n return ''.join(word.capitalize() for word in string.split('_'))\n\n\nT = TypeVar('T')\n\n\ndef unique_list(input_list: Union[List[T], Tuple[T, ...]]) -> List[T]:\n \"\"\"\n Make a list unique while maintaining order.\n \"\"\"\n result = []\n unique_set = set()\n for v in input_list:\n if v not in unique_set:\n unique_set.add(v)\n result.append(v)\n\n return result\n\n\ndef update_normalized_all(\n item: Union['AbstractSetIntStr', 'MappingIntStrAny'],\n all_items: Union['AbstractSetIntStr', 'MappingIntStrAny'],\n) -> Union['AbstractSetIntStr', 'MappingIntStrAny']:\n \"\"\"\n Update item based on what all items contains.\n\n The update is done based on these cases:\n\n - if both arguments are dicts then each key-value pair existing in ``all_items`` is merged into ``item``,\n while the rest of the key-value pairs are updated recursively with this function.\n - if both arguments are sets then they are just merged.\n - if ``item`` is a dictionary and ``all_items`` is a set then all values of it are added to ``item`` as\n ``key: ...``.\n - if ``item`` is set and ``all_items`` is a dictionary, then ``item`` is converted to a dictionary and then the\n key-value pairs of ``all_items`` are merged in it.\n\n During recursive calls, there is a case where ``all_items`` can be an Ellipsis, in which case the ``item`` is\n returned as is.\n \"\"\"\n if not item:\n return all_items\n if isinstance(item, dict) and isinstance(all_items, dict):\n item = dict(item)\n item.update({k: update_normalized_all(item[k], v) for k, v in all_items.items() if k in item})\n item.update({k: v for k, v in all_items.items() if k not in item})\n return item\n if isinstance(item, set) and isinstance(all_items, set):\n item = set(item)\n item.update(all_items)\n return item\n if isinstance(item, dict) and isinstance(all_items, set):\n item = dict(item)\n item.update({k: ... for k in all_items if k not in item})\n return item\n if isinstance(item, set) and isinstance(all_items, dict):\n item = {k: ... for k in item}\n item.update({k: v for k, v in all_items.items() if k not in item})\n return item\n # Case when item or all_items is ... (in recursive calls).\n return item\n\n\nclass PyObjectStr(str):\n \"\"\"\n String class where repr doesn't include quotes. Useful with Representation when you want to return a string\n representation of something that valid (or pseudo-valid) python.\n \"\"\"\n\n def __repr__(self) -> str:\n return str(self)\n\n\nclass Representation:\n \"\"\"\n Mixin to provide __str__, __repr__, and __pretty__ methods. See #884 for more details.\n\n __pretty__ is used by [devtools](https://python-devtools.helpmanual.io/) to provide human readable representations\n of objects.\n \"\"\"\n\n __slots__: Tuple[str, ...] = tuple()\n\n def __repr_args__(self) -> 'ReprArgs':\n \"\"\"\n Returns the attributes to show in __str__, __repr__, and __pretty__ this is generally overridden.\n\n Can either return:\n * name - value pairs, e.g.: `[('foo_name', 'foo'), ('bar_name', ['b', 'a', 'r'])]`\n * or, just values, e.g.: `[(None, 'foo'), (None, ['b', 'a', 'r'])]`\n \"\"\"\n attrs = ((s, getattr(self, s)) for s in self.__slots__)\n return [(a, v) for a, v in attrs if v is not None]\n\n def __repr_name__(self) -> str:\n \"\"\"\n Name of the instance's class, used in __repr__.\n \"\"\"\n return self.__class__.__name__\n\n def __repr_str__(self, join_str: str) -> str:\n return join_str.join(repr(v) if a is None else f'{a}={v!r}' for a, v in self.__repr_args__())\n\n def __pretty__(self, fmt: Callable[[Any], Any], **kwargs: Any) -> Generator[Any, None, None]:\n \"\"\"\n Used by devtools (https://python-devtools.helpmanual.io/) to provide a human readable representations of objects\n \"\"\"\n yield self.__repr_name__() + '('\n yield 1\n for name, value in self.__repr_args__():\n if name is not None:\n yield name + '='\n yield fmt(value)\n yield ','\n yield 0\n yield -1\n yield ')'\n\n def __str__(self) -> str:\n return self.__repr_str__(' ')\n\n def __repr__(self) -> str:\n return f'{self.__repr_name__()}({self.__repr_str__(\", \")})'\n\n\nclass GetterDict(Representation):\n \"\"\"\n Hack to make object's smell just enough like dicts for validate_model.\n\n We can't inherit from Mapping[str, Any] because it upsets cython so we have to implement all methods ourselves.\n \"\"\"\n\n __slots__ = ('_obj',)\n\n def __init__(self, obj: Any):\n self._obj = obj\n\n def __getitem__(self, key: str) -> Any:\n try:\n return getattr(self._obj, key)\n except AttributeError as e:\n raise KeyError(key) from e\n\n def get(self, key: Any, default: Any = None) -> Any:\n return getattr(self._obj, key, default)\n\n def extra_keys(self) -> Set[Any]:\n \"\"\"\n We don't want to get any other attributes of obj if the model didn't explicitly ask for them\n \"\"\"\n return set()\n\n def keys(self) -> List[Any]:\n \"\"\"\n Keys of the pseudo dictionary, uses a list not set so order information can be maintained like python\n dictionaries.\n \"\"\"\n return list(self)\n\n def values(self) -> List[Any]:\n return [self[k] for k in self]\n\n def items(self) -> Iterator[Tuple[str, Any]]:\n for k in self:\n yield k, self.get(k)\n\n def __iter__(self) -> Iterator[str]:\n for name in dir(self._obj):\n if not name.startswith('_'):\n yield name\n\n def __len__(self) -> int:\n return sum(1 for _ in self)\n\n def __contains__(self, item: Any) -> bool:\n return item in self.keys()\n\n def __eq__(self, other: Any) -> bool:\n return dict(self) == dict(other.items())\n\n def __repr_args__(self) -> 'ReprArgs':\n return [(None, dict(self))]\n\n def __repr_name__(self) -> str:\n return f'GetterDict[{display_as_type(self._obj)}]'\n\n\nclass ValueItems(Representation):\n \"\"\"\n Class for more convenient calculation of excluded or included fields on values.\n \"\"\"\n\n __slots__ = ('_items', '_type')\n\n def __init__(self, value: Any, items: Union['AbstractSetIntStr', 'MappingIntStrAny']) -> None:\n if TYPE_CHECKING:\n self._items: Union['AbstractSetIntStr', 'MappingIntStrAny']\n self._type: Type[Union[set, dict]] # type: ignore\n\n # For further type checks speed-up\n if isinstance(items, Mapping):\n self._type = dict\n elif isinstance(items, AbstractSet):\n self._type = set\n else:\n raise TypeError(f'Unexpected type of exclude value {items.__class__}')\n\n if isinstance(value, (list, tuple)):\n items = self._normalize_indexes(items, len(value))\n\n self._items = items\n\n @no_type_check\n def is_excluded(self, item: Any) -> bool:\n \"\"\"\n Check if item is fully excluded\n (value considered excluded if self._type is set and item contained in self._items\n or self._type is dict and self._items.get(item) is ...\n\n :param item: key or index of a value\n \"\"\"\n if self._type is set:\n return item in self._items\n return self._items.get(item) is ...\n\n @no_type_check\n def is_included(self, item: Any) -> bool:\n \"\"\"\n Check if value is contained in self._items\n\n :param item: key or index of value\n \"\"\"\n return item in self._items\n\n @no_type_check\n def for_element(self, e: 'IntStr') -> Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']]:\n \"\"\"\n :param e: key or index of element on value\n :return: raw values for elemet if self._items is dict and contain needed element\n \"\"\"\n\n if self._type is dict:\n item = self._items.get(e)\n return item if item is not ... else None\n return None\n\n @no_type_check\n def _normalize_indexes(\n self, items: Union['AbstractSetIntStr', 'MappingIntStrAny'], v_length: int\n ) -> Union['AbstractSetIntStr', 'DictIntStrAny']:\n \"\"\"\n :param items: dict or set of indexes which will be normalized\n :param v_length: length of sequence indexes of which will be\n\n >>> self._normalize_indexes({0, -2, -1}, 4)\n {0, 2, 3}\n >>> self._normalize_indexes({'__all__'}, 4)\n {0, 1, 2, 3}\n \"\"\"\n if any(not isinstance(i, int) and i != '__all__' for i in items):\n raise TypeError(\n 'Excluding fields from a sequence of sub-models or dicts must be performed index-wise: '\n 'expected integer keys or keyword \"__all__\"'\n )\n if self._type is set:\n if '__all__' in items:\n if items != {'__all__'}:\n raise ValueError('set with keyword \"__all__\" must not contain other elements')\n return {i for i in range(v_length)}\n return {v_length + i if i < 0 else i for i in items}\n else:\n all_items = items.get('__all__')\n for i, v in items.items():\n if not (isinstance(v, Mapping) or isinstance(v, AbstractSet) or v is ...):\n raise TypeError(f'Unexpected type of exclude value for index \"{i}\" {v.__class__}')\n normalized_items = {v_length + i if i < 0 else i: v for i, v in items.items() if i != '__all__'}\n if all_items:\n default: Type[Union[Set[Any], Dict[Any, Any]]]\n if isinstance(all_items, Mapping):\n default = dict\n elif isinstance(all_items, AbstractSet):\n default = set\n else:\n for i in range(v_length):\n normalized_items.setdefault(i, ...)\n return normalized_items\n for i in range(v_length):\n normalized_item = normalized_items.setdefault(i, default())\n if normalized_item is not ...:\n normalized_items[i] = update_normalized_all(normalized_item, all_items)\n return normalized_items\n\n def __repr_args__(self) -> 'ReprArgs':\n return [(None, self._items)]\n\n\nclass ClassAttribute:\n \"\"\"\n Hide class attribute from its instances\n \"\"\"\n\n __slots__ = (\n 'name',\n 'value',\n )\n\n def __init__(self, name: str, value: Any) -> None:\n self.name = name\n self.value = value\n\n def __get__(self, instance: Any, owner: Type[Any]) -> None:\n if instance is None:\n return self.value\n raise AttributeError(f'{self.name!r} attribute of {owner.__name__!r} is class-only')\n\n\npath_types = {\n 'is_dir': 'directory',\n 'is_file': 'file',\n 'is_mount': 'mount point',\n 'is_symlink': 'symlink',\n 'is_block_device': 'block device',\n 'is_char_device': 'char device',\n 'is_fifo': 'FIFO',\n 'is_socket': 'socket',\n}\n\n\ndef path_type(p: 'Path') -> str:\n \"\"\"\n Find out what sort of thing a path is.\n \"\"\"\n assert p.exists(), 'path does not exist'\n for method, name in path_types.items():\n if getattr(p, method)():\n return name\n\n return 'unknown'\n\n\nObj = TypeVar('Obj')\n\n\ndef smart_deepcopy(obj: Obj) -> Obj:\n \"\"\"\n Return type as is for immutable built-in types\n Use obj.copy() for built-in empty collections\n Use copy.deepcopy() for non-empty collections and unknown objects\n \"\"\"\n\n obj_type = obj.__class__\n if obj_type in IMMUTABLE_NON_COLLECTIONS_TYPES:\n return obj # fastest case: obj is immutable and not collection therefore will not be copied anyway\n elif not obj and obj_type in BUILTIN_COLLECTIONS:\n # faster way for empty collections, no need to copy its members\n return obj if obj_type is tuple else obj.copy() # type: ignore # tuple doesn't have copy method\n return deepcopy(obj) # slowest way when we actually might need a deepcopy\n\n\ndef is_valid_field(name: str) -> bool:\n if not name.startswith('_'):\n return True\n return ROOT_KEY == name\n\n\ndef is_valid_private_name(name: str) -> bool:\n return not is_valid_field(name) and name not in {\n '__annotations__',\n '__classcell__',\n '__doc__',\n '__module__',\n '__orig_bases__',\n '__qualname__',\n }\n",
"path": "pydantic/utils.py"
}
] | diff --git a/changes/2138-PrettyWood.md b/changes/2138-PrettyWood.md
new file mode 100644
index 00000000000..e58d0ed169b
--- /dev/null
+++ b/changes/2138-PrettyWood.md
@@ -0,0 +1 @@
+fix: support `underscore_attrs_are_private` with generic models
\ No newline at end of file
diff --git a/pydantic/utils.py b/pydantic/utils.py
index e3e613ca392..c75f4e1cfc8 100644
--- a/pydantic/utils.py
+++ b/pydantic/utils.py
@@ -636,5 +636,6 @@ def is_valid_private_name(name: str) -> bool:
'__classcell__',
'__doc__',
'__module__',
+ '__orig_bases__',
'__qualname__',
}
diff --git a/tests/test_private_attributes.py b/tests/test_private_attributes.py
index 21f52090be3..d87e572abf9 100644
--- a/tests/test_private_attributes.py
+++ b/tests/test_private_attributes.py
@@ -1,9 +1,13 @@
-from typing import ClassVar
+import sys
+from typing import ClassVar, Generic, TypeVar
import pytest
from pydantic import BaseModel, Extra, PrivateAttr
from pydantic.fields import Undefined
+from pydantic.generics import GenericModel
+
+skip_36 = pytest.mark.skipif(sys.version_info < (3, 7), reason='generics only supported for python 3.7 and above')
def test_private_attribute():
@@ -180,3 +184,19 @@ class Config:
m = MyModel(x='hello')
assert m.dict() == {'x': 'hello'}
assert m._private_attr == 123
+
+
+@skip_36
+def test_generic_private_attribute():
+ T = TypeVar('T')
+
+ class Model(GenericModel, Generic[T]):
+ value: T
+ _private_value: T
+
+ class Config:
+ underscore_attrs_are_private = True
+
+ m = Model[int](value=1, _private_attr=3)
+ m._private_value = 3
+ assert m.dict() == {'value': 1}
|
qtile__qtile-1837 | 0.16.0: impossible to build from github sources (to run tests)
<!--
Please do not ask general questions here! There are [community
contact](https://github.com/qtile/qtile#community) options for that.
-->
# Issue description
Hi! I package qtile for Arch Linux. I'm currently trying to build 0.16.0.
Usually I also run the test suite against the release (although there are still problems: #1352 and #1130) to be able to at least ensure some kind of compatibility with the Arch Linux provided python3 ecosystem.
However, running tests is only possible with the github source tarballs (because the test files are included), which unfortunately is not the case for the pypi tarballs.
When running `python setup.py build` for 0.16.0 I am now getting this:
```
Traceback (most recent call last):
File "setup.py", line 91, in <module>
setup(
File "/usr/lib/python3.8/site-packages/setuptools/__init__.py", line 165, in setup
return distutils.core.setup(**attrs)
File "/usr/lib/python3.8/distutils/core.py", line 108, in setup
_setup_distribution = dist = klass(attrs)
File "/usr/lib/python3.8/site-packages/setuptools/dist.py", line 429, in __init__
_Distribution.__init__(self, {
File "/usr/lib/python3.8/distutils/dist.py", line 292, in __init__
self.finalize_options()
File "/usr/lib/python3.8/site-packages/setuptools/dist.py", line 721, in finalize_options
ep(self)
File "/usr/lib/python3.8/site-packages/setuptools/dist.py", line 728, in _finalize_setup_keywords
ep.load()(self, ep.name, value)
File "/usr/lib/python3.8/site-packages/setuptools_scm/integration.py", line 17, in version_keyword
dist.metadata.version = _get_version(config)
File "/usr/lib/python3.8/site-packages/setuptools_scm/__init__.py", line 148, in _get_version
parsed_version = _do_parse(config)
File "/usr/lib/python3.8/site-packages/setuptools_scm/__init__.py", line 110, in _do_parse
raise LookupError(
LookupError: setuptools-scm was unable to detect version for '/build/qtile/src/qtile-0.16.0'.
Make sure you're either building from a fully intact git repository or PyPI tarballs. Most other sources (such as GitHub's tarballs, a git checkout without the .git folder) don't contain the necessary metadata and will not work.
For example, if you're using pip, instead of https://github.com/user/proj/archive/master.zip use git+https://github.com/user/proj.git#egg=proj
```
It seems that setuptools_scm has been introduced. Unfortunately, this breaks the build for me.
It would be great to either include the tests in the pypi sdist tarballs or to start using [signed tags](https://github.com/qtile/qtile/tags) again, as then I can rely upon signed tags and a git repository (note: the latter might not help other distributions, as they have different policies).
If you choose the latter (both would be great too), please make sure to have @flacjacket sign the key of @tych0 so that a clear chain of trust can be established.
# Qtile version
0.16.0
# Stack traces
n/a
# Configuration
n/a
| [
{
"content": "#!/usr/bin/env python3\n\n# Copyright (c) 2008 Aldo Cortesi\n# Copyright (c) 2011 Mounier Florian\n# Copyright (c) 2012 dmpayton\n# Copyright (c) 2014 Sean Vig\n# Copyright (c) 2014 roger\n# Copyright (c) 2014 Pedro Algarvio\n# Copyright (c) 2014-2015 Tycho Andersen\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\n\nimport sys\nimport textwrap\n\nfrom setuptools import setup\nfrom setuptools.command.install import install\n\n\nclass CheckCairoXcb(install):\n def cairo_xcb_check(self):\n try:\n from cairocffi import cairo\n cairo.cairo_xcb_surface_create\n return True\n except AttributeError:\n return False\n\n def finalize_options(self):\n if not self.cairo_xcb_check():\n\n print(textwrap.dedent(\"\"\"\n\n It looks like your cairocffi was not built with xcffib support. To fix this:\n\n - Ensure a recent xcffib is installed (pip install 'xcffib>=0.5.0')\n - The pip cache is cleared (remove ~/.cache/pip, if it exists)\n - Reinstall cairocffi, either:\n\n pip install --no-deps --ignore-installed cairocffi\n\n or\n\n pip uninstall cairocffi && pip install cairocffi\n \"\"\"))\n\n sys.exit(1)\n install.finalize_options(self)\n\n\ndef get_cffi_modules():\n cffi_modules = [\n 'libqtile/pango_ffi_build.py:pango_ffi',\n 'libqtile/backend/x11/xcursors_ffi_build.py:xcursors_ffi',\n ]\n try:\n from cffi.error import PkgConfigError\n from cffi.pkgconfig import call\n except ImportError:\n # technically all ffi defined above wont be built\n print('CFFI package is missing')\n else:\n try:\n call('libpulse', '--libs')\n except PkgConfigError:\n print('Failed to find pulseaudio headers. '\n 'PulseVolume widget will be unavailable')\n else:\n cffi_modules.append(\n 'libqtile/widget/pulseaudio_ffi.py:pulseaudio_ffi'\n )\n return cffi_modules\n\n\nsetup(\n cmdclass={'install': CheckCairoXcb},\n use_scm_version=True,\n cffi_modules=get_cffi_modules(),\n install_requires=[\"cffi>=1.0.0\"],\n)\n",
"path": "setup.py"
}
] | [
{
"content": "#!/usr/bin/env python3\n\n# Copyright (c) 2008 Aldo Cortesi\n# Copyright (c) 2011 Mounier Florian\n# Copyright (c) 2012 dmpayton\n# Copyright (c) 2014 Sean Vig\n# Copyright (c) 2014 roger\n# Copyright (c) 2014 Pedro Algarvio\n# Copyright (c) 2014-2015 Tycho Andersen\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\n\nimport sys\nimport textwrap\n\nfrom setuptools import setup\nfrom setuptools.command.install import install\n\n\nclass CheckCairoXcb(install):\n def cairo_xcb_check(self):\n try:\n from cairocffi import cairo\n cairo.cairo_xcb_surface_create\n return True\n except AttributeError:\n return False\n\n def finalize_options(self):\n if not self.cairo_xcb_check():\n\n print(textwrap.dedent(\"\"\"\n\n It looks like your cairocffi was not built with xcffib support. To fix this:\n\n - Ensure a recent xcffib is installed (pip install 'xcffib>=0.5.0')\n - The pip cache is cleared (remove ~/.cache/pip, if it exists)\n - Reinstall cairocffi, either:\n\n pip install --no-deps --ignore-installed cairocffi\n\n or\n\n pip uninstall cairocffi && pip install cairocffi\n \"\"\"))\n\n sys.exit(1)\n install.finalize_options(self)\n\n\ndef get_cffi_modules():\n cffi_modules = [\n 'libqtile/pango_ffi_build.py:pango_ffi',\n 'libqtile/backend/x11/xcursors_ffi_build.py:xcursors_ffi',\n ]\n try:\n from cffi.error import PkgConfigError\n from cffi.pkgconfig import call\n except ImportError:\n # technically all ffi defined above wont be built\n print('CFFI package is missing')\n else:\n try:\n call('libpulse', '--libs')\n except PkgConfigError:\n print('Failed to find pulseaudio headers. '\n 'PulseVolume widget will be unavailable')\n else:\n cffi_modules.append(\n 'libqtile/widget/pulseaudio_ffi.py:pulseaudio_ffi'\n )\n return cffi_modules\n\n\nsetup(\n cmdclass={'install': CheckCairoXcb},\n use_scm_version=True,\n cffi_modules=get_cffi_modules(),\n install_requires=[\"cffi>=1.0.0\"],\n include_package_data=True,\n)\n",
"path": "setup.py"
}
] | diff --git a/MANIFEST.in b/MANIFEST.in
index 08975f6dc1..0f66062037 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -16,11 +16,11 @@ exclude logo.png
graft libqtile/resources
graft resources
+graft test
prune bin
prune docs
prune scripts
-prune test
prune rpm
include bin/dqtile-cmd
include bin/iqshell
diff --git a/setup.py b/setup.py
index 12c7735fae..b82201c8f3 100755
--- a/setup.py
+++ b/setup.py
@@ -93,4 +93,5 @@ def get_cffi_modules():
use_scm_version=True,
cffi_modules=get_cffi_modules(),
install_requires=["cffi>=1.0.0"],
+ include_package_data=True,
)
|
facebookresearch__fairseq-62 | installation from source requires installing cffi
This is a very minor documentation issue
note: using python3/pip3 as there is a comment about requiring python 3 for fairseq-py
not using anaconda..I have had issues with package consistency..so I avoid it
fairseq-py installed with
git clone https://github.com/facebookresearch/fairseq-py.git
sudo pip3 install -r requirements.txt
levinth@zt-gpu-lin-1:~/fairseq-py$ sudo python3 setup.py build
Traceback (most recent call last):
File "/usr/local/lib/python3.5/dist-packages/torch/utils/ffi/__init__.py", line 12, in <module>
import cffi
ImportError: No module named 'cffi'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "setup.py", line 13, in <module>
from torch.utils.ffi import create_extension
File "/usr/local/lib/python3.5/dist-packages/torch/utils/ffi/__init__.py", line 14, in <module>
raise ImportError("torch.utils.ffi requires the cffi package")
ImportError: torch.utils.ffi requires the cffi package
levinth@zt-gpu-lin-1:~/fairseq-py$ pip3 install cffi
and then the build worked
likely can be fixed by adding cffii to requirements.txt
| [
{
"content": "# Copyright (c) 2017-present, Facebook, Inc.\n# All rights reserved.\n#\n# This source code is licensed under the license found in the LICENSE file in\n# the root directory of this source tree. An additional grant of patent rights\n# can be found in the PATENTS file in the same directory.\n#\n\n\"\"\"\nWrapper around various loggers and progress bars (e.g., tqdm).\n\"\"\"\n\nfrom collections import OrderedDict\nimport json\nfrom numbers import Number\nimport sys\n\nfrom tqdm import tqdm\n\nfrom fairseq.meters import AverageMeter\n\n\nclass progress_bar(object):\n \"\"\"Abstract class for progress bars.\"\"\"\n def __init__(self, iterable, epoch=None, prefix=None):\n self.iterable = iterable\n self.epoch = epoch\n self.prefix = ''\n if epoch is not None:\n self.prefix += '| epoch {:03d}'.format(epoch)\n if prefix is not None:\n self.prefix += ' | {}'.format(prefix)\n\n def __enter__(self):\n return self\n\n def __exit__(self, *exc):\n return False\n\n def __iter__(self):\n raise NotImplementedError\n\n def log(self, stats):\n \"\"\"Log intermediate stats according to log_interval.\"\"\"\n raise NotImplementedError\n\n def print(self, stats):\n \"\"\"Print end-of-epoch stats.\"\"\"\n raise NotImplementedError\n\n def _str_commas(self, stats):\n return ', '.join(key + '=' + stats[key].strip()\n for key in stats.keys())\n\n def _str_pipes(self, stats):\n return ' | '.join(key + ' ' + stats[key].strip()\n for key in stats.keys())\n\n def _format_stats(self, stats):\n postfix = OrderedDict(stats)\n # Preprocess stats according to datatype\n for key in postfix.keys():\n # Number: limit the length of the string\n if isinstance(postfix[key], Number):\n postfix[key] = '{:g}'.format(postfix[key])\n # Meter: display both current and average value\n elif isinstance(postfix[key], AverageMeter):\n postfix[key] = '{:.2f} ({:.2f})'.format(\n postfix[key].val, postfix[key].avg)\n # Else for any other type, try to get the string conversion\n elif not isinstance(postfix[key], str):\n postfix[key] = str(postfix[key])\n # Else if it's a string, don't need to preprocess anything\n return postfix\n\n\nclass json_progress_bar(progress_bar):\n \"\"\"Log output in JSON format.\"\"\"\n\n def __init__(self, iterable, epoch=None, prefix=None, log_interval=1000):\n super().__init__(iterable, epoch, prefix)\n self.log_interval = log_interval\n self.stats = None\n\n def __iter__(self):\n size = float(len(self.iterable))\n for i, obj in enumerate(self.iterable):\n yield obj\n if self.stats is not None and i > 0 and \\\n self.log_interval is not None and i % self.log_interval == 0:\n update = self.epoch + float(i / size) if self.epoch is not None else None\n stats = self._format_stats(self.stats, epoch=self.epoch, update=update)\n print('sweep_log: ' + json.dumps(stats), flush=True)\n\n def log(self, stats):\n \"\"\"Log intermediate stats according to log_interval.\"\"\"\n self.stats = stats\n\n def print(self, stats):\n \"\"\"Print end-of-epoch stats.\"\"\"\n stats = self._format_stats(self.stats, epoch=self.epoch)\n print(\"sweep_log: \" + json.dumps(stats), flush=True)\n\n def _format_stats(self, stats, epoch=None, update=None):\n postfix = OrderedDict()\n if epoch is not None:\n postfix['epoch'] = epoch\n if update is not None:\n postfix['update'] = update\n # Preprocess stats according to datatype\n for key in stats.keys():\n # Meter: display both current and average value\n if isinstance(stats[key], AverageMeter):\n postfix[key] = stats[key].val\n postfix[key + '_avg'] = stats[key].avg\n else:\n postfix[key] = stats[key]\n return postfix\n\n\nclass noop_progress_bar(progress_bar):\n \"\"\"No logging.\"\"\"\n\n def __init__(self, iterable, epoch=None, prefix=None):\n super().__init__(iterable, epoch, prefix)\n\n def __iter__(self):\n for obj in self.iterable:\n yield obj\n\n def log(self, stats):\n \"\"\"Log intermediate stats according to log_interval.\"\"\"\n pass\n\n def print(self, stats):\n \"\"\"Print end-of-epoch stats.\"\"\"\n pass\n\n\nclass simple_progress_bar(progress_bar):\n \"\"\"A minimal logger for non-TTY environments.\"\"\"\n\n def __init__(self, iterable, epoch=None, prefix=None, log_interval=1000):\n super().__init__(iterable, epoch, prefix)\n self.log_interval = log_interval\n self.stats = None\n\n def __iter__(self):\n size = len(self.iterable)\n for i, obj in enumerate(self.iterable):\n yield obj\n if self.stats is not None and i > 0 and \\\n self.log_interval is not None and i % self.log_interval == 0:\n postfix = self._str_commas(self.stats)\n print('{}: {:5d} / {:d} {}'.format(self.prefix, i, size, postfix),\n flush=True)\n\n def log(self, stats):\n \"\"\"Log intermediate stats according to log_interval.\"\"\"\n self.stats = self._format_stats(stats)\n\n def print(self, stats):\n \"\"\"Print end-of-epoch stats.\"\"\"\n postfix = self._str_pipes(self._format_stats(stats))\n print('{} | {}'.format(self.prefix, postfix), flush=True)\n\n\nclass tqdm_progress_bar(progress_bar):\n \"\"\"Log to tqdm.\"\"\"\n\n def __init__(self, iterable, epoch=None, prefix=None):\n super().__init__(iterable, epoch, prefix)\n self.tqdm = tqdm(iterable, self.prefix, leave=False)\n\n def __iter__(self):\n return iter(self.tqdm)\n\n def log(self, stats):\n \"\"\"Log intermediate stats according to log_interval.\"\"\"\n self.tqdm.set_postfix(self._format_stats(stats), refresh=False)\n\n def print(self, stats):\n \"\"\"Print end-of-epoch stats.\"\"\"\n postfix = self._str_pipes(self._format_stats(stats))\n self.tqdm.write('{} | {}'.format(self.tqdm.desc, postfix))\n",
"path": "fairseq/progress_bar.py"
}
] | [
{
"content": "# Copyright (c) 2017-present, Facebook, Inc.\n# All rights reserved.\n#\n# This source code is licensed under the license found in the LICENSE file in\n# the root directory of this source tree. An additional grant of patent rights\n# can be found in the PATENTS file in the same directory.\n#\n\n\"\"\"\nWrapper around various loggers and progress bars (e.g., tqdm).\n\"\"\"\n\nfrom collections import OrderedDict\nimport json\nfrom numbers import Number\n\nfrom tqdm import tqdm\n\nfrom fairseq.meters import AverageMeter\n\n\nclass progress_bar(object):\n \"\"\"Abstract class for progress bars.\"\"\"\n def __init__(self, iterable, epoch=None, prefix=None):\n self.iterable = iterable\n self.epoch = epoch\n self.prefix = ''\n if epoch is not None:\n self.prefix += '| epoch {:03d}'.format(epoch)\n if prefix is not None:\n self.prefix += ' | {}'.format(prefix)\n\n def __enter__(self):\n return self\n\n def __exit__(self, *exc):\n return False\n\n def __iter__(self):\n raise NotImplementedError\n\n def log(self, stats):\n \"\"\"Log intermediate stats according to log_interval.\"\"\"\n raise NotImplementedError\n\n def print(self, stats):\n \"\"\"Print end-of-epoch stats.\"\"\"\n raise NotImplementedError\n\n def _str_commas(self, stats):\n return ', '.join(key + '=' + stats[key].strip()\n for key in stats.keys())\n\n def _str_pipes(self, stats):\n return ' | '.join(key + ' ' + stats[key].strip()\n for key in stats.keys())\n\n def _format_stats(self, stats):\n postfix = OrderedDict(stats)\n # Preprocess stats according to datatype\n for key in postfix.keys():\n # Number: limit the length of the string\n if isinstance(postfix[key], Number):\n postfix[key] = '{:g}'.format(postfix[key])\n # Meter: display both current and average value\n elif isinstance(postfix[key], AverageMeter):\n postfix[key] = '{:.2f} ({:.2f})'.format(\n postfix[key].val, postfix[key].avg)\n # Else for any other type, try to get the string conversion\n elif not isinstance(postfix[key], str):\n postfix[key] = str(postfix[key])\n # Else if it's a string, don't need to preprocess anything\n return postfix\n\n\nclass json_progress_bar(progress_bar):\n \"\"\"Log output in JSON format.\"\"\"\n\n def __init__(self, iterable, epoch=None, prefix=None, log_interval=1000):\n super().__init__(iterable, epoch, prefix)\n self.log_interval = log_interval\n self.stats = None\n\n def __iter__(self):\n size = float(len(self.iterable))\n for i, obj in enumerate(self.iterable):\n yield obj\n if self.stats is not None and i > 0 and \\\n self.log_interval is not None and i % self.log_interval == 0:\n update = self.epoch + float(i / size) if self.epoch is not None else None\n stats = self._format_stats(self.stats, epoch=self.epoch, update=update)\n print('sweep_log: ' + json.dumps(stats), flush=True)\n\n def log(self, stats):\n \"\"\"Log intermediate stats according to log_interval.\"\"\"\n self.stats = stats\n\n def print(self, stats):\n \"\"\"Print end-of-epoch stats.\"\"\"\n stats = self._format_stats(self.stats, epoch=self.epoch)\n print(\"sweep_log: \" + json.dumps(stats), flush=True)\n\n def _format_stats(self, stats, epoch=None, update=None):\n postfix = OrderedDict()\n if epoch is not None:\n postfix['epoch'] = epoch\n if update is not None:\n postfix['update'] = update\n # Preprocess stats according to datatype\n for key in stats.keys():\n # Meter: display both current and average value\n if isinstance(stats[key], AverageMeter):\n postfix[key] = stats[key].val\n postfix[key + '_avg'] = stats[key].avg\n else:\n postfix[key] = stats[key]\n return postfix\n\n\nclass noop_progress_bar(progress_bar):\n \"\"\"No logging.\"\"\"\n\n def __init__(self, iterable, epoch=None, prefix=None):\n super().__init__(iterable, epoch, prefix)\n\n def __iter__(self):\n for obj in self.iterable:\n yield obj\n\n def log(self, stats):\n \"\"\"Log intermediate stats according to log_interval.\"\"\"\n pass\n\n def print(self, stats):\n \"\"\"Print end-of-epoch stats.\"\"\"\n pass\n\n\nclass simple_progress_bar(progress_bar):\n \"\"\"A minimal logger for non-TTY environments.\"\"\"\n\n def __init__(self, iterable, epoch=None, prefix=None, log_interval=1000):\n super().__init__(iterable, epoch, prefix)\n self.log_interval = log_interval\n self.stats = None\n\n def __iter__(self):\n size = len(self.iterable)\n for i, obj in enumerate(self.iterable):\n yield obj\n if self.stats is not None and i > 0 and \\\n self.log_interval is not None and i % self.log_interval == 0:\n postfix = self._str_commas(self.stats)\n print('{}: {:5d} / {:d} {}'.format(self.prefix, i, size, postfix),\n flush=True)\n\n def log(self, stats):\n \"\"\"Log intermediate stats according to log_interval.\"\"\"\n self.stats = self._format_stats(stats)\n\n def print(self, stats):\n \"\"\"Print end-of-epoch stats.\"\"\"\n postfix = self._str_pipes(self._format_stats(stats))\n print('{} | {}'.format(self.prefix, postfix), flush=True)\n\n\nclass tqdm_progress_bar(progress_bar):\n \"\"\"Log to tqdm.\"\"\"\n\n def __init__(self, iterable, epoch=None, prefix=None):\n super().__init__(iterable, epoch, prefix)\n self.tqdm = tqdm(iterable, self.prefix, leave=False)\n\n def __iter__(self):\n return iter(self.tqdm)\n\n def log(self, stats):\n \"\"\"Log intermediate stats according to log_interval.\"\"\"\n self.tqdm.set_postfix(self._format_stats(stats), refresh=False)\n\n def print(self, stats):\n \"\"\"Print end-of-epoch stats.\"\"\"\n postfix = self._str_pipes(self._format_stats(stats))\n self.tqdm.write('{} | {}'.format(self.tqdm.desc, postfix))\n",
"path": "fairseq/progress_bar.py"
}
] | diff --git a/fairseq/progress_bar.py b/fairseq/progress_bar.py
index 713f73b7b2..2c4acf833f 100644
--- a/fairseq/progress_bar.py
+++ b/fairseq/progress_bar.py
@@ -13,7 +13,6 @@
from collections import OrderedDict
import json
from numbers import Number
-import sys
from tqdm import tqdm
diff --git a/requirements.txt b/requirements.txt
index 3b55e1b4d3..3327ee454c 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,3 +1,4 @@
+cffi
numpy
torch
tqdm
|
pex-tool__pex-795 | pex --index-url=... fails in 2.0.0
Hello,
In my team we have an issue since this morning with the new version of PEX
This is a big Django project but with simple configuration.
Here is an extract of the setup.py
```
setuptools.setup(
name='rackguru-api',
version=find_version(),
install_requires=_INSTALL_REQUIRES,
author='Criteo',
author_email='[email protected]',
description='Criteo datacenter assets manager',
packages=setuptools.find_packages(),
entry_points={
'console_scripts': [
'rackguru-run = marathon.run:main',
],
},
classifiers=CLASSIFIERS,
include_package_data=True,
)
```
In the tox.ini :
```
# Bundle environment
[testenv:bundle]
deps = pex
setenv =
LANG=en_US.UTF-8
commands =
# Collect the statics to be embedded in the sdist and PEX file (via the MANIFEST)
{envpython} manage.py collectstatic --noinput --clear
# Creates a source archive in sdist/
{envpython} setup.py sdist --dist-dir=sdist --format=gztar
# Build exec file and save it in dist/
{envpython} setup.py bdist_pex --bdist-dir=dist --pex-args='--disable-cache --not-zip-safe --index-url=http://build-nexus.crto.in/repository/pypi/simple' --bdist-all
```
And here is the build output :
```
bundle run-test: commands[2] | /tmp/.tox-rackguru-api-post-submit-3573/com.criteo.rackguru.rackguru-api/bundle/bin/python setup.py bdist_pex --bdist-dir=dist '--pex-args=--disable-cache --not-zip-safe --index-url=http://build-nexus.crto.in/repository/pypi/simple' --bdist-all
running bdist_pex
Writing rackguru-run to dist/rackguru-run
Failed to create pex via /tmp/.tox-rackguru-api-post-submit-3573/com.criteo.rackguru.rackguru-api/bundle/bin/python3.6 -s -m pex /home/jenkins/workspace/rackguru-api-post-submit --disable-cache --not-zip-safe --index-url=http://build-nexus.crto.in/repository/pypi/simple --output-file dist/rackguru-run --script rackguru-run:
Traceback (most recent call last):
File "/usr/lib64/python3.6/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/usr/lib64/python3.6/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/tmp/.tox-rackguru-api-post-submit-3573/com.criteo.rackguru.rackguru-api/bundle/lib/python3.6/site-packages/pex/__main__.py", line 8, in <module>
__name__ == '__main__' and pex.main()
File "/tmp/.tox-rackguru-api-post-submit-3573/com.criteo.rackguru.rackguru-api/bundle/lib/python3.6/site-packages/pex/bin/pex.py", line 628, in main
pex_builder = build_pex(reqs, options)
File "/tmp/.tox-rackguru-api-post-submit-3573/com.criteo.rackguru.rackguru-api/bundle/lib/python3.6/site-packages/pex/bin/pex.py", line 540, in build_pex
indexes = [str(index) for index in options.indexes]
File "/tmp/.tox-rackguru-api-post-submit-3573/com.criteo.rackguru.rackguru-api/bundle/lib/python3.6/site-packages/pex/bin/pex.py", line 540, in <listcomp>
indexes = [str(index) for index in options.indexes]
TypeError: __str__ returned non-string (type NoneType)
```
Do you have any idea about the root issue ?
| [
{
"content": "# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n\"\"\"\nThe pex.bin.pex utility builds PEX environments and .pex files specified by\nsources, requirements and their dependencies.\n\"\"\"\n\nfrom __future__ import absolute_import, print_function\n\nimport os\nimport sys\nfrom optparse import OptionGroup, OptionParser\nfrom textwrap import TextWrapper\n\nfrom pex.common import die, safe_delete, safe_mkdtemp\nfrom pex.interpreter import PythonInterpreter\nfrom pex.interpreter_constraints import validate_constraints\nfrom pex.pex import PEX\nfrom pex.pex_bootstrapper import iter_compatible_interpreters\nfrom pex.pex_builder import PEXBuilder\nfrom pex.platforms import Platform\nfrom pex.resolver import Unsatisfiable, resolve_multi\nfrom pex.tracer import TRACER\nfrom pex.variables import ENV, Variables\nfrom pex.version import __version__\n\nCANNOT_SETUP_INTERPRETER = 102\nINVALID_OPTIONS = 103\n\n\nclass Logger(object):\n def _default_logger(self, msg, V):\n if V:\n print(msg, file=sys.stderr)\n\n _LOGGER = _default_logger\n\n def __call__(self, msg, V):\n self._LOGGER(msg, V)\n\n def set_logger(self, logger_callback):\n self._LOGGER = logger_callback\n\nlog = Logger()\n\n\ndef parse_bool(option, opt_str, _, parser):\n setattr(parser.values, option.dest, not opt_str.startswith('--no'))\n\n\ndef increment_verbosity(option, opt_str, _, parser):\n verbosity = getattr(parser.values, option.dest, 0)\n setattr(parser.values, option.dest, verbosity + 1)\n\n\ndef process_disable_cache(option, option_str, option_value, parser):\n setattr(parser.values, option.dest, None)\n\n\nclass PyPiSentinel(object):\n def __str__(self):\n 'https://pypi.org/simple'\n\n\n_PYPI = PyPiSentinel()\n\n\ndef process_pypi_option(option, option_str, option_value, parser):\n if option_str.startswith('--no'):\n setattr(parser.values, option.dest, [])\n else:\n indexes = getattr(parser.values, option.dest, [])\n if _PYPI not in indexes:\n indexes.append(_PYPI)\n setattr(parser.values, option.dest, indexes)\n\n\ndef process_find_links(option, option_str, option_value, parser):\n find_links = getattr(parser.values, option.dest, [])\n if option_value not in find_links:\n find_links.append(option_value)\n setattr(parser.values, option.dest, find_links)\n\n\ndef process_index_url(option, option_str, option_value, parser):\n indexes = getattr(parser.values, option.dest, [])\n if option_value not in indexes:\n indexes.append(option_value)\n setattr(parser.values, option.dest, indexes)\n\n\ndef process_transitive(option, option_str, option_value, parser):\n transitive = option_str == '--transitive'\n setattr(parser.values, option.dest, transitive)\n\n\ndef print_variable_help(option, option_str, option_value, parser):\n for variable_name, variable_type, variable_help in Variables.iter_help():\n print('\\n%s: %s\\n' % (variable_name, variable_type))\n for line in TextWrapper(initial_indent=' ' * 4, subsequent_indent=' ' * 4).wrap(variable_help):\n print(line)\n sys.exit(0)\n\n\ndef configure_clp_pex_resolution(parser):\n group = OptionGroup(\n parser,\n 'Resolver options',\n 'Tailor how to find, resolve and translate the packages that get put into the PEX '\n 'environment.')\n\n group.add_option(\n '--pypi', '--no-pypi', '--no-index',\n action='callback',\n dest='indexes',\n default=[_PYPI],\n callback=process_pypi_option,\n help='Whether to use pypi to resolve dependencies; Default: use pypi')\n\n group.add_option(\n '--pex-path',\n dest='pex_path',\n type=str,\n default=None,\n help='A colon separated list of other pex files to merge into the runtime environment.')\n\n group.add_option(\n '-f', '--find-links', '--repo',\n metavar='PATH/URL',\n action='callback',\n default=[],\n dest='find_links',\n callback=process_find_links,\n type=str,\n help='Additional repository path (directory or URL) to look for requirements.')\n\n group.add_option(\n '-i', '--index', '--index-url',\n metavar='URL',\n action='callback',\n dest='indexes',\n callback=process_index_url,\n type=str,\n help='Additional cheeseshop indices to use to satisfy requirements.')\n\n group.add_option(\n '--pre', '--no-pre',\n dest='allow_prereleases',\n default=False,\n action='callback',\n callback=parse_bool,\n help='Whether to include pre-release and development versions of requirements; '\n 'Default: only stable versions are used, unless explicitly requested')\n\n group.add_option(\n '--disable-cache',\n action='callback',\n dest='cache_dir',\n callback=process_disable_cache,\n help='Disable caching in the pex tool entirely.')\n\n group.add_option(\n '--cache-dir',\n dest='cache_dir',\n default='{pex_root}/build',\n help='The local cache directory to use for speeding up requirement '\n 'lookups. [Default: ~/.pex/build]')\n\n group.add_option(\n '--wheel', '--no-wheel', '--no-use-wheel',\n dest='use_wheel',\n default=True,\n action='callback',\n callback=parse_bool,\n help='Whether to allow wheel distributions; Default: allow wheels')\n\n group.add_option(\n '--build', '--no-build',\n dest='build',\n default=True,\n action='callback',\n callback=parse_bool,\n help='Whether to allow building of distributions from source; Default: allow builds')\n\n group.add_option(\n '--transitive', '--no-transitive', '--intransitive',\n dest='transitive',\n default=True,\n action='callback',\n callback=process_transitive,\n help='Whether to transitively resolve requirements. Default: True')\n\n parser.add_option_group(group)\n\n\ndef configure_clp_pex_options(parser):\n group = OptionGroup(\n parser,\n 'PEX output options',\n 'Tailor the behavior of the emitted .pex file if -o is specified.')\n\n group.add_option(\n '--zip-safe', '--not-zip-safe',\n dest='zip_safe',\n default=True,\n action='callback',\n callback=parse_bool,\n help='Whether or not the sources in the pex file are zip safe. If they are '\n 'not zip safe, they will be written to disk prior to execution; '\n 'Default: zip safe.')\n\n group.add_option(\n '--always-write-cache',\n dest='always_write_cache',\n default=False,\n action='store_true',\n help='Always write the internally cached distributions to disk prior to invoking '\n 'the pex source code. This can use less memory in RAM constrained '\n 'environments. [Default: %default]')\n\n group.add_option(\n '--ignore-errors',\n dest='ignore_errors',\n default=False,\n action='store_true',\n help='Ignore run-time requirement resolution errors when invoking the pex. '\n '[Default: %default]')\n\n group.add_option(\n '--inherit-path',\n dest='inherit_path',\n default='false',\n action='store',\n choices=['false', 'fallback', 'prefer'],\n help='Inherit the contents of sys.path (including site-packages, user site-packages and '\n 'PYTHONPATH) running the pex. Possible values: false (does not inherit sys.path), '\n 'fallback (inherits sys.path after packaged dependencies), prefer (inherits sys.path '\n 'before packaged dependencies), No value (alias for prefer, for backwards '\n 'compatibility). [Default: %default]')\n\n group.add_option(\n '--compile', '--no-compile',\n dest='compile',\n default=False,\n action='callback',\n callback=parse_bool,\n help='Compiling means that the built pex will include .pyc files, which will result in '\n 'slightly faster startup performance. However, compiling means that the generated pex '\n 'likely will not be reproducible, meaning that if you were to run `./pex -o` with the '\n 'same inputs then the new pex would not be byte-for-byte identical to the original.')\n\n group.add_option(\n '--use-system-time', '--no-use-system-time',\n dest='use_system_time',\n default=False,\n action='callback',\n callback=parse_bool,\n help='Use the current system time to generate timestamps for the new pex. Otherwise, Pex '\n 'will use midnight on January 1, 1980. By using system time, the generated pex '\n 'will not be reproducible, meaning that if you were to run `./pex -o` with the '\n 'same inputs then the new pex would not be byte-for-byte identical to the original.')\n\n parser.add_option_group(group)\n\n\ndef configure_clp_pex_environment(parser):\n group = OptionGroup(\n parser,\n 'PEX environment options',\n 'Tailor the interpreter and platform targets for the PEX environment.')\n\n group.add_option(\n '--python',\n dest='python',\n default=[],\n type='str',\n action='append',\n help='The Python interpreter to use to build the pex. Either specify an explicit '\n 'path to an interpreter, or specify a binary accessible on $PATH. This option '\n 'can be passed multiple times to create a multi-interpreter compatible pex. '\n 'Default: Use current interpreter.')\n\n group.add_option(\n '--interpreter-constraint',\n dest='interpreter_constraint',\n default=[],\n type='str',\n action='append',\n help='Constrain the selected Python interpreter. Specify with Requirement-style syntax, '\n 'e.g. \"CPython>=2.7,<3\" (A CPython interpreter with version >=2.7 AND version <3) '\n 'or \"PyPy\" (A pypy interpreter of any version). This argument may be repeated multiple '\n 'times to OR the constraints.')\n\n group.add_option(\n '--rcfile',\n dest='rc_file',\n default=None,\n help='An additional path to a pexrc file to read during configuration parsing. '\n 'Used primarily for testing.')\n\n group.add_option(\n '--python-shebang',\n dest='python_shebang',\n default=None,\n help='The exact shebang (#!...) line to add at the top of the PEX file minus the '\n '#!. This overrides the default behavior, which picks an environment python '\n 'interpreter compatible with the one used to build the PEX file.')\n\n group.add_option(\n '--platform',\n dest='platforms',\n default=[],\n type=str,\n action='append',\n help='The platform for which to build the PEX. This option can be passed multiple times '\n 'to create a multi-platform pex. To use wheels for specific interpreter/platform tags'\n ', you can append them to the platform with hyphens like: PLATFORM-IMPL-PYVER-ABI '\n '(e.g. \"linux_x86_64-cp-27-cp27mu\", \"macosx_10.12_x86_64-cp-36-cp36m\") PLATFORM is '\n 'the host platform e.g. \"linux-x86_64\", \"macosx-10.12-x86_64\", etc\". IMPL is the '\n 'python implementation abbreviation (e.g. \"cp\", \"pp\", \"jp\"). PYVER is a two-digit '\n 'string representing the python version (e.g. \"27\", \"36\"). ABI is the ABI tag '\n '(e.g. \"cp36m\", \"cp27mu\", \"abi3\", \"none\"). Default: current platform.')\n\n parser.add_option_group(group)\n\n\ndef configure_clp_pex_entry_points(parser):\n group = OptionGroup(\n parser,\n 'PEX entry point options',\n 'Specify what target/module the PEX should invoke if any.')\n\n group.add_option(\n '-m', '-e', '--entry-point',\n dest='entry_point',\n metavar='MODULE[:SYMBOL]',\n default=None,\n help='Set the entry point to module or module:symbol. If just specifying module, pex '\n 'behaves like python -m, e.g. python -m SimpleHTTPServer. If specifying '\n 'module:symbol, pex imports that symbol and invokes it as if it were main.')\n\n group.add_option(\n '-c', '--script', '--console-script',\n dest='script',\n default=None,\n metavar='SCRIPT_NAME',\n help='Set the entry point as to the script or console_script as defined by a any of the '\n 'distributions in the pex. For example: \"pex -c fab fabric\" or \"pex -c mturk boto\".')\n\n group.add_option(\n '--validate-entry-point',\n dest='validate_ep',\n default=False,\n action='store_true',\n help='Validate the entry point by importing it in separate process. Warning: this could have '\n 'side effects. For example, entry point `a.b.c:m` will translate to '\n '`from a.b.c import m` during validation. [Default: %default]')\n\n parser.add_option_group(group)\n\n\ndef configure_clp():\n usage = (\n '%prog [-o OUTPUT.PEX] [options] [-- arg1 arg2 ...]\\n\\n'\n '%prog builds a PEX (Python Executable) file based on the given specifications: '\n 'sources, requirements, their dependencies and other options.')\n\n parser = OptionParser(usage=usage, version='%prog {0}'.format(__version__))\n configure_clp_pex_resolution(parser)\n configure_clp_pex_options(parser)\n configure_clp_pex_environment(parser)\n configure_clp_pex_entry_points(parser)\n\n parser.add_option(\n '-o', '--output-file',\n dest='pex_name',\n default=None,\n help='The name of the generated .pex file: Omiting this will run PEX '\n 'immediately and not save it to a file.')\n\n parser.add_option(\n '-p', '--preamble-file',\n dest='preamble_file',\n metavar='FILE',\n default=None,\n type=str,\n help='The name of a file to be included as the preamble for the generated .pex file')\n\n parser.add_option(\n '-D', '--sources-directory',\n dest='sources_directory',\n metavar='DIR',\n default=[],\n type=str,\n action='append',\n help='Add sources directory to be packaged into the generated .pex file.'\n ' This option can be used multiple times.')\n\n parser.add_option(\n '-R', '--resources-directory',\n dest='resources_directory',\n metavar='DIR',\n default=[],\n type=str,\n action='append',\n help='Add resources directory to be packaged into the generated .pex file.'\n ' This option can be used multiple times.')\n\n parser.add_option(\n '-r', '--requirement',\n dest='requirement_files',\n metavar='FILE',\n default=[],\n type=str,\n action='append',\n help='Add requirements from the given requirements file. This option can be used multiple '\n 'times.')\n\n parser.add_option(\n '--constraints',\n dest='constraint_files',\n metavar='FILE',\n default=[],\n type=str,\n action='append',\n help='Add constraints from the given constraints file. This option can be used multiple '\n 'times.')\n\n parser.add_option(\n '-v',\n dest='verbosity',\n default=0,\n action='callback',\n callback=increment_verbosity,\n help='Turn on logging verbosity, may be specified multiple times.')\n\n parser.add_option(\n '--emit-warnings', '--no-emit-warnings',\n dest='emit_warnings',\n action='callback',\n callback=parse_bool,\n default=True,\n help='Emit runtime UserWarnings on stderr. If false, only emit them when PEX_VERBOSE is set.'\n 'Default: emit user warnings to stderr')\n\n parser.add_option(\n '--pex-root',\n dest='pex_root',\n default=None,\n help='Specify the pex root used in this invocation of pex. [Default: ~/.pex]'\n )\n\n parser.add_option(\n '--help-variables',\n action='callback',\n callback=print_variable_help,\n help='Print out help about the various environment variables used to change the behavior of '\n 'a running PEX file.')\n\n return parser\n\n\ndef _safe_link(src, dst):\n try:\n os.unlink(dst)\n except OSError:\n pass\n os.symlink(src, dst)\n\n\ndef build_pex(reqs, options):\n interpreters = None # Default to the current interpreter.\n\n # NB: options.python and interpreter constraints cannot be used together.\n if options.python:\n with TRACER.timed('Resolving interpreters', V=2):\n def to_python_interpreter(full_path_or_basename):\n if os.path.exists(full_path_or_basename):\n return PythonInterpreter.from_binary(full_path_or_basename)\n else:\n interpreter = PythonInterpreter.from_env(full_path_or_basename)\n if interpreter is None:\n die('Failed to find interpreter: %s' % full_path_or_basename)\n return interpreter\n\n interpreters = [to_python_interpreter(interp) for interp in options.python]\n elif options.interpreter_constraint:\n with TRACER.timed('Resolving interpreters', V=2):\n constraints = options.interpreter_constraint\n validate_constraints(constraints)\n if options.rc_file or not ENV.PEX_IGNORE_RCFILES:\n rc_variables = Variables.from_rc(rc=options.rc_file)\n pex_python_path = rc_variables.get('PEX_PYTHON_PATH', None)\n else:\n pex_python_path = None\n interpreters = list(iter_compatible_interpreters(pex_python_path, constraints))\n if not interpreters:\n die('Could not find compatible interpreter', CANNOT_SETUP_INTERPRETER)\n\n try:\n with open(options.preamble_file) as preamble_fd:\n preamble = preamble_fd.read()\n except TypeError:\n # options.preamble_file is None\n preamble = None\n\n interpreter = min(interpreters) if interpreters else None\n\n pex_builder = PEXBuilder(path=safe_mkdtemp(), interpreter=interpreter, preamble=preamble)\n\n def walk_and_do(fn, src_dir):\n src_dir = os.path.normpath(src_dir)\n for root, dirs, files in os.walk(src_dir):\n for f in files:\n src_file_path = os.path.join(root, f)\n dst_path = os.path.relpath(src_file_path, src_dir)\n fn(src_file_path, dst_path)\n\n for directory in options.sources_directory:\n walk_and_do(pex_builder.add_source, directory)\n\n for directory in options.resources_directory:\n walk_and_do(pex_builder.add_resource, directory)\n\n pex_info = pex_builder.info\n pex_info.zip_safe = options.zip_safe\n pex_info.pex_path = options.pex_path\n pex_info.always_write_cache = options.always_write_cache\n pex_info.ignore_errors = options.ignore_errors\n pex_info.emit_warnings = options.emit_warnings\n pex_info.inherit_path = options.inherit_path\n if options.interpreter_constraint:\n for ic in options.interpreter_constraint:\n pex_builder.add_interpreter_constraint(ic)\n\n # NB: `None` means use the default (pypi) index, `[]` means use no indexes.\n indexes = None\n if options.indexes != [_PYPI] and options.indexes is not None:\n indexes = [str(index) for index in options.indexes]\n\n with TRACER.timed('Resolving distributions ({})'.format(reqs)):\n try:\n resolveds = resolve_multi(requirements=reqs,\n requirement_files=options.requirement_files,\n constraint_files=options.constraint_files,\n allow_prereleases=options.allow_prereleases,\n transitive=options.transitive,\n interpreters=interpreters,\n platforms=options.platforms,\n indexes=indexes,\n find_links=options.find_links,\n cache=options.cache_dir,\n build=options.build,\n use_wheel=options.use_wheel,\n compile=options.compile)\n\n for resolved_dist in resolveds:\n log(' %s -> %s' % (resolved_dist.requirement, resolved_dist.distribution),\n V=options.verbosity)\n pex_builder.add_distribution(resolved_dist.distribution)\n pex_builder.add_requirement(resolved_dist.requirement)\n except Unsatisfiable as e:\n die(e)\n\n if options.entry_point and options.script:\n die('Must specify at most one entry point or script.', INVALID_OPTIONS)\n\n if options.entry_point:\n pex_builder.set_entry_point(options.entry_point)\n elif options.script:\n pex_builder.set_script(options.script)\n\n if options.python_shebang:\n pex_builder.set_shebang(options.python_shebang)\n\n return pex_builder\n\n\ndef make_relative_to_root(path):\n \"\"\"Update options so that defaults are user relative to specified pex_root.\"\"\"\n return os.path.normpath(path.format(pex_root=ENV.PEX_ROOT))\n\n\ndef transform_legacy_arg(arg):\n # inherit-path used to be a boolean arg (so either was absent, or --inherit-path)\n # Now it takes a string argument, so --inherit-path is invalid.\n # Fix up the args we're about to parse to preserve backwards compatibility.\n if arg == '--inherit-path':\n return '--inherit-path=prefer'\n return arg\n\n\ndef _compatible_with_current_platform(platforms):\n return (\n not platforms or\n 'current' in platforms or\n str(Platform.current()) in platforms\n )\n\n\ndef main(args=None):\n args = args[:] if args else sys.argv[1:]\n args = [transform_legacy_arg(arg) for arg in args]\n parser = configure_clp()\n\n try:\n separator = args.index('--')\n args, cmdline = args[:separator], args[separator + 1:]\n except ValueError:\n args, cmdline = args, []\n\n options, reqs = parser.parse_args(args=args)\n if options.python and options.interpreter_constraint:\n die('The \"--python\" and \"--interpreter-constraint\" options cannot be used together.')\n\n if options.pex_root:\n ENV.set('PEX_ROOT', options.pex_root)\n else:\n options.pex_root = ENV.PEX_ROOT # If option not specified fallback to env variable.\n\n # Don't alter cache if it is disabled.\n if options.cache_dir:\n options.cache_dir = make_relative_to_root(options.cache_dir)\n\n with ENV.patch(PEX_VERBOSE=str(options.verbosity)):\n with TRACER.timed('Building pex'):\n pex_builder = build_pex(reqs, options)\n\n pex_builder.freeze(bytecode_compile=options.compile)\n pex = PEX(pex_builder.path(),\n interpreter=pex_builder.interpreter,\n verify_entry_point=options.validate_ep)\n\n if options.pex_name is not None:\n log('Saving PEX file to %s' % options.pex_name, V=options.verbosity)\n tmp_name = options.pex_name + '~'\n safe_delete(tmp_name)\n pex_builder.build(\n tmp_name,\n bytecode_compile=options.compile,\n deterministic_timestamp=not options.use_system_time\n )\n os.rename(tmp_name, options.pex_name)\n else:\n if not _compatible_with_current_platform(options.platforms):\n log('WARNING: attempting to run PEX with incompatible platforms!')\n\n log('Running PEX file at %s with args %s' % (pex_builder.path(), cmdline),\n V=options.verbosity)\n sys.exit(pex.run(args=list(cmdline)))\n\n\nif __name__ == '__main__':\n main()\n",
"path": "pex/bin/pex.py"
}
] | [
{
"content": "# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n\"\"\"\nThe pex.bin.pex utility builds PEX environments and .pex files specified by\nsources, requirements and their dependencies.\n\"\"\"\n\nfrom __future__ import absolute_import, print_function\n\nimport os\nimport sys\nfrom optparse import OptionGroup, OptionParser\nfrom textwrap import TextWrapper\n\nfrom pex.common import die, safe_delete, safe_mkdtemp\nfrom pex.interpreter import PythonInterpreter\nfrom pex.interpreter_constraints import validate_constraints\nfrom pex.pex import PEX\nfrom pex.pex_bootstrapper import iter_compatible_interpreters\nfrom pex.pex_builder import PEXBuilder\nfrom pex.platforms import Platform\nfrom pex.resolver import Unsatisfiable, resolve_multi\nfrom pex.tracer import TRACER\nfrom pex.variables import ENV, Variables\nfrom pex.version import __version__\n\nCANNOT_SETUP_INTERPRETER = 102\nINVALID_OPTIONS = 103\n\n\nclass Logger(object):\n def _default_logger(self, msg, V):\n if V:\n print(msg, file=sys.stderr)\n\n _LOGGER = _default_logger\n\n def __call__(self, msg, V):\n self._LOGGER(msg, V)\n\n def set_logger(self, logger_callback):\n self._LOGGER = logger_callback\n\nlog = Logger()\n\n\ndef parse_bool(option, opt_str, _, parser):\n setattr(parser.values, option.dest, not opt_str.startswith('--no'))\n\n\ndef increment_verbosity(option, opt_str, _, parser):\n verbosity = getattr(parser.values, option.dest, 0)\n setattr(parser.values, option.dest, verbosity + 1)\n\n\ndef process_disable_cache(option, option_str, option_value, parser):\n setattr(parser.values, option.dest, None)\n\n\nclass PyPiSentinel(object):\n def __str__(self):\n return 'https://pypi.org/simple'\n\n\n_PYPI = PyPiSentinel()\n\n\ndef process_pypi_option(option, option_str, option_value, parser):\n if option_str.startswith('--no'):\n setattr(parser.values, option.dest, [])\n else:\n indexes = getattr(parser.values, option.dest, [])\n if _PYPI not in indexes:\n indexes.append(_PYPI)\n setattr(parser.values, option.dest, indexes)\n\n\ndef process_find_links(option, option_str, option_value, parser):\n find_links = getattr(parser.values, option.dest, [])\n if option_value not in find_links:\n find_links.append(option_value)\n setattr(parser.values, option.dest, find_links)\n\n\ndef process_index_url(option, option_str, option_value, parser):\n indexes = getattr(parser.values, option.dest, [])\n if option_value not in indexes:\n indexes.append(option_value)\n setattr(parser.values, option.dest, indexes)\n\n\ndef process_transitive(option, option_str, option_value, parser):\n transitive = option_str == '--transitive'\n setattr(parser.values, option.dest, transitive)\n\n\ndef print_variable_help(option, option_str, option_value, parser):\n for variable_name, variable_type, variable_help in Variables.iter_help():\n print('\\n%s: %s\\n' % (variable_name, variable_type))\n for line in TextWrapper(initial_indent=' ' * 4, subsequent_indent=' ' * 4).wrap(variable_help):\n print(line)\n sys.exit(0)\n\n\ndef configure_clp_pex_resolution(parser):\n group = OptionGroup(\n parser,\n 'Resolver options',\n 'Tailor how to find, resolve and translate the packages that get put into the PEX '\n 'environment.')\n\n group.add_option(\n '--pypi', '--no-pypi', '--no-index',\n action='callback',\n dest='indexes',\n default=[_PYPI],\n callback=process_pypi_option,\n help='Whether to use pypi to resolve dependencies; Default: use pypi')\n\n group.add_option(\n '--pex-path',\n dest='pex_path',\n type=str,\n default=None,\n help='A colon separated list of other pex files to merge into the runtime environment.')\n\n group.add_option(\n '-f', '--find-links', '--repo',\n metavar='PATH/URL',\n action='callback',\n default=[],\n dest='find_links',\n callback=process_find_links,\n type=str,\n help='Additional repository path (directory or URL) to look for requirements.')\n\n group.add_option(\n '-i', '--index', '--index-url',\n metavar='URL',\n action='callback',\n dest='indexes',\n callback=process_index_url,\n type=str,\n help='Additional cheeseshop indices to use to satisfy requirements.')\n\n group.add_option(\n '--pre', '--no-pre',\n dest='allow_prereleases',\n default=False,\n action='callback',\n callback=parse_bool,\n help='Whether to include pre-release and development versions of requirements; '\n 'Default: only stable versions are used, unless explicitly requested')\n\n group.add_option(\n '--disable-cache',\n action='callback',\n dest='cache_dir',\n callback=process_disable_cache,\n help='Disable caching in the pex tool entirely.')\n\n group.add_option(\n '--cache-dir',\n dest='cache_dir',\n default='{pex_root}/build',\n help='The local cache directory to use for speeding up requirement '\n 'lookups. [Default: ~/.pex/build]')\n\n group.add_option(\n '--wheel', '--no-wheel', '--no-use-wheel',\n dest='use_wheel',\n default=True,\n action='callback',\n callback=parse_bool,\n help='Whether to allow wheel distributions; Default: allow wheels')\n\n group.add_option(\n '--build', '--no-build',\n dest='build',\n default=True,\n action='callback',\n callback=parse_bool,\n help='Whether to allow building of distributions from source; Default: allow builds')\n\n group.add_option(\n '--transitive', '--no-transitive', '--intransitive',\n dest='transitive',\n default=True,\n action='callback',\n callback=process_transitive,\n help='Whether to transitively resolve requirements. Default: True')\n\n parser.add_option_group(group)\n\n\ndef configure_clp_pex_options(parser):\n group = OptionGroup(\n parser,\n 'PEX output options',\n 'Tailor the behavior of the emitted .pex file if -o is specified.')\n\n group.add_option(\n '--zip-safe', '--not-zip-safe',\n dest='zip_safe',\n default=True,\n action='callback',\n callback=parse_bool,\n help='Whether or not the sources in the pex file are zip safe. If they are '\n 'not zip safe, they will be written to disk prior to execution; '\n 'Default: zip safe.')\n\n group.add_option(\n '--always-write-cache',\n dest='always_write_cache',\n default=False,\n action='store_true',\n help='Always write the internally cached distributions to disk prior to invoking '\n 'the pex source code. This can use less memory in RAM constrained '\n 'environments. [Default: %default]')\n\n group.add_option(\n '--ignore-errors',\n dest='ignore_errors',\n default=False,\n action='store_true',\n help='Ignore run-time requirement resolution errors when invoking the pex. '\n '[Default: %default]')\n\n group.add_option(\n '--inherit-path',\n dest='inherit_path',\n default='false',\n action='store',\n choices=['false', 'fallback', 'prefer'],\n help='Inherit the contents of sys.path (including site-packages, user site-packages and '\n 'PYTHONPATH) running the pex. Possible values: false (does not inherit sys.path), '\n 'fallback (inherits sys.path after packaged dependencies), prefer (inherits sys.path '\n 'before packaged dependencies), No value (alias for prefer, for backwards '\n 'compatibility). [Default: %default]')\n\n group.add_option(\n '--compile', '--no-compile',\n dest='compile',\n default=False,\n action='callback',\n callback=parse_bool,\n help='Compiling means that the built pex will include .pyc files, which will result in '\n 'slightly faster startup performance. However, compiling means that the generated pex '\n 'likely will not be reproducible, meaning that if you were to run `./pex -o` with the '\n 'same inputs then the new pex would not be byte-for-byte identical to the original.')\n\n group.add_option(\n '--use-system-time', '--no-use-system-time',\n dest='use_system_time',\n default=False,\n action='callback',\n callback=parse_bool,\n help='Use the current system time to generate timestamps for the new pex. Otherwise, Pex '\n 'will use midnight on January 1, 1980. By using system time, the generated pex '\n 'will not be reproducible, meaning that if you were to run `./pex -o` with the '\n 'same inputs then the new pex would not be byte-for-byte identical to the original.')\n\n parser.add_option_group(group)\n\n\ndef configure_clp_pex_environment(parser):\n group = OptionGroup(\n parser,\n 'PEX environment options',\n 'Tailor the interpreter and platform targets for the PEX environment.')\n\n group.add_option(\n '--python',\n dest='python',\n default=[],\n type='str',\n action='append',\n help='The Python interpreter to use to build the pex. Either specify an explicit '\n 'path to an interpreter, or specify a binary accessible on $PATH. This option '\n 'can be passed multiple times to create a multi-interpreter compatible pex. '\n 'Default: Use current interpreter.')\n\n group.add_option(\n '--interpreter-constraint',\n dest='interpreter_constraint',\n default=[],\n type='str',\n action='append',\n help='Constrain the selected Python interpreter. Specify with Requirement-style syntax, '\n 'e.g. \"CPython>=2.7,<3\" (A CPython interpreter with version >=2.7 AND version <3) '\n 'or \"PyPy\" (A pypy interpreter of any version). This argument may be repeated multiple '\n 'times to OR the constraints.')\n\n group.add_option(\n '--rcfile',\n dest='rc_file',\n default=None,\n help='An additional path to a pexrc file to read during configuration parsing. '\n 'Used primarily for testing.')\n\n group.add_option(\n '--python-shebang',\n dest='python_shebang',\n default=None,\n help='The exact shebang (#!...) line to add at the top of the PEX file minus the '\n '#!. This overrides the default behavior, which picks an environment python '\n 'interpreter compatible with the one used to build the PEX file.')\n\n group.add_option(\n '--platform',\n dest='platforms',\n default=[],\n type=str,\n action='append',\n help='The platform for which to build the PEX. This option can be passed multiple times '\n 'to create a multi-platform pex. To use wheels for specific interpreter/platform tags'\n ', you can append them to the platform with hyphens like: PLATFORM-IMPL-PYVER-ABI '\n '(e.g. \"linux_x86_64-cp-27-cp27mu\", \"macosx_10.12_x86_64-cp-36-cp36m\") PLATFORM is '\n 'the host platform e.g. \"linux-x86_64\", \"macosx-10.12-x86_64\", etc\". IMPL is the '\n 'python implementation abbreviation (e.g. \"cp\", \"pp\", \"jp\"). PYVER is a two-digit '\n 'string representing the python version (e.g. \"27\", \"36\"). ABI is the ABI tag '\n '(e.g. \"cp36m\", \"cp27mu\", \"abi3\", \"none\"). Default: current platform.')\n\n parser.add_option_group(group)\n\n\ndef configure_clp_pex_entry_points(parser):\n group = OptionGroup(\n parser,\n 'PEX entry point options',\n 'Specify what target/module the PEX should invoke if any.')\n\n group.add_option(\n '-m', '-e', '--entry-point',\n dest='entry_point',\n metavar='MODULE[:SYMBOL]',\n default=None,\n help='Set the entry point to module or module:symbol. If just specifying module, pex '\n 'behaves like python -m, e.g. python -m SimpleHTTPServer. If specifying '\n 'module:symbol, pex imports that symbol and invokes it as if it were main.')\n\n group.add_option(\n '-c', '--script', '--console-script',\n dest='script',\n default=None,\n metavar='SCRIPT_NAME',\n help='Set the entry point as to the script or console_script as defined by a any of the '\n 'distributions in the pex. For example: \"pex -c fab fabric\" or \"pex -c mturk boto\".')\n\n group.add_option(\n '--validate-entry-point',\n dest='validate_ep',\n default=False,\n action='store_true',\n help='Validate the entry point by importing it in separate process. Warning: this could have '\n 'side effects. For example, entry point `a.b.c:m` will translate to '\n '`from a.b.c import m` during validation. [Default: %default]')\n\n parser.add_option_group(group)\n\n\ndef configure_clp():\n usage = (\n '%prog [-o OUTPUT.PEX] [options] [-- arg1 arg2 ...]\\n\\n'\n '%prog builds a PEX (Python Executable) file based on the given specifications: '\n 'sources, requirements, their dependencies and other options.')\n\n parser = OptionParser(usage=usage, version='%prog {0}'.format(__version__))\n configure_clp_pex_resolution(parser)\n configure_clp_pex_options(parser)\n configure_clp_pex_environment(parser)\n configure_clp_pex_entry_points(parser)\n\n parser.add_option(\n '-o', '--output-file',\n dest='pex_name',\n default=None,\n help='The name of the generated .pex file: Omiting this will run PEX '\n 'immediately and not save it to a file.')\n\n parser.add_option(\n '-p', '--preamble-file',\n dest='preamble_file',\n metavar='FILE',\n default=None,\n type=str,\n help='The name of a file to be included as the preamble for the generated .pex file')\n\n parser.add_option(\n '-D', '--sources-directory',\n dest='sources_directory',\n metavar='DIR',\n default=[],\n type=str,\n action='append',\n help='Add sources directory to be packaged into the generated .pex file.'\n ' This option can be used multiple times.')\n\n parser.add_option(\n '-R', '--resources-directory',\n dest='resources_directory',\n metavar='DIR',\n default=[],\n type=str,\n action='append',\n help='Add resources directory to be packaged into the generated .pex file.'\n ' This option can be used multiple times.')\n\n parser.add_option(\n '-r', '--requirement',\n dest='requirement_files',\n metavar='FILE',\n default=[],\n type=str,\n action='append',\n help='Add requirements from the given requirements file. This option can be used multiple '\n 'times.')\n\n parser.add_option(\n '--constraints',\n dest='constraint_files',\n metavar='FILE',\n default=[],\n type=str,\n action='append',\n help='Add constraints from the given constraints file. This option can be used multiple '\n 'times.')\n\n parser.add_option(\n '-v',\n dest='verbosity',\n default=0,\n action='callback',\n callback=increment_verbosity,\n help='Turn on logging verbosity, may be specified multiple times.')\n\n parser.add_option(\n '--emit-warnings', '--no-emit-warnings',\n dest='emit_warnings',\n action='callback',\n callback=parse_bool,\n default=True,\n help='Emit runtime UserWarnings on stderr. If false, only emit them when PEX_VERBOSE is set.'\n 'Default: emit user warnings to stderr')\n\n parser.add_option(\n '--pex-root',\n dest='pex_root',\n default=None,\n help='Specify the pex root used in this invocation of pex. [Default: ~/.pex]'\n )\n\n parser.add_option(\n '--help-variables',\n action='callback',\n callback=print_variable_help,\n help='Print out help about the various environment variables used to change the behavior of '\n 'a running PEX file.')\n\n return parser\n\n\ndef _safe_link(src, dst):\n try:\n os.unlink(dst)\n except OSError:\n pass\n os.symlink(src, dst)\n\n\ndef build_pex(reqs, options):\n interpreters = None # Default to the current interpreter.\n\n # NB: options.python and interpreter constraints cannot be used together.\n if options.python:\n with TRACER.timed('Resolving interpreters', V=2):\n def to_python_interpreter(full_path_or_basename):\n if os.path.exists(full_path_or_basename):\n return PythonInterpreter.from_binary(full_path_or_basename)\n else:\n interpreter = PythonInterpreter.from_env(full_path_or_basename)\n if interpreter is None:\n die('Failed to find interpreter: %s' % full_path_or_basename)\n return interpreter\n\n interpreters = [to_python_interpreter(interp) for interp in options.python]\n elif options.interpreter_constraint:\n with TRACER.timed('Resolving interpreters', V=2):\n constraints = options.interpreter_constraint\n validate_constraints(constraints)\n if options.rc_file or not ENV.PEX_IGNORE_RCFILES:\n rc_variables = Variables.from_rc(rc=options.rc_file)\n pex_python_path = rc_variables.get('PEX_PYTHON_PATH', None)\n else:\n pex_python_path = None\n interpreters = list(iter_compatible_interpreters(pex_python_path, constraints))\n if not interpreters:\n die('Could not find compatible interpreter', CANNOT_SETUP_INTERPRETER)\n\n try:\n with open(options.preamble_file) as preamble_fd:\n preamble = preamble_fd.read()\n except TypeError:\n # options.preamble_file is None\n preamble = None\n\n interpreter = min(interpreters) if interpreters else None\n\n pex_builder = PEXBuilder(path=safe_mkdtemp(), interpreter=interpreter, preamble=preamble)\n\n def walk_and_do(fn, src_dir):\n src_dir = os.path.normpath(src_dir)\n for root, dirs, files in os.walk(src_dir):\n for f in files:\n src_file_path = os.path.join(root, f)\n dst_path = os.path.relpath(src_file_path, src_dir)\n fn(src_file_path, dst_path)\n\n for directory in options.sources_directory:\n walk_and_do(pex_builder.add_source, directory)\n\n for directory in options.resources_directory:\n walk_and_do(pex_builder.add_resource, directory)\n\n pex_info = pex_builder.info\n pex_info.zip_safe = options.zip_safe\n pex_info.pex_path = options.pex_path\n pex_info.always_write_cache = options.always_write_cache\n pex_info.ignore_errors = options.ignore_errors\n pex_info.emit_warnings = options.emit_warnings\n pex_info.inherit_path = options.inherit_path\n if options.interpreter_constraint:\n for ic in options.interpreter_constraint:\n pex_builder.add_interpreter_constraint(ic)\n\n # NB: `None` means use the default (pypi) index, `[]` means use no indexes.\n indexes = None\n if options.indexes != [_PYPI] and options.indexes is not None:\n indexes = [str(index) for index in options.indexes]\n\n with TRACER.timed('Resolving distributions ({})'.format(reqs)):\n try:\n resolveds = resolve_multi(requirements=reqs,\n requirement_files=options.requirement_files,\n constraint_files=options.constraint_files,\n allow_prereleases=options.allow_prereleases,\n transitive=options.transitive,\n interpreters=interpreters,\n platforms=options.platforms,\n indexes=indexes,\n find_links=options.find_links,\n cache=options.cache_dir,\n build=options.build,\n use_wheel=options.use_wheel,\n compile=options.compile)\n\n for resolved_dist in resolveds:\n log(' %s -> %s' % (resolved_dist.requirement, resolved_dist.distribution),\n V=options.verbosity)\n pex_builder.add_distribution(resolved_dist.distribution)\n pex_builder.add_requirement(resolved_dist.requirement)\n except Unsatisfiable as e:\n die(e)\n\n if options.entry_point and options.script:\n die('Must specify at most one entry point or script.', INVALID_OPTIONS)\n\n if options.entry_point:\n pex_builder.set_entry_point(options.entry_point)\n elif options.script:\n pex_builder.set_script(options.script)\n\n if options.python_shebang:\n pex_builder.set_shebang(options.python_shebang)\n\n return pex_builder\n\n\ndef make_relative_to_root(path):\n \"\"\"Update options so that defaults are user relative to specified pex_root.\"\"\"\n return os.path.normpath(path.format(pex_root=ENV.PEX_ROOT))\n\n\ndef transform_legacy_arg(arg):\n # inherit-path used to be a boolean arg (so either was absent, or --inherit-path)\n # Now it takes a string argument, so --inherit-path is invalid.\n # Fix up the args we're about to parse to preserve backwards compatibility.\n if arg == '--inherit-path':\n return '--inherit-path=prefer'\n return arg\n\n\ndef _compatible_with_current_platform(platforms):\n return (\n not platforms or\n 'current' in platforms or\n str(Platform.current()) in platforms\n )\n\n\ndef main(args=None):\n args = args[:] if args else sys.argv[1:]\n args = [transform_legacy_arg(arg) for arg in args]\n parser = configure_clp()\n\n try:\n separator = args.index('--')\n args, cmdline = args[:separator], args[separator + 1:]\n except ValueError:\n args, cmdline = args, []\n\n options, reqs = parser.parse_args(args=args)\n if options.python and options.interpreter_constraint:\n die('The \"--python\" and \"--interpreter-constraint\" options cannot be used together.')\n\n if options.pex_root:\n ENV.set('PEX_ROOT', options.pex_root)\n else:\n options.pex_root = ENV.PEX_ROOT # If option not specified fallback to env variable.\n\n # Don't alter cache if it is disabled.\n if options.cache_dir:\n options.cache_dir = make_relative_to_root(options.cache_dir)\n\n with ENV.patch(PEX_VERBOSE=str(options.verbosity)):\n with TRACER.timed('Building pex'):\n pex_builder = build_pex(reqs, options)\n\n pex_builder.freeze(bytecode_compile=options.compile)\n pex = PEX(pex_builder.path(),\n interpreter=pex_builder.interpreter,\n verify_entry_point=options.validate_ep)\n\n if options.pex_name is not None:\n log('Saving PEX file to %s' % options.pex_name, V=options.verbosity)\n tmp_name = options.pex_name + '~'\n safe_delete(tmp_name)\n pex_builder.build(\n tmp_name,\n bytecode_compile=options.compile,\n deterministic_timestamp=not options.use_system_time\n )\n os.rename(tmp_name, options.pex_name)\n else:\n if not _compatible_with_current_platform(options.platforms):\n log('WARNING: attempting to run PEX with incompatible platforms!')\n\n log('Running PEX file at %s with args %s' % (pex_builder.path(), cmdline),\n V=options.verbosity)\n sys.exit(pex.run(args=list(cmdline)))\n\n\nif __name__ == '__main__':\n main()\n",
"path": "pex/bin/pex.py"
}
] | diff --git a/pex/bin/pex.py b/pex/bin/pex.py
index 1376c3bbc..7268aa712 100755
--- a/pex/bin/pex.py
+++ b/pex/bin/pex.py
@@ -60,7 +60,7 @@ def process_disable_cache(option, option_str, option_value, parser):
class PyPiSentinel(object):
def __str__(self):
- 'https://pypi.org/simple'
+ return 'https://pypi.org/simple'
_PYPI = PyPiSentinel()
|
liberapay__liberapay.com-726 | The list of top individuals is incomplete
While looking at https://liberapay.com/explore/individuals I realized that ploum isn't listed. It's because he doesn't have a profile statement. The thinking was that without a statement there isn't much to see on a profile page, so there's little point in linking to it. However it also makes the list incomplete.
The list of top individuals is incomplete
While looking at https://liberapay.com/explore/individuals I realized that ploum isn't listed. It's because he doesn't have a profile statement. The thinking was that without a statement there isn't much to see on a profile page, so there's little point in linking to it. However it also makes the list incomplete.
| [
{
"content": "# coding: utf8\nfrom __future__ import print_function, unicode_literals\n\nfrom collections import namedtuple, OrderedDict\nfrom datetime import date, datetime, timedelta\nfrom decimal import Decimal, ROUND_UP\nimport re\n\nfrom jinja2 import StrictUndefined\nfrom pando.utils import utc\n\n\nclass CustomUndefined(StrictUndefined):\n __bool__ = __nonzero__ = lambda self: False\n\n def __str__(self):\n try:\n self._fail_with_undefined_error()\n except Exception as e:\n self._tell_sentry(e, {})\n return ''\n\n __unicode__ = __str__\n\n\ndef check_bits(bits):\n assert len(set(bits)) == len(bits) # no duplicates\n assert not [b for b in bits if '{0:b}'.format(b).count('1') != 1] # single bit\n\n\nEvent = namedtuple('Event', 'name bit title')\n\nFees = namedtuple('Fees', ('var', 'fix'))\n\nStandardTip = namedtuple('StandardTip', 'label weekly monthly yearly')\n\n\n_ = lambda a: a\n\nASCII_ALLOWED_IN_USERNAME = set(\"0123456789\"\n \"abcdefghijklmnopqrstuvwxyz\"\n \"ABCDEFGHIJKLMNOPQRSTUVWXYZ\"\n \"-_.\")\n\nAVATAR_QUERY = '?s=160&default=retro'\nAVATAR_SOURCES = 'libravatar bitbucket facebook github google mastodon twitter'.split()\n\nBIRTHDAY = date(2015, 5, 22)\n\nD_CENT = Decimal('0.01')\nD_INF = Decimal('inf')\nD_UNIT = Decimal('1.00')\nD_ZERO = Decimal('0.00')\n\nDONATION_LIMITS_WEEKLY = (Decimal('0.01'), Decimal('100.00'))\nDONATION_LIMITS = {\n 'weekly': DONATION_LIMITS_WEEKLY,\n 'monthly': tuple((x * Decimal(52) / Decimal(12)).quantize(D_CENT, rounding=ROUND_UP)\n for x in DONATION_LIMITS_WEEKLY),\n 'yearly': tuple((x * Decimal(52)).quantize(D_CENT)\n for x in DONATION_LIMITS_WEEKLY),\n}\nDONATION_WEEKLY_MIN, DONATION_WEEKLY_MAX = DONATION_LIMITS_WEEKLY\n\nDOMAIN_RE = re.compile(r'''\n ^\n ([a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?\\.)+\n [a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?\n $\n''', re.VERBOSE)\n\nELSEWHERE_ACTIONS = {'connect', 'lock', 'unlock'}\n\nEMAIL_VERIFICATION_TIMEOUT = timedelta(hours=24)\nEMAIL_RE = re.compile(r'''\n # This is the regexp used by MangoPay (as of February 2017).\n # It rejects some valid but exotic addresses.\n # https://en.wikipedia.org/wiki/Email_address\n ^\n [a-zA-Z0-9!#$%&'*+/=?^_`{|}~-]+(\\.[a-zA-Z0-9!#$%&'*+/=?^_`{|}~-]+)*\n @\n ([a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?\\.)+[a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?\n $\n''', re.VERBOSE)\n\nEPOCH = datetime(1970, 1, 1, 0, 0, 0, 0, utc)\n\nEVENTS = [\n Event('income', 1, _(\"When I receive money\")),\n Event('low_balance', 2, _(\"When there isn't enough money in my wallet to cover my donations\")),\n Event('withdrawal_created', 4, _(\"When a transfer to my bank account is initiated\")),\n Event('withdrawal_failed', 8, _(\"When a transfer to my bank account fails\")),\n Event('pledgee_joined', 16, _(\"When someone I pledge to joins Liberapay\")),\n Event('team_invite', 32, _(\"When someone invites me to join a team\")),\n Event('payin_bankwire_failed', 64, _(\"When a bank wire transfer to my Liberapay wallet fails\")),\n Event('payin_bankwire_succeeded', 128, _(\"When a bank wire transfer to my Liberapay wallet succeeds\")),\n Event('payin_bankwire_expired', 256, _(\"When a bank wire transfer to my Liberapay wallet expires\")),\n Event('payin_directdebit_failed', 512, _(\"When a direct debit from my bank account fails\")),\n Event('payin_directdebit_succeeded', 1024, _(\"When a direct debit from my bank account succeeds\")),\n]\ncheck_bits([e.bit for e in EVENTS])\nEVENTS = OrderedDict((e.name, e) for e in EVENTS)\nEVENTS_S = ' '.join(EVENTS.keys())\n\n# https://www.mangopay.com/pricing/\nFEE_PAYIN_BANK_WIRE = Fees(Decimal('0.005'), Decimal(0)) # 0.5%\nFEE_PAYIN_CARD = Fees(Decimal('0.018'), Decimal('0.18')) # 1.8% + €0.18\nFEE_PAYIN_DIRECT_DEBIT = Fees(Decimal(0), Decimal('0.80')) # €0.80\nFEE_PAYOUT = Fees(Decimal(0), Decimal(0))\nFEE_PAYOUT_OUTSIDE_SEPA = Fees(Decimal(0), Decimal('2.5'))\nFEE_PAYOUT_WARN = Decimal('0.03') # warn user when fee exceeds 3%\nFEE_VAT = Decimal('0.17') # 17% (Luxembourg rate)\n\nINVOICE_DOC_MAX_SIZE = 5000000\nINVOICE_DOCS_EXTS = ['pdf', 'jpeg', 'jpg', 'png']\nINVOICE_DOCS_LIMIT = 10\n\nINVOICE_NATURES = {\n 'expense': _(\"Expense Report\"),\n}\n\nINVOICE_STATUSES = {\n 'pre': _(\"Draft\"),\n 'new': _(\"Sent (awaiting approval)\"),\n 'retracted': _(\"Retracted\"),\n 'accepted': _(\"Accepted (awaiting payment)\"),\n 'paid': _(\"Paid\"),\n 'rejected': _(\"Rejected\"),\n}\n\nJINJA_ENV_COMMON = dict(\n trim_blocks=True, lstrip_blocks=True,\n line_statement_prefix='%',\n # undefined=CustomUndefined,\n)\n\n# https://docs.mangopay.com/api-references/kyc-rules/\nKYC_DOC_MAX_SIZE = 7000000\nKYC_DOC_MAX_SIZE_MB = int(KYC_DOC_MAX_SIZE / 1000000)\nKYC_DOCS_EXTS = ['pdf', 'jpeg', 'jpg', 'gif', 'png']\nKYC_DOCS_EXTS_STR = ', '.join(KYC_DOCS_EXTS)\nKYC_INCOME_THRESHOLDS = (\n (1, 18000),\n (2, 30000),\n (3, 50000),\n (4, 80000),\n (5, 120000),\n (6, 120000),\n)\nKYC_PAYIN_YEARLY_THRESHOLD = Decimal('2500')\nKYC_PAYOUT_YEARLY_THRESHOLD = Decimal('1000')\n\nLAUNCH_TIME = datetime(2016, 2, 3, 12, 50, 0, 0, utc)\n\nPARTICIPANT_KINDS = {\n 'individual': _(\"Individual\"),\n 'organization': _(\"Organization\"),\n 'group': _(\"Team\"),\n}\n\nPASSWORD_MIN_SIZE = 8\nPASSWORD_MAX_SIZE = 150\n\nPAYIN_BANK_WIRE_MIN = Decimal('2.00') # fee ≈ 0.99%\nPAYIN_BANK_WIRE_TARGET = Decimal('5.00') # fee ≈ 0.6%\nPAYIN_CARD_MIN = Decimal(\"15.00\") # fee ≈ 3.5%\nPAYIN_CARD_TARGET = Decimal(\"92.00\") # fee ≈ 2.33%\nPAYIN_DIRECT_DEBIT_MIN = Decimal('25.00') # fee ≈ 3.6%\nPAYIN_DIRECT_DEBIT_TARGET = Decimal('99.00') # fee ≈ 0.94%\n\nPERIOD_CONVERSION_RATES = {\n 'weekly': Decimal(1),\n 'monthly': Decimal(12) / Decimal(52),\n 'yearly': Decimal(1) / Decimal(52),\n}\n\nPOSTAL_ADDRESS_KEYS = (\n 'AddressLine1', 'AddressLine2', 'City', 'Region', 'PostalCode', 'Country'\n)\n\nPRIVACY_FIELDS = OrderedDict([\n ('hide_giving', _(\"Hide total giving from others.\")),\n ('hide_receiving', _(\"Hide total receiving from others.\")),\n ('hide_from_search', _(\"Hide myself from search results on Liberapay.\")),\n ('profile_noindex', _(\"Tell web search engines not to index my profile.\")),\n ('hide_from_lists', _(\"Prevent my profile from being listed on Liberapay.\")),\n])\nPRIVACY_FIELDS_S = ' '.join(PRIVACY_FIELDS.keys())\n\nPRIVILEGES = dict(admin=1, run_payday=2)\ncheck_bits(list(PRIVILEGES.values()))\n\nQUARANTINE = timedelta(weeks=4)\n\nRATE_LIMITS = {\n 'add_email.source': (5, 60*60*24), # 5 per day\n 'add_email.target': (2, 60*60*24), # 2 per day\n 'change_username': (7, 60*60*24*7), # 7 per week\n 'log-in.email': (10, 60*60*24), # 10 per day\n 'log-in.email.not-verified': (2, 60*60*24), # 2 per day\n 'log-in.email.verified': (10, 60*60*24), # 10 per day\n 'log-in.password': (3, 60*60), # 3 per hour\n 'sign-up.ip-addr': (5, 60*60), # 5 per hour per IP address\n 'sign-up.ip-net': (15, 15*60), # 15 per 15 minutes per IP network\n 'sign-up.ip-version': (15, 15*60), # 15 per 15 minutes per IP version\n}\n\nSEPA = set(\"\"\"\n AT BE BG CH CY CZ DE DK EE ES ES FI FR GB GI GR HR HU IE IS IT LI LT LU LV\n MC MT NL NO PL PT RO SE SI SK\n\"\"\".split())\n\nSESSION = str('session') # bytes in python2, unicode in python3\nSESSION_REFRESH = timedelta(hours=1)\nSESSION_TIMEOUT = timedelta(hours=6)\n\n\ndef make_standard_tip(label, weekly):\n return StandardTip(\n label,\n weekly,\n weekly / PERIOD_CONVERSION_RATES['monthly'],\n weekly / PERIOD_CONVERSION_RATES['yearly'],\n )\n\n\nSTANDARD_TIPS = (\n make_standard_tip(_(\"Symbolic\"), Decimal('0.01')),\n make_standard_tip(_(\"Small\"), Decimal('0.25')),\n make_standard_tip(_(\"Medium\"), Decimal('1.00')),\n make_standard_tip(_(\"Large\"), Decimal('5.00')),\n make_standard_tip(_(\"Maximum\"), DONATION_WEEKLY_MAX),\n)\n\nUSERNAME_MAX_SIZE = 32\n\ndel _\n",
"path": "liberapay/constants.py"
}
] | [
{
"content": "# coding: utf8\nfrom __future__ import print_function, unicode_literals\n\nfrom collections import namedtuple, OrderedDict\nfrom datetime import date, datetime, timedelta\nfrom decimal import Decimal, ROUND_UP\nimport re\n\nfrom jinja2 import StrictUndefined\nfrom pando.utils import utc\n\n\nclass CustomUndefined(StrictUndefined):\n __bool__ = __nonzero__ = lambda self: False\n\n def __str__(self):\n try:\n self._fail_with_undefined_error()\n except Exception as e:\n self._tell_sentry(e, {})\n return ''\n\n __unicode__ = __str__\n\n\ndef check_bits(bits):\n assert len(set(bits)) == len(bits) # no duplicates\n assert not [b for b in bits if '{0:b}'.format(b).count('1') != 1] # single bit\n\n\nEvent = namedtuple('Event', 'name bit title')\n\nFees = namedtuple('Fees', ('var', 'fix'))\n\nStandardTip = namedtuple('StandardTip', 'label weekly monthly yearly')\n\n\n_ = lambda a: a\n\nASCII_ALLOWED_IN_USERNAME = set(\"0123456789\"\n \"abcdefghijklmnopqrstuvwxyz\"\n \"ABCDEFGHIJKLMNOPQRSTUVWXYZ\"\n \"-_.\")\n\nAVATAR_QUERY = '?s=160&default=retro'\nAVATAR_SOURCES = 'libravatar bitbucket facebook github google mastodon twitter'.split()\n\nBIRTHDAY = date(2015, 5, 22)\n\nD_CENT = Decimal('0.01')\nD_INF = Decimal('inf')\nD_UNIT = Decimal('1.00')\nD_ZERO = Decimal('0.00')\n\nDONATION_LIMITS_WEEKLY = (Decimal('0.01'), Decimal('100.00'))\nDONATION_LIMITS = {\n 'weekly': DONATION_LIMITS_WEEKLY,\n 'monthly': tuple((x * Decimal(52) / Decimal(12)).quantize(D_CENT, rounding=ROUND_UP)\n for x in DONATION_LIMITS_WEEKLY),\n 'yearly': tuple((x * Decimal(52)).quantize(D_CENT)\n for x in DONATION_LIMITS_WEEKLY),\n}\nDONATION_WEEKLY_MIN, DONATION_WEEKLY_MAX = DONATION_LIMITS_WEEKLY\n\nDOMAIN_RE = re.compile(r'''\n ^\n ([a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?\\.)+\n [a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?\n $\n''', re.VERBOSE)\n\nELSEWHERE_ACTIONS = {'connect', 'lock', 'unlock'}\n\nEMAIL_VERIFICATION_TIMEOUT = timedelta(hours=24)\nEMAIL_RE = re.compile(r'''\n # This is the regexp used by MangoPay (as of February 2017).\n # It rejects some valid but exotic addresses.\n # https://en.wikipedia.org/wiki/Email_address\n ^\n [a-zA-Z0-9!#$%&'*+/=?^_`{|}~-]+(\\.[a-zA-Z0-9!#$%&'*+/=?^_`{|}~-]+)*\n @\n ([a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?\\.)+[a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?\n $\n''', re.VERBOSE)\n\nEPOCH = datetime(1970, 1, 1, 0, 0, 0, 0, utc)\n\nEVENTS = [\n Event('income', 1, _(\"When I receive money\")),\n Event('low_balance', 2, _(\"When there isn't enough money in my wallet to cover my donations\")),\n Event('withdrawal_created', 4, _(\"When a transfer to my bank account is initiated\")),\n Event('withdrawal_failed', 8, _(\"When a transfer to my bank account fails\")),\n Event('pledgee_joined', 16, _(\"When someone I pledge to joins Liberapay\")),\n Event('team_invite', 32, _(\"When someone invites me to join a team\")),\n Event('payin_bankwire_failed', 64, _(\"When a bank wire transfer to my Liberapay wallet fails\")),\n Event('payin_bankwire_succeeded', 128, _(\"When a bank wire transfer to my Liberapay wallet succeeds\")),\n Event('payin_bankwire_expired', 256, _(\"When a bank wire transfer to my Liberapay wallet expires\")),\n Event('payin_directdebit_failed', 512, _(\"When a direct debit from my bank account fails\")),\n Event('payin_directdebit_succeeded', 1024, _(\"When a direct debit from my bank account succeeds\")),\n]\ncheck_bits([e.bit for e in EVENTS])\nEVENTS = OrderedDict((e.name, e) for e in EVENTS)\nEVENTS_S = ' '.join(EVENTS.keys())\n\n# https://www.mangopay.com/pricing/\nFEE_PAYIN_BANK_WIRE = Fees(Decimal('0.005'), Decimal(0)) # 0.5%\nFEE_PAYIN_CARD = Fees(Decimal('0.018'), Decimal('0.18')) # 1.8% + €0.18\nFEE_PAYIN_DIRECT_DEBIT = Fees(Decimal(0), Decimal('0.80')) # €0.80\nFEE_PAYOUT = Fees(Decimal(0), Decimal(0))\nFEE_PAYOUT_OUTSIDE_SEPA = Fees(Decimal(0), Decimal('2.5'))\nFEE_PAYOUT_WARN = Decimal('0.03') # warn user when fee exceeds 3%\nFEE_VAT = Decimal('0.17') # 17% (Luxembourg rate)\n\nINVOICE_DOC_MAX_SIZE = 5000000\nINVOICE_DOCS_EXTS = ['pdf', 'jpeg', 'jpg', 'png']\nINVOICE_DOCS_LIMIT = 10\n\nINVOICE_NATURES = {\n 'expense': _(\"Expense Report\"),\n}\n\nINVOICE_STATUSES = {\n 'pre': _(\"Draft\"),\n 'new': _(\"Sent (awaiting approval)\"),\n 'retracted': _(\"Retracted\"),\n 'accepted': _(\"Accepted (awaiting payment)\"),\n 'paid': _(\"Paid\"),\n 'rejected': _(\"Rejected\"),\n}\n\nJINJA_ENV_COMMON = dict(\n trim_blocks=True, lstrip_blocks=True,\n line_statement_prefix='%',\n # undefined=CustomUndefined,\n)\n\n# https://docs.mangopay.com/api-references/kyc-rules/\nKYC_DOC_MAX_SIZE = 7000000\nKYC_DOC_MAX_SIZE_MB = int(KYC_DOC_MAX_SIZE / 1000000)\nKYC_DOCS_EXTS = ['pdf', 'jpeg', 'jpg', 'gif', 'png']\nKYC_DOCS_EXTS_STR = ', '.join(KYC_DOCS_EXTS)\nKYC_INCOME_THRESHOLDS = (\n (1, 18000),\n (2, 30000),\n (3, 50000),\n (4, 80000),\n (5, 120000),\n (6, 120000),\n)\nKYC_PAYIN_YEARLY_THRESHOLD = Decimal('2500')\nKYC_PAYOUT_YEARLY_THRESHOLD = Decimal('1000')\n\nLAUNCH_TIME = datetime(2016, 2, 3, 12, 50, 0, 0, utc)\n\nPARTICIPANT_KINDS = {\n 'individual': _(\"Individual\"),\n 'organization': _(\"Organization\"),\n 'group': _(\"Team\"),\n}\n\nPASSWORD_MIN_SIZE = 8\nPASSWORD_MAX_SIZE = 150\n\nPAYIN_BANK_WIRE_MIN = Decimal('2.00') # fee ≈ 0.99%\nPAYIN_BANK_WIRE_TARGET = Decimal('5.00') # fee ≈ 0.6%\nPAYIN_CARD_MIN = Decimal(\"15.00\") # fee ≈ 3.5%\nPAYIN_CARD_TARGET = Decimal(\"92.00\") # fee ≈ 2.33%\nPAYIN_DIRECT_DEBIT_MIN = Decimal('25.00') # fee ≈ 3.6%\nPAYIN_DIRECT_DEBIT_TARGET = Decimal('99.00') # fee ≈ 0.94%\n\nPERIOD_CONVERSION_RATES = {\n 'weekly': Decimal(1),\n 'monthly': Decimal(12) / Decimal(52),\n 'yearly': Decimal(1) / Decimal(52),\n}\n\nPOSTAL_ADDRESS_KEYS = (\n 'AddressLine1', 'AddressLine2', 'City', 'Region', 'PostalCode', 'Country'\n)\n\nPRIVACY_FIELDS = OrderedDict([\n ('hide_giving', _(\"Hide total giving from others.\")),\n ('hide_receiving', _(\"Hide total receiving from others.\")),\n ('hide_from_search', _(\"Hide myself from search results on Liberapay.\")),\n ('profile_noindex', _(\"Tell web search engines not to index my profile.\")),\n ('hide_from_lists', _(\"Prevent my profile from being listed on Liberapay.\")),\n])\nPRIVACY_FIELDS_S = ' '.join(PRIVACY_FIELDS.keys())\n\nPRIVILEGES = dict(admin=1, run_payday=2)\ncheck_bits(list(PRIVILEGES.values()))\n\nQUARANTINE = timedelta(weeks=4)\n\nRATE_LIMITS = {\n 'add_email.source': (5, 60*60*24), # 5 per day\n 'add_email.target': (2, 60*60*24), # 2 per day\n 'change_username': (7, 60*60*24*7), # 7 per week\n 'log-in.email': (10, 60*60*24), # 10 per day\n 'log-in.email.not-verified': (2, 60*60*24), # 2 per day\n 'log-in.email.verified': (10, 60*60*24), # 10 per day\n 'log-in.password': (3, 60*60), # 3 per hour\n 'sign-up.ip-addr': (5, 60*60), # 5 per hour per IP address\n 'sign-up.ip-net': (15, 15*60), # 15 per 15 minutes per IP network\n 'sign-up.ip-version': (15, 15*60), # 15 per 15 minutes per IP version\n}\n\nSEPA = set(\"\"\"\n AT BE BG CH CY CZ DE DK EE ES ES FI FR GB GI GR HR HU IE IS IT LI LT LU LV\n MC MT NL NO PL PT RO SE SI SK\n\"\"\".split())\n\nSESSION = str('session') # bytes in python2, unicode in python3\nSESSION_REFRESH = timedelta(hours=1)\nSESSION_TIMEOUT = timedelta(hours=6)\n\n\ndef make_standard_tip(label, weekly):\n return StandardTip(\n label,\n weekly,\n weekly / PERIOD_CONVERSION_RATES['monthly'],\n weekly / PERIOD_CONVERSION_RATES['yearly'],\n )\n\n\nSTANDARD_TIPS = (\n make_standard_tip(_(\"Symbolic\"), Decimal('0.01')),\n make_standard_tip(_(\"Small\"), Decimal('0.25')),\n make_standard_tip(_(\"Medium\"), Decimal('1.00')),\n make_standard_tip(_(\"Large\"), Decimal('5.00')),\n make_standard_tip(_(\"Maximum\"), DONATION_WEEKLY_MAX),\n)\n\nSUMMARY_MAX_SIZE = 100\n\nUSERNAME_MAX_SIZE = 32\n\ndel _\n",
"path": "liberapay/constants.py"
}
] | diff --git a/liberapay/constants.py b/liberapay/constants.py
index 47b3c9dccd..c03372d92a 100644
--- a/liberapay/constants.py
+++ b/liberapay/constants.py
@@ -232,6 +232,8 @@ def make_standard_tip(label, weekly):
make_standard_tip(_("Maximum"), DONATION_WEEKLY_MAX),
)
+SUMMARY_MAX_SIZE = 100
+
USERNAME_MAX_SIZE = 32
del _
diff --git a/sql/branch.sql b/sql/branch.sql
new file mode 100644
index 0000000000..802ffa1737
--- /dev/null
+++ b/sql/branch.sql
@@ -0,0 +1 @@
+ALTER TYPE stmt_type ADD VALUE IF NOT EXISTS 'summary';
diff --git a/style/base/base.scss b/style/base/base.scss
index 9abbd62c91..1ecded0c75 100644
--- a/style/base/base.scss
+++ b/style/base/base.scss
@@ -191,6 +191,10 @@ img.account-type {
font-size: 20px;
margin: 0 0 10px;
}
+ h4 + p.summary {
+ color: $gray-light;
+ margin-top: -5px;
+ }
.radio {
margin-top: 0;
}
diff --git a/templates/profile-box.html b/templates/profile-box.html
index ed56dd5e09..a884d7a1c3 100644
--- a/templates/profile-box.html
+++ b/templates/profile-box.html
@@ -8,9 +8,9 @@
% endcall
% endmacro
-% macro profile_box_embedded(participant, nmembers=None)
+% macro profile_box_embedded(participant, summary, nmembers=None)
% call profile_box(participant, embedded=True)
- {{ profile_box_embedded_col2(participant, nmembers=nmembers) }}
+ {{ profile_box_embedded_col2(participant, summary, nmembers=nmembers) }}
% endcall
% endmacro
@@ -93,13 +93,17 @@ <h1>{{ username }}</h1>
% endif
% endmacro
-% macro profile_box_embedded_col2(participant, nmembers=None)
+% macro profile_box_embedded_col2(participant, summary, nmembers=None)
% set username = participant.username
% set receiving = participant.receiving
% set goal = participant.goal
<h4><a href="/{{ username }}/">{{ username }}</a></h4>
+ % if summary
+ <p class="summary">{{ summary }}</p>
+ % endif
+
% if participant.hide_receiving
% elif goal == None
<p>{{ _("Income: {0}/week", Money(receiving, 'EUR')) }}</p>
diff --git a/www/%username/edit.spt b/www/%username/edit.spt
index 61ef4c006e..cf2b3f7177 100644
--- a/www/%username/edit.spt
+++ b/www/%username/edit.spt
@@ -9,25 +9,36 @@ participant = get_participant(state, restrict=True, allow_member=True)
if request.method == 'POST':
lang = request.body['lang']
+ summary = request.body.get('summary') or ''
statement = request.body['statement']
if lang not in LANGUAGES_2:
raise response.error(400, "unknown lang")
+ if len(summary) > constants.SUMMARY_MAX_SIZE:
+ raise response.error(400, _(
+ "The submitted summary is too long ({0} > {1}).",
+ len(summary), constants.SUMMARY_MAX_SIZE)
+ )
+
if request.body.get('save') == 'true':
- participant.upsert_statement(lang, statement)
+ participant.upsert_statement(lang, summary, 'summary')
+ participant.upsert_statement(lang, statement, 'profile')
response.redirect(request.line.uri+'#statement')
else:
lang = request.qs.get('lang')
if lang:
+ if lang not in LANGUAGES_2:
+ raise response.error(400, "unknown lang")
statement = participant.get_statement(lang)
else:
statement, lang = participant.get_statement(request.accept_langs)
+ if not lang:
+ lang = locale.language
+ summary = participant.get_statement(lang, 'summary') or ''
select_langs = get_lang_options(request, locale, participant.get_statement_langs())
-lang = lang or locale.language
-stmt_placeholder = _("You don't have a profile statement in this language yet.")
confirm_discard = _("You haven't saved your changes, are you sure you want to discard them?")
if participant.kind == 'individual':
@@ -155,6 +166,11 @@ title = _username
<form action="#statement" method="POST">
<input type="hidden" name="csrf_token" value="{{ csrf_token }}" />
<input type="hidden" name="lang" value="{{ lang }}" />
+ % if summary
+ <input type="hidden" name="summary" value="{{ summary }}" />
+ <p class="summary">{{ summary }}</p>
+ <hr>
+ % endif
<textarea class="hidden" name="statement">{{ statement }}</textarea>
<section class="profile-statement">{{ rendered_stmt }}</section>
<hr>
@@ -167,26 +183,42 @@ title = _username
% else
- <p>{{ _("Tell us how you're making the world better.") }}</p>
-
<p>{{ _(
- "Liberapay allows you to have profile statements in multiple languages. "
- "Use the selector below to switch between them."
+ "Describe your work, why you're asking for donations, etc. We need "
+ "both a short summary and a full statement."
) }}</p>
+ <p>{{ _(
+ "Liberapay allows you to internationalize your texts. "
+ "Use the selector below to switch between languages.")
+ }}</p>
+
<form action="#statement" method="POST" class="statement">
<input type="hidden" name="csrf_token" value="{{ csrf_token }}" />
<input type="hidden" name="lang" value="{{ lang }}" />
- {{ _("Current language: {0}", locale.languages.get(lang, lang.upper())) }}
- <textarea name="statement" rows="15" class="form-control profile-statement vertical-resize"
- placeholder="{{ stmt_placeholder }}"
+
+ <p>{{ _("Current language: {0}",
+ '<b>%s</b>'|safe % locale.languages.get(lang, lang.upper())) }}</p>
+
+ <div class="form-group">
+ <input name="summary" class="form-control" size=60
+ maxlength="{{ constants.SUMMARY_MAX_SIZE }}"
+ placeholder="{{ _('Short description') }}"
+ value="{{ summary }}" />
+ </div>
+
+ <div class="form-group">
+ <textarea name="statement" rows="15"
+ class="form-control profile-statement vertical-resize"
+ placeholder="{{ _('Full statement') }}"
data-confirm-discard="{{ confirm_discard }}"
>{{ statement or '' }}</textarea>
<p class="help-block pull-right">{{ _("Markdown supported.") }}
<a href="https://daringfireball.net/projects/markdown/basics"
target="_blank" rel="noopener noreferrer">{{ _("What is markdown?") }}</a>
</p>
- <p> </p>{# this is for spacing #}
+ </div>
+
<button class="preview btn btn-default" name="preview" value="true">{{ _("Preview") }}</button>
<button class="save btn btn-success" name="save" value="true">{{ _("Save") }}</button>
</form>
diff --git a/www/%username/index.html.spt b/www/%username/index.html.spt
index 7add1edc4b..ce7c233d2c 100644
--- a/www/%username/index.html.spt
+++ b/www/%username/index.html.spt
@@ -14,6 +14,7 @@ if lang:
else:
statement, lang = participant.get_statement(request.accept_langs)
statement = markdown.render(statement) if statement else None
+summary = participant.get_statement(lang, 'summary')
langs = participant.get_statement_langs()
@@ -26,8 +27,8 @@ show_income = not participant.hide_receiving and participant.accepts_tips
% block head_early
{{ super() }}
-% if statement
- <meta property="og:description" content="{{ excerpt_intro(statement) }}">
+% if statement or summary
+ <meta property="og:description" content="{{ excerpt_intro(statement) or summary }}">
% endif
% endblock
diff --git a/www/explore/individuals.spt b/www/explore/individuals.spt
index 8fe2b006ea..593b15d6b7 100644
--- a/www/explore/individuals.spt
+++ b/www/explore/individuals.spt
@@ -6,6 +6,13 @@ query_cache = website.db_qc5
individuals = query_cache.all("""
SELECT p
+ , ( SELECT s.content
+ FROM statements s
+ WHERE s.participant = p.id
+ AND s.type = 'summary'
+ ORDER BY s.lang = %s DESC, s.id
+ LIMIT 1
+ ) AS summary
FROM participants p
WHERE p.kind = 'individual'
AND p.status = 'active'
@@ -13,10 +20,9 @@ individuals = query_cache.all("""
AND p.hide_receiving IS NOT TRUE
AND p.hide_from_lists = 0
AND p.receiving > 0
- AND EXISTS (SELECT 1 FROM statements s WHERE s.participant = p.id)
ORDER BY p.receiving DESC, p.join_time DESC
LIMIT 30
-""")
+""", (locale.language,))
title = _("Explore")
subhead = _("Individuals")
@@ -30,9 +36,9 @@ subhead = _("Individuals")
% if individuals
<p>{{ _("The top {0} individuals on Liberapay are:", len(individuals)) }}</p>
<div class="row">
- % for p in individuals
+ % for p, summary in individuals
<div class="col-md-6">
- {{ profile_box_embedded(p) }}
+ {{ profile_box_embedded(p, summary) }}
</div>
% endfor
</div>
diff --git a/www/explore/organizations.spt b/www/explore/organizations.spt
index ba038bba1e..549f1b5592 100644
--- a/www/explore/organizations.spt
+++ b/www/explore/organizations.spt
@@ -6,6 +6,13 @@ query_cache = website.db_qc5
orgs_receiving = query_cache.all("""
SELECT p
+ , ( SELECT s.content
+ FROM statements s
+ WHERE s.participant = p.id
+ AND s.type = 'summary'
+ ORDER BY s.lang = %s DESC, s.id
+ LIMIT 1
+ ) AS summary
FROM participants p
WHERE p.kind = 'organization'
AND p.status = 'active'
@@ -13,10 +20,9 @@ orgs_receiving = query_cache.all("""
AND p.hide_receiving IS NOT TRUE
AND p.hide_from_lists = 0
AND p.receiving > 0
- AND EXISTS (SELECT 1 FROM statements s WHERE s.participant = p.id)
ORDER BY p.receiving DESC, p.join_time DESC
LIMIT 30
-""")
+""", (locale.language,))
title = _("Explore")
subhead = _("Organizations")
@@ -30,9 +36,9 @@ subhead = _("Organizations")
% if orgs_receiving
<p>{{ _("The top {0} organizations on Liberapay are:", len(orgs_receiving)) }}</p>
<div class="row">
- % for p in orgs_receiving
+ % for p, summary in orgs_receiving
<div class="col-md-6">
- {{ profile_box_embedded(p) }}
+ {{ profile_box_embedded(p, summary) }}
</div>
% endfor
</div>
diff --git a/www/explore/teams.spt b/www/explore/teams.spt
index 3ddcc0a9aa..7159b008c3 100644
--- a/www/explore/teams.spt
+++ b/www/explore/teams.spt
@@ -8,17 +8,23 @@ teams = query_cache.all("""
SELECT t.*
, p.*::participants AS participant
+ , ( SELECT s.content
+ FROM statements s
+ WHERE s.participant = p.id
+ AND s.type = 'summary'
+ ORDER BY s.lang = %s DESC, s.id
+ LIMIT 1
+ ) AS summary
FROM ( SELECT team AS id, count(member) AS nmembers
FROM current_takes
GROUP BY team
) AS t
- JOIN participants p
- ON p.id = t.id
+ JOIN participants p ON p.id = t.id
WHERE (p.goal >= 0 OR p.goal IS NULL)
AND p.hide_from_lists = 0
ORDER BY receiving DESC, join_time DESC
-""")
+""", (locale.language,))
nteams = len(teams)
title = _("Explore")
subhead = _("Teams")
@@ -44,7 +50,7 @@ subhead = _("Teams")
<div class="row">
% for team in teams
<div class="col-md-6">
- {{ profile_box_embedded(team.participant, nmembers=team.nmembers) }}
+ {{ profile_box_embedded(team.participant, team.summary, nmembers=team.nmembers) }}
</div>
% endfor
</div>
diff --git a/www/for/%name/edit.spt b/www/for/%name/edit.spt
index 8fe2c57404..e0abf7c285 100644
--- a/www/for/%name/edit.spt
+++ b/www/for/%name/edit.spt
@@ -44,7 +44,8 @@ title = _("{0} community settings", c.name)
"Use the selector below to switch between languages.")
}}</p>
-<p>{{ _("Current language: {0}", locale.languages.get(lang, lang.upper())) }}</p>
+<p>{{ _("Current language: {0}",
+ '<b>%s</b>'|safe % locale.languages.get(lang, lang.upper())) }}</p>
<form action="" class="block-labels" method="POST">
<input type="hidden" name="csrf_token" value="{{ csrf_token }}" />
diff --git a/www/search.spt b/www/search.spt
index a5f7d17272..286e5fc9dd 100644
--- a/www/search.spt
+++ b/www/search.spt
@@ -25,7 +25,7 @@ if query:
""", locals())
if scope in (None, 'statements'):
- langs = tuple(l for l in request.accept_langs if l in LANGUAGES_2)
+ langs = tuple(l for l in request.accept_langs[:3] if l in LANGUAGES_2)
search_confs = list(set(SEARCH_CONFS.get(lang, 'simple') for lang in langs))
results['statements'] = website.db.all("""
WITH queries AS (
@@ -40,14 +40,14 @@ if query:
SELECT rank
, lang
, ts_headline(search_conf, content, query,
- 'StartSel=**,StopSel=**,MaxFragments=1') AS excerpt
+ 'StartSel=**,StopSel=**,MaxFragments=1,ShortWord=0') AS excerpt
) a)) AS excerpts
FROM (
SELECT participant, lang, content, search_conf, query
, ts_rank_cd(search_vector, query) AS rank
FROM statements NATURAL JOIN queries
WHERE lang IN %(langs)s
- AND type = 'profile'
+ AND type IN ('profile', 'summary')
AND search_vector @@ query
ORDER BY rank DESC
LIMIT 10
|
scikit-hep__pyhf-1540 | Split "Use in Publications" into use cases and general citations
> Technically speaking we don't actually use `pyhf` to obtain the results of our paper,
Yeah, as you correctly point out we just have the "list of citations and use cases of `pyhf`" under "[Use in Publications](https://scikit-hep.org/pyhf/citations.html#use-in-publications)" and we should probably split that out into actual use cases vs. just citations like this one.
_Originally posted by @matthewfeickert in https://github.com/scikit-hep/pyhf/issues/1537#issuecomment-890282084_
| [
{
"content": "#\n# pyhf documentation build configuration file, created by\n# sphinx-quickstart on Fri Feb 9 11:58:49 2018.\n#\n# This file is execfile()d with the current directory set to its\n# containing dir.\n#\n# Note that not all possible configuration values are present in this\n# autogenerated file.\n#\n# All configuration values have a default; values that are commented out\n# serve to show the default.\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use Path('../relative_path_to_dir').resolve() to make it absolute, like shown here.\n\nfrom pathlib import Path\nimport sys\nfrom pkg_resources import get_distribution\n\nsys.path.insert(0, str(Path('./exts').resolve()))\n\n\ndef setup(app):\n app.add_css_file(\n 'https://cdnjs.cloudflare.com/ajax/libs/github-fork-ribbon-css/0.2.2/gh-fork-ribbon.min.css'\n )\n\n\n# -- General configuration ------------------------------------------------\n\n# If your documentation needs a minimal Sphinx version, state it here.\n#\n# needs_sphinx = '1.0'\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nextensions = [\n 'sphinx.ext.autodoc',\n 'sphinx.ext.autosummary',\n 'sphinx.ext.coverage',\n 'sphinx.ext.mathjax',\n 'sphinx.ext.ifconfig',\n 'sphinx.ext.viewcode',\n 'sphinx.ext.githubpages',\n 'sphinx.ext.intersphinx',\n 'sphinxcontrib.bibtex',\n 'sphinx.ext.napoleon',\n 'sphinx_click.ext',\n 'nbsphinx',\n 'sphinx_issues',\n 'sphinx_copybutton',\n 'xref',\n]\nbibtex_bibfiles = [\n \"bib/docs.bib\",\n \"bib/HEPData_likelihoods.bib\",\n \"bib/media.bib\",\n \"bib/posters.bib\",\n \"bib/preferred.bib\",\n \"bib/talks.bib\",\n \"bib/tutorials.bib\",\n \"bib/use_citations.bib\",\n]\n\n# external links\nxref_links = {\"arXiv:1007.1727\": (\"[1007.1727]\", \"https://arxiv.org/abs/1007.1727\")}\n\nintersphinx_mapping = {\n 'python': ('https://docs.python.org/3', None),\n 'numpy': ('https://numpy.org/doc/stable/', None),\n 'scipy': ('https://docs.scipy.org/doc/scipy/reference/', None),\n 'matplotlib': ('https://matplotlib.org/stable/', None),\n 'iminuit': ('https://iminuit.readthedocs.io/en/stable/', None),\n 'uproot': ('https://uproot.readthedocs.io/en/latest/', None),\n}\n\n# GitHub repo\nissues_github_path = 'scikit-hep/pyhf'\n\n# Generate the API documentation when building\nautosummary_generate = True\nnumpydoc_show_class_members = False\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = ['_templates']\n\n# The suffix(es) of source filenames.\n# You can specify multiple suffix as a list of string:\n#\nsource_suffix = ['.rst', '.md']\n# source_suffix = '.rst'\n\n# The encoding of source files.\n#\n# source_encoding = 'utf-8-sig'\n\n# The master toctree document.\nmaster_doc = 'index'\n\n# General information about the project.\nproject = 'pyhf'\ncopyright = '2018, Lukas Heinrich, Matthew Feickert, Giordon Stark'\nauthor = 'Lukas Heinrich, Matthew Feickert, Giordon Stark'\n\n# The version info for the project you're documenting, acts as replacement for\n# |version| and |release|, also used in various other places throughout the\n# built documents.\n# The full version, including alpha/beta/rc tags.\nrelease = get_distribution('pyhf').version\n# for example take major/minor/patch\nversion = '.'.join(release.split('.')[:3])\n\n# The language for content autogenerated by Sphinx. Refer to documentation\n# for a list of supported languages.\n#\n# This is also used if you do content translation via gettext catalogs.\n# Usually you set \"language\" from the command line for these cases.\nlanguage = None\n\n# There are two options for replacing |today|: either, you set today to some\n# non-false value, then it is used:\n#\n# today = ''\n#\n# Else, today_fmt is used as the format for a strftime call.\n#\n# today_fmt = '%B %d, %Y'\n\nautodoc_mock_imports = [\n 'tensorflow',\n 'torch',\n 'jax',\n 'iminuit',\n 'tensorflow_probability',\n]\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\n# This patterns also effect to html_static_path and html_extra_path\nexclude_patterns = [\n '_build',\n 'JOSS',\n '**.ipynb_checkpoints',\n 'examples/experiments/edwardpyhf.ipynb',\n 'examples/notebooks/ImpactPlot.ipynb',\n 'examples/notebooks/Recast.ipynb',\n 'examples/notebooks/StatError.ipynb',\n 'examples/notebooks/example-tensorflow.ipynb',\n 'examples/notebooks/histogrammar.ipynb',\n 'examples/notebooks/histosys.ipynb',\n 'examples/notebooks/histosys-pytorch.ipynb',\n 'examples/notebooks/importxml.ipynb',\n 'examples/notebooks/multichannel-coupled-normsys.ipynb',\n 'examples/notebooks/multichannel-normsys.ipynb',\n 'examples/notebooks/normsys.ipynb',\n 'examples/notebooks/pullplot.ipynb',\n 'examples/notebooks/pytorch_tests_onoff.ipynb',\n 'examples/notebooks/tensorflow-limit.ipynb',\n]\n\n# The reST default role (used for this markup: `text`) to use for all\n# documents.\n#\n# default_role = None\n\n# If true, '()' will be appended to :func: etc. cross-reference text.\n#\n# add_function_parentheses = True\n\n# If true, the current module name will be prepended to all description\n# unit titles (such as .. function::).\n#\n# add_module_names = True\n\n# If true, sectionauthor and moduleauthor directives will be shown in the\n# output. They are ignored by default.\n#\n# show_authors = False\n\n# The name of the Pygments (syntax highlighting) style to use.\npygments_style = 'sphinx'\n\n# A list of ignored prefixes for module index sorting.\n# modindex_common_prefix = []\n\n# If true, keep warnings as \"system message\" paragraphs in the built documents.\n# keep_warnings = False\n\n# If true, `todo` and `todoList` produce output, else they produce nothing.\ntodo_include_todos = False\n\n\n# -- Options for HTML output ----------------------------------------------\n\n# The theme to use for HTML and HTML Help pages. See the documentation for\n# a list of builtin themes.\n#\nhtml_theme = 'sphinx_rtd_theme'\n\n# Theme options are theme-specific and customize the look and feel of a theme\n# further. For a list of options available for each theme, see the\n# documentation.\n#\nhtml_theme_options = {}\n\n# Add any paths that contain custom themes here, relative to this directory.\nhtml_theme_path = []\n\n# The name for this set of Sphinx documents.\n# \"<project> v<release> documentation\" by default.\n#\n# html_title = u'pyhf v0.3.0'\n\n# A shorter title for the navigation bar. Default is the same as html_title.\n#\n# html_short_title = None\n\n# The name of an image file (relative to this directory) to place at the top\n# of the sidebar.\n#\n# html_logo = None\n\n# The name of an image file (relative to this directory) to use as a favicon of\n# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32\n# pixels large.\n#\n# html_favicon = None\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = ['_static']\n\nhtml_css_files = [\n 'css/custom.css',\n]\n\nhtml_js_files = [\n 'js/custom.js',\n]\n\n# Add any extra paths that contain custom files (such as robots.txt or\n# .htaccess) here, relative to this directory. These files are copied\n# directly to the root of the documentation.\n#\nhtml_extra_path = ['_extras']\n\n# If not None, a 'Last updated on:' timestamp is inserted at every page\n# bottom, using the given strftime format.\n# The empty string is equivalent to '%b %d, %Y'.\n#\n# html_last_updated_fmt = None\n\n# If true, SmartyPants will be used to convert quotes and dashes to\n# typographically correct entities.\n#\n# html_use_smartypants = True\n\n# Custom sidebar templates, maps document names to template names.\n#\n# html_sidebars = {}\n\n# Additional templates that should be rendered to pages, maps page names to\n# template names.\n#\n# html_additional_pages = {}\n\n# If false, no module index is generated.\n#\n# html_domain_indices = True\n\n# If false, no index is generated.\n#\n# html_use_index = True\n\n# If true, the index is split into individual pages for each letter.\n#\n# html_split_index = False\n\n# If true, links to the reST sources are added to the pages.\n#\n# html_show_sourcelink = True\n\n# If true, \"Created using Sphinx\" is shown in the HTML footer. Default is True.\n#\n# html_show_sphinx = True\n\n# If true, \"(C) Copyright ...\" is shown in the HTML footer. Default is True.\n#\n# html_show_copyright = True\n\n# If true, an OpenSearch description file will be output, and all pages will\n# contain a <link> tag referring to it. The value of this option must be the\n# base URL from which the finished HTML is served.\n#\n# html_use_opensearch = ''\n\n# This is the file name suffix for HTML files (e.g. \".xhtml\").\n# html_file_suffix = None\n\n# Language to be used for generating the HTML full-text search index.\n# Sphinx supports the following languages:\n# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'\n# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'\n#\n# html_search_language = 'en'\n\n# A dictionary with options for the search language support, empty by default.\n# 'ja' uses this config value.\n# 'zh' user can custom change `jieba` dictionary path.\n#\n# html_search_options = {'type': 'default'}\n\n# The name of a javascript file (relative to the configuration directory) that\n# implements a search results scorer. If empty, the default will be used.\n#\n# html_search_scorer = 'scorer.js'\n\n# Output file base name for HTML help builder.\nhtmlhelp_basename = 'pyhfdoc'\n\n# sphinx-copybutton configuration\ncopybutton_prompt_text = \">>> \"\n\n# -- Options for LaTeX output ---------------------------------------------\n\nlatex_elements = {\n # The paper size ('letterpaper' or 'a4paper').\n #\n # 'papersize': 'letterpaper',\n # The font size ('10pt', '11pt' or '12pt').\n #\n # 'pointsize': '10pt',\n # Additional stuff for the LaTeX preamble.\n #\n # 'preamble': '',\n # Latex figure (float) alignment\n #\n # 'figure_align': 'htbp',\n}\n\n# Grouping the document tree into LaTeX files. List of tuples\n# (source start file, target name, title,\n# author, documentclass [howto, manual, or own class]).\nlatex_documents = [\n (\n master_doc,\n 'pyhf.tex',\n 'pyhf Documentation',\n 'Lukas Heinrich, Matthew Feickert, Giordon Stark',\n 'manual',\n )\n]\n\n# The name of an image file (relative to this directory) to place at the top of\n# the title page.\n#\n# latex_logo = None\n\n# For \"manual\" documents, if this is true, then toplevel headings are parts,\n# not chapters.\n#\n# latex_use_parts = False\n\n# If true, show page references after internal links.\n#\n# latex_show_pagerefs = False\n\n# If true, show URL addresses after external links.\n#\n# latex_show_urls = False\n\n# Documents to append as an appendix to all manuals.\n#\n# latex_appendices = []\n\n# It false, will not define \\strong, \\code, \titleref, \\crossref ... but only\n# \\sphinxstrong, ..., \\sphinxtitleref, ... To help avoid clash with user added\n# packages.\n#\n# latex_keep_old_macro_names = True\n\n# If false, no module index is generated.\n#\n# latex_domain_indices = True\n\n\n# -- Options for manual page output ---------------------------------------\n\n# One entry per manual page. List of tuples\n# (source start file, name, description, authors, manual section).\nman_pages = [(master_doc, 'pyhf', 'pyhf Documentation', [author], 1)]\n\n# If true, show URL addresses after external links.\n#\n# man_show_urls = False\n\n\n# -- Options for Texinfo output -------------------------------------------\n\n# Grouping the document tree into Texinfo files. List of tuples\n# (source start file, target name, title, author,\n# dir menu entry, description, category)\ntexinfo_documents = [\n (\n master_doc,\n 'pyhf',\n 'pyhf Documentation',\n author,\n 'pyhf',\n 'One line description of project.',\n 'Miscellaneous',\n )\n]\n\n# Documents to append as an appendix to all manuals.\n#\n# texinfo_appendices = []\n\n# If false, no module index is generated.\n#\n# texinfo_domain_indices = True\n\n# How to display URL addresses: 'footnote', 'no', or 'inline'.\n#\n# texinfo_show_urls = 'footnote'\n\n# If true, do not generate a @detailmenu in the \"Top\" node's menu.\n#\n# texinfo_no_detailmenu = False\n\nmathjax3_config = {\n 'tex2jax': {'inlineMath': [['$', '$'], ['\\\\(', '\\\\)']]},\n 'tex': {\n 'macros': {\n 'bm': [\"\\\\boldsymbol{#1}\", 1], # \\usepackage{bm}, see mathjax/MathJax#1219\n 'HiFa': r'\\texttt{HistFactory}',\n 'Root': r'\\texttt{ROOT}',\n 'RooStats': r'\\texttt{RooStats}',\n 'RooFit': r'\\texttt{RooFit}',\n 'pyhf': r'\\texttt{pyhf}',\n 'CLs': r'\\mathrm{CL}_{s}',\n 'freeset': r'\\bm{\\eta}',\n 'constrset': r'\\bm{\\chi}',\n 'singleconstr': r'\\chi',\n 'channelcounts': r'\\bm{n}',\n 'auxdata': r'\\bm{a}',\n 'poiset': r'\\bm{\\psi}',\n 'nuisset': r'\\bm{\\theta}',\n 'fullset': r'\\bm{\\phi}',\n 'singlefull': r'\\phi',\n 'TeV': r'\\textrm{TeV}',\n }\n },\n}\n",
"path": "docs/conf.py"
}
] | [
{
"content": "#\n# pyhf documentation build configuration file, created by\n# sphinx-quickstart on Fri Feb 9 11:58:49 2018.\n#\n# This file is execfile()d with the current directory set to its\n# containing dir.\n#\n# Note that not all possible configuration values are present in this\n# autogenerated file.\n#\n# All configuration values have a default; values that are commented out\n# serve to show the default.\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use Path('../relative_path_to_dir').resolve() to make it absolute, like shown here.\n\nfrom pathlib import Path\nimport sys\nfrom pkg_resources import get_distribution\n\nsys.path.insert(0, str(Path('./exts').resolve()))\n\n\ndef setup(app):\n app.add_css_file(\n 'https://cdnjs.cloudflare.com/ajax/libs/github-fork-ribbon-css/0.2.2/gh-fork-ribbon.min.css'\n )\n\n\n# -- General configuration ------------------------------------------------\n\n# If your documentation needs a minimal Sphinx version, state it here.\n#\n# needs_sphinx = '1.0'\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nextensions = [\n 'sphinx.ext.autodoc',\n 'sphinx.ext.autosummary',\n 'sphinx.ext.coverage',\n 'sphinx.ext.mathjax',\n 'sphinx.ext.ifconfig',\n 'sphinx.ext.viewcode',\n 'sphinx.ext.githubpages',\n 'sphinx.ext.intersphinx',\n 'sphinxcontrib.bibtex',\n 'sphinx.ext.napoleon',\n 'sphinx_click.ext',\n 'nbsphinx',\n 'sphinx_issues',\n 'sphinx_copybutton',\n 'xref',\n]\nbibtex_bibfiles = [\n \"bib/docs.bib\",\n \"bib/HEPData_likelihoods.bib\",\n \"bib/media.bib\",\n \"bib/posters.bib\",\n \"bib/preferred.bib\",\n \"bib/talks.bib\",\n \"bib/tutorials.bib\",\n \"bib/use_citations.bib\",\n \"bib/general_citations.bib\",\n]\n\n# external links\nxref_links = {\"arXiv:1007.1727\": (\"[1007.1727]\", \"https://arxiv.org/abs/1007.1727\")}\n\nintersphinx_mapping = {\n 'python': ('https://docs.python.org/3', None),\n 'numpy': ('https://numpy.org/doc/stable/', None),\n 'scipy': ('https://docs.scipy.org/doc/scipy/reference/', None),\n 'matplotlib': ('https://matplotlib.org/stable/', None),\n 'iminuit': ('https://iminuit.readthedocs.io/en/stable/', None),\n 'uproot': ('https://uproot.readthedocs.io/en/latest/', None),\n}\n\n# GitHub repo\nissues_github_path = 'scikit-hep/pyhf'\n\n# Generate the API documentation when building\nautosummary_generate = True\nnumpydoc_show_class_members = False\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = ['_templates']\n\n# The suffix(es) of source filenames.\n# You can specify multiple suffix as a list of string:\n#\nsource_suffix = ['.rst', '.md']\n# source_suffix = '.rst'\n\n# The encoding of source files.\n#\n# source_encoding = 'utf-8-sig'\n\n# The master toctree document.\nmaster_doc = 'index'\n\n# General information about the project.\nproject = 'pyhf'\ncopyright = '2018, Lukas Heinrich, Matthew Feickert, Giordon Stark'\nauthor = 'Lukas Heinrich, Matthew Feickert, Giordon Stark'\n\n# The version info for the project you're documenting, acts as replacement for\n# |version| and |release|, also used in various other places throughout the\n# built documents.\n# The full version, including alpha/beta/rc tags.\nrelease = get_distribution('pyhf').version\n# for example take major/minor/patch\nversion = '.'.join(release.split('.')[:3])\n\n# The language for content autogenerated by Sphinx. Refer to documentation\n# for a list of supported languages.\n#\n# This is also used if you do content translation via gettext catalogs.\n# Usually you set \"language\" from the command line for these cases.\nlanguage = None\n\n# There are two options for replacing |today|: either, you set today to some\n# non-false value, then it is used:\n#\n# today = ''\n#\n# Else, today_fmt is used as the format for a strftime call.\n#\n# today_fmt = '%B %d, %Y'\n\nautodoc_mock_imports = [\n 'tensorflow',\n 'torch',\n 'jax',\n 'iminuit',\n 'tensorflow_probability',\n]\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\n# This patterns also effect to html_static_path and html_extra_path\nexclude_patterns = [\n '_build',\n 'JOSS',\n '**.ipynb_checkpoints',\n 'examples/experiments/edwardpyhf.ipynb',\n 'examples/notebooks/ImpactPlot.ipynb',\n 'examples/notebooks/Recast.ipynb',\n 'examples/notebooks/StatError.ipynb',\n 'examples/notebooks/example-tensorflow.ipynb',\n 'examples/notebooks/histogrammar.ipynb',\n 'examples/notebooks/histosys.ipynb',\n 'examples/notebooks/histosys-pytorch.ipynb',\n 'examples/notebooks/importxml.ipynb',\n 'examples/notebooks/multichannel-coupled-normsys.ipynb',\n 'examples/notebooks/multichannel-normsys.ipynb',\n 'examples/notebooks/normsys.ipynb',\n 'examples/notebooks/pullplot.ipynb',\n 'examples/notebooks/pytorch_tests_onoff.ipynb',\n 'examples/notebooks/tensorflow-limit.ipynb',\n]\n\n# The reST default role (used for this markup: `text`) to use for all\n# documents.\n#\n# default_role = None\n\n# If true, '()' will be appended to :func: etc. cross-reference text.\n#\n# add_function_parentheses = True\n\n# If true, the current module name will be prepended to all description\n# unit titles (such as .. function::).\n#\n# add_module_names = True\n\n# If true, sectionauthor and moduleauthor directives will be shown in the\n# output. They are ignored by default.\n#\n# show_authors = False\n\n# The name of the Pygments (syntax highlighting) style to use.\npygments_style = 'sphinx'\n\n# A list of ignored prefixes for module index sorting.\n# modindex_common_prefix = []\n\n# If true, keep warnings as \"system message\" paragraphs in the built documents.\n# keep_warnings = False\n\n# If true, `todo` and `todoList` produce output, else they produce nothing.\ntodo_include_todos = False\n\n\n# -- Options for HTML output ----------------------------------------------\n\n# The theme to use for HTML and HTML Help pages. See the documentation for\n# a list of builtin themes.\n#\nhtml_theme = 'sphinx_rtd_theme'\n\n# Theme options are theme-specific and customize the look and feel of a theme\n# further. For a list of options available for each theme, see the\n# documentation.\n#\nhtml_theme_options = {}\n\n# Add any paths that contain custom themes here, relative to this directory.\nhtml_theme_path = []\n\n# The name for this set of Sphinx documents.\n# \"<project> v<release> documentation\" by default.\n#\n# html_title = u'pyhf v0.3.0'\n\n# A shorter title for the navigation bar. Default is the same as html_title.\n#\n# html_short_title = None\n\n# The name of an image file (relative to this directory) to place at the top\n# of the sidebar.\n#\n# html_logo = None\n\n# The name of an image file (relative to this directory) to use as a favicon of\n# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32\n# pixels large.\n#\n# html_favicon = None\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = ['_static']\n\nhtml_css_files = [\n 'css/custom.css',\n]\n\nhtml_js_files = [\n 'js/custom.js',\n]\n\n# Add any extra paths that contain custom files (such as robots.txt or\n# .htaccess) here, relative to this directory. These files are copied\n# directly to the root of the documentation.\n#\nhtml_extra_path = ['_extras']\n\n# If not None, a 'Last updated on:' timestamp is inserted at every page\n# bottom, using the given strftime format.\n# The empty string is equivalent to '%b %d, %Y'.\n#\n# html_last_updated_fmt = None\n\n# If true, SmartyPants will be used to convert quotes and dashes to\n# typographically correct entities.\n#\n# html_use_smartypants = True\n\n# Custom sidebar templates, maps document names to template names.\n#\n# html_sidebars = {}\n\n# Additional templates that should be rendered to pages, maps page names to\n# template names.\n#\n# html_additional_pages = {}\n\n# If false, no module index is generated.\n#\n# html_domain_indices = True\n\n# If false, no index is generated.\n#\n# html_use_index = True\n\n# If true, the index is split into individual pages for each letter.\n#\n# html_split_index = False\n\n# If true, links to the reST sources are added to the pages.\n#\n# html_show_sourcelink = True\n\n# If true, \"Created using Sphinx\" is shown in the HTML footer. Default is True.\n#\n# html_show_sphinx = True\n\n# If true, \"(C) Copyright ...\" is shown in the HTML footer. Default is True.\n#\n# html_show_copyright = True\n\n# If true, an OpenSearch description file will be output, and all pages will\n# contain a <link> tag referring to it. The value of this option must be the\n# base URL from which the finished HTML is served.\n#\n# html_use_opensearch = ''\n\n# This is the file name suffix for HTML files (e.g. \".xhtml\").\n# html_file_suffix = None\n\n# Language to be used for generating the HTML full-text search index.\n# Sphinx supports the following languages:\n# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'\n# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'\n#\n# html_search_language = 'en'\n\n# A dictionary with options for the search language support, empty by default.\n# 'ja' uses this config value.\n# 'zh' user can custom change `jieba` dictionary path.\n#\n# html_search_options = {'type': 'default'}\n\n# The name of a javascript file (relative to the configuration directory) that\n# implements a search results scorer. If empty, the default will be used.\n#\n# html_search_scorer = 'scorer.js'\n\n# Output file base name for HTML help builder.\nhtmlhelp_basename = 'pyhfdoc'\n\n# sphinx-copybutton configuration\ncopybutton_prompt_text = \">>> \"\n\n# -- Options for LaTeX output ---------------------------------------------\n\nlatex_elements = {\n # The paper size ('letterpaper' or 'a4paper').\n #\n # 'papersize': 'letterpaper',\n # The font size ('10pt', '11pt' or '12pt').\n #\n # 'pointsize': '10pt',\n # Additional stuff for the LaTeX preamble.\n #\n # 'preamble': '',\n # Latex figure (float) alignment\n #\n # 'figure_align': 'htbp',\n}\n\n# Grouping the document tree into LaTeX files. List of tuples\n# (source start file, target name, title,\n# author, documentclass [howto, manual, or own class]).\nlatex_documents = [\n (\n master_doc,\n 'pyhf.tex',\n 'pyhf Documentation',\n 'Lukas Heinrich, Matthew Feickert, Giordon Stark',\n 'manual',\n )\n]\n\n# The name of an image file (relative to this directory) to place at the top of\n# the title page.\n#\n# latex_logo = None\n\n# For \"manual\" documents, if this is true, then toplevel headings are parts,\n# not chapters.\n#\n# latex_use_parts = False\n\n# If true, show page references after internal links.\n#\n# latex_show_pagerefs = False\n\n# If true, show URL addresses after external links.\n#\n# latex_show_urls = False\n\n# Documents to append as an appendix to all manuals.\n#\n# latex_appendices = []\n\n# It false, will not define \\strong, \\code, \titleref, \\crossref ... but only\n# \\sphinxstrong, ..., \\sphinxtitleref, ... To help avoid clash with user added\n# packages.\n#\n# latex_keep_old_macro_names = True\n\n# If false, no module index is generated.\n#\n# latex_domain_indices = True\n\n\n# -- Options for manual page output ---------------------------------------\n\n# One entry per manual page. List of tuples\n# (source start file, name, description, authors, manual section).\nman_pages = [(master_doc, 'pyhf', 'pyhf Documentation', [author], 1)]\n\n# If true, show URL addresses after external links.\n#\n# man_show_urls = False\n\n\n# -- Options for Texinfo output -------------------------------------------\n\n# Grouping the document tree into Texinfo files. List of tuples\n# (source start file, target name, title, author,\n# dir menu entry, description, category)\ntexinfo_documents = [\n (\n master_doc,\n 'pyhf',\n 'pyhf Documentation',\n author,\n 'pyhf',\n 'One line description of project.',\n 'Miscellaneous',\n )\n]\n\n# Documents to append as an appendix to all manuals.\n#\n# texinfo_appendices = []\n\n# If false, no module index is generated.\n#\n# texinfo_domain_indices = True\n\n# How to display URL addresses: 'footnote', 'no', or 'inline'.\n#\n# texinfo_show_urls = 'footnote'\n\n# If true, do not generate a @detailmenu in the \"Top\" node's menu.\n#\n# texinfo_no_detailmenu = False\n\nmathjax3_config = {\n 'tex2jax': {'inlineMath': [['$', '$'], ['\\\\(', '\\\\)']]},\n 'tex': {\n 'macros': {\n 'bm': [\"\\\\boldsymbol{#1}\", 1], # \\usepackage{bm}, see mathjax/MathJax#1219\n 'HiFa': r'\\texttt{HistFactory}',\n 'Root': r'\\texttt{ROOT}',\n 'RooStats': r'\\texttt{RooStats}',\n 'RooFit': r'\\texttt{RooFit}',\n 'pyhf': r'\\texttt{pyhf}',\n 'CLs': r'\\mathrm{CL}_{s}',\n 'freeset': r'\\bm{\\eta}',\n 'constrset': r'\\bm{\\chi}',\n 'singleconstr': r'\\chi',\n 'channelcounts': r'\\bm{n}',\n 'auxdata': r'\\bm{a}',\n 'poiset': r'\\bm{\\psi}',\n 'nuisset': r'\\bm{\\theta}',\n 'fullset': r'\\bm{\\phi}',\n 'singlefull': r'\\phi',\n 'TeV': r'\\textrm{TeV}',\n }\n },\n}\n",
"path": "docs/conf.py"
}
] | diff --git a/docs/bib/general_citations.bib b/docs/bib/general_citations.bib
new file mode 100644
index 0000000000..481fc91c2c
--- /dev/null
+++ b/docs/bib/general_citations.bib
@@ -0,0 +1,75 @@
+% 2021-07-27
+@article{Tastet:2021vwp,
+ author = "Tastet, Jean-Loup and Ruchayskiy, Oleg and Timiryasov, Inar",
+ title = "{Reinterpreting the ATLAS bounds on heavy neutral leptons in a realistic neutrino oscillation model}",
+ eprint = "2107.12980",
+ archivePrefix = "arXiv",
+ primaryClass = "hep-ph",
+ month = "7",
+ year = "2021",
+ journal = ""
+}
+
+% 2020-06-20
+@article{Krupa:2020bwg,
+ author = "Krupa, Jeffrey and others",
+ title = "{GPU coprocessors as a service for deep learning inference in high energy physics}",
+ eprint = "2007.10359",
+ archivePrefix = "arXiv",
+ primaryClass = "physics.comp-ph",
+ reportNumber = "FERMILAB-PUB-20-338-E-SCD",
+ month = "7",
+ year = "2020",
+ journal = ""
+}
+
+% 2020-03-17
+@article{LHCReinterpretationForum:2020xtr,
+ author = "Abdallah, Waleed and others",
+ collaboration = "LHC Reinterpretation Forum",
+ title = "{Reinterpretation of LHC Results for New Physics: Status and Recommendations after Run 2}",
+ eprint = "2003.07868",
+ archivePrefix = "arXiv",
+ primaryClass = "hep-ph",
+ reportNumber = "CERN-LPCC-2020-001, FERMILAB-FN-1098-CMS-T, Imperial/HEP/2020/RIF/01",
+ doi = "10.21468/SciPostPhys.9.2.022",
+ journal = "SciPost Phys.",
+ volume = "9",
+ number = "2",
+ pages = "022",
+ year = "2020"
+}
+
+% 2019-09-30
+@article{DiMicco:2019ngk,
+ author = "Alison, J. and others",
+ editor = "Di Micco, Biagio and Gouzevitch, Maxime and Mazzitelli, Javier and Vernieri, Caterina",
+ title = "{Higgs boson potential at colliders: Status and perspectives}",
+ eprint = "1910.00012",
+ archivePrefix = "arXiv",
+ primaryClass = "hep-ph",
+ reportNumber = "FERMILAB-CONF-19-468-E-T, LHCXSWG-2019-005",
+ doi = "10.1016/j.revip.2020.100045",
+ journal = "Rev. Phys.",
+ volume = "5",
+ pages = "100045",
+ year = "2020"
+}
+
+% 2019-06-24
+@article{Brehmer:2019xox,
+ author = "Brehmer, Johann and Kling, Felix and Espejo, Irina and
+ Cranmer, Kyle",
+ title = "{MadMiner: Machine learning-based inference for particle
+ physics}",
+ journal = "Comput. Softw. Big Sci.",
+ volume = "4",
+ year = "2020",
+ number = "1",
+ pages = "3",
+ doi = "10.1007/s41781-020-0035-2",
+ eprint = "1907.10621",
+ archivePrefix = "arXiv",
+ primaryClass = "hep-ph",
+ SLACcitation = "%%CITATION = ARXIV:1907.10621;%%"
+}
diff --git a/docs/bib/use_citations.bib b/docs/bib/use_citations.bib
index f5277d8728..b906764e02 100644
--- a/docs/bib/use_citations.bib
+++ b/docs/bib/use_citations.bib
@@ -1,15 +1,3 @@
-% 2021-07-27
-@article{Tastet:2021vwp,
- author = "Tastet, Jean-Loup and Ruchayskiy, Oleg and Timiryasov, Inar",
- title = "{Reinterpreting the ATLAS bounds on heavy neutral leptons in a realistic neutrino oscillation model}",
- eprint = "2107.12980",
- archivePrefix = "arXiv",
- primaryClass = "hep-ph",
- month = "7",
- year = "2021",
- journal = ""
-}
-
% 2021-06-03
@article{ATLAS:SUSY-3L-compressed-combination,
author = "ATLAS Collaboration",
@@ -180,19 +168,6 @@ @article{Aad:2021hjy
journal = ""
}
-% 2020-06-20
-@article{Krupa:2020bwg,
- author = "Krupa, Jeffrey and others",
- title = "{GPU coprocessors as a service for deep learning inference in high energy physics}",
- eprint = "2007.10359",
- archivePrefix = "arXiv",
- primaryClass = "physics.comp-ph",
- reportNumber = "FERMILAB-PUB-20-338-E-SCD",
- month = "7",
- year = "2020",
- journal = ""
-}
-
% 2020-05-01
@article{Khosa:2020zar,
author = "Khosa, Charanjit K. and Kraml, Sabine and Lessa, Andre and Neuhuber, Philipp and Waltenberger, Wolfgang",
@@ -208,21 +183,6 @@ @article{Khosa:2020zar
year = "2020"
}
-@article{Abdallah:2020pec,
- author = "Abdallah, Waleed and others",
- title = "{Reinterpretation of LHC Results for New Physics: Status
- and Recommendations after Run 2}",
- collaboration = "LHC Reinterpretation Forum",
- year = "2020",
- eprint = "2003.07868",
- archivePrefix = "arXiv",
- primaryClass = "hep-ph",
- reportNumber = "CERN-LPCC-2020-001, FERMILAB-FN-1098-CMS-T,
- Imperial/HEP/2020/RIF/01",
- SLACcitation = "%%CITATION = ARXIV:2003.07868;%%",
- journal = ""
-}
-
@inproceedings{Brooijmans:2020yij,
author = "Brooijmans, G. and others",
title = "{Les Houches 2019 Physics at TeV Colliders: New Physics
@@ -266,19 +226,6 @@ @article{Allanach:2019zfr
year = "2020"
}
-@inproceedings{DiMicco:2019ngk,
- author = "Alison, J. and others",
- editor = "Di Micco, B. and Gouzevitch, M. and Mazzitelli, J. and Vernieri, C.",
- title = "{Higgs Boson Pair Production at Colliders: Status and Perspectives}",
- booktitle = "{Double Higgs Production at Colliders}",
- eprint = "1910.00012",
- archivePrefix = "arXiv",
- primaryClass = "hep-ph",
- reportNumber = "FERMILAB-CONF-19-468-E-T, LHCXSWG-2019-005",
- month = "9",
- year = "2019"
-}
-
@booklet{ATL-PHYS-PUB-2019-029,
author = "{ATLAS Collaboration}",
title = "{Reproducing searches for new physics with the ATLAS
@@ -293,23 +240,6 @@ @booklet{ATL-PHYS-PUB-2019-029
url = "https://cds.cern.ch/record/2684863",
}
-@article{Brehmer:2019xox,
- author = "Brehmer, Johann and Kling, Felix and Espejo, Irina and
- Cranmer, Kyle",
- title = "{MadMiner: Machine learning-based inference for particle
- physics}",
- journal = "Comput. Softw. Big Sci.",
- volume = "4",
- year = "2020",
- number = "1",
- pages = "3",
- doi = "10.1007/s41781-020-0035-2",
- eprint = "1907.10621",
- archivePrefix = "arXiv",
- primaryClass = "hep-ph",
- SLACcitation = "%%CITATION = ARXIV:1907.10621;%%"
-}
-
@article{Heinrich:2018nip,
author = "Heinrich, Lukas and Schulz, Holger and Turner, Jessica
and Zhou, Ye-Ling",
diff --git a/docs/citations.rst b/docs/citations.rst
index 2497b28360..c3cb36afa2 100644
--- a/docs/citations.rst
+++ b/docs/citations.rst
@@ -19,11 +19,22 @@ Use in Publications
Updating list of citations and use cases of :code:`pyhf`:
+Use Citations
+~~~~~~~~~~~~~
+
.. bibliography:: bib/use_citations.bib
:list: bullet
:all:
:style: plain
+General Citations
+~~~~~~~~~~~~~~~~~
+
+.. bibliography:: bib/general_citations.bib
+ :list: bullet
+ :all:
+ :style: plain
+
Published Statistical Models
----------------------------
diff --git a/docs/conf.py b/docs/conf.py
index 17b9636843..dc7fc07892 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -63,6 +63,7 @@ def setup(app):
"bib/talks.bib",
"bib/tutorials.bib",
"bib/use_citations.bib",
+ "bib/general_citations.bib",
]
# external links
|
cython__cython-4842 | [BUG] Fused type not subscriptable in uncompiled pure python
**Describe the bug**
Fused type can't be subscribed in pure python syntax when `cython.compiled == False`
**To Reproduce**
Code to reproduce the behaviour:
```python
import cython
int_or_float = cython.fused_type(cython.int, cython.float)
def func(num: int_or_float[:]):
...
```
Gives error:
```
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
Input In [302], in <module>
1 import cython
3 int_or_float = cython.fused_type(cython.int, cython.float)
----> 5 def func(num: int_or_float[:]):
6 ...
TypeError: '_FusedType' object is not subscriptable
```
**Expected behavior**
`Cython.Shadow` should implement this so that it doesn't raise an error (?).
**Environment (please complete the following information):**
- OS: Linux
- Python version: '3.8.12 | packaged by conda-forge | (default, Oct 12 2021, 21:59:51) \n[GCC 9.4.0]'
- Cython version: 3.0.0a10
| [
{
"content": "# cython.* namespace for pure mode.\nfrom __future__ import absolute_import\n\n__version__ = \"3.0.0a10\"\n\ntry:\n from __builtin__ import basestring\nexcept ImportError:\n basestring = str\n\n\n# BEGIN shameless copy from Cython/minivect/minitypes.py\n\nclass _ArrayType(object):\n\n is_array = True\n subtypes = ['dtype']\n\n def __init__(self, dtype, ndim, is_c_contig=False, is_f_contig=False,\n inner_contig=False, broadcasting=None):\n self.dtype = dtype\n self.ndim = ndim\n self.is_c_contig = is_c_contig\n self.is_f_contig = is_f_contig\n self.inner_contig = inner_contig or is_c_contig or is_f_contig\n self.broadcasting = broadcasting\n\n def __repr__(self):\n axes = [\":\"] * self.ndim\n if self.is_c_contig:\n axes[-1] = \"::1\"\n elif self.is_f_contig:\n axes[0] = \"::1\"\n\n return \"%s[%s]\" % (self.dtype, \", \".join(axes))\n\n\ndef index_type(base_type, item):\n \"\"\"\n Support array type creation by slicing, e.g. double[:, :] specifies\n a 2D strided array of doubles. The syntax is the same as for\n Cython memoryviews.\n \"\"\"\n class InvalidTypeSpecification(Exception):\n pass\n\n def verify_slice(s):\n if s.start or s.stop or s.step not in (None, 1):\n raise InvalidTypeSpecification(\n \"Only a step of 1 may be provided to indicate C or \"\n \"Fortran contiguity\")\n\n if isinstance(item, tuple):\n step_idx = None\n for idx, s in enumerate(item):\n verify_slice(s)\n if s.step and (step_idx or idx not in (0, len(item) - 1)):\n raise InvalidTypeSpecification(\n \"Step may only be provided once, and only in the \"\n \"first or last dimension.\")\n\n if s.step == 1:\n step_idx = idx\n\n return _ArrayType(base_type, len(item),\n is_c_contig=step_idx == len(item) - 1,\n is_f_contig=step_idx == 0)\n elif isinstance(item, slice):\n verify_slice(item)\n return _ArrayType(base_type, 1, is_c_contig=bool(item.step))\n else:\n # int[8] etc.\n assert int(item) == item # array size must be a plain integer\n return array(base_type, item)\n\n# END shameless copy\n\n\ncompiled = False\n\n_Unspecified = object()\n\n# Function decorators\n\ndef _empty_decorator(x):\n return x\n\ndef locals(**arg_types):\n return _empty_decorator\n\ndef test_assert_path_exists(*paths):\n return _empty_decorator\n\ndef test_fail_if_path_exists(*paths):\n return _empty_decorator\n\nclass _EmptyDecoratorAndManager(object):\n def __call__(self, x):\n return x\n def __enter__(self):\n pass\n def __exit__(self, exc_type, exc_value, traceback):\n pass\n\nclass _Optimization(object):\n pass\n\ncclass = ccall = cfunc = _EmptyDecoratorAndManager()\n\nreturns = wraparound = boundscheck = initializedcheck = nonecheck = \\\n embedsignature = cdivision = cdivision_warnings = \\\n always_allows_keywords = profile = linetrace = infer_types = \\\n unraisable_tracebacks = freelist = \\\n lambda _: _EmptyDecoratorAndManager()\n\nexceptval = lambda _=None, check=True: _EmptyDecoratorAndManager()\n\noverflowcheck = lambda _: _EmptyDecoratorAndManager()\noptimize = _Optimization()\n\noverflowcheck.fold = optimize.use_switch = \\\n optimize.unpack_method_calls = lambda arg: _EmptyDecoratorAndManager()\n\nfinal = internal = type_version_tag = no_gc_clear = no_gc = total_ordering = _empty_decorator\n\nbinding = lambda _: _empty_decorator\n\n\n_cython_inline = None\ndef inline(f, *args, **kwds):\n if isinstance(f, basestring):\n global _cython_inline\n if _cython_inline is None:\n from Cython.Build.Inline import cython_inline as _cython_inline\n return _cython_inline(f, *args, **kwds)\n else:\n assert len(args) == len(kwds) == 0\n return f\n\n\ndef compile(f):\n from Cython.Build.Inline import RuntimeCompiledFunction\n return RuntimeCompiledFunction(f)\n\n\n# Special functions\n\ndef cdiv(a, b):\n if a < 0:\n a = -a\n b = -b\n if b < 0:\n return (a + b + 1) // b\n return a // b\n\ndef cmod(a, b):\n r = a % b\n if (a * b) < 0 and r:\n r -= b\n return r\n\n\n# Emulated language constructs\n\ndef cast(t, *args, **kwargs):\n kwargs.pop('typecheck', None)\n assert not kwargs\n\n if isinstance(t, typedef):\n return t(*args)\n elif isinstance(t, type): # Doesn't work with old-style classes of Python 2.x\n if len(args) != 1 or not (args[0] is None or isinstance(args[0], t)):\n return t(*args)\n\n return args[0]\n\ndef sizeof(arg):\n return 1\n\ndef typeof(arg):\n return arg.__class__.__name__\n # return type(arg)\n\ndef address(arg):\n return pointer(type(arg))([arg])\n\ndef _is_value_type(t):\n if isinstance(t, typedef):\n return _is_value_type(t._basetype)\n\n return isinstance(t, type) and issubclass(t, (StructType, UnionType, ArrayType))\n\ndef declare(t=None, value=_Unspecified, **kwds):\n if value is not _Unspecified:\n return cast(t, value)\n elif _is_value_type(t):\n return t()\n else:\n return None\n\nclass _nogil(object):\n \"\"\"Support for 'with nogil' statement and @nogil decorator.\n \"\"\"\n def __call__(self, x):\n if callable(x):\n # Used as function decorator => return the function unchanged.\n return x\n # Used as conditional context manager or to create an \"@nogil(True/False)\" decorator => keep going.\n return self\n\n def __enter__(self):\n pass\n def __exit__(self, exc_class, exc, tb):\n return exc_class is None\n\nnogil = _nogil()\ngil = _nogil()\ndel _nogil\n\n\n# Emulated types\n\nclass CythonMetaType(type):\n\n def __getitem__(type, ix):\n return array(type, ix)\n\nCythonTypeObject = CythonMetaType('CythonTypeObject', (object,), {})\n\nclass CythonType(CythonTypeObject):\n\n def _pointer(self, n=1):\n for i in range(n):\n self = pointer(self)\n return self\n\nclass PointerType(CythonType):\n\n def __init__(self, value=None):\n if isinstance(value, (ArrayType, PointerType)):\n self._items = [cast(self._basetype, a) for a in value._items]\n elif isinstance(value, list):\n self._items = [cast(self._basetype, a) for a in value]\n elif value is None or value == 0:\n self._items = []\n else:\n raise ValueError\n\n def __getitem__(self, ix):\n if ix < 0:\n raise IndexError(\"negative indexing not allowed in C\")\n return self._items[ix]\n\n def __setitem__(self, ix, value):\n if ix < 0:\n raise IndexError(\"negative indexing not allowed in C\")\n self._items[ix] = cast(self._basetype, value)\n\n def __eq__(self, value):\n if value is None and not self._items:\n return True\n elif type(self) != type(value):\n return False\n else:\n return not self._items and not value._items\n\n def __repr__(self):\n return \"%s *\" % (self._basetype,)\n\nclass ArrayType(PointerType):\n\n def __init__(self, value=None):\n if value is None:\n self._items = [None] * self._n\n else:\n super(ArrayType, self).__init__(value)\n\n\nclass StructType(CythonType):\n\n def __init__(self, *posargs, **data):\n if not (posargs or data):\n return\n if posargs and data:\n raise ValueError('Cannot accept both positional and keyword arguments.')\n\n # Allow 'cast_from' as single positional or keyword argument.\n if data and len(data) == 1 and 'cast_from' in data:\n cast_from = data.pop('cast_from')\n elif len(posargs) == 1 and type(posargs[0]) is type(self):\n cast_from, posargs = posargs[0], ()\n elif posargs:\n for key, arg in zip(self._members, posargs):\n setattr(self, key, arg)\n return\n else:\n for key, value in data.items():\n if key not in self._members:\n raise ValueError(\"Invalid struct attribute for %s: %s\" % (\n self.__class__.__name__, key))\n setattr(self, key, value)\n return\n\n # do cast\n if data:\n raise ValueError('Cannot accept keyword arguments when casting.')\n if type(cast_from) is not type(self):\n raise ValueError('Cannot cast from %s' % cast_from)\n for key, value in cast_from.__dict__.items():\n setattr(self, key, value)\n\n def __setattr__(self, key, value):\n if key in self._members:\n self.__dict__[key] = cast(self._members[key], value)\n else:\n raise AttributeError(\"Struct has no member '%s'\" % key)\n\n\nclass UnionType(CythonType):\n\n def __init__(self, cast_from=_Unspecified, **data):\n if cast_from is not _Unspecified:\n # do type cast\n if len(data) > 0:\n raise ValueError('Cannot accept keyword arguments when casting.')\n if isinstance(cast_from, dict):\n datadict = cast_from\n elif type(cast_from) is type(self):\n datadict = cast_from.__dict__\n else:\n raise ValueError('Cannot cast from %s' % cast_from)\n else:\n datadict = data\n if len(datadict) > 1:\n raise AttributeError(\"Union can only store one field at a time.\")\n for key, value in datadict.items():\n setattr(self, key, value)\n\n def __setattr__(self, key, value):\n if key == '__dict__':\n CythonType.__setattr__(self, key, value)\n elif key in self._members:\n self.__dict__ = {key: cast(self._members[key], value)}\n else:\n raise AttributeError(\"Union has no member '%s'\" % key)\n\ndef pointer(basetype):\n class PointerInstance(PointerType):\n _basetype = basetype\n return PointerInstance\n\ndef array(basetype, n):\n class ArrayInstance(ArrayType):\n _basetype = basetype\n _n = n\n return ArrayInstance\n\ndef struct(**members):\n class StructInstance(StructType):\n _members = members\n for key in members:\n setattr(StructInstance, key, None)\n return StructInstance\n\ndef union(**members):\n class UnionInstance(UnionType):\n _members = members\n for key in members:\n setattr(UnionInstance, key, None)\n return UnionInstance\n\nclass typedef(CythonType):\n\n def __init__(self, type, name=None):\n self._basetype = type\n self.name = name\n\n def __call__(self, *arg):\n value = cast(self._basetype, *arg)\n return value\n\n def __repr__(self):\n return self.name or str(self._basetype)\n\n __getitem__ = index_type\n\nclass _FusedType(CythonType):\n pass\n\n\ndef fused_type(*args):\n if not args:\n raise TypeError(\"Expected at least one type as argument\")\n\n # Find the numeric type with biggest rank if all types are numeric\n rank = -1\n for type in args:\n if type not in (py_int, py_long, py_float, py_complex):\n break\n\n if type_ordering.index(type) > rank:\n result_type = type\n else:\n return result_type\n\n # Not a simple numeric type, return a fused type instance. The result\n # isn't really meant to be used, as we can't keep track of the context in\n # pure-mode. Casting won't do anything in this case.\n return _FusedType()\n\n\ndef _specialized_from_args(signatures, args, kwargs):\n \"Perhaps this should be implemented in a TreeFragment in Cython code\"\n raise Exception(\"yet to be implemented\")\n\n\npy_int = typedef(int, \"int\")\ntry:\n py_long = typedef(long, \"long\")\nexcept NameError: # Py3\n py_long = typedef(int, \"long\")\npy_float = typedef(float, \"float\")\npy_complex = typedef(complex, \"double complex\")\n\n\n# Predefined types\n\nint_types = [\n 'char',\n 'short',\n 'Py_UNICODE',\n 'int',\n 'Py_UCS4',\n 'long',\n 'longlong',\n 'Py_hash_t',\n 'Py_ssize_t',\n 'size_t',\n]\nfloat_types = [\n 'longdouble',\n 'double',\n 'float',\n]\ncomplex_types = [\n 'longdoublecomplex',\n 'doublecomplex',\n 'floatcomplex',\n 'complex',\n]\nother_types = [\n 'bint',\n 'void',\n 'Py_tss_t',\n]\n\nto_repr = {\n 'longlong': 'long long',\n 'longdouble': 'long double',\n 'longdoublecomplex': 'long double complex',\n 'doublecomplex': 'double complex',\n 'floatcomplex': 'float complex',\n}.get\n\ngs = globals()\n\n# note: cannot simply name the unicode type here as 2to3 gets in the way and replaces it by str\ntry:\n import __builtin__ as builtins\nexcept ImportError: # Py3\n import builtins\n\ngs['unicode'] = typedef(getattr(builtins, 'unicode', str), 'unicode')\ndel builtins\n\nfor name in int_types:\n reprname = to_repr(name, name)\n gs[name] = typedef(py_int, reprname)\n if name not in ('Py_UNICODE', 'Py_UCS4') and not name.endswith('size_t'):\n gs['u'+name] = typedef(py_int, \"unsigned \" + reprname)\n gs['s'+name] = typedef(py_int, \"signed \" + reprname)\n\nfor name in float_types:\n gs[name] = typedef(py_float, to_repr(name, name))\n\nfor name in complex_types:\n gs[name] = typedef(py_complex, to_repr(name, name))\n\nbint = typedef(bool, \"bint\")\nvoid = typedef(None, \"void\")\nPy_tss_t = typedef(None, \"Py_tss_t\")\n\nfor t in int_types + float_types + complex_types + other_types:\n for i in range(1, 4):\n gs[\"%s_%s\" % ('p'*i, t)] = gs[t]._pointer(i)\n\nNULL = gs['p_void'](0)\n\n# looks like 'gs' has some users out there by now...\n#del gs\n\nintegral = floating = numeric = _FusedType()\n\ntype_ordering = [py_int, py_long, py_float, py_complex]\n\nclass CythonDotParallel(object):\n \"\"\"\n The cython.parallel module.\n \"\"\"\n\n __all__ = ['parallel', 'prange', 'threadid']\n\n def parallel(self, num_threads=None):\n return nogil\n\n def prange(self, start=0, stop=None, step=1, nogil=False, schedule=None, chunksize=None, num_threads=None):\n if stop is None:\n stop = start\n start = 0\n return range(start, stop, step)\n\n def threadid(self):\n return 0\n\n # def threadsavailable(self):\n # return 1\n\nclass CythonDotImportedFromElsewhere(object):\n \"\"\"\n cython.dataclasses just shadows the standard library modules of the same name\n \"\"\"\n def __init__(self, module):\n self.__path__ = []\n self.__file__ = None\n self.__name__ = module\n self.__package__ = module\n\n def __getattr__(self, attr):\n # we typically only expect this to be called once\n from importlib import import_module\n import sys\n try:\n mod = import_module(self.__name__)\n except ImportError:\n # but if they don't exist (Python is not sufficiently up-to-date) then\n # you can't use them\n raise AttributeError(\"%s: the standard library module %s is not available\" %\n (attr, self.__name__))\n sys.modules['cython.%s' % self.__name__] = mod\n return getattr(mod, attr)\n\n\nclass CythonCImports(object):\n \"\"\"\n Simplistic module mock to make cimports sort-of work in Python code.\n \"\"\"\n def __init__(self, module):\n self.__path__ = []\n self.__file__ = None\n self.__name__ = module\n self.__package__ = module\n\n def __getattr__(self, item):\n if item.startswith('__') and item.endswith('__'):\n raise AttributeError(item)\n return __import__(item)\n\n\nimport math, sys\nsys.modules['cython.parallel'] = CythonDotParallel()\nsys.modules['cython.cimports'] = CythonCImports('cython.cimports')\nsys.modules['cython.cimports.libc'] = CythonCImports('cython.cimports.libc')\nsys.modules['cython.cimports.libc.math'] = math\n# In pure Python mode @cython.dataclasses.dataclass and dataclass field should just\n# shadow the standard library ones (if they are available)\ndataclasses = sys.modules['cython.dataclasses'] = CythonDotImportedFromElsewhere('dataclasses')\ndel math, sys\n",
"path": "Cython/Shadow.py"
}
] | [
{
"content": "# cython.* namespace for pure mode.\nfrom __future__ import absolute_import\n\n__version__ = \"3.0.0a10\"\n\ntry:\n from __builtin__ import basestring\nexcept ImportError:\n basestring = str\n\n\n# BEGIN shameless copy from Cython/minivect/minitypes.py\n\nclass _ArrayType(object):\n\n is_array = True\n subtypes = ['dtype']\n\n def __init__(self, dtype, ndim, is_c_contig=False, is_f_contig=False,\n inner_contig=False, broadcasting=None):\n self.dtype = dtype\n self.ndim = ndim\n self.is_c_contig = is_c_contig\n self.is_f_contig = is_f_contig\n self.inner_contig = inner_contig or is_c_contig or is_f_contig\n self.broadcasting = broadcasting\n\n def __repr__(self):\n axes = [\":\"] * self.ndim\n if self.is_c_contig:\n axes[-1] = \"::1\"\n elif self.is_f_contig:\n axes[0] = \"::1\"\n\n return \"%s[%s]\" % (self.dtype, \", \".join(axes))\n\n\ndef index_type(base_type, item):\n \"\"\"\n Support array type creation by slicing, e.g. double[:, :] specifies\n a 2D strided array of doubles. The syntax is the same as for\n Cython memoryviews.\n \"\"\"\n class InvalidTypeSpecification(Exception):\n pass\n\n def verify_slice(s):\n if s.start or s.stop or s.step not in (None, 1):\n raise InvalidTypeSpecification(\n \"Only a step of 1 may be provided to indicate C or \"\n \"Fortran contiguity\")\n\n if isinstance(item, tuple):\n step_idx = None\n for idx, s in enumerate(item):\n verify_slice(s)\n if s.step and (step_idx or idx not in (0, len(item) - 1)):\n raise InvalidTypeSpecification(\n \"Step may only be provided once, and only in the \"\n \"first or last dimension.\")\n\n if s.step == 1:\n step_idx = idx\n\n return _ArrayType(base_type, len(item),\n is_c_contig=step_idx == len(item) - 1,\n is_f_contig=step_idx == 0)\n elif isinstance(item, slice):\n verify_slice(item)\n return _ArrayType(base_type, 1, is_c_contig=bool(item.step))\n else:\n # int[8] etc.\n assert int(item) == item # array size must be a plain integer\n return array(base_type, item)\n\n# END shameless copy\n\n\ncompiled = False\n\n_Unspecified = object()\n\n# Function decorators\n\ndef _empty_decorator(x):\n return x\n\ndef locals(**arg_types):\n return _empty_decorator\n\ndef test_assert_path_exists(*paths):\n return _empty_decorator\n\ndef test_fail_if_path_exists(*paths):\n return _empty_decorator\n\nclass _EmptyDecoratorAndManager(object):\n def __call__(self, x):\n return x\n def __enter__(self):\n pass\n def __exit__(self, exc_type, exc_value, traceback):\n pass\n\nclass _Optimization(object):\n pass\n\ncclass = ccall = cfunc = _EmptyDecoratorAndManager()\n\nreturns = wraparound = boundscheck = initializedcheck = nonecheck = \\\n embedsignature = cdivision = cdivision_warnings = \\\n always_allows_keywords = profile = linetrace = infer_types = \\\n unraisable_tracebacks = freelist = \\\n lambda _: _EmptyDecoratorAndManager()\n\nexceptval = lambda _=None, check=True: _EmptyDecoratorAndManager()\n\noverflowcheck = lambda _: _EmptyDecoratorAndManager()\noptimize = _Optimization()\n\noverflowcheck.fold = optimize.use_switch = \\\n optimize.unpack_method_calls = lambda arg: _EmptyDecoratorAndManager()\n\nfinal = internal = type_version_tag = no_gc_clear = no_gc = total_ordering = _empty_decorator\n\nbinding = lambda _: _empty_decorator\n\n\n_cython_inline = None\ndef inline(f, *args, **kwds):\n if isinstance(f, basestring):\n global _cython_inline\n if _cython_inline is None:\n from Cython.Build.Inline import cython_inline as _cython_inline\n return _cython_inline(f, *args, **kwds)\n else:\n assert len(args) == len(kwds) == 0\n return f\n\n\ndef compile(f):\n from Cython.Build.Inline import RuntimeCompiledFunction\n return RuntimeCompiledFunction(f)\n\n\n# Special functions\n\ndef cdiv(a, b):\n if a < 0:\n a = -a\n b = -b\n if b < 0:\n return (a + b + 1) // b\n return a // b\n\ndef cmod(a, b):\n r = a % b\n if (a * b) < 0 and r:\n r -= b\n return r\n\n\n# Emulated language constructs\n\ndef cast(t, *args, **kwargs):\n kwargs.pop('typecheck', None)\n assert not kwargs\n\n if isinstance(t, typedef):\n return t(*args)\n elif isinstance(t, type): # Doesn't work with old-style classes of Python 2.x\n if len(args) != 1 or not (args[0] is None or isinstance(args[0], t)):\n return t(*args)\n\n return args[0]\n\ndef sizeof(arg):\n return 1\n\ndef typeof(arg):\n return arg.__class__.__name__\n # return type(arg)\n\ndef address(arg):\n return pointer(type(arg))([arg])\n\ndef _is_value_type(t):\n if isinstance(t, typedef):\n return _is_value_type(t._basetype)\n\n return isinstance(t, type) and issubclass(t, (StructType, UnionType, ArrayType))\n\ndef declare(t=None, value=_Unspecified, **kwds):\n if value is not _Unspecified:\n return cast(t, value)\n elif _is_value_type(t):\n return t()\n else:\n return None\n\nclass _nogil(object):\n \"\"\"Support for 'with nogil' statement and @nogil decorator.\n \"\"\"\n def __call__(self, x):\n if callable(x):\n # Used as function decorator => return the function unchanged.\n return x\n # Used as conditional context manager or to create an \"@nogil(True/False)\" decorator => keep going.\n return self\n\n def __enter__(self):\n pass\n def __exit__(self, exc_class, exc, tb):\n return exc_class is None\n\nnogil = _nogil()\ngil = _nogil()\ndel _nogil\n\n\n# Emulated types\n\nclass CythonMetaType(type):\n\n def __getitem__(type, ix):\n return array(type, ix)\n\nCythonTypeObject = CythonMetaType('CythonTypeObject', (object,), {})\n\nclass CythonType(CythonTypeObject):\n\n def _pointer(self, n=1):\n for i in range(n):\n self = pointer(self)\n return self\n\nclass PointerType(CythonType):\n\n def __init__(self, value=None):\n if isinstance(value, (ArrayType, PointerType)):\n self._items = [cast(self._basetype, a) for a in value._items]\n elif isinstance(value, list):\n self._items = [cast(self._basetype, a) for a in value]\n elif value is None or value == 0:\n self._items = []\n else:\n raise ValueError\n\n def __getitem__(self, ix):\n if ix < 0:\n raise IndexError(\"negative indexing not allowed in C\")\n return self._items[ix]\n\n def __setitem__(self, ix, value):\n if ix < 0:\n raise IndexError(\"negative indexing not allowed in C\")\n self._items[ix] = cast(self._basetype, value)\n\n def __eq__(self, value):\n if value is None and not self._items:\n return True\n elif type(self) != type(value):\n return False\n else:\n return not self._items and not value._items\n\n def __repr__(self):\n return \"%s *\" % (self._basetype,)\n\nclass ArrayType(PointerType):\n\n def __init__(self, value=None):\n if value is None:\n self._items = [None] * self._n\n else:\n super(ArrayType, self).__init__(value)\n\n\nclass StructType(CythonType):\n\n def __init__(self, *posargs, **data):\n if not (posargs or data):\n return\n if posargs and data:\n raise ValueError('Cannot accept both positional and keyword arguments.')\n\n # Allow 'cast_from' as single positional or keyword argument.\n if data and len(data) == 1 and 'cast_from' in data:\n cast_from = data.pop('cast_from')\n elif len(posargs) == 1 and type(posargs[0]) is type(self):\n cast_from, posargs = posargs[0], ()\n elif posargs:\n for key, arg in zip(self._members, posargs):\n setattr(self, key, arg)\n return\n else:\n for key, value in data.items():\n if key not in self._members:\n raise ValueError(\"Invalid struct attribute for %s: %s\" % (\n self.__class__.__name__, key))\n setattr(self, key, value)\n return\n\n # do cast\n if data:\n raise ValueError('Cannot accept keyword arguments when casting.')\n if type(cast_from) is not type(self):\n raise ValueError('Cannot cast from %s' % cast_from)\n for key, value in cast_from.__dict__.items():\n setattr(self, key, value)\n\n def __setattr__(self, key, value):\n if key in self._members:\n self.__dict__[key] = cast(self._members[key], value)\n else:\n raise AttributeError(\"Struct has no member '%s'\" % key)\n\n\nclass UnionType(CythonType):\n\n def __init__(self, cast_from=_Unspecified, **data):\n if cast_from is not _Unspecified:\n # do type cast\n if len(data) > 0:\n raise ValueError('Cannot accept keyword arguments when casting.')\n if isinstance(cast_from, dict):\n datadict = cast_from\n elif type(cast_from) is type(self):\n datadict = cast_from.__dict__\n else:\n raise ValueError('Cannot cast from %s' % cast_from)\n else:\n datadict = data\n if len(datadict) > 1:\n raise AttributeError(\"Union can only store one field at a time.\")\n for key, value in datadict.items():\n setattr(self, key, value)\n\n def __setattr__(self, key, value):\n if key == '__dict__':\n CythonType.__setattr__(self, key, value)\n elif key in self._members:\n self.__dict__ = {key: cast(self._members[key], value)}\n else:\n raise AttributeError(\"Union has no member '%s'\" % key)\n\ndef pointer(basetype):\n class PointerInstance(PointerType):\n _basetype = basetype\n return PointerInstance\n\ndef array(basetype, n):\n class ArrayInstance(ArrayType):\n _basetype = basetype\n _n = n\n return ArrayInstance\n\ndef struct(**members):\n class StructInstance(StructType):\n _members = members\n for key in members:\n setattr(StructInstance, key, None)\n return StructInstance\n\ndef union(**members):\n class UnionInstance(UnionType):\n _members = members\n for key in members:\n setattr(UnionInstance, key, None)\n return UnionInstance\n\nclass typedef(CythonType):\n\n def __init__(self, type, name=None):\n self._basetype = type\n self.name = name\n\n def __call__(self, *arg):\n value = cast(self._basetype, *arg)\n return value\n\n def __repr__(self):\n return self.name or str(self._basetype)\n\n __getitem__ = index_type\n\nclass _FusedType(CythonType):\n __getitem__ = index_type\n\n\ndef fused_type(*args):\n if not args:\n raise TypeError(\"Expected at least one type as argument\")\n\n # Find the numeric type with biggest rank if all types are numeric\n rank = -1\n for type in args:\n if type not in (py_int, py_long, py_float, py_complex):\n break\n\n if type_ordering.index(type) > rank:\n result_type = type\n else:\n return result_type\n\n # Not a simple numeric type, return a fused type instance. The result\n # isn't really meant to be used, as we can't keep track of the context in\n # pure-mode. Casting won't do anything in this case.\n return _FusedType()\n\n\ndef _specialized_from_args(signatures, args, kwargs):\n \"Perhaps this should be implemented in a TreeFragment in Cython code\"\n raise Exception(\"yet to be implemented\")\n\n\npy_int = typedef(int, \"int\")\ntry:\n py_long = typedef(long, \"long\")\nexcept NameError: # Py3\n py_long = typedef(int, \"long\")\npy_float = typedef(float, \"float\")\npy_complex = typedef(complex, \"double complex\")\n\n\n# Predefined types\n\nint_types = [\n 'char',\n 'short',\n 'Py_UNICODE',\n 'int',\n 'Py_UCS4',\n 'long',\n 'longlong',\n 'Py_hash_t',\n 'Py_ssize_t',\n 'size_t',\n]\nfloat_types = [\n 'longdouble',\n 'double',\n 'float',\n]\ncomplex_types = [\n 'longdoublecomplex',\n 'doublecomplex',\n 'floatcomplex',\n 'complex',\n]\nother_types = [\n 'bint',\n 'void',\n 'Py_tss_t',\n]\n\nto_repr = {\n 'longlong': 'long long',\n 'longdouble': 'long double',\n 'longdoublecomplex': 'long double complex',\n 'doublecomplex': 'double complex',\n 'floatcomplex': 'float complex',\n}.get\n\ngs = globals()\n\n# note: cannot simply name the unicode type here as 2to3 gets in the way and replaces it by str\ntry:\n import __builtin__ as builtins\nexcept ImportError: # Py3\n import builtins\n\ngs['unicode'] = typedef(getattr(builtins, 'unicode', str), 'unicode')\ndel builtins\n\nfor name in int_types:\n reprname = to_repr(name, name)\n gs[name] = typedef(py_int, reprname)\n if name not in ('Py_UNICODE', 'Py_UCS4') and not name.endswith('size_t'):\n gs['u'+name] = typedef(py_int, \"unsigned \" + reprname)\n gs['s'+name] = typedef(py_int, \"signed \" + reprname)\n\nfor name in float_types:\n gs[name] = typedef(py_float, to_repr(name, name))\n\nfor name in complex_types:\n gs[name] = typedef(py_complex, to_repr(name, name))\n\nbint = typedef(bool, \"bint\")\nvoid = typedef(None, \"void\")\nPy_tss_t = typedef(None, \"Py_tss_t\")\n\nfor t in int_types + float_types + complex_types + other_types:\n for i in range(1, 4):\n gs[\"%s_%s\" % ('p'*i, t)] = gs[t]._pointer(i)\n\nNULL = gs['p_void'](0)\n\n# looks like 'gs' has some users out there by now...\n#del gs\n\nintegral = floating = numeric = _FusedType()\n\ntype_ordering = [py_int, py_long, py_float, py_complex]\n\nclass CythonDotParallel(object):\n \"\"\"\n The cython.parallel module.\n \"\"\"\n\n __all__ = ['parallel', 'prange', 'threadid']\n\n def parallel(self, num_threads=None):\n return nogil\n\n def prange(self, start=0, stop=None, step=1, nogil=False, schedule=None, chunksize=None, num_threads=None):\n if stop is None:\n stop = start\n start = 0\n return range(start, stop, step)\n\n def threadid(self):\n return 0\n\n # def threadsavailable(self):\n # return 1\n\nclass CythonDotImportedFromElsewhere(object):\n \"\"\"\n cython.dataclasses just shadows the standard library modules of the same name\n \"\"\"\n def __init__(self, module):\n self.__path__ = []\n self.__file__ = None\n self.__name__ = module\n self.__package__ = module\n\n def __getattr__(self, attr):\n # we typically only expect this to be called once\n from importlib import import_module\n import sys\n try:\n mod = import_module(self.__name__)\n except ImportError:\n # but if they don't exist (Python is not sufficiently up-to-date) then\n # you can't use them\n raise AttributeError(\"%s: the standard library module %s is not available\" %\n (attr, self.__name__))\n sys.modules['cython.%s' % self.__name__] = mod\n return getattr(mod, attr)\n\n\nclass CythonCImports(object):\n \"\"\"\n Simplistic module mock to make cimports sort-of work in Python code.\n \"\"\"\n def __init__(self, module):\n self.__path__ = []\n self.__file__ = None\n self.__name__ = module\n self.__package__ = module\n\n def __getattr__(self, item):\n if item.startswith('__') and item.endswith('__'):\n raise AttributeError(item)\n return __import__(item)\n\n\nimport math, sys\nsys.modules['cython.parallel'] = CythonDotParallel()\nsys.modules['cython.cimports'] = CythonCImports('cython.cimports')\nsys.modules['cython.cimports.libc'] = CythonCImports('cython.cimports.libc')\nsys.modules['cython.cimports.libc.math'] = math\n# In pure Python mode @cython.dataclasses.dataclass and dataclass field should just\n# shadow the standard library ones (if they are available)\ndataclasses = sys.modules['cython.dataclasses'] = CythonDotImportedFromElsewhere('dataclasses')\ndel math, sys\n",
"path": "Cython/Shadow.py"
}
] | diff --git a/Cython/Shadow.py b/Cython/Shadow.py
index 48bc249e01f..78d950ce231 100644
--- a/Cython/Shadow.py
+++ b/Cython/Shadow.py
@@ -385,7 +385,7 @@ def __repr__(self):
__getitem__ = index_type
class _FusedType(CythonType):
- pass
+ __getitem__ = index_type
def fused_type(*args):
|
conda__conda-3524 | Progress bar broken

```
C:\Users\Korijn\dev\myproject>conda info
Current conda install:
platform : win-64
conda version : 4.2.7
conda is private : False
conda-env version : 4.2.7
conda-build version : 2.0.1
python version : 3.5.1.final.0
requests version : 2.9.1
root environment : C:\Users\Korijn\Miniconda3 (writable)
default environment : C:\Users\Korijn\Miniconda3
envs directories : C:\Users\Korijn\Miniconda3\envs
package cache : C:\Users\Korijn\Miniconda3\pkgs
channel URLs : https://repo.continuum.io/pkgs/free/win-64/
https://repo.continuum.io/pkgs/free/noarch/
https://repo.continuum.io/pkgs/pro/win-64/
https://repo.continuum.io/pkgs/pro/noarch/
https://repo.continuum.io/pkgs/msys2/win-64/
https://repo.continuum.io/pkgs/msys2/noarch/
config file : C:\Users\Korijn\.condarc
offline mode : False
```
| [
{
"content": "# -*- coding: utf-8 -*-\n\"\"\"\nThis file should hold almost all string literals and magic numbers used throughout the code base.\nThe exception is if a literal is specifically meant to be private to and isolated within a module.\n\"\"\"\nfrom __future__ import absolute_import, division, print_function\n\nimport os\nimport sys\nfrom logging import getLogger\nfrom platform import machine\n\nfrom enum import Enum\n\nfrom conda._vendor.auxlib.collection import frozendict\n\nlog = getLogger(__name__)\n\n\nclass Arch(Enum):\n x86 = 'x86'\n x86_64 = 'x86_64'\n armv6l = 'armv6l'\n armv7l = 'armv7l'\n ppc64le = 'ppc64le'\n\n @classmethod\n def from_sys(cls):\n return cls[machine()]\n\n\nclass Platform(Enum):\n linux = 'linux'\n win = 'win32'\n openbsd = 'openbsd5'\n osx = 'darwin'\n\n @classmethod\n def from_sys(cls):\n p = sys.platform\n if p.startswith('linux'):\n # Changed in version 2.7.3: Since lots of code check for sys.platform == 'linux2',\n # and there is no essential change between Linux 2.x and 3.x, sys.platform is always\n # set to 'linux2', even on Linux 3.x. In Python 3.3 and later, the value will always\n # be set to 'linux'\n p = 'linux'\n return cls(p)\n\nmachine_bits = 8 * tuple.__itemsize__\n\n# UID = os.getuid()\nPWD = os.getcwd()\nCONDA = 'CONDA'\nCONDA_ = 'CONDA_'\nconda = 'conda'\n\nSEARCH_PATH = (\n '/etc/conda/condarc',\n '/etc/conda/condarc.d/',\n '/var/lib/conda/condarc',\n '/var/lib/conda/condarc.d/',\n '$CONDA_ROOT/condarc',\n '$CONDA_ROOT/.condarc',\n '$CONDA_ROOT/condarc.d/',\n '~/.conda/condarc',\n '~/.conda/condarc.d/',\n '~/.condarc',\n '$CONDA_PREFIX/.condarc',\n '$CONDA_PREFIX/condarc.d/',\n '$CONDARC',\n)\n\nDEFAULT_CHANNEL_ALIAS = 'https://conda.anaconda.org/'\n\nPLATFORM_DIRECTORIES = (\"linux-64\", \"linux-32\",\n \"win-64\", \"win-32\",\n \"osx-64\", \"noarch\")\n\nRECOGNIZED_URL_SCHEMES = ('http', 'https', 'ftp', 's3', 'file')\n\n\nif Platform.from_sys() is Platform.win:\n DEFAULT_CHANNELS = ('https://repo.continuum.io/pkgs/free',\n 'https://repo.continuum.io/pkgs/pro',\n 'https://repo.continuum.io/pkgs/msys2',\n )\nelse:\n DEFAULT_CHANNELS = ('https://repo.continuum.io/pkgs/free',\n 'https://repo.continuum.io/pkgs/pro',\n )\n\nROOT_ENV_NAME = 'root'\n\nEMPTY_LIST = ()\nEMPTY_MAP = frozendict()\n\n\nclass _Null(object):\n def __nonzero__(self):\n return False\n\nNULL = _Null()\n\nUTF8 = 'UTF-8'\n",
"path": "conda/base/constants.py"
}
] | [
{
"content": "# -*- coding: utf-8 -*-\n\"\"\"\nThis file should hold almost all string literals and magic numbers used throughout the code base.\nThe exception is if a literal is specifically meant to be private to and isolated within a module.\n\"\"\"\nfrom __future__ import absolute_import, division, print_function\n\nimport os\nimport sys\nfrom logging import getLogger\nfrom platform import machine\n\nfrom enum import Enum\n\nfrom conda._vendor.auxlib.collection import frozendict\n\nlog = getLogger(__name__)\n\n\nclass Arch(Enum):\n x86 = 'x86'\n x86_64 = 'x86_64'\n armv6l = 'armv6l'\n armv7l = 'armv7l'\n ppc64le = 'ppc64le'\n\n @classmethod\n def from_sys(cls):\n return cls[machine()]\n\n\nclass Platform(Enum):\n linux = 'linux'\n win = 'win32'\n openbsd = 'openbsd5'\n osx = 'darwin'\n\n @classmethod\n def from_sys(cls):\n p = sys.platform\n if p.startswith('linux'):\n # Changed in version 2.7.3: Since lots of code check for sys.platform == 'linux2',\n # and there is no essential change between Linux 2.x and 3.x, sys.platform is always\n # set to 'linux2', even on Linux 3.x. In Python 3.3 and later, the value will always\n # be set to 'linux'\n p = 'linux'\n return cls(p)\n\nmachine_bits = 8 * tuple.__itemsize__\n\n# UID = os.getuid()\nPWD = os.getcwd()\nCONDA = 'CONDA'\nCONDA_ = 'CONDA_'\nconda = 'conda'\n\nSEARCH_PATH = (\n '/etc/conda/condarc',\n '/etc/conda/condarc.d/',\n '/var/lib/conda/condarc',\n '/var/lib/conda/condarc.d/',\n '$CONDA_ROOT/condarc',\n '$CONDA_ROOT/.condarc',\n '$CONDA_ROOT/condarc.d/',\n '~/.conda/condarc',\n '~/.conda/condarc.d/',\n '~/.condarc',\n '$CONDA_PREFIX/.condarc',\n '$CONDA_PREFIX/condarc.d/',\n '$CONDARC',\n)\n\nDEFAULT_CHANNEL_ALIAS = 'https://conda.anaconda.org/'\n\nPLATFORM_DIRECTORIES = (\"linux-64\", \"linux-32\",\n \"win-64\", \"win-32\",\n \"osx-64\", \"noarch\")\n\nRECOGNIZED_URL_SCHEMES = ('http', 'https', 'ftp', 's3', 'file')\n\n\nif Platform.from_sys() is Platform.win:\n DEFAULT_CHANNELS = ('https://repo.continuum.io/pkgs/free',\n 'https://repo.continuum.io/pkgs/pro',\n 'https://repo.continuum.io/pkgs/msys2',\n )\nelse:\n DEFAULT_CHANNELS = ('https://repo.continuum.io/pkgs/free',\n 'https://repo.continuum.io/pkgs/pro',\n )\n\nROOT_ENV_NAME = 'root'\n\nEMPTY_LIST = ()\nEMPTY_MAP = frozendict()\n\n\nclass _Null(object):\n def __nonzero__(self):\n return False\n\n def __bool__(self):\n return False\n\nNULL = _Null()\n\nUTF8 = 'UTF-8'\n",
"path": "conda/base/constants.py"
}
] | diff --git a/conda/base/constants.py b/conda/base/constants.py
index e9ae93e008c..5816bf5174d 100644
--- a/conda/base/constants.py
+++ b/conda/base/constants.py
@@ -99,6 +99,9 @@ class _Null(object):
def __nonzero__(self):
return False
+ def __bool__(self):
+ return False
+
NULL = _Null()
UTF8 = 'UTF-8'
diff --git a/tests/base/test_constants.py b/tests/base/test_constants.py
new file mode 100644
index 00000000000..04e0adefe7a
--- /dev/null
+++ b/tests/base/test_constants.py
@@ -0,0 +1,11 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+from conda.base.constants import NULL
+from logging import getLogger
+
+log = getLogger(__name__)
+
+
+def test_null_is_falsey():
+ assert not NULL
|
twisted__twisted-11622 | Release 22.8.0
This is the ticket to track the release of 22.8.0
| [
{
"content": "\"\"\"\nProvides Twisted version information.\n\"\"\"\n\n# This file is auto-generated! Do not edit!\n# Use `python -m incremental.update Twisted` to change this file.\n\nfrom incremental import Version\n\n__version__ = Version(\"Twisted\", 22, 4, 0, post=0)\n__all__ = [\"__version__\"]\n",
"path": "src/twisted/_version.py"
}
] | [
{
"content": "\"\"\"\nProvides Twisted version information.\n\"\"\"\n\n# This file is auto-generated! Do not edit!\n# Use `python -m incremental.update Twisted` to change this file.\n\nfrom incremental import Version\n\n__version__ = Version(\"Twisted\", 22, 8, 0, post=0)\n__all__ = [\"__version__\"]\n",
"path": "src/twisted/_version.py"
}
] | diff --git a/NEWS.rst b/NEWS.rst
index b91359d22fc..f96d726e59b 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,4 +1,4 @@
-This file contains the release notes for the Twisted.
+This file contains the release notes for Twisted.
It only contains high-level changes that are of interest to Twisted library users.
Users of Twisted should check the notes before planning an upgrade.
@@ -8,6 +8,135 @@ https://twisted.org/trac/ticket/<number>
.. towncrier release notes start
+Twisted 22.8.0 (2022-09-06)
+===========================
+
+Twisted 22.8.0rc1 release candidate was released on 2022-08-28 and there are
+no changes between the release candidate and the final release.
+
+
+Features
+--------
+
+- twisted.internet.defer.maybeDeferred will now schedule a coroutine result as asynchronous operation and return a Deferred that fires with the result of the coroutine. (#10327)
+- Twisted now works with Cryptography versions 37 and above, and as a result, its minimum TLS protocol version has been upgraded to TLSv1.2. (#10377)
+
+
+Bugfixes
+--------
+
+- ``twisted.internet.base.DelayedCall.__repr__`` will no longer raise ``AttributeError`` if the ``DelayedCall`` was created before debug mode was enabled. As a side-effect, ``twisted.internet.base.DelayedCall.creator`` is now defined as ``None`` in cases where previously it was undefined. (#8306)
+- twisted.internet.iocpreactor.udp now properly re-queues its listener when there is a failure condition on the read from the socket. (#10052)
+- twisted.internet.defer.inlineCallbacks no longer causes confusing StopIteration tracebacks to be added to the top of tracebacks originating in triggered callbacks (#10260)
+- The typing of twisted.internet.task.react no longer constrains the type of argv. (#10289)
+- `ContextVar.reset()` now works correctly inside `inlineCallbacks` functions and coroutines. (#10301)
+- Implement twisted.python.failure._Code.co_positions for compatibility with Python 3.11. (#10336)
+- twisted.pair.tuntap._TUNSETIFF and ._TUNGETIFF values are now correct parisc, powerpc and sparc architectures. (#10339)
+
+
+Improved Documentation
+----------------------
+
+- The release process documentation was updated to include information about
+ doing a security release. (#10324)
+- The development and policy documentation pages were moved into the same
+ directory that is now placed inside the documentation root directory. (#11575)
+
+
+Deprecations and Removals
+-------------------------
+
+- Python 3.6 is no longer supported.
+ Twisted 22.4.0 was the last version with support for Python 3.6. (#10304)
+
+
+Misc
+----
+
+- #9437, #9495, #10066, #10275, #10318, #10325, #10328, #10329, #10331, #10349, #10350, #10352, #10353, #11561, #11564, #11567, #11569, #11585, #11592, #11600, #11606, #11610, #11612, #11614
+
+
+Conch
+-----
+
+Bugfixes
+~~~~~~~~
+
+- twisted.conch.checkers.UNIXAuthorizedKeysFiles now uses the filesystem encoding to decode usernames before looking them up in the password database, so it works on Python 3. (#10286)
+- twisted.conch.ssh.SSHSession.request_env no longer gives a warning if the session does not implement ISessionSetEnv. (#10347)
+- The cftp command line (and `twisted.conch.scripts.cftp.SSHSession.extReceived`) no longer raises an unhandled error when receiving data on stderr from the server. (#10351)
+
+
+Misc
+~~~~
+
+- #10330
+
+
+Web
+---
+
+Features
+~~~~~~~~
+
+- twisted.web.template.renderElement now combines consecutive, sychronously-available bytes up to a fixed size limit into a single string to pass to ``IRequest.write`` instead of passing them all separately. This greatly reduces the number of chunks in the response. (#10348)
+
+
+Misc
+~~~~
+
+- #11604
+
+
+Mail
+----
+
+Bugfixes
+~~~~~~~~
+
+- twisted.mail.maildir.MaildirMessage now use byte header to avoid incompatibility with the FileMessage which writes bytes not strings lines to a message file (#10244)
+
+
+Words
+-----
+
+Bugfixes
+~~~~~~~~
+
+- twisted.words.protocols.irc.IRCClient now splits overly long NOTICEs and NOTICEs containing \n before sending. (#10285)
+
+
+Names
+-----
+
+Bugfixes
+~~~~~~~~
+
+- twisted.names.dns logs unparsable messages rather than generating a Failure instance (#9723)
+
+
+Trial
+-----
+
+Features
+~~~~~~~~
+
+- ``trial --jobs=N --exitfirst`` is now supported. (#9654)
+
+
+Bugfixes
+~~~~~~~~
+
+- `trial --jobs=N --until-failure ...` now reports the correct number of tests run after each iteration. (#10311)
+- ``trial -jN ...`` will now pass errors and failures to ``IReporter`` methods as instances of ``WorkerException`` instead of ``str``. (#10333)
+
+
+Misc
+~~~~
+
+- #10319, #10338, #11571
+
+
Twisted 22.4.0 (2022-04-11)
===========================
diff --git a/setup.cfg b/setup.cfg
index 18c01766fab..3b0419da96c 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -56,7 +56,7 @@ test =
; List of dependencies required to build the documentation and test the
; release scripts and process.
dev_release =
- towncrier ~= 19.2
+ towncrier ~= 22.8
pydoctor ~= 22.7.0
sphinx-rtd-theme ~= 0.5
readthedocs-sphinx-ext ~= 2.1
diff --git a/src/twisted/_version.py b/src/twisted/_version.py
index 06ca17532f8..9f5c29361b9 100644
--- a/src/twisted/_version.py
+++ b/src/twisted/_version.py
@@ -7,5 +7,5 @@
from incremental import Version
-__version__ = Version("Twisted", 22, 4, 0, post=0)
+__version__ = Version("Twisted", 22, 8, 0, post=0)
__all__ = ["__version__"]
diff --git a/src/twisted/conch/newsfragments/10286.bugfix b/src/twisted/conch/newsfragments/10286.bugfix
deleted file mode 100644
index fc42c4c2df1..00000000000
--- a/src/twisted/conch/newsfragments/10286.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-twisted.conch.checkers.UNIXAuthorizedKeysFiles now uses the filesystem encoding to decode usernames before looking them up in the password database, so it works on Python 3.
diff --git a/src/twisted/conch/newsfragments/10330.misc b/src/twisted/conch/newsfragments/10330.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/conch/newsfragments/10347.bugfix b/src/twisted/conch/newsfragments/10347.bugfix
deleted file mode 100644
index 476b2788100..00000000000
--- a/src/twisted/conch/newsfragments/10347.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-twisted.conch.ssh.SSHSession.request_env no longer gives a warning if the session does not implement ISessionSetEnv.
diff --git a/src/twisted/conch/newsfragments/10351.bugfix b/src/twisted/conch/newsfragments/10351.bugfix
deleted file mode 100644
index 91da01e1c22..00000000000
--- a/src/twisted/conch/newsfragments/10351.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-The cftp command line (and `twisted.conch.scripts.cftp.SSHSession.extReceived`) no longer raises an unhandled error when receiving data on stderr from the server.
diff --git a/src/twisted/mail/newsfragments/10244.bugfix b/src/twisted/mail/newsfragments/10244.bugfix
deleted file mode 100644
index 5c6970b2cf1..00000000000
--- a/src/twisted/mail/newsfragments/10244.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-twisted.mail.maildir.MaildirMessage now use byte header to avoid incompatibility with the FileMessage which writes bytes not strings lines to a message file
diff --git a/src/twisted/names/newsfragments/9723.bugfix b/src/twisted/names/newsfragments/9723.bugfix
deleted file mode 100644
index 8f8ed1d9640..00000000000
--- a/src/twisted/names/newsfragments/9723.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-twisted.names.dns logs unparsable messages rather than generating a Failure instance
diff --git a/src/twisted/newsfragments/10052.bugfix b/src/twisted/newsfragments/10052.bugfix
deleted file mode 100644
index e61ef8131d4..00000000000
--- a/src/twisted/newsfragments/10052.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-twisted.internet.iocpreactor.udp now properly re-queues its listener when there is a failure condition on the read from the socket.
diff --git a/src/twisted/newsfragments/10066.misc b/src/twisted/newsfragments/10066.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/newsfragments/10260.bugfix b/src/twisted/newsfragments/10260.bugfix
deleted file mode 100644
index 11e6320037d..00000000000
--- a/src/twisted/newsfragments/10260.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-twisted.internet.defer.inlineCallbacks no longer causes confusing StopIteration tracebacks to be added to the top of tracebacks originating in triggered callbacks
diff --git a/src/twisted/newsfragments/10275.misc b/src/twisted/newsfragments/10275.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/newsfragments/10289.bugfix b/src/twisted/newsfragments/10289.bugfix
deleted file mode 100644
index ea4d85df4b1..00000000000
--- a/src/twisted/newsfragments/10289.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-The typing of twisted.internet.task.react no longer constrains the type of argv.
diff --git a/src/twisted/newsfragments/10301.bugfix b/src/twisted/newsfragments/10301.bugfix
deleted file mode 100644
index 587bd7d7422..00000000000
--- a/src/twisted/newsfragments/10301.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-`ContextVar.reset()` now works correctly inside `inlineCallbacks` functions and coroutines.
diff --git a/src/twisted/newsfragments/10304.removal b/src/twisted/newsfragments/10304.removal
deleted file mode 100644
index 11457cc802b..00000000000
--- a/src/twisted/newsfragments/10304.removal
+++ /dev/null
@@ -1,2 +0,0 @@
-Python 3.6 is no longer supported.
-Twisted 22.4.0 was the last version with support for Python 3.6.
diff --git a/src/twisted/newsfragments/10318.misc b/src/twisted/newsfragments/10318.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/newsfragments/10324.doc b/src/twisted/newsfragments/10324.doc
deleted file mode 100644
index f383ad7775f..00000000000
--- a/src/twisted/newsfragments/10324.doc
+++ /dev/null
@@ -1,2 +0,0 @@
-The release process documentation was updated to include information about
-doing a security release.
diff --git a/src/twisted/newsfragments/10325.misc b/src/twisted/newsfragments/10325.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/newsfragments/10327.feature b/src/twisted/newsfragments/10327.feature
deleted file mode 100644
index cd3db5bbf4d..00000000000
--- a/src/twisted/newsfragments/10327.feature
+++ /dev/null
@@ -1 +0,0 @@
-twisted.internet.defer.maybeDeferred will now schedule a coroutine result as asynchronous operation and return a Deferred that fires with the result of the coroutine.
diff --git a/src/twisted/newsfragments/10328.misc b/src/twisted/newsfragments/10328.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/newsfragments/10329.misc b/src/twisted/newsfragments/10329.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/newsfragments/10331.misc b/src/twisted/newsfragments/10331.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/newsfragments/10336.bugfix b/src/twisted/newsfragments/10336.bugfix
deleted file mode 100644
index a7ffab3627d..00000000000
--- a/src/twisted/newsfragments/10336.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Implement twisted.python.failure._Code.co_positions for compatibility with Python 3.11.
diff --git a/src/twisted/newsfragments/10339.bugfix b/src/twisted/newsfragments/10339.bugfix
deleted file mode 100644
index 7d543b4ec44..00000000000
--- a/src/twisted/newsfragments/10339.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-twisted.pair.tuntap._TUNSETIFF and ._TUNGETIFF values are now correct parisc, powerpc and sparc architectures.
diff --git a/src/twisted/newsfragments/10349.misc b/src/twisted/newsfragments/10349.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/newsfragments/10350.misc b/src/twisted/newsfragments/10350.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/newsfragments/10352.misc b/src/twisted/newsfragments/10352.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/newsfragments/10353.misc b/src/twisted/newsfragments/10353.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/newsfragments/10377.feature b/src/twisted/newsfragments/10377.feature
deleted file mode 100644
index 79cd5927b1b..00000000000
--- a/src/twisted/newsfragments/10377.feature
+++ /dev/null
@@ -1 +0,0 @@
-Twisted now works with Cryptography versions 37 and above, and as a result its minimum TLS protocol version has been upgraded to TLSv1.2.
diff --git a/src/twisted/newsfragments/11561.misc b/src/twisted/newsfragments/11561.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/newsfragments/11564.misc b/src/twisted/newsfragments/11564.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/newsfragments/11567.misc b/src/twisted/newsfragments/11567.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/newsfragments/11569.misc b/src/twisted/newsfragments/11569.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/newsfragments/11575.doc b/src/twisted/newsfragments/11575.doc
deleted file mode 100644
index b3b4c29a485..00000000000
--- a/src/twisted/newsfragments/11575.doc
+++ /dev/null
@@ -1,2 +0,0 @@
-The development and policy documentation pages were moved into the same
-directory that is now placed inside the documentation root directory.
diff --git a/src/twisted/newsfragments/11585.misc b/src/twisted/newsfragments/11585.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/newsfragments/11592.misc b/src/twisted/newsfragments/11592.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/newsfragments/11600.misc b/src/twisted/newsfragments/11600.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/newsfragments/11606.misc b/src/twisted/newsfragments/11606.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/newsfragments/11610.misc b/src/twisted/newsfragments/11610.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/newsfragments/11612.misc b/src/twisted/newsfragments/11612.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/newsfragments/11614.misc b/src/twisted/newsfragments/11614.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/newsfragments/8306.bugfix b/src/twisted/newsfragments/8306.bugfix
deleted file mode 100644
index 455bc564851..00000000000
--- a/src/twisted/newsfragments/8306.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-``twisted.internet.base.DelayedCall.__repr__`` will no longer raise ``AttributeError`` if the ``DelayedCall`` was created before debug mode was enabled. As a side-effect, ``twisted.internet.base.DelayedCall.creator`` is now defined as ``None`` in cases where previously it was undefined.
diff --git a/src/twisted/newsfragments/9437.misc b/src/twisted/newsfragments/9437.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/newsfragments/9495.misc b/src/twisted/newsfragments/9495.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/trial/newsfragments/0.misc b/src/twisted/trial/newsfragments/0.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/trial/newsfragments/10311.bugfix b/src/twisted/trial/newsfragments/10311.bugfix
deleted file mode 100644
index 008db2b635a..00000000000
--- a/src/twisted/trial/newsfragments/10311.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-`trial --jobs=N --until-failure ...` now reports the correct number of tests run after each iteration.
diff --git a/src/twisted/trial/newsfragments/10319.misc b/src/twisted/trial/newsfragments/10319.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/trial/newsfragments/10333.bugfix b/src/twisted/trial/newsfragments/10333.bugfix
deleted file mode 100644
index 062ce1460dc..00000000000
--- a/src/twisted/trial/newsfragments/10333.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-``trial -jN ...`` will now pass errors and failures to ``IReporter`` methods as instances of ``WorkerException`` instead of ``str``.
diff --git a/src/twisted/trial/newsfragments/10338.misc b/src/twisted/trial/newsfragments/10338.misc
deleted file mode 100644
index 8b137891791..00000000000
--- a/src/twisted/trial/newsfragments/10338.misc
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/src/twisted/trial/newsfragments/11571.misc b/src/twisted/trial/newsfragments/11571.misc
deleted file mode 100644
index 8b137891791..00000000000
--- a/src/twisted/trial/newsfragments/11571.misc
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/src/twisted/trial/newsfragments/9654.feature b/src/twisted/trial/newsfragments/9654.feature
deleted file mode 100644
index 328c3da8f4f..00000000000
--- a/src/twisted/trial/newsfragments/9654.feature
+++ /dev/null
@@ -1 +0,0 @@
-``trial --jobs=N --exitfirst`` is now supported.
diff --git a/src/twisted/web/newsfragments/10348.feature b/src/twisted/web/newsfragments/10348.feature
deleted file mode 100644
index 20604a707f7..00000000000
--- a/src/twisted/web/newsfragments/10348.feature
+++ /dev/null
@@ -1 +0,0 @@
-twisted.web.template.renderElement now combines consecutive, sychronously-available bytes up to a fixed size limit into a single string to pass to ``IRequest.write`` instead of passing them all separately. This greatly reduces the number of chunks in the response.
diff --git a/src/twisted/web/newsfragments/11604.misc b/src/twisted/web/newsfragments/11604.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/words/newsfragments/10285.bugfix b/src/twisted/words/newsfragments/10285.bugfix
deleted file mode 100644
index 3caac051b3b..00000000000
--- a/src/twisted/words/newsfragments/10285.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-twisted.words.protocols.irc.IRCClient now splits overly long NOTICEs and NOTICEs containing \n before sending.
|
dbt-labs__dbt-core-2057 | Create -t flag as alias for dbt run --target
I love the `-m` flag as an alias for `--models` (in #1161) but now it's completely messed up my muscle memory for `--target`! I'm now repeatedly typing `-target` instead of `--target` when I want to run dbt on production.
| [
{
"content": "from dbt.logger import GLOBAL_LOGGER as logger, log_cache_events, log_manager\n\nimport argparse\nimport os.path\nimport sys\nimport traceback\nfrom contextlib import contextmanager\n\nimport dbt.version\nimport dbt.flags as flags\nimport dbt.task.run as run_task\nimport dbt.task.compile as compile_task\nimport dbt.task.debug as debug_task\nimport dbt.task.clean as clean_task\nimport dbt.task.deps as deps_task\nimport dbt.task.init as init_task\nimport dbt.task.seed as seed_task\nimport dbt.task.test as test_task\nimport dbt.task.snapshot as snapshot_task\nimport dbt.task.generate as generate_task\nimport dbt.task.serve as serve_task\nimport dbt.task.freshness as freshness_task\nimport dbt.task.run_operation as run_operation_task\nfrom dbt.task.list import ListTask\nfrom dbt.task.rpc.server import RPCServerTask\nfrom dbt.adapters.factory import reset_adapters, cleanup_connections\n\nimport dbt.tracking\nimport dbt.ui.printer\nimport dbt.deprecations\nimport dbt.profiler\n\nfrom dbt.utils import ExitCodes\nfrom dbt.config import PROFILES_DIR, read_user_config\nfrom dbt.exceptions import RuntimeException\n\n\nclass DBTVersion(argparse.Action):\n \"\"\"This is very very similar to the builtin argparse._Version action,\n except it just calls dbt.version.get_version_information().\n \"\"\"\n def __init__(self,\n option_strings,\n version=None,\n dest=argparse.SUPPRESS,\n default=argparse.SUPPRESS,\n help=\"show program's version number and exit\"):\n super().__init__(\n option_strings=option_strings,\n dest=dest,\n default=default,\n nargs=0,\n help=help)\n\n def __call__(self, parser, namespace, values, option_string=None):\n formatter = argparse.RawTextHelpFormatter(prog=parser.prog)\n formatter.add_text(dbt.version.get_version_information())\n parser.exit(message=formatter.format_help())\n\n\nclass DBTArgumentParser(argparse.ArgumentParser):\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.register('action', 'dbtversion', DBTVersion)\n\n\nclass RPCArgumentParser(DBTArgumentParser):\n def exit(self, status=0, message=None):\n if status == 0:\n return\n else:\n raise TypeError(message)\n\n\ndef main(args=None):\n if args is None:\n args = sys.argv[1:]\n with log_manager.applicationbound():\n try:\n results, succeeded = handle_and_check(args)\n if succeeded:\n exit_code = ExitCodes.Success.value\n else:\n exit_code = ExitCodes.ModelError.value\n\n except KeyboardInterrupt:\n logger.info(\"ctrl-c\")\n exit_code = ExitCodes.UnhandledError.value\n\n # This can be thrown by eg. argparse\n except SystemExit as e:\n exit_code = e.code\n\n except BaseException as e:\n logger.warning(\"Encountered an error:\")\n logger.warning(str(e))\n\n if log_manager.initialized:\n logger.debug(traceback.format_exc())\n elif not isinstance(e, RuntimeException):\n # if it did not come from dbt proper and the logger is not\n # initialized (so there's no safe path to log to), log the\n # stack trace at error level.\n logger.error(traceback.format_exc())\n exit_code = ExitCodes.UnhandledError.value\n\n sys.exit(exit_code)\n\n\n# here for backwards compatibility\ndef handle(args):\n res, success = handle_and_check(args)\n return res\n\n\ndef initialize_config_values(parsed):\n \"\"\"Given the parsed args, initialize the dbt tracking code.\n\n It would be nice to re-use this profile later on instead of parsing it\n twice, but dbt's intialization is not structured in a way that makes that\n easy.\n \"\"\"\n cfg = read_user_config(parsed.profiles_dir)\n cfg.set_values(parsed.profiles_dir)\n\n\n@contextmanager\ndef adapter_management():\n reset_adapters()\n try:\n yield\n finally:\n cleanup_connections()\n\n\ndef handle_and_check(args):\n with log_manager.applicationbound():\n parsed = parse_args(args)\n\n # we've parsed the args - we can now decide if we're debug or not\n if parsed.debug:\n log_manager.set_debug()\n\n profiler_enabled = False\n\n if parsed.record_timing_info:\n profiler_enabled = True\n\n with dbt.profiler.profiler(\n enable=profiler_enabled,\n outfile=parsed.record_timing_info\n ):\n\n initialize_config_values(parsed)\n\n with adapter_management():\n\n task, res = run_from_args(parsed)\n success = task.interpret_results(res)\n\n return res, success\n\n\n@contextmanager\ndef track_run(task):\n dbt.tracking.track_invocation_start(config=task.config, args=task.args)\n try:\n yield\n dbt.tracking.track_invocation_end(\n config=task.config, args=task.args, result_type=\"ok\"\n )\n except (dbt.exceptions.NotImplementedException,\n dbt.exceptions.FailedToConnectException) as e:\n logger.error('ERROR: {}'.format(e))\n dbt.tracking.track_invocation_end(\n config=task.config, args=task.args, result_type=\"error\"\n )\n except Exception:\n dbt.tracking.track_invocation_end(\n config=task.config, args=task.args, result_type=\"error\"\n )\n raise\n finally:\n dbt.tracking.flush()\n\n\ndef run_from_args(parsed):\n log_cache_events(getattr(parsed, 'log_cache_events', False))\n flags.set_from_args(parsed)\n\n parsed.cls.pre_init_hook(parsed)\n # we can now use the logger for stdout\n\n logger.info(\"Running with dbt{}\".format(dbt.version.installed))\n\n # this will convert DbtConfigErrors into RuntimeExceptions\n task = parsed.cls.from_args(args=parsed)\n logger.debug(\"running dbt with arguments {parsed}\", parsed=str(parsed))\n\n log_path = None\n if task.config is not None:\n log_path = getattr(task.config, 'log_path', None)\n # we can finally set the file logger up\n log_manager.set_path(log_path)\n logger.debug(\"Tracking: {}\".format(dbt.tracking.active_user.state()))\n\n results = None\n\n with track_run(task):\n results = task.run()\n\n return task, results\n\n\ndef _build_base_subparser():\n base_subparser = argparse.ArgumentParser(add_help=False)\n\n base_subparser.add_argument(\n '--project-dir',\n default=None,\n type=str,\n help='''\n Which directory to look in for the dbt_project.yml file.\n Default is the current working directory and its parents.\n '''\n )\n\n base_subparser.add_argument(\n '--profiles-dir',\n default=PROFILES_DIR,\n type=str,\n help='''\n Which directory to look in for the profiles.yml file. Default = {}\n '''.format(PROFILES_DIR)\n )\n\n base_subparser.add_argument(\n '--profile',\n required=False,\n type=str,\n help='''\n Which profile to load. Overrides setting in dbt_project.yml.\n '''\n )\n\n base_subparser.add_argument(\n '--target',\n default=None,\n type=str,\n help='''\n Which target to load for the given profile\n ''',\n )\n\n base_subparser.add_argument(\n '--vars',\n type=str,\n default='{}',\n help='''\n Supply variables to the project. This argument overrides variables\n defined in your dbt_project.yml file. This argument should be a YAML\n string, eg. '{my_variable: my_value}'\n '''\n )\n\n # if set, log all cache events. This is extremely verbose!\n base_subparser.add_argument(\n '--log-cache-events',\n action='store_true',\n help=argparse.SUPPRESS,\n )\n\n base_subparser.add_argument(\n '--bypass-cache',\n action='store_false',\n dest='use_cache',\n help='''\n If set, bypass the adapter-level cache of database state\n ''',\n )\n return base_subparser\n\n\ndef _build_docs_subparser(subparsers, base_subparser):\n docs_sub = subparsers.add_parser(\n 'docs',\n parents=[base_subparser],\n help='''\n Generate or serve the documentation website for your project.\n '''\n )\n return docs_sub\n\n\ndef _build_source_subparser(subparsers, base_subparser):\n source_sub = subparsers.add_parser(\n 'source',\n parents=[base_subparser],\n help='''\n Manage your project's sources\n ''',\n )\n return source_sub\n\n\ndef _build_init_subparser(subparsers, base_subparser):\n sub = subparsers.add_parser(\n 'init',\n parents=[base_subparser],\n help='''\n Initialize a new DBT project.\n '''\n )\n sub.add_argument(\n 'project_name',\n type=str,\n help='''\n Name of the new project\n ''',\n )\n sub.set_defaults(cls=init_task.InitTask, which='init', rpc_method=None)\n return sub\n\n\ndef _build_clean_subparser(subparsers, base_subparser):\n sub = subparsers.add_parser(\n 'clean',\n parents=[base_subparser],\n help='''\n Delete all folders in the clean-targets list\n (usually the dbt_modules and target directories.)\n '''\n )\n sub.set_defaults(cls=clean_task.CleanTask, which='clean', rpc_method=None)\n return sub\n\n\ndef _build_debug_subparser(subparsers, base_subparser):\n sub = subparsers.add_parser(\n 'debug',\n parents=[base_subparser],\n help='''\n Show some helpful information about dbt for debugging.\n\n Not to be confused with the --debug option which increases verbosity.\n '''\n )\n sub.add_argument(\n '--config-dir',\n action='store_true',\n help='''\n If specified, DBT will show path information for this project\n '''\n )\n sub.set_defaults(cls=debug_task.DebugTask, which='debug', rpc_method=None)\n return sub\n\n\ndef _build_deps_subparser(subparsers, base_subparser):\n sub = subparsers.add_parser(\n 'deps',\n parents=[base_subparser],\n help='''\n Pull the most recent version of the dependencies listed in packages.yml\n '''\n )\n sub.set_defaults(cls=deps_task.DepsTask, which='deps', rpc_method='deps')\n return sub\n\n\ndef _build_snapshot_subparser(subparsers, base_subparser):\n sub = subparsers.add_parser(\n 'snapshot',\n parents=[base_subparser],\n help='''\n Execute snapshots defined in your project\n ''',\n )\n sub.add_argument(\n '--threads',\n type=int,\n required=False,\n help='''\n Specify number of threads to use while snapshotting tables.\n Overrides settings in profiles.yml.\n '''\n )\n sub.set_defaults(cls=snapshot_task.SnapshotTask, which='snapshot',\n rpc_method='snapshot')\n return sub\n\n\ndef _build_run_subparser(subparsers, base_subparser):\n run_sub = subparsers.add_parser(\n 'run',\n parents=[base_subparser],\n help='''\n Compile SQL and execute against the current target database.\n ''')\n run_sub.set_defaults(cls=run_task.RunTask, which='run', rpc_method='run')\n return run_sub\n\n\ndef _build_compile_subparser(subparsers, base_subparser):\n sub = subparsers.add_parser(\n 'compile',\n parents=[base_subparser],\n help='''\n Generates executable SQL from source model, test, and analysis files.\n Compiled SQL files are written to the target/ directory.\n '''\n )\n sub.set_defaults(cls=compile_task.CompileTask, which='compile',\n rpc_method='compile')\n sub.add_argument('--parse-only', action='store_true')\n return sub\n\n\ndef _build_docs_generate_subparser(subparsers, base_subparser):\n # it might look like docs_sub is the correct parents entry, but that\n # will cause weird errors about 'conflicting option strings'.\n generate_sub = subparsers.add_parser('generate', parents=[base_subparser])\n generate_sub.set_defaults(cls=generate_task.GenerateTask,\n which='generate', rpc_method='docs.generate')\n generate_sub.add_argument(\n '--no-compile',\n action='store_false',\n dest='compile',\n help='''\n Do not run \"dbt compile\" as part of docs generation\n ''',\n )\n return generate_sub\n\n\ndef _add_selection_arguments(*subparsers, **kwargs):\n models_name = kwargs.get('models_name', 'models')\n for sub in subparsers:\n sub.add_argument(\n '-{}'.format(models_name[0]),\n '--{}'.format(models_name),\n dest='models',\n required=False,\n nargs='+',\n help='''\n Specify the models to include.\n ''',\n )\n sub.add_argument(\n '--exclude',\n required=False,\n nargs='+',\n help='''\n Specify the models to exclude.\n ''',\n )\n\n\ndef _add_table_mutability_arguments(*subparsers):\n for sub in subparsers:\n sub.add_argument(\n '--full-refresh',\n action='store_true',\n help='''\n If specified, DBT will drop incremental models and\n fully-recalculate the incremental table from the model definition.\n '''\n )\n\n\ndef _add_common_arguments(*subparsers):\n for sub in subparsers:\n sub.add_argument(\n '--threads',\n type=int,\n required=False,\n help='''\n Specify number of threads to use while executing models. Overrides\n settings in profiles.yml.\n '''\n )\n sub.add_argument(\n '--no-version-check',\n dest='version_check',\n action='store_false',\n help='''\n If set, skip ensuring dbt's version matches the one specified in\n the dbt_project.yml file ('require-dbt-version')\n '''\n )\n\n\ndef _build_seed_subparser(subparsers, base_subparser):\n seed_sub = subparsers.add_parser(\n 'seed',\n parents=[base_subparser],\n help='''\n Load data from csv files into your data warehouse.\n ''',\n )\n seed_sub.add_argument(\n '--full-refresh',\n action='store_true',\n help='''\n Drop existing seed tables and recreate them\n ''',\n )\n seed_sub.add_argument(\n '--show',\n action='store_true',\n help='''\n Show a sample of the loaded data in the terminal\n '''\n )\n seed_sub.set_defaults(cls=seed_task.SeedTask, which='seed',\n rpc_method='seed')\n return seed_sub\n\n\ndef _build_docs_serve_subparser(subparsers, base_subparser):\n serve_sub = subparsers.add_parser('serve', parents=[base_subparser])\n serve_sub.add_argument(\n '--port',\n default=8080,\n type=int,\n help='''\n Specify the port number for the docs server.\n '''\n )\n serve_sub.set_defaults(cls=serve_task.ServeTask, which='serve',\n rpc_method=None)\n return serve_sub\n\n\ndef _build_test_subparser(subparsers, base_subparser):\n sub = subparsers.add_parser(\n 'test',\n parents=[base_subparser],\n help='''\n Runs tests on data in deployed models. Run this after `dbt run`\n '''\n )\n sub.add_argument(\n '--data',\n action='store_true',\n help='''\n Run data tests defined in \"tests\" directory.\n '''\n )\n sub.add_argument(\n '--schema',\n action='store_true',\n help='''\n Run constraint validations from schema.yml files\n '''\n )\n\n sub.set_defaults(cls=test_task.TestTask, which='test', rpc_method='test')\n return sub\n\n\ndef _build_source_snapshot_freshness_subparser(subparsers, base_subparser):\n sub = subparsers.add_parser(\n 'snapshot-freshness',\n parents=[base_subparser],\n help='''\n Snapshots the current freshness of the project's sources\n ''',\n )\n sub.add_argument(\n '-s',\n '--select',\n required=False,\n nargs='+',\n help='''\n Specify the sources to snapshot freshness\n ''',\n dest='selected'\n )\n sub.add_argument(\n '-o',\n '--output',\n required=False,\n help='''\n Specify the output path for the json report. By default, outputs to\n target/sources.json\n '''\n )\n sub.add_argument(\n '--threads',\n type=int,\n required=False,\n help='''\n Specify number of threads to use. Overrides settings in profiles.yml\n '''\n )\n sub.set_defaults(\n cls=freshness_task.FreshnessTask,\n which='snapshot-freshness',\n rpc_method='snapshot-freshness',\n )\n return sub\n\n\ndef _build_rpc_subparser(subparsers, base_subparser):\n sub = subparsers.add_parser(\n 'rpc',\n parents=[base_subparser],\n help='''\n Start a json-rpc server\n ''',\n )\n sub.add_argument(\n '--host',\n default='0.0.0.0',\n help='''\n Specify the host to listen on for the rpc server.\n ''',\n )\n sub.add_argument(\n '--port',\n default=8580,\n type=int,\n help='''\n Specify the port number for the rpc server.\n ''',\n )\n sub.set_defaults(cls=RPCServerTask, which='rpc', rpc_method=None)\n # the rpc task does a 'compile', so we need these attributes to exist, but\n # we don't want users to be allowed to set them.\n sub.set_defaults(models=None, exclude=None)\n return sub\n\n\ndef _build_list_subparser(subparsers, base_subparser):\n sub = subparsers.add_parser(\n 'list',\n parents=[base_subparser],\n help='''\n List the resources in your project\n ''',\n aliases=['ls'],\n )\n sub.set_defaults(cls=ListTask, which='list', rpc_method=None)\n resource_values = list(ListTask.ALL_RESOURCE_VALUES) + ['default', 'all']\n sub.add_argument('--resource-type',\n choices=resource_values,\n action='append',\n default=[],\n dest='resource_types')\n sub.add_argument('--output',\n choices=['json', 'name', 'path', 'selector'],\n default='selector')\n sub.add_argument(\n '-s',\n '--select',\n required=False,\n nargs='+',\n metavar='SELECTOR',\n help='''\n Specify the nodes to select.\n ''',\n )\n sub.add_argument(\n '-m',\n '--models',\n required=False,\n nargs='+',\n metavar='SELECTOR',\n help='''\n Specify the models to select and set the resource-type to 'model'.\n Mutually exclusive with '--select' (or '-s') and '--resource-type'\n ''',\n )\n sub.add_argument(\n '--exclude',\n required=False,\n nargs='+',\n metavar='SELECTOR',\n help='''\n Specify the models to exclude.\n '''\n )\n return sub\n\n\ndef _build_run_operation_subparser(subparsers, base_subparser):\n sub = subparsers.add_parser(\n 'run-operation',\n parents=[base_subparser],\n help='''\n Run the named macro with any supplied arguments.\n '''\n )\n sub.add_argument(\n 'macro',\n help='''\n Specify the macro to invoke. dbt will call this macro with the supplied\n arguments and then exit\n ''',\n )\n sub.add_argument(\n '--args',\n type=str,\n default='{}',\n help='''\n Supply arguments to the macro. This dictionary will be mapped to the\n keyword arguments defined in the selected macro. This argument should\n be a YAML string, eg. '{my_variable: my_value}'\n '''\n )\n sub.set_defaults(cls=run_operation_task.RunOperationTask,\n which='run-operation', rpc_method='run-operation')\n return sub\n\n\ndef parse_args(args, cls=DBTArgumentParser):\n p = cls(\n prog='dbt',\n description='''\n An ELT tool for managing your SQL transformations and data models.\n For more documentation on these commands, visit: docs.getdbt.com\n ''',\n epilog='''\n Specify one of these sub-commands and you can find more help from\n there.\n '''\n )\n\n p.add_argument(\n '--version',\n action='dbtversion',\n help='''\n Show version information\n ''')\n\n p.add_argument(\n '-r',\n '--record-timing-info',\n default=None,\n type=str,\n help='''\n When this option is passed, dbt will output low-level timing stats to\n the specified file. Example: `--record-timing-info output.profile`\n '''\n )\n\n p.add_argument(\n '-d',\n '--debug',\n action='store_true',\n help='''\n Display debug logging during dbt execution. Useful for debugging and\n making bug reports.\n '''\n )\n\n p.add_argument(\n '--log-format',\n choices=['text', 'json', 'default'],\n default='default',\n help='''Specify the log format, overriding the command's default.'''\n )\n\n p.add_argument(\n '--no-write-json',\n action='store_false',\n dest='write_json',\n help='''\n If set, skip writing the manifest and run_results.json files to disk\n '''\n )\n\n p.add_argument(\n '-S',\n '--strict',\n action='store_true',\n help='''\n Run schema validations at runtime. This will surface bugs in dbt, but\n may incur a performance penalty.\n '''\n )\n\n p.add_argument(\n '--warn-error',\n action='store_true',\n help='''\n If dbt would normally warn, instead raise an exception. Examples\n include --models that selects nothing, deprecations, configurations\n with no associated models, invalid test configurations, and missing\n sources/refs in tests.\n '''\n )\n\n partial_flag = p.add_mutually_exclusive_group()\n partial_flag.add_argument(\n '--partial-parse',\n action='store_const',\n const=True,\n dest='partial_parse',\n default=None,\n help='''\n Allow for partial parsing by looking for and writing to a pickle file\n in the target directory. This overrides the user configuration file.\n\n WARNING: This can result in unexpected behavior if you use env_var()!\n '''\n )\n\n partial_flag.add_argument(\n '--no-partial-parse',\n action='store_const',\n const=False,\n default=None,\n dest='partial_parse',\n help='''\n Disallow partial parsing. This overrides the user configuration file.\n '''\n )\n\n # if set, run dbt in single-threaded mode: thread count is ignored, and\n # calls go through `map` instead of the thread pool. This is useful for\n # getting performance information about aspects of dbt that normally run in\n # a thread, as the profiler ignores child threads. Users should really\n # never use this.\n p.add_argument(\n '--single-threaded',\n action='store_true',\n help=argparse.SUPPRESS,\n )\n\n # if set, extract all models and blocks with the jinja block extractor, and\n # verify that we don't fail anywhere the actual jinja parser passes. The\n # reverse (passing files that ends up failing jinja) is fine.\n p.add_argument(\n '--test-new-parser',\n action='store_true',\n help=argparse.SUPPRESS\n )\n\n subs = p.add_subparsers(title=\"Available sub-commands\")\n\n base_subparser = _build_base_subparser()\n\n # make the subcommands that have their own subcommands\n docs_sub = _build_docs_subparser(subs, base_subparser)\n docs_subs = docs_sub.add_subparsers(title=\"Available sub-commands\")\n source_sub = _build_source_subparser(subs, base_subparser)\n source_subs = source_sub.add_subparsers(title=\"Available sub-commands\")\n\n _build_init_subparser(subs, base_subparser)\n _build_clean_subparser(subs, base_subparser)\n _build_debug_subparser(subs, base_subparser)\n _build_deps_subparser(subs, base_subparser)\n _build_list_subparser(subs, base_subparser)\n\n snapshot_sub = _build_snapshot_subparser(subs, base_subparser)\n rpc_sub = _build_rpc_subparser(subs, base_subparser)\n run_sub = _build_run_subparser(subs, base_subparser)\n compile_sub = _build_compile_subparser(subs, base_subparser)\n generate_sub = _build_docs_generate_subparser(docs_subs, base_subparser)\n test_sub = _build_test_subparser(subs, base_subparser)\n seed_sub = _build_seed_subparser(subs, base_subparser)\n # --threads, --no-version-check\n _add_common_arguments(run_sub, compile_sub, generate_sub, test_sub,\n rpc_sub, seed_sub)\n # --models, --exclude\n _add_selection_arguments(run_sub, compile_sub, generate_sub, test_sub)\n _add_selection_arguments(snapshot_sub, models_name='select')\n # --full-refresh\n _add_table_mutability_arguments(run_sub, compile_sub)\n\n _build_docs_serve_subparser(docs_subs, base_subparser)\n _build_source_snapshot_freshness_subparser(source_subs, base_subparser)\n _build_run_operation_subparser(subs, base_subparser)\n\n if len(args) == 0:\n p.print_help()\n sys.exit(1)\n\n parsed = p.parse_args(args)\n parsed.profiles_dir = os.path.expanduser(parsed.profiles_dir)\n\n if not hasattr(parsed, 'which'):\n # the user did not provide a valid subcommand. trigger the help message\n # and exit with a error\n p.print_help()\n p.exit(1)\n\n return parsed\n",
"path": "core/dbt/main.py"
}
] | [
{
"content": "from dbt.logger import GLOBAL_LOGGER as logger, log_cache_events, log_manager\n\nimport argparse\nimport os.path\nimport sys\nimport traceback\nfrom contextlib import contextmanager\n\nimport dbt.version\nimport dbt.flags as flags\nimport dbt.task.run as run_task\nimport dbt.task.compile as compile_task\nimport dbt.task.debug as debug_task\nimport dbt.task.clean as clean_task\nimport dbt.task.deps as deps_task\nimport dbt.task.init as init_task\nimport dbt.task.seed as seed_task\nimport dbt.task.test as test_task\nimport dbt.task.snapshot as snapshot_task\nimport dbt.task.generate as generate_task\nimport dbt.task.serve as serve_task\nimport dbt.task.freshness as freshness_task\nimport dbt.task.run_operation as run_operation_task\nfrom dbt.task.list import ListTask\nfrom dbt.task.rpc.server import RPCServerTask\nfrom dbt.adapters.factory import reset_adapters, cleanup_connections\n\nimport dbt.tracking\nimport dbt.ui.printer\nimport dbt.deprecations\nimport dbt.profiler\n\nfrom dbt.utils import ExitCodes\nfrom dbt.config import PROFILES_DIR, read_user_config\nfrom dbt.exceptions import RuntimeException\n\n\nclass DBTVersion(argparse.Action):\n \"\"\"This is very very similar to the builtin argparse._Version action,\n except it just calls dbt.version.get_version_information().\n \"\"\"\n def __init__(self,\n option_strings,\n version=None,\n dest=argparse.SUPPRESS,\n default=argparse.SUPPRESS,\n help=\"show program's version number and exit\"):\n super().__init__(\n option_strings=option_strings,\n dest=dest,\n default=default,\n nargs=0,\n help=help)\n\n def __call__(self, parser, namespace, values, option_string=None):\n formatter = argparse.RawTextHelpFormatter(prog=parser.prog)\n formatter.add_text(dbt.version.get_version_information())\n parser.exit(message=formatter.format_help())\n\n\nclass DBTArgumentParser(argparse.ArgumentParser):\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.register('action', 'dbtversion', DBTVersion)\n\n\nclass RPCArgumentParser(DBTArgumentParser):\n def exit(self, status=0, message=None):\n if status == 0:\n return\n else:\n raise TypeError(message)\n\n\ndef main(args=None):\n if args is None:\n args = sys.argv[1:]\n with log_manager.applicationbound():\n try:\n results, succeeded = handle_and_check(args)\n if succeeded:\n exit_code = ExitCodes.Success.value\n else:\n exit_code = ExitCodes.ModelError.value\n\n except KeyboardInterrupt:\n logger.info(\"ctrl-c\")\n exit_code = ExitCodes.UnhandledError.value\n\n # This can be thrown by eg. argparse\n except SystemExit as e:\n exit_code = e.code\n\n except BaseException as e:\n logger.warning(\"Encountered an error:\")\n logger.warning(str(e))\n\n if log_manager.initialized:\n logger.debug(traceback.format_exc())\n elif not isinstance(e, RuntimeException):\n # if it did not come from dbt proper and the logger is not\n # initialized (so there's no safe path to log to), log the\n # stack trace at error level.\n logger.error(traceback.format_exc())\n exit_code = ExitCodes.UnhandledError.value\n\n sys.exit(exit_code)\n\n\n# here for backwards compatibility\ndef handle(args):\n res, success = handle_and_check(args)\n return res\n\n\ndef initialize_config_values(parsed):\n \"\"\"Given the parsed args, initialize the dbt tracking code.\n\n It would be nice to re-use this profile later on instead of parsing it\n twice, but dbt's intialization is not structured in a way that makes that\n easy.\n \"\"\"\n cfg = read_user_config(parsed.profiles_dir)\n cfg.set_values(parsed.profiles_dir)\n\n\n@contextmanager\ndef adapter_management():\n reset_adapters()\n try:\n yield\n finally:\n cleanup_connections()\n\n\ndef handle_and_check(args):\n with log_manager.applicationbound():\n parsed = parse_args(args)\n\n # we've parsed the args - we can now decide if we're debug or not\n if parsed.debug:\n log_manager.set_debug()\n\n profiler_enabled = False\n\n if parsed.record_timing_info:\n profiler_enabled = True\n\n with dbt.profiler.profiler(\n enable=profiler_enabled,\n outfile=parsed.record_timing_info\n ):\n\n initialize_config_values(parsed)\n\n with adapter_management():\n\n task, res = run_from_args(parsed)\n success = task.interpret_results(res)\n\n return res, success\n\n\n@contextmanager\ndef track_run(task):\n dbt.tracking.track_invocation_start(config=task.config, args=task.args)\n try:\n yield\n dbt.tracking.track_invocation_end(\n config=task.config, args=task.args, result_type=\"ok\"\n )\n except (dbt.exceptions.NotImplementedException,\n dbt.exceptions.FailedToConnectException) as e:\n logger.error('ERROR: {}'.format(e))\n dbt.tracking.track_invocation_end(\n config=task.config, args=task.args, result_type=\"error\"\n )\n except Exception:\n dbt.tracking.track_invocation_end(\n config=task.config, args=task.args, result_type=\"error\"\n )\n raise\n finally:\n dbt.tracking.flush()\n\n\ndef run_from_args(parsed):\n log_cache_events(getattr(parsed, 'log_cache_events', False))\n flags.set_from_args(parsed)\n\n parsed.cls.pre_init_hook(parsed)\n # we can now use the logger for stdout\n\n logger.info(\"Running with dbt{}\".format(dbt.version.installed))\n\n # this will convert DbtConfigErrors into RuntimeExceptions\n task = parsed.cls.from_args(args=parsed)\n logger.debug(\"running dbt with arguments {parsed}\", parsed=str(parsed))\n\n log_path = None\n if task.config is not None:\n log_path = getattr(task.config, 'log_path', None)\n # we can finally set the file logger up\n log_manager.set_path(log_path)\n logger.debug(\"Tracking: {}\".format(dbt.tracking.active_user.state()))\n\n results = None\n\n with track_run(task):\n results = task.run()\n\n return task, results\n\n\ndef _build_base_subparser():\n base_subparser = argparse.ArgumentParser(add_help=False)\n\n base_subparser.add_argument(\n '--project-dir',\n default=None,\n type=str,\n help='''\n Which directory to look in for the dbt_project.yml file.\n Default is the current working directory and its parents.\n '''\n )\n\n base_subparser.add_argument(\n '--profiles-dir',\n default=PROFILES_DIR,\n type=str,\n help='''\n Which directory to look in for the profiles.yml file. Default = {}\n '''.format(PROFILES_DIR)\n )\n\n base_subparser.add_argument(\n '--profile',\n required=False,\n type=str,\n help='''\n Which profile to load. Overrides setting in dbt_project.yml.\n '''\n )\n\n base_subparser.add_argument(\n '-t',\n '--target',\n default=None,\n type=str,\n help='''\n Which target to load for the given profile\n ''',\n )\n\n base_subparser.add_argument(\n '--vars',\n type=str,\n default='{}',\n help='''\n Supply variables to the project. This argument overrides variables\n defined in your dbt_project.yml file. This argument should be a YAML\n string, eg. '{my_variable: my_value}'\n '''\n )\n\n # if set, log all cache events. This is extremely verbose!\n base_subparser.add_argument(\n '--log-cache-events',\n action='store_true',\n help=argparse.SUPPRESS,\n )\n\n base_subparser.add_argument(\n '--bypass-cache',\n action='store_false',\n dest='use_cache',\n help='''\n If set, bypass the adapter-level cache of database state\n ''',\n )\n return base_subparser\n\n\ndef _build_docs_subparser(subparsers, base_subparser):\n docs_sub = subparsers.add_parser(\n 'docs',\n parents=[base_subparser],\n help='''\n Generate or serve the documentation website for your project.\n '''\n )\n return docs_sub\n\n\ndef _build_source_subparser(subparsers, base_subparser):\n source_sub = subparsers.add_parser(\n 'source',\n parents=[base_subparser],\n help='''\n Manage your project's sources\n ''',\n )\n return source_sub\n\n\ndef _build_init_subparser(subparsers, base_subparser):\n sub = subparsers.add_parser(\n 'init',\n parents=[base_subparser],\n help='''\n Initialize a new DBT project.\n '''\n )\n sub.add_argument(\n 'project_name',\n type=str,\n help='''\n Name of the new project\n ''',\n )\n sub.set_defaults(cls=init_task.InitTask, which='init', rpc_method=None)\n return sub\n\n\ndef _build_clean_subparser(subparsers, base_subparser):\n sub = subparsers.add_parser(\n 'clean',\n parents=[base_subparser],\n help='''\n Delete all folders in the clean-targets list\n (usually the dbt_modules and target directories.)\n '''\n )\n sub.set_defaults(cls=clean_task.CleanTask, which='clean', rpc_method=None)\n return sub\n\n\ndef _build_debug_subparser(subparsers, base_subparser):\n sub = subparsers.add_parser(\n 'debug',\n parents=[base_subparser],\n help='''\n Show some helpful information about dbt for debugging.\n\n Not to be confused with the --debug option which increases verbosity.\n '''\n )\n sub.add_argument(\n '--config-dir',\n action='store_true',\n help='''\n If specified, DBT will show path information for this project\n '''\n )\n sub.set_defaults(cls=debug_task.DebugTask, which='debug', rpc_method=None)\n return sub\n\n\ndef _build_deps_subparser(subparsers, base_subparser):\n sub = subparsers.add_parser(\n 'deps',\n parents=[base_subparser],\n help='''\n Pull the most recent version of the dependencies listed in packages.yml\n '''\n )\n sub.set_defaults(cls=deps_task.DepsTask, which='deps', rpc_method='deps')\n return sub\n\n\ndef _build_snapshot_subparser(subparsers, base_subparser):\n sub = subparsers.add_parser(\n 'snapshot',\n parents=[base_subparser],\n help='''\n Execute snapshots defined in your project\n ''',\n )\n sub.add_argument(\n '--threads',\n type=int,\n required=False,\n help='''\n Specify number of threads to use while snapshotting tables.\n Overrides settings in profiles.yml.\n '''\n )\n sub.set_defaults(cls=snapshot_task.SnapshotTask, which='snapshot',\n rpc_method='snapshot')\n return sub\n\n\ndef _build_run_subparser(subparsers, base_subparser):\n run_sub = subparsers.add_parser(\n 'run',\n parents=[base_subparser],\n help='''\n Compile SQL and execute against the current target database.\n ''')\n run_sub.set_defaults(cls=run_task.RunTask, which='run', rpc_method='run')\n return run_sub\n\n\ndef _build_compile_subparser(subparsers, base_subparser):\n sub = subparsers.add_parser(\n 'compile',\n parents=[base_subparser],\n help='''\n Generates executable SQL from source model, test, and analysis files.\n Compiled SQL files are written to the target/ directory.\n '''\n )\n sub.set_defaults(cls=compile_task.CompileTask, which='compile',\n rpc_method='compile')\n sub.add_argument('--parse-only', action='store_true')\n return sub\n\n\ndef _build_docs_generate_subparser(subparsers, base_subparser):\n # it might look like docs_sub is the correct parents entry, but that\n # will cause weird errors about 'conflicting option strings'.\n generate_sub = subparsers.add_parser('generate', parents=[base_subparser])\n generate_sub.set_defaults(cls=generate_task.GenerateTask,\n which='generate', rpc_method='docs.generate')\n generate_sub.add_argument(\n '--no-compile',\n action='store_false',\n dest='compile',\n help='''\n Do not run \"dbt compile\" as part of docs generation\n ''',\n )\n return generate_sub\n\n\ndef _add_selection_arguments(*subparsers, **kwargs):\n models_name = kwargs.get('models_name', 'models')\n for sub in subparsers:\n sub.add_argument(\n '-{}'.format(models_name[0]),\n '--{}'.format(models_name),\n dest='models',\n required=False,\n nargs='+',\n help='''\n Specify the models to include.\n ''',\n )\n sub.add_argument(\n '--exclude',\n required=False,\n nargs='+',\n help='''\n Specify the models to exclude.\n ''',\n )\n\n\ndef _add_table_mutability_arguments(*subparsers):\n for sub in subparsers:\n sub.add_argument(\n '--full-refresh',\n action='store_true',\n help='''\n If specified, DBT will drop incremental models and\n fully-recalculate the incremental table from the model definition.\n '''\n )\n\n\ndef _add_common_arguments(*subparsers):\n for sub in subparsers:\n sub.add_argument(\n '--threads',\n type=int,\n required=False,\n help='''\n Specify number of threads to use while executing models. Overrides\n settings in profiles.yml.\n '''\n )\n sub.add_argument(\n '--no-version-check',\n dest='version_check',\n action='store_false',\n help='''\n If set, skip ensuring dbt's version matches the one specified in\n the dbt_project.yml file ('require-dbt-version')\n '''\n )\n\n\ndef _build_seed_subparser(subparsers, base_subparser):\n seed_sub = subparsers.add_parser(\n 'seed',\n parents=[base_subparser],\n help='''\n Load data from csv files into your data warehouse.\n ''',\n )\n seed_sub.add_argument(\n '--full-refresh',\n action='store_true',\n help='''\n Drop existing seed tables and recreate them\n ''',\n )\n seed_sub.add_argument(\n '--show',\n action='store_true',\n help='''\n Show a sample of the loaded data in the terminal\n '''\n )\n seed_sub.set_defaults(cls=seed_task.SeedTask, which='seed',\n rpc_method='seed')\n return seed_sub\n\n\ndef _build_docs_serve_subparser(subparsers, base_subparser):\n serve_sub = subparsers.add_parser('serve', parents=[base_subparser])\n serve_sub.add_argument(\n '--port',\n default=8080,\n type=int,\n help='''\n Specify the port number for the docs server.\n '''\n )\n serve_sub.set_defaults(cls=serve_task.ServeTask, which='serve',\n rpc_method=None)\n return serve_sub\n\n\ndef _build_test_subparser(subparsers, base_subparser):\n sub = subparsers.add_parser(\n 'test',\n parents=[base_subparser],\n help='''\n Runs tests on data in deployed models. Run this after `dbt run`\n '''\n )\n sub.add_argument(\n '--data',\n action='store_true',\n help='''\n Run data tests defined in \"tests\" directory.\n '''\n )\n sub.add_argument(\n '--schema',\n action='store_true',\n help='''\n Run constraint validations from schema.yml files\n '''\n )\n\n sub.set_defaults(cls=test_task.TestTask, which='test', rpc_method='test')\n return sub\n\n\ndef _build_source_snapshot_freshness_subparser(subparsers, base_subparser):\n sub = subparsers.add_parser(\n 'snapshot-freshness',\n parents=[base_subparser],\n help='''\n Snapshots the current freshness of the project's sources\n ''',\n )\n sub.add_argument(\n '-s',\n '--select',\n required=False,\n nargs='+',\n help='''\n Specify the sources to snapshot freshness\n ''',\n dest='selected'\n )\n sub.add_argument(\n '-o',\n '--output',\n required=False,\n help='''\n Specify the output path for the json report. By default, outputs to\n target/sources.json\n '''\n )\n sub.add_argument(\n '--threads',\n type=int,\n required=False,\n help='''\n Specify number of threads to use. Overrides settings in profiles.yml\n '''\n )\n sub.set_defaults(\n cls=freshness_task.FreshnessTask,\n which='snapshot-freshness',\n rpc_method='snapshot-freshness',\n )\n return sub\n\n\ndef _build_rpc_subparser(subparsers, base_subparser):\n sub = subparsers.add_parser(\n 'rpc',\n parents=[base_subparser],\n help='''\n Start a json-rpc server\n ''',\n )\n sub.add_argument(\n '--host',\n default='0.0.0.0',\n help='''\n Specify the host to listen on for the rpc server.\n ''',\n )\n sub.add_argument(\n '--port',\n default=8580,\n type=int,\n help='''\n Specify the port number for the rpc server.\n ''',\n )\n sub.set_defaults(cls=RPCServerTask, which='rpc', rpc_method=None)\n # the rpc task does a 'compile', so we need these attributes to exist, but\n # we don't want users to be allowed to set them.\n sub.set_defaults(models=None, exclude=None)\n return sub\n\n\ndef _build_list_subparser(subparsers, base_subparser):\n sub = subparsers.add_parser(\n 'list',\n parents=[base_subparser],\n help='''\n List the resources in your project\n ''',\n aliases=['ls'],\n )\n sub.set_defaults(cls=ListTask, which='list', rpc_method=None)\n resource_values = list(ListTask.ALL_RESOURCE_VALUES) + ['default', 'all']\n sub.add_argument('--resource-type',\n choices=resource_values,\n action='append',\n default=[],\n dest='resource_types')\n sub.add_argument('--output',\n choices=['json', 'name', 'path', 'selector'],\n default='selector')\n sub.add_argument(\n '-s',\n '--select',\n required=False,\n nargs='+',\n metavar='SELECTOR',\n help='''\n Specify the nodes to select.\n ''',\n )\n sub.add_argument(\n '-m',\n '--models',\n required=False,\n nargs='+',\n metavar='SELECTOR',\n help='''\n Specify the models to select and set the resource-type to 'model'.\n Mutually exclusive with '--select' (or '-s') and '--resource-type'\n ''',\n )\n sub.add_argument(\n '--exclude',\n required=False,\n nargs='+',\n metavar='SELECTOR',\n help='''\n Specify the models to exclude.\n '''\n )\n return sub\n\n\ndef _build_run_operation_subparser(subparsers, base_subparser):\n sub = subparsers.add_parser(\n 'run-operation',\n parents=[base_subparser],\n help='''\n Run the named macro with any supplied arguments.\n '''\n )\n sub.add_argument(\n 'macro',\n help='''\n Specify the macro to invoke. dbt will call this macro with the supplied\n arguments and then exit\n ''',\n )\n sub.add_argument(\n '--args',\n type=str,\n default='{}',\n help='''\n Supply arguments to the macro. This dictionary will be mapped to the\n keyword arguments defined in the selected macro. This argument should\n be a YAML string, eg. '{my_variable: my_value}'\n '''\n )\n sub.set_defaults(cls=run_operation_task.RunOperationTask,\n which='run-operation', rpc_method='run-operation')\n return sub\n\n\ndef parse_args(args, cls=DBTArgumentParser):\n p = cls(\n prog='dbt',\n description='''\n An ELT tool for managing your SQL transformations and data models.\n For more documentation on these commands, visit: docs.getdbt.com\n ''',\n epilog='''\n Specify one of these sub-commands and you can find more help from\n there.\n '''\n )\n\n p.add_argument(\n '--version',\n action='dbtversion',\n help='''\n Show version information\n ''')\n\n p.add_argument(\n '-r',\n '--record-timing-info',\n default=None,\n type=str,\n help='''\n When this option is passed, dbt will output low-level timing stats to\n the specified file. Example: `--record-timing-info output.profile`\n '''\n )\n\n p.add_argument(\n '-d',\n '--debug',\n action='store_true',\n help='''\n Display debug logging during dbt execution. Useful for debugging and\n making bug reports.\n '''\n )\n\n p.add_argument(\n '--log-format',\n choices=['text', 'json', 'default'],\n default='default',\n help='''Specify the log format, overriding the command's default.'''\n )\n\n p.add_argument(\n '--no-write-json',\n action='store_false',\n dest='write_json',\n help='''\n If set, skip writing the manifest and run_results.json files to disk\n '''\n )\n\n p.add_argument(\n '-S',\n '--strict',\n action='store_true',\n help='''\n Run schema validations at runtime. This will surface bugs in dbt, but\n may incur a performance penalty.\n '''\n )\n\n p.add_argument(\n '--warn-error',\n action='store_true',\n help='''\n If dbt would normally warn, instead raise an exception. Examples\n include --models that selects nothing, deprecations, configurations\n with no associated models, invalid test configurations, and missing\n sources/refs in tests.\n '''\n )\n\n partial_flag = p.add_mutually_exclusive_group()\n partial_flag.add_argument(\n '--partial-parse',\n action='store_const',\n const=True,\n dest='partial_parse',\n default=None,\n help='''\n Allow for partial parsing by looking for and writing to a pickle file\n in the target directory. This overrides the user configuration file.\n\n WARNING: This can result in unexpected behavior if you use env_var()!\n '''\n )\n\n partial_flag.add_argument(\n '--no-partial-parse',\n action='store_const',\n const=False,\n default=None,\n dest='partial_parse',\n help='''\n Disallow partial parsing. This overrides the user configuration file.\n '''\n )\n\n # if set, run dbt in single-threaded mode: thread count is ignored, and\n # calls go through `map` instead of the thread pool. This is useful for\n # getting performance information about aspects of dbt that normally run in\n # a thread, as the profiler ignores child threads. Users should really\n # never use this.\n p.add_argument(\n '--single-threaded',\n action='store_true',\n help=argparse.SUPPRESS,\n )\n\n # if set, extract all models and blocks with the jinja block extractor, and\n # verify that we don't fail anywhere the actual jinja parser passes. The\n # reverse (passing files that ends up failing jinja) is fine.\n p.add_argument(\n '--test-new-parser',\n action='store_true',\n help=argparse.SUPPRESS\n )\n\n subs = p.add_subparsers(title=\"Available sub-commands\")\n\n base_subparser = _build_base_subparser()\n\n # make the subcommands that have their own subcommands\n docs_sub = _build_docs_subparser(subs, base_subparser)\n docs_subs = docs_sub.add_subparsers(title=\"Available sub-commands\")\n source_sub = _build_source_subparser(subs, base_subparser)\n source_subs = source_sub.add_subparsers(title=\"Available sub-commands\")\n\n _build_init_subparser(subs, base_subparser)\n _build_clean_subparser(subs, base_subparser)\n _build_debug_subparser(subs, base_subparser)\n _build_deps_subparser(subs, base_subparser)\n _build_list_subparser(subs, base_subparser)\n\n snapshot_sub = _build_snapshot_subparser(subs, base_subparser)\n rpc_sub = _build_rpc_subparser(subs, base_subparser)\n run_sub = _build_run_subparser(subs, base_subparser)\n compile_sub = _build_compile_subparser(subs, base_subparser)\n generate_sub = _build_docs_generate_subparser(docs_subs, base_subparser)\n test_sub = _build_test_subparser(subs, base_subparser)\n seed_sub = _build_seed_subparser(subs, base_subparser)\n # --threads, --no-version-check\n _add_common_arguments(run_sub, compile_sub, generate_sub, test_sub,\n rpc_sub, seed_sub)\n # --models, --exclude\n _add_selection_arguments(run_sub, compile_sub, generate_sub, test_sub)\n _add_selection_arguments(snapshot_sub, models_name='select')\n # --full-refresh\n _add_table_mutability_arguments(run_sub, compile_sub)\n\n _build_docs_serve_subparser(docs_subs, base_subparser)\n _build_source_snapshot_freshness_subparser(source_subs, base_subparser)\n _build_run_operation_subparser(subs, base_subparser)\n\n if len(args) == 0:\n p.print_help()\n sys.exit(1)\n\n parsed = p.parse_args(args)\n parsed.profiles_dir = os.path.expanduser(parsed.profiles_dir)\n\n if not hasattr(parsed, 'which'):\n # the user did not provide a valid subcommand. trigger the help message\n # and exit with a error\n p.print_help()\n p.exit(1)\n\n return parsed\n",
"path": "core/dbt/main.py"
}
] | diff --git a/core/dbt/main.py b/core/dbt/main.py
index 6136350b44a..f5c0cd75949 100644
--- a/core/dbt/main.py
+++ b/core/dbt/main.py
@@ -244,6 +244,7 @@ def _build_base_subparser():
)
base_subparser.add_argument(
+ '-t',
'--target',
default=None,
type=str,
|
aws-cloudformation__cfn-lint-2900 | E1017 Select does not find already supported function when using complex list with nested Selects
### CloudFormation Lint Version
cfn-lint 0.80.4
### What operating system are you using?
Mac
### Describe the bug
When launching a template with complex nested Selects and list to extract value from, it seems to be reporting E1017 while it should not. Templates are correctly deployed and work fine on my side.
Output from command
```
E1017 Select should use a supported function of Fn::FindInMap, Fn::GetAtt, Fn::GetAZs, Fn::If, Fn::Split, Fn::Cidr, Ref
/file1.yml:3189:11
```
### Expected behavior
No E1017 reported by cfn-lint.
Template is working fine in Cloudformation, E1017 should not be reported.
### Reproduction template
AWSTemplateFormatVersion: '2010-09-09'
Description: 'Build EC2 instance'
Resources:
MountTarget1:
Type: AWS::EFS::MountTarget
Properties:
FileSystemId: fs-1234567svsdabsf76s
# E1017 STARTS HERE
SubnetId: !Select
- 0
- !Select
- 0
- [
[
"subnet-0987sknlnsdoi9j76",
"subnet-875jgyjlpzj75j8k0",
"subnet-5447hnd6hI8js45js"
],
[
"subnet-0987sknlnsdoi9j76",
"subnet-875jgyjlpzj75j8k0",
"subnet-5447hnd6hI8js45js"
],
[
"subnet-0987sknlnsdoi9j76",
"subnet-875jgyjlpzj75j8k0",
"subnet-5447hnd6hI8js45js"
]
]
SecurityGroups: [sg-00qdqeef0a5c345gf]
| [
{
"content": "\"\"\"\nCopyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\nSPDX-License-Identifier: MIT-0\n\"\"\"\nfrom cfnlint.rules import CloudFormationLintRule, RuleMatch\n\n\nclass Select(CloudFormationLintRule):\n \"\"\"Check if Select values are correct\"\"\"\n\n id = \"E1017\"\n shortdesc = \"Select validation of parameters\"\n description = \"Making sure the Select function is properly configured\"\n source_url = \"https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/intrinsic-function-reference-select.html\"\n tags = [\"functions\", \"select\"]\n supported_functions = [\n \"Fn::FindInMap\",\n \"Fn::GetAtt\",\n \"Fn::GetAZs\",\n \"Fn::If\",\n \"Fn::Split\",\n \"Fn::Cidr\",\n \"Ref\",\n ]\n\n def _test_index_obj(self, index_obj, path):\n matches = []\n if isinstance(index_obj, dict):\n if len(index_obj) == 1:\n for index_key, _ in index_obj.items():\n if index_key not in [\n \"Ref\",\n \"Fn::FindInMap\",\n \"Fn::Select\",\n ]:\n message = \"Select index should be an Integer or a function Ref, Fn::FindInMap, or Fn::Select for {0}\"\n matches.append(\n RuleMatch(\n path,\n message.format(\"/\".join(map(str, path))),\n )\n )\n else:\n message = \"Select index should be an Integer or a function Ref, Fn::FindInMap, or Fn::Select for {0}\"\n matches.append(\n RuleMatch(\n path,\n message.format(\"/\".join(map(str, path))),\n )\n )\n elif not isinstance(index_obj, int):\n try:\n int(index_obj)\n except (ValueError, TypeError):\n message = \"Select index should be an Integer or a function of Ref, Fn::FindInMap, or Fn::Select for {0}\"\n matches.append(\n RuleMatch(path, message.format(\"/\".join(map(str, path))))\n )\n\n return matches\n\n def _test_list_obj(self, list_obj, path):\n matches = []\n if isinstance(list_obj, dict):\n if len(list_obj) == 1:\n for key, _ in list_obj.items():\n if key not in self.supported_functions:\n message = \"Select should use a supported function of {0}\"\n matches.append(\n RuleMatch(\n path,\n message.format(\n \", \".join(map(str, self.supported_functions))\n ),\n )\n )\n else:\n message = \"Select should use a supported function of {0}\"\n matches.append(\n RuleMatch(\n path,\n message.format(\", \".join(map(str, self.supported_functions))),\n )\n )\n elif not isinstance(list_obj, list):\n message = \"Select should be an array of values for {0}\"\n matches.append(RuleMatch(path, message.format(\"/\".join(map(str, path)))))\n\n return matches\n\n def _test_select_obj(self, select_obj, path):\n matches = []\n if not isinstance(select_obj, list):\n message = \"Select should be a list of 2 elements for {0}\"\n matches.append(RuleMatch(path, message.format(\"/\".join(map(str, path)))))\n return matches\n if len(select_obj) != 2:\n message = \"Select should be a list of 2 elements for {0}\"\n matches.append(RuleMatch(path, message.format(\"/\".join(map(str, path)))))\n return matches\n\n index_obj = select_obj[0]\n list_of_objs = select_obj[1]\n matches.extend(self._test_index_obj(index_obj, path[:] + [0]))\n matches.extend(self._test_list_obj(list_of_objs, path[:] + [1]))\n\n return matches\n\n def match(self, cfn):\n matches = []\n\n select_objs = cfn.search_deep_keys(\"Fn::Select\")\n\n for select_obj in select_objs:\n select_value_obj = select_obj[-1]\n tree = select_obj[:-1]\n matches.extend(self._test_select_obj(select_value_obj, tree[:]))\n\n return matches\n",
"path": "src/cfnlint/rules/functions/Select.py"
}
] | [
{
"content": "\"\"\"\nCopyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\nSPDX-License-Identifier: MIT-0\n\"\"\"\nfrom cfnlint.rules import CloudFormationLintRule, RuleMatch\n\n\nclass Select(CloudFormationLintRule):\n \"\"\"Check if Select values are correct\"\"\"\n\n id = \"E1017\"\n shortdesc = \"Select validation of parameters\"\n description = \"Making sure the Select function is properly configured\"\n source_url = \"https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/intrinsic-function-reference-select.html\"\n tags = [\"functions\", \"select\"]\n supported_functions = [\n \"Fn::FindInMap\",\n \"Fn::GetAtt\",\n \"Fn::GetAZs\",\n \"Fn::If\",\n \"Fn::Split\",\n \"Fn::Cidr\",\n \"Fn::Select\", # issue: 2895\n \"Ref\",\n ]\n\n def _test_index_obj(self, index_obj, path):\n matches = []\n if isinstance(index_obj, dict):\n if len(index_obj) == 1:\n for index_key, _ in index_obj.items():\n if index_key not in [\n \"Ref\",\n \"Fn::FindInMap\",\n \"Fn::Select\",\n ]:\n message = \"Select index should be an Integer or a function Ref, Fn::FindInMap, or Fn::Select for {0}\"\n matches.append(\n RuleMatch(\n path,\n message.format(\"/\".join(map(str, path))),\n )\n )\n else:\n message = \"Select index should be an Integer or a function Ref, Fn::FindInMap, or Fn::Select for {0}\"\n matches.append(\n RuleMatch(\n path,\n message.format(\"/\".join(map(str, path))),\n )\n )\n elif not isinstance(index_obj, int):\n try:\n int(index_obj)\n except (ValueError, TypeError):\n message = \"Select index should be an Integer or a function of Ref, Fn::FindInMap, or Fn::Select for {0}\"\n matches.append(\n RuleMatch(path, message.format(\"/\".join(map(str, path))))\n )\n\n return matches\n\n def _test_list_obj(self, list_obj, path):\n matches = []\n if isinstance(list_obj, dict):\n if len(list_obj) == 1:\n for key, _ in list_obj.items():\n if key not in self.supported_functions:\n message = \"Select should use a supported function of {0}\"\n matches.append(\n RuleMatch(\n path,\n message.format(\n \", \".join(map(str, self.supported_functions))\n ),\n )\n )\n else:\n message = \"Select should use a supported function of {0}\"\n matches.append(\n RuleMatch(\n path,\n message.format(\", \".join(map(str, self.supported_functions))),\n )\n )\n elif not isinstance(list_obj, list):\n message = \"Select should be an array of values for {0}\"\n matches.append(RuleMatch(path, message.format(\"/\".join(map(str, path)))))\n\n return matches\n\n def _test_select_obj(self, select_obj, path):\n matches = []\n if not isinstance(select_obj, list):\n message = \"Select should be a list of 2 elements for {0}\"\n matches.append(RuleMatch(path, message.format(\"/\".join(map(str, path)))))\n return matches\n if len(select_obj) != 2:\n message = \"Select should be a list of 2 elements for {0}\"\n matches.append(RuleMatch(path, message.format(\"/\".join(map(str, path)))))\n return matches\n\n index_obj = select_obj[0]\n list_of_objs = select_obj[1]\n matches.extend(self._test_index_obj(index_obj, path[:] + [0]))\n matches.extend(self._test_list_obj(list_of_objs, path[:] + [1]))\n\n return matches\n\n def match(self, cfn):\n matches = []\n\n select_objs = cfn.search_deep_keys(\"Fn::Select\")\n\n for select_obj in select_objs:\n select_value_obj = select_obj[-1]\n tree = select_obj[:-1]\n matches.extend(self._test_select_obj(select_value_obj, tree[:]))\n\n return matches\n",
"path": "src/cfnlint/rules/functions/Select.py"
}
] | diff --git a/src/cfnlint/rules/functions/Select.py b/src/cfnlint/rules/functions/Select.py
index a463abcc42..d9f034aaca 100644
--- a/src/cfnlint/rules/functions/Select.py
+++ b/src/cfnlint/rules/functions/Select.py
@@ -20,6 +20,7 @@ class Select(CloudFormationLintRule):
"Fn::If",
"Fn::Split",
"Fn::Cidr",
+ "Fn::Select", # issue: 2895
"Ref",
]
|
twisted__twisted-1695 | Release 22.2.0
|[<img alt="adiroiban's avatar" src="https://avatars.githubusercontent.com/u/204609?s=50" width="50" height="50">](https://github.com/adiroiban)| @adiroiban reported|
|-|-|
|Trac ID|trac#10306|
|Type|enhancement|
|Created|2022-02-08 14:05:11Z|
<details><summary>Searchable metadata</summary>
```
trac-id__10306 10306
type__enhancement enhancement
reporter__adiroiban adiroiban
priority__normal normal
milestone__None None
branch__
branch_author__
status__closed closed
resolution__fixed fixed
component__core core
keywords__None None
time__1644329111193403 1644329111193403
changetime__1646513115841857 1646513115841857
version__None None
owner__None None
```
</details>
| [
{
"content": "\"\"\"\nProvides Twisted version information.\n\"\"\"\n\n# This file is auto-generated! Do not edit!\n# Use `python -m incremental.update Twisted` to change this file.\n\nfrom incremental import Version\n\n__version__ = Version(\"Twisted\", 22, 1, 0, post=0)\n__all__ = [\"__version__\"]\n",
"path": "src/twisted/_version.py"
}
] | [
{
"content": "\"\"\"\nProvides Twisted version information.\n\"\"\"\n\n# This file is auto-generated! Do not edit!\n# Use `python -m incremental.update Twisted` to change this file.\n\nfrom incremental import Version\n\n__version__ = Version(\"Twisted\", 22, 2, 0, post=0)\n__all__ = [\"__version__\"]\n",
"path": "src/twisted/_version.py"
}
] | diff --git a/NEWS.rst b/NEWS.rst
index b6880590715..66a31d0742a 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -3,6 +3,78 @@ http://twistedmatrix.com/trac/ticket/<number>
.. towncrier release notes start
+Twisted 22.2.0 (2022-03-01)
+===========================
+
+Bugfixes
+--------
+
+- twisted.internet.gireactor.PortableGIReactor.simulate and twisted.internet.gtk2reactor.PortableGtkReactor.simulate no longer raises TypeError when there are no delayed called. This was a regression introduced with the migration to Python 3 in which the builtin `min` function no longer accepts `None` as an argument. (#9660)
+- twisted.conch.ssh.transport.SSHTransportBase now disconnects the remote peer if the
+ SSH version string is not sent in the first 4096 bytes. (#10284, CVE-2022-21716,
+ GHSA-rv6r-3f5q-9rgx)
+
+
+Improved Documentation
+----------------------
+
+- Add type annotations for twisted.web.http.Request.getHeader. (#10270)
+
+
+Deprecations and Removals
+-------------------------
+
+- Support for Python 3.6, which is EoL as of 2021-09-04, has been deprecated. (#10303)
+
+
+Misc
+----
+
+- #10216, #10299, #10300
+
+
+Conch
+-----
+
+Misc
+~~~~
+
+- #10298
+
+
+Web
+---
+
+No significant changes.
+
+
+Mail
+----
+
+No significant changes.
+
+
+Words
+-----
+
+No significant changes.
+
+
+Names
+-----
+
+No significant changes.
+
+
+Trial
+-----
+
+Bugfixes
+~~~~~~~~
+
+- _dist.test.test_workertrial now correctly compare strings via assertEqual() and pass on PyPy3 (#10302)
+
+
Twisted 22.1.0 (2022-02-03)
===========================
diff --git a/src/twisted/_version.py b/src/twisted/_version.py
index ae72bd784d9..4b8dba9d272 100644
--- a/src/twisted/_version.py
+++ b/src/twisted/_version.py
@@ -7,5 +7,5 @@
from incremental import Version
-__version__ = Version("Twisted", 22, 1, 0, post=0)
+__version__ = Version("Twisted", 22, 2, 0, post=0)
__all__ = ["__version__"]
diff --git a/src/twisted/conch/newsfragments/10298.misc b/src/twisted/conch/newsfragments/10298.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/newsfragments/10216.misc b/src/twisted/newsfragments/10216.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/newsfragments/10270.doc b/src/twisted/newsfragments/10270.doc
deleted file mode 100644
index 90b17e9e517..00000000000
--- a/src/twisted/newsfragments/10270.doc
+++ /dev/null
@@ -1,2 +0,0 @@
-Add type annotations for twisted.web.http.Request.getHeader.
-
diff --git a/src/twisted/newsfragments/10284.bugfix b/src/twisted/newsfragments/10284.bugfix
deleted file mode 100644
index b2316f1e687..00000000000
--- a/src/twisted/newsfragments/10284.bugfix
+++ /dev/null
@@ -1,2 +0,0 @@
-twisted.conch.ssh.transport.SSHTransportBase now disconnects the remote peer if the
-SSH version string is not sent in the first 4096 bytes.
diff --git a/src/twisted/newsfragments/10299.misc b/src/twisted/newsfragments/10299.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/newsfragments/10300.misc b/src/twisted/newsfragments/10300.misc
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/twisted/newsfragments/10303.removal b/src/twisted/newsfragments/10303.removal
deleted file mode 100644
index 3fcd35ba559..00000000000
--- a/src/twisted/newsfragments/10303.removal
+++ /dev/null
@@ -1 +0,0 @@
-Support for Python 3.6, which is EoL as of 2021-09-04, has been deprecated.
diff --git a/src/twisted/newsfragments/9660.bugfix b/src/twisted/newsfragments/9660.bugfix
deleted file mode 100644
index 288c38a2d44..00000000000
--- a/src/twisted/newsfragments/9660.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-twisted.internet.gireactor.PortableGIReactor.simulate and twisted.internet.gtk2reactor.PortableGtkReactor.simulate no longer raises TypeError when there are no delayed called. This was a regression introduced with the migration to Python3 in which the builtin `min` function no longer accepts `None` as an argument.
diff --git a/src/twisted/trial/newsfragments/10302.bugfix b/src/twisted/trial/newsfragments/10302.bugfix
deleted file mode 100644
index 098693409cd..00000000000
--- a/src/twisted/trial/newsfragments/10302.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-_dist.test.test_workertrial now correctly compare strings via assertEqual() and pass on PyPy3
|
scrapy__scrapy-1566 | signals docs are confusing
It seems it is not explained how to connect a callback to a singnal anywhere in Scrapy docs.
http://doc.scrapy.org/en/latest/topics/signals.html tells:
> You can connect to signals (or send your own) through the [Signals API](http://doc.scrapy.org/en/latest/topics/api.html#topics-api-signals).
But if you follow this link you get docs for scrapy.signalmanager.SignalManager - that's fine, but it is not explained where to get a SignalManager instance from.
There is an example in Extension docs (http://doc.scrapy.org/en/latest/topics/extensions.html#sample-extension), but
a) this is just an example;
b) it is not explained that crawler.signals is a SignalManager instance;
c) this example is neither in Signals docs nor in SignalManager docs.
There is also a bit of information here: http://doc.scrapy.org/en/latest/topics/api.html#scrapy.crawler.Crawler.signals, but
a) it is not linked to neither from Signal docs nor from SignalManager, so you can't find it if you don't know about it already;
b) it is not explained that crawler.signals is the only way to access signals.
So in the end users may get some luck connecting signals if they start from Crawler docs, but almost no luck if they start from Signals docs.
| [
{
"content": "\"\"\"Helper functions which doesn't fit anywhere else\"\"\"\nimport re\nimport hashlib\nfrom importlib import import_module\nfrom pkgutil import iter_modules\n\nimport six\nfrom w3lib.html import replace_entities\n\nfrom scrapy.utils.python import flatten, to_unicode\nfrom scrapy.item import BaseItem\n\n\n_ITERABLE_SINGLE_VALUES = dict, BaseItem, six.text_type, bytes\n\n\ndef arg_to_iter(arg):\n \"\"\"Convert an argument to an iterable. The argument can be a None, single\n value, or an iterable.\n\n Exception: if arg is a dict, [arg] will be returned\n \"\"\"\n if arg is None:\n return []\n elif not isinstance(arg, _ITERABLE_SINGLE_VALUES) and hasattr(arg, '__iter__'):\n return arg\n else:\n return [arg]\n\n\ndef load_object(path):\n \"\"\"Load an object given its absolute object path, and return it.\n\n object can be a class, function, variable or an instance.\n path ie: 'scrapy.downloadermiddlewares.redirect.RedirectMiddleware'\n \"\"\"\n\n try:\n dot = path.rindex('.')\n except ValueError:\n raise ValueError(\"Error loading object '%s': not a full path\" % path)\n\n module, name = path[:dot], path[dot+1:]\n mod = import_module(module)\n\n try:\n obj = getattr(mod, name)\n except AttributeError:\n raise NameError(\"Module '%s' doesn't define any object named '%s'\" % (module, name))\n\n return obj\n\n\ndef walk_modules(path):\n \"\"\"Loads a module and all its submodules from the given module path and\n returns them. If *any* module throws an exception while importing, that\n exception is thrown back.\n\n For example: walk_modules('scrapy.utils')\n \"\"\"\n\n mods = []\n mod = import_module(path)\n mods.append(mod)\n if hasattr(mod, '__path__'):\n for _, subpath, ispkg in iter_modules(mod.__path__):\n fullpath = path + '.' + subpath\n if ispkg:\n mods += walk_modules(fullpath)\n else:\n submod = import_module(fullpath)\n mods.append(submod)\n return mods\n\n\ndef extract_regex(regex, text, encoding='utf-8'):\n \"\"\"Extract a list of unicode strings from the given text/encoding using the following policies:\n\n * if the regex contains a named group called \"extract\" that will be returned\n * if the regex contains multiple numbered groups, all those will be returned (flattened)\n * if the regex doesn't contain any group the entire regex matching is returned\n \"\"\"\n\n if isinstance(regex, six.string_types):\n regex = re.compile(regex, re.UNICODE)\n\n try:\n strings = [regex.search(text).group('extract')] # named group\n except:\n strings = regex.findall(text) # full regex or numbered groups\n strings = flatten(strings)\n\n if isinstance(text, six.text_type):\n return [replace_entities(s, keep=['lt', 'amp']) for s in strings]\n else:\n return [replace_entities(to_unicode(s, encoding), keep=['lt', 'amp'])\n for s in strings]\n\n\ndef md5sum(file):\n \"\"\"Calculate the md5 checksum of a file-like object without reading its\n whole content in memory.\n\n >>> from io import BytesIO\n >>> md5sum(BytesIO(b'file content to hash'))\n '784406af91dd5a54fbb9c84c2236595a'\n \"\"\"\n m = hashlib.md5()\n while True:\n d = file.read(8096)\n if not d:\n break\n m.update(d)\n return m.hexdigest()\n\ndef rel_has_nofollow(rel):\n \"\"\"Return True if link rel attribute has nofollow type\"\"\"\n return True if rel is not None and 'nofollow' in rel.split() else False\n \n",
"path": "scrapy/utils/misc.py"
}
] | [
{
"content": "\"\"\"Helper functions which don't fit anywhere else\"\"\"\nimport re\nimport hashlib\nfrom importlib import import_module\nfrom pkgutil import iter_modules\n\nimport six\nfrom w3lib.html import replace_entities\n\nfrom scrapy.utils.python import flatten, to_unicode\nfrom scrapy.item import BaseItem\n\n\n_ITERABLE_SINGLE_VALUES = dict, BaseItem, six.text_type, bytes\n\n\ndef arg_to_iter(arg):\n \"\"\"Convert an argument to an iterable. The argument can be a None, single\n value, or an iterable.\n\n Exception: if arg is a dict, [arg] will be returned\n \"\"\"\n if arg is None:\n return []\n elif not isinstance(arg, _ITERABLE_SINGLE_VALUES) and hasattr(arg, '__iter__'):\n return arg\n else:\n return [arg]\n\n\ndef load_object(path):\n \"\"\"Load an object given its absolute object path, and return it.\n\n object can be a class, function, variable o instance.\n path ie: 'scrapy.downloadermiddlewares.redirect.RedirectMiddleware'\n \"\"\"\n\n try:\n dot = path.rindex('.')\n except ValueError:\n raise ValueError(\"Error loading object '%s': not a full path\" % path)\n\n module, name = path[:dot], path[dot+1:]\n mod = import_module(module)\n\n try:\n obj = getattr(mod, name)\n except AttributeError:\n raise NameError(\"Module '%s' doesn't define any object named '%s'\" % (module, name))\n\n return obj\n\n\ndef walk_modules(path):\n \"\"\"Loads a module and all its submodules from a the given module path and\n returns them. If *any* module throws an exception while importing, that\n exception is thrown back.\n\n For example: walk_modules('scrapy.utils')\n \"\"\"\n\n mods = []\n mod = import_module(path)\n mods.append(mod)\n if hasattr(mod, '__path__'):\n for _, subpath, ispkg in iter_modules(mod.__path__):\n fullpath = path + '.' + subpath\n if ispkg:\n mods += walk_modules(fullpath)\n else:\n submod = import_module(fullpath)\n mods.append(submod)\n return mods\n\n\ndef extract_regex(regex, text, encoding='utf-8'):\n \"\"\"Extract a list of unicode strings from the given text/encoding using the following policies:\n\n * if the regex contains a named group called \"extract\" that will be returned\n * if the regex contains multiple numbered groups, all those will be returned (flattened)\n * if the regex doesn't contain any group the entire regex matching is returned\n \"\"\"\n\n if isinstance(regex, six.string_types):\n regex = re.compile(regex, re.UNICODE)\n\n try:\n strings = [regex.search(text).group('extract')] # named group\n except:\n strings = regex.findall(text) # full regex or numbered groups\n strings = flatten(strings)\n\n if isinstance(text, six.text_type):\n return [replace_entities(s, keep=['lt', 'amp']) for s in strings]\n else:\n return [replace_entities(to_unicode(s, encoding), keep=['lt', 'amp'])\n for s in strings]\n\n\ndef md5sum(file):\n \"\"\"Calculate the md5 checksum of a file-like object without reading its\n whole content in memory.\n\n >>> from io import BytesIO\n >>> md5sum(BytesIO(b'file content to hash'))\n '784406af91dd5a54fbb9c84c2236595a'\n \"\"\"\n m = hashlib.md5()\n while True:\n d = file.read(8096)\n if not d:\n break\n m.update(d)\n return m.hexdigest()\n\ndef rel_has_nofollow(rel):\n \"\"\"Return True if link rel attribute has nofollow type\"\"\"\n return True if rel is not None and 'nofollow' in rel.split() else False\n \n",
"path": "scrapy/utils/misc.py"
}
] | diff --git a/docs/topics/media-pipeline.rst b/docs/topics/media-pipeline.rst
index 4ee4f17583c..5ed6ce97d4b 100644
--- a/docs/topics/media-pipeline.rst
+++ b/docs/topics/media-pipeline.rst
@@ -7,7 +7,7 @@ Downloading and processing files and images
.. currentmodule:: scrapy.pipelines.images
Scrapy provides reusable :doc:`item pipelines </topics/item-pipeline>` for
-downloading fies attached to a particular item (for example, when you scrape
+downloading files attached to a particular item (for example, when you scrape
products and also want to download their images locally). These pipelines share
a bit of functionality and structure (we refer to them as media pipelines), but
typically you'll either use the Files Pipeline or the Images Pipeline.
diff --git a/docs/topics/signals.rst b/docs/topics/signals.rst
index 5dd3b9ef5d9..19d5e8df9f6 100644
--- a/docs/topics/signals.rst
+++ b/docs/topics/signals.rst
@@ -16,6 +16,37 @@ deliver the arguments that the handler receives.
You can connect to signals (or send your own) through the
:ref:`topics-api-signals`.
+Here is a simple example showing how you can catch signals and perform some action:
+::
+
+ from scrapy import signals
+ from scrapy import Spider
+
+
+ class DmozSpider(Spider):
+ name = "dmoz"
+ allowed_domains = ["dmoz.org"]
+ start_urls = [
+ "http://www.dmoz.org/Computers/Programming/Languages/Python/Books/",
+ "http://www.dmoz.org/Computers/Programming/Languages/Python/Resources/",
+ ]
+
+
+ @classmethod
+ def from_crawler(cls, crawler, *args, **kwargs):
+ spider = super(DmozSpider, cls).from_crawler(crawler, *args, **kwargs)
+ crawler.signals.connect(spider.spider_closed, signal=signals.spider_closed)
+ return spider
+
+
+ def spider_closed(self, spider):
+ spider.logger.info('Spider closed: %s', spider.name)
+
+
+ def parse(self, response):
+ pass
+
+
Deferred signal handlers
========================
diff --git a/scrapy/utils/misc.py b/scrapy/utils/misc.py
index 303a413d8b9..f20070b5d98 100644
--- a/scrapy/utils/misc.py
+++ b/scrapy/utils/misc.py
@@ -1,4 +1,4 @@
-"""Helper functions which doesn't fit anywhere else"""
+"""Helper functions which don't fit anywhere else"""
import re
import hashlib
from importlib import import_module
|
blaze__blaze-475 | Make blaze.test() return True or False
@asmeurer suggests this. Currently we're passing through pytest.main() which is like the error code from command line programs.
<!---
@huboard:{"order":398.859375,"milestone_order":452,"custom_state":""}
-->
| [
{
"content": "from __future__ import absolute_import, division, print_function\n\nimport logging\n\nfrom dynd import nd\nfrom pandas import DataFrame\nimport h5py\n\nfrom multipledispatch import halt_ordering, restart_ordering\n\nhalt_ordering() # Turn off multipledispatch ordering\n\nfrom .expr import *\nfrom .expr.functions import *\nfrom .api import *\nfrom .data.csv import *\nfrom .data.json import *\nfrom .data.hdf5 import *\nfrom .compute.python import *\nfrom .data.meta import *\nfrom .compute.pandas import *\nfrom .compute.numpy import *\nfrom .compute.core import *\nfrom .compute.core import compute\nfrom .sql import *\n\ntry:\n from .spark import *\nexcept ImportError:\n pass\ntry:\n from .compute.pytables import *\nexcept ImportError:\n pass\ntry:\n from .compute.chunks import *\nexcept ImportError:\n pass\ntry:\n from .bcolz import *\nexcept ImportError:\n pass\ntry:\n from .mongo import *\nexcept ImportError:\n pass\n\nrestart_ordering() # Restart multipledispatch ordering and do ordering\n\nlogging.basicConfig()\nlogger = logging.getLogger(__name__)\nlogger.setLevel(logging.WARNING)\n\n\ninf = float('inf')\nnan = float('nan')\n\n__version__ = '0.6.1'\n\n# If IPython is already loaded, register the Blaze catalog magic\n# from . import catalog\n# import sys\n# if 'IPython' in sys.modules:\n# catalog.register_ipy_magic()\n# del sys\n\ndef print_versions():\n \"\"\"Print all the versions of software that Blaze relies on.\"\"\"\n import sys, platform\n import numpy as np\n import dynd\n import datashape\n print(\"-=\" * 38)\n print(\"Blaze version: %s\" % __version__)\n print(\"Datashape version: %s\" % datashape.__version__)\n print(\"NumPy version: %s\" % np.__version__)\n print(\"DyND version: %s / LibDyND %s\" %\n (dynd.__version__, dynd.__libdynd_version__))\n print(\"Python version: %s\" % sys.version)\n (sysname, nodename, release, version, machine, processor) = \\\n platform.uname()\n print(\"Platform: %s-%s-%s (%s)\" % (sysname, release, machine, version))\n if sysname == \"Linux\":\n print(\"Linux dist: %s\" % \" \".join(platform.linux_distribution()[:-1]))\n if not processor:\n processor = \"not recognized\"\n print(\"Processor: %s\" % processor)\n print(\"Byte-ordering: %s\" % sys.byteorder)\n print(\"-=\" * 38)\n\n\ndef test(verbose=False, junitfile=None, exit=False):\n \"\"\"\n Runs the full Blaze test suite, outputting\n the results of the tests to sys.stdout.\n\n This uses py.test to discover which tests to\n run, and runs tests in any 'tests' subdirectory\n within the Blaze module.\n\n Parameters\n ----------\n verbose : int, optional\n Value 0 prints very little, 1 prints a little bit,\n and 2 prints the test names while testing.\n junitfile : string, optional\n If provided, writes the test results to an junit xml\n style xml file. This is useful for running the tests\n in a CI server such as Jenkins.\n exit : bool, optional\n If True, the function will call sys.exit with an\n error code after the tests are finished.\n \"\"\"\n import os\n import sys\n import pytest\n\n args = []\n\n if verbose:\n args.append('--verbose')\n\n # Output an xunit file if requested\n if junitfile is not None:\n args.append('--junit-xml=%s' % junitfile)\n\n # Add all 'tests' subdirectories to the options\n rootdir = os.path.dirname(__file__)\n for root, dirs, files in os.walk(rootdir):\n if 'tests' in dirs:\n testsdir = os.path.join(root, 'tests')\n args.append(testsdir)\n print('Test dir: %s' % testsdir[len(rootdir) + 1:])\n # print versions (handy when reporting problems)\n print_versions()\n sys.stdout.flush()\n\n # Ask pytest to do its thing\n error_code = pytest.main(args=args)\n if exit:\n return sys.exit(error_code)\n return error_code\n",
"path": "blaze/__init__.py"
}
] | [
{
"content": "from __future__ import absolute_import, division, print_function\n\nimport logging\n\nfrom dynd import nd\nfrom pandas import DataFrame\nimport h5py\n\nfrom multipledispatch import halt_ordering, restart_ordering\n\nhalt_ordering() # Turn off multipledispatch ordering\n\nfrom .expr import *\nfrom .expr.functions import *\nfrom .api import *\nfrom .data.csv import *\nfrom .data.json import *\nfrom .data.hdf5 import *\nfrom .compute.python import *\nfrom .data.meta import *\nfrom .compute.pandas import *\nfrom .compute.numpy import *\nfrom .compute.core import *\nfrom .compute.core import compute\nfrom .sql import *\n\ntry:\n from .spark import *\nexcept ImportError:\n pass\ntry:\n from .compute.pytables import *\nexcept ImportError:\n pass\ntry:\n from .compute.chunks import *\nexcept ImportError:\n pass\ntry:\n from .bcolz import *\nexcept ImportError:\n pass\ntry:\n from .mongo import *\nexcept ImportError:\n pass\n\nrestart_ordering() # Restart multipledispatch ordering and do ordering\n\nlogging.basicConfig()\nlogger = logging.getLogger(__name__)\nlogger.setLevel(logging.WARNING)\n\n\ninf = float('inf')\nnan = float('nan')\n\n__version__ = '0.6.1'\n\n# If IPython is already loaded, register the Blaze catalog magic\n# from . import catalog\n# import sys\n# if 'IPython' in sys.modules:\n# catalog.register_ipy_magic()\n# del sys\n\ndef print_versions():\n \"\"\"Print all the versions of software that Blaze relies on.\"\"\"\n import sys, platform\n import numpy as np\n import dynd\n import datashape\n print(\"-=\" * 38)\n print(\"Blaze version: %s\" % __version__)\n print(\"Datashape version: %s\" % datashape.__version__)\n print(\"NumPy version: %s\" % np.__version__)\n print(\"DyND version: %s / LibDyND %s\" %\n (dynd.__version__, dynd.__libdynd_version__))\n print(\"Python version: %s\" % sys.version)\n (sysname, nodename, release, version, machine, processor) = \\\n platform.uname()\n print(\"Platform: %s-%s-%s (%s)\" % (sysname, release, machine, version))\n if sysname == \"Linux\":\n print(\"Linux dist: %s\" % \" \".join(platform.linux_distribution()[:-1]))\n if not processor:\n processor = \"not recognized\"\n print(\"Processor: %s\" % processor)\n print(\"Byte-ordering: %s\" % sys.byteorder)\n print(\"-=\" * 38)\n\n\ndef test(verbose=False, junitfile=None, exit=False):\n \"\"\"\n Runs the full Blaze test suite, outputting\n the results of the tests to sys.stdout.\n\n This uses py.test to discover which tests to\n run, and runs tests in any 'tests' subdirectory\n within the Blaze module.\n\n Parameters\n ----------\n verbose : int, optional\n Value 0 prints very little, 1 prints a little bit,\n and 2 prints the test names while testing.\n junitfile : string, optional\n If provided, writes the test results to an junit xml\n style xml file. This is useful for running the tests\n in a CI server such as Jenkins.\n exit : bool, optional\n If True, the function will call sys.exit with an\n error code after the tests are finished.\n \"\"\"\n import os\n import sys\n import pytest\n\n args = []\n\n if verbose:\n args.append('--verbose')\n\n # Output an xunit file if requested\n if junitfile is not None:\n args.append('--junit-xml=%s' % junitfile)\n\n # Add all 'tests' subdirectories to the options\n rootdir = os.path.dirname(__file__)\n for root, dirs, files in os.walk(rootdir):\n if 'tests' in dirs:\n testsdir = os.path.join(root, 'tests')\n args.append(testsdir)\n print('Test dir: %s' % testsdir[len(rootdir) + 1:])\n # print versions (handy when reporting problems)\n print_versions()\n sys.stdout.flush()\n\n # Ask pytest to do its thing\n error_code = pytest.main(args=args)\n if exit:\n return sys.exit(error_code)\n return error_code == 0\n",
"path": "blaze/__init__.py"
}
] | diff --git a/blaze/__init__.py b/blaze/__init__.py
index 94239fde3..b6cd8e108 100644
--- a/blaze/__init__.py
+++ b/blaze/__init__.py
@@ -139,4 +139,4 @@ def test(verbose=False, junitfile=None, exit=False):
error_code = pytest.main(args=args)
if exit:
return sys.exit(error_code)
- return error_code
+ return error_code == 0
|
ocadotechnology__aimmo-232 | Test coverage
Some first extra tests written to get the test coverage up a bit.
| [
{
"content": "# -*- coding: utf-8 -*-\nfrom setuptools import find_packages, setup\n\n\nsetup(\n name='aimmo-game-creator',\n packages=find_packages(),\n include_package_data=True,\n install_requires=[\n 'eventlet',\n 'pykube',\n ],\n tests_require=[\n 'httmock',\n ],\n test_suite='tests',\n zip_safe=False,\n)\n",
"path": "aimmo-game-creator/setup.py"
}
] | [
{
"content": "# -*- coding: utf-8 -*-\nfrom setuptools import find_packages, setup\n\n\nsetup(\n name='aimmo-game-creator',\n packages=find_packages(),\n include_package_data=True,\n install_requires=[\n 'eventlet',\n 'pykube',\n ],\n tests_require=[\n 'httmock',\n 'mock',\n ],\n test_suite='tests',\n zip_safe=False,\n)\n",
"path": "aimmo-game-creator/setup.py"
}
] | diff --git a/aimmo-game-creator/setup.py b/aimmo-game-creator/setup.py
index 6f7ef60c8..1f147d967 100644
--- a/aimmo-game-creator/setup.py
+++ b/aimmo-game-creator/setup.py
@@ -12,6 +12,7 @@
],
tests_require=[
'httmock',
+ 'mock',
],
test_suite='tests',
zip_safe=False,
diff --git a/aimmo-game-creator/tests/test_worker_manager.py b/aimmo-game-creator/tests/test_worker_manager.py
index fa60d5cf0..5ec5bfe16 100644
--- a/aimmo-game-creator/tests/test_worker_manager.py
+++ b/aimmo-game-creator/tests/test_worker_manager.py
@@ -1,11 +1,16 @@
from __future__ import absolute_import
+import cPickle as pickle
import unittest
+
from json import dumps, loads
from httmock import HTTMock
+import mock
from worker_manager import WorkerManager
+from worker_manager import LocalWorkerManager
+
class ConcreteWorkerManager(WorkerManager):
def __init__(self, *args, **kwargs):
@@ -93,3 +98,45 @@ def test_added_workers_given_correct_url(self):
'http://test/{}/'.format(i)
)
self.assertEqual(self.worker_manager.added_workers[str(i)]['name'], 'Game %s' % i)
+
+
+class TestLocalWorkerManager(unittest.TestCase):
+
+ def test_create_worker(self):
+ with mock.patch('subprocess.Popen') as mocked_popen:
+ localWorkerManager = LocalWorkerManager("")
+
+ game_id = 1
+ game_data = {
+ "test" : "test"
+ }
+
+ localWorkerManager.create_worker(game_id, game_data)
+ call_args = mocked_popen.call_args
+
+ argument_dictionary = call_args[1]
+ self.assertTrue("aimmo-game" in argument_dictionary["cwd"])
+ self.assertEqual(argument_dictionary["env"]["test"], "test")
+
+ def test_remove_worker(self):
+ self.killed = False
+ class KillableWorker():
+ def __init__(self, binder):
+ self.binder = binder
+ self.binder.killed = False
+
+ def kill(self):
+ self.binder.killed = True
+
+ localWorkerManager = LocalWorkerManager("")
+ localWorkerManager.workers = {
+ 1 : KillableWorker(self)
+ }
+
+ self.assertFalse(self.killed)
+ self.assertTrue(1 in localWorkerManager.workers)
+
+ localWorkerManager.remove_worker(1)
+
+ self.assertTrue(self.killed)
+ self.assertTrue(1 not in localWorkerManager.workers)
diff --git a/aimmo-game-worker/tests/simulation/test_location.py b/aimmo-game-worker/tests/simulation/test_location.py
new file mode 100644
index 000000000..890e57f23
--- /dev/null
+++ b/aimmo-game-worker/tests/simulation/test_location.py
@@ -0,0 +1,29 @@
+from __future__ import absolute_import
+
+from unittest import TestCase
+
+from simulation.location import Location
+
+
+class TestLocation(TestCase):
+
+ def test_add(self):
+ dummy_location = Location(1, 2)
+ direction = Location(1, 1)
+ expected = Location(2, 3)
+ self.assertEqual(dummy_location + direction, expected)
+
+ def test_sub(self):
+ dummy_location = Location(3, 2)
+ direction = Location(1, 1)
+ expected = Location(2, 1)
+ self.assertEqual(dummy_location - direction, expected)
+
+ def test_repr(self):
+ dummy_location = Location(1, 2)
+ self.assertTrue("Location(1, 2)" == str(dummy_location))
+
+ def test_not_equal(self):
+ dummy_location_1 = Location(1, 1)
+ dummy_location_2 = Location(2, 2)
+ self.assertTrue(dummy_location_1 != dummy_location_2)
diff --git a/aimmo-game/tests/test_simulation/avatar/test_fog_of_war.py b/aimmo-game/tests/test_simulation/avatar/test_fog_of_war.py
new file mode 100644
index 000000000..737e00b91
--- /dev/null
+++ b/aimmo-game/tests/test_simulation/avatar/test_fog_of_war.py
@@ -0,0 +1,11 @@
+from __future__ import absolute_import
+
+from unittest import TestCase
+
+from simulation.avatar.fog_of_war import should_partially_fog
+
+
+class TestFogOfWar(TestCase):
+ def test_should_partially_fog(self):
+ self.assertFalse(should_partially_fog(no_fog_distance=20, partial_fog_distance=2, x_dist=1, y_dist=10))
+ self.assertTrue(should_partially_fog(no_fog_distance=1, partial_fog_distance=2, x_dist=20, y_dist=10))
diff --git a/aimmo-game/tests/test_simulation/test_location.py b/aimmo-game/tests/test_simulation/test_location.py
index 83e86b7c7..3eb4d5471 100644
--- a/aimmo-game/tests/test_simulation/test_location.py
+++ b/aimmo-game/tests/test_simulation/test_location.py
@@ -9,39 +9,46 @@ class TestLocation(TestCase):
def test_equal(self):
loc_1 = Location(3, 3)
loc_2 = Location(3, 3)
+
self.assertEqual(loc_1, loc_2)
self.assertFalse(loc_1 != loc_2)
def test_x_not_equal(self):
loc_1 = Location(3, 3)
loc_2 = Location(4, 3)
+
self.assertNotEqual(loc_1, loc_2)
self.assertFalse(loc_1 == loc_2)
def test_y_not_equal(self):
loc_1 = Location(4, 4)
loc_2 = Location(4, 3)
+
self.assertNotEqual(loc_1, loc_2)
self.assertFalse(loc_1 == loc_2)
def test_add(self):
loc_1 = Location(1, 2)
loc_2 = Location(3, 4)
+
expected = Location(4, 6)
self.assertEqual(loc_1 + loc_2, expected)
def test_sub(self):
loc_1 = Location(1, 2)
loc_2 = Location(3, 4)
+
expected = Location(-2, -2)
self.assertEqual(loc_1 - loc_2, expected)
def test_hash_equal(self):
loc_1 = Location(3, 3)
loc_2 = Location(3, 3)
+
self.assertEqual(hash(loc_1), hash(loc_2))
def test_serialise(self):
loc = Location(3, 9)
+
expected = {'x': 3, 'y': 9}
self.assertEqual(loc.serialise(), expected)
diff --git a/aimmo-game/tests/test_simulation/test_turn_manager.py b/aimmo-game/tests/test_simulation/test_turn_manager.py
index 074b69139..18f45f583 100644
--- a/aimmo-game/tests/test_simulation/test_turn_manager.py
+++ b/aimmo-game/tests/test_simulation/test_turn_manager.py
@@ -6,6 +6,7 @@
from simulation.game_state import GameState
from simulation.location import Location
from simulation.turn_manager import ConcurrentTurnManager
+from simulation.turn_manager import SequentialTurnManager
from .dummy_avatar import DummyAvatarManager
from .dummy_avatar import MoveEastDummy
from .dummy_avatar import MoveNorthDummy
@@ -32,16 +33,22 @@ class TestTurnManager(unittest.TestCase):
def construct_default_avatar_appearance(self):
return AvatarAppearance("#000", "#ddd", "#777", "#fff")
- def construct_turn_manager(self, avatars, locations):
+ def construct_turn_manager(self, avatars, locations, manager):
self.avatar_manager = DummyAvatarManager(avatars)
self.game_state = MockGameState(InfiniteMap(), self.avatar_manager)
- self.turn_manager = ConcurrentTurnManager(game_state=self.game_state,
+ self.turn_manager = manager(game_state=self.game_state,
end_turn_callback=lambda: None,
completion_url='')
for index, location in enumerate(locations):
self.game_state.add_avatar(index, "", location)
return self.turn_manager
+ def construct_concurrent_turn_manager(self, avatars, locations):
+ return self.construct_turn_manager(avatars, locations, ConcurrentTurnManager)
+
+ def construct_sequential_turn_manager(self, avatars, locations):
+ return self.construct_turn_manager(avatars, locations, SequentialTurnManager)
+
def assert_at(self, avatar, location):
self.assertEqual(avatar.location, location)
cell = self.game_state.world_map.get_cell(location)
@@ -53,33 +60,33 @@ def get_avatar(self, player_id):
def run_turn(self):
self.turn_manager.run_turn()
- def test_run_turn(self):
+ def run_by_manager_turn(self, construct_manager):
'''
Given: > _
(1)
Expect: _ o
'''
- self.construct_turn_manager([MoveEastDummy], [ORIGIN])
+ construct_manager([MoveEastDummy], [ORIGIN])
avatar = self.get_avatar(0)
self.assert_at(avatar, ORIGIN)
self.run_turn()
self.assert_at(avatar, RIGHT_OF_ORIGIN)
- def test_run_several_turns(self):
+ def run_by_manager_several_turns(self, construct_manager):
'''
Given: > _ _ _ _ _
(5)
Expect: _ _ _ _ _ o
'''
- self.construct_turn_manager([MoveEastDummy], [ORIGIN])
+ construct_manager([MoveEastDummy], [ORIGIN])
avatar = self.get_avatar(0)
self.assertEqual(avatar.location, ORIGIN)
[self.run_turn() for _ in range(5)]
self.assertEqual(avatar.location, FIVE_RIGHT_OF_ORIGIN)
- def test_run_several_turns_and_avatars(self):
+ def run_by_manager_several_turns_and_avatars(self, construct_manager):
'''
Given: > _ _ _ _ _
> _ _ _ _ _
@@ -87,7 +94,7 @@ def test_run_several_turns_and_avatars(self):
Expect: _ _ _ _ _ o
_ _ _ _ _ o
'''
- self.construct_turn_manager([MoveEastDummy, MoveEastDummy],
+ construct_manager([MoveEastDummy, MoveEastDummy],
[ORIGIN, ABOVE_ORIGIN])
avatar0 = self.get_avatar(0)
avatar1 = self.get_avatar(1)
@@ -98,13 +105,13 @@ def test_run_several_turns_and_avatars(self):
self.assert_at(avatar0, FIVE_RIGHT_OF_ORIGIN)
self.assert_at(avatar1, FIVE_RIGHT_OF_ORIGIN_AND_ONE_ABOVE)
- def test_move_chain_succeeds(self):
+ def run_by_manager_move_chain_succeeds(self, construct_manager):
'''
Given: > > > > > _
Expect: _ o o o o o
'''
- self.construct_turn_manager([MoveEastDummy for _ in range(5)],
+ construct_manager([MoveEastDummy for _ in range(5)],
[Location(x, 0) for x in range(5)])
avatars = [self.get_avatar(i) for i in range(5)]
@@ -112,13 +119,13 @@ def test_move_chain_succeeds(self):
self.run_turn()
[self.assert_at(avatars[x], Location(x + 1, 0)) for x in range(5)]
- def test_move_chain_fails_occupied(self):
+ def run_by_manager_move_chain_fails_occupied(self, construct_manager):
'''
Given: > > x _
Expect: x x x _
'''
- self.construct_turn_manager([MoveEastDummy, MoveEastDummy, WaitDummy],
+ construct_manager([MoveEastDummy, MoveEastDummy, WaitDummy],
[Location(x, 0) for x in range(3)])
avatars = [self.get_avatar(i) for i in range(3)]
@@ -126,14 +133,14 @@ def test_move_chain_fails_occupied(self):
self.run_turn()
[self.assert_at(avatars[x], Location(x, 0)) for x in range(3)]
- def test_move_chain_fails_collision(self):
+ def run_by_manager_move_chain_fails_collision(self, construct_manager):
'''
Given: > > > _ <
(1)
Expect: x x x _ x
'''
locations = [Location(0, 0), Location(1, 0), Location(2, 0), Location(4, 0)]
- self.construct_turn_manager(
+ construct_manager(
[MoveEastDummy, MoveEastDummy, MoveEastDummy, MoveWestDummy],
locations)
avatars = [self.get_avatar(i) for i in range(4)]
@@ -142,7 +149,7 @@ def test_move_chain_fails_collision(self):
self.run_turn()
[self.assert_at(avatars[i], locations[i]) for i in range(4)]
- def test_move_chain_fails_cycle(self):
+ def run_by_manager_move_chain_fails_cycle(self, construct_manager):
'''
Given: > v
^ <
@@ -151,7 +158,7 @@ def test_move_chain_fails_cycle(self):
x x
'''
locations = [Location(0, 1), Location(1, 1), Location(1, 0), Location(0, 0)]
- self.construct_turn_manager(
+ construct_manager(
[MoveEastDummy, MoveSouthDummy, MoveWestDummy, MoveNorthDummy],
locations)
avatars = [self.get_avatar(i) for i in range(4)]
@@ -160,7 +167,7 @@ def test_move_chain_fails_cycle(self):
self.run_turn()
[self.assert_at(avatars[i], locations[i]) for i in range(4)]
- def test_move_chain_fails_spiral(self):
+ def run_by_manager_move_chain_fails_spiral(self, construct_manager):
'''
Given: > > v
^ <
@@ -173,7 +180,7 @@ def test_move_chain_fails_spiral(self):
Location(2, 1),
Location(2, 0),
Location(1, 0)]
- self.construct_turn_manager(
+ construct_manager(
[MoveEastDummy, MoveEastDummy, MoveSouthDummy, MoveWestDummy, MoveNorthDummy],
locations)
avatars = [self.get_avatar(i) for i in range(5)]
@@ -182,5 +189,59 @@ def test_move_chain_fails_spiral(self):
self.run_turn()
[self.assert_at(avatars[i], locations[i]) for i in range(5)]
+ def build_test_by_constructor(self, constructor):
+ self.run_by_manager_turn(constructor)
+ self.run_by_manager_several_turns_and_avatars(constructor)
+ self.run_by_manager_several_turns(constructor)
+ self.run_by_manager_move_chain_fails_spiral(constructor)
+ self.run_by_manager_move_chain_fails_cycle(constructor)
+ self.run_by_manager_move_chain_fails_occupied(constructor)
+
+ def test_concurrent_turn_manager(self):
+ constructor = lambda x, y: self.construct_concurrent_turn_manager(x, y)
+ self.build_test_by_constructor(constructor)
+ self.run_by_manager_move_chain_fails_collision(constructor)
+ self.run_by_manager_move_chain_succeeds(constructor)
+
+ def sequential_move_chain_consecutive_avatars_fails(self):
+ '''
+ Given: > > > > > _
+ Expect: _ o o o o o
+
+ This should fail for the sequential manager as the first avatar will bump into the second one
+ '''
+ self.construct_sequential_turn_manager([MoveEastDummy for _ in range(5)],
+ [Location(x, 0) for x in range(5)])
+ avatars = [self.get_avatar(i) for i in range(5)]
+
+ [self.assert_at(avatars[x], Location(x, 0)) for x in range(5)]
+ self.run_turn()
+ [self.assert_at(avatars[x], Location(x, 0)) for x in range(4)]
+ self.assert_at(avatars[4], Location(5, 0))
+
+ def sequential_move_chain_fails_collision(self):
+ '''
+ Given: > > > _ <
+ (1)
+ Expect: x x x _ x
+ '''
+ locations = [Location(0, 0), Location(1, 0), Location(2, 0), Location(4, 0)]
+ self.construct_sequential_turn_manager(
+ [MoveEastDummy, MoveEastDummy, MoveEastDummy, MoveWestDummy],
+ locations)
+ avatars = [self.get_avatar(i) for i in range(4)]
+
+ [self.assert_at(avatars[i], locations[i]) for i in range(4)]
+ self.run_turn()
+ [self.assert_at(avatars[i], locations[i]) for i in [0, 1, 3]]
+ self.assert_at(avatars[2], Location(3, 0))
+
+ def test_sequential_turn_manager(self):
+ constructor = lambda x, y: self.construct_sequential_turn_manager(x, y)
+ self.build_test_by_constructor(constructor)
+ self.sequential_move_chain_consecutive_avatars_fails()
+ self.sequential_move_chain_fails_collision()
+
+
if __name__ == '__main__':
unittest.main()
|
ansible-collections__community.aws-1886 | mq_broker: Tagging a broker on creation does not work
### Summary
When creating a new MQ broker using the following task, the broker does not get tagged.
```
- name: create broker with minimal parameters
mq_broker:
broker_name: "{{ broker_name }}"
security_groups: "{{ broker_sg_ids.split(',') }}"
subnet_ids: "{{ broker_subnet_ids.split(',') }}"
tags:
"Foo": "Bar"
"FooBar": "foobar"
```
Actual result:
```
changed: [testhost] => {
"broker": {
"broker_arn": "arn:aws:mq:us-east-1:123456789100:broker:ansible-test-52903175--mq:b-70e0807b-102d-42ae-8805-94ec6395436c",
"broker_id": "b-70e0807b-102d-42ae-8805-94ec6395436c",
"response_metadata": {
"http_headers": {
"access-control-allow-origin": "*",
"access-control-expose-headers": "x-amzn-errortype,x-amzn-requestid,x-amzn-errormessage,x-amzn-trace-id,x-amz-apigw-id,date",
"cache-control": "no-cache; no-store, must-revalidate, private",
"connection": "keep-alive",
"content-length": "191",
"content-type": "application/json",
"date": "Wed, 31 May 2023 13:25:16 GMT",
"expires": "0",
"pragma": "no-cache",
"x-amz-apigw-id": "FyidUFppIAMF1zw=",
"x-amzn-requestid": "12345bcb-5678-890d-972c-26a92712aaeb",
"x-amzn-trace-id": "Root=1-64774abb-2b3bf58a2b0cbf7800afdef6"
},
"http_status_code": 200,
"request_id": "59392bcb-5406-460d-972c-26a92712aaeb",
"retry_attempts": 0
}
},
```
### Issue Type
Bug Report
### Component Name
mq_broker
### Ansible Version
```console (paste below)
$ ansible --version
ansible [core 2.14.3]
```
### Collection Versions
```console (paste below)
$ ansible-galaxy collection list
Collection Version
----------------------------- -------
amazon.aws 6.0.0
community.aws 6.0.0
```
### AWS SDK versions
```console (paste below)
$ pip show boto boto3 botocore
Name: boto3
Version: 1.22.0
Summary: The AWS SDK for Python
Home-page: https://github.com/boto/boto3
Author: Amazon Web Services
Author-email:
License: Apache License 2.0
Location: /Users/alinabuzachis/anaconda3/envs/py310/lib/python3.10/site-packages
Requires: botocore, jmespath, s3transfer
Required-by: gouttelette
---
Name: botocore
Version: 1.25.13
Summary: Low-level, data-driven core of boto 3.
Home-page: https://github.com/boto/botocore
Author: Amazon Web Services
Author-email:
License: Apache License 2.0
Location: /Users/alinabuzachis/anaconda3/envs/py310/lib/python3.10/site-packages
Requires: jmespath, python-dateutil, urllib3
Required-by: aiobotocore, awscli, boto3, s3transfer
```
### Configuration
```console (paste below)
$ ansible-config dump --only-changed
```
### OS / Environment
_No response_
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
```
### Expected Results
Create an MQ broker using the task I pasted before.
### Actual Results
```console (paste below)
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
mq_broker: Tagging a broker on creation does not work
### Summary
When creating a new MQ broker using the following task, the broker does not get tagged.
```
- name: create broker with minimal parameters
mq_broker:
broker_name: "{{ broker_name }}"
security_groups: "{{ broker_sg_ids.split(',') }}"
subnet_ids: "{{ broker_subnet_ids.split(',') }}"
tags:
"Foo": "Bar"
"FooBar": "foobar"
```
Actual result:
```
changed: [testhost] => {
"broker": {
"broker_arn": "arn:aws:mq:us-east-1:123456789100:broker:ansible-test-52903175--mq:b-70e0807b-102d-42ae-8805-94ec6395436c",
"broker_id": "b-70e0807b-102d-42ae-8805-94ec6395436c",
"response_metadata": {
"http_headers": {
"access-control-allow-origin": "*",
"access-control-expose-headers": "x-amzn-errortype,x-amzn-requestid,x-amzn-errormessage,x-amzn-trace-id,x-amz-apigw-id,date",
"cache-control": "no-cache; no-store, must-revalidate, private",
"connection": "keep-alive",
"content-length": "191",
"content-type": "application/json",
"date": "Wed, 31 May 2023 13:25:16 GMT",
"expires": "0",
"pragma": "no-cache",
"x-amz-apigw-id": "FyidUFppIAMF1zw=",
"x-amzn-requestid": "12345bcb-5678-890d-972c-26a92712aaeb",
"x-amzn-trace-id": "Root=1-64774abb-2b3bf58a2b0cbf7800afdef6"
},
"http_status_code": 200,
"request_id": "59392bcb-5406-460d-972c-26a92712aaeb",
"retry_attempts": 0
}
},
```
### Issue Type
Bug Report
### Component Name
mq_broker
### Ansible Version
```console (paste below)
$ ansible --version
ansible [core 2.14.3]
```
### Collection Versions
```console (paste below)
$ ansible-galaxy collection list
Collection Version
----------------------------- -------
amazon.aws 6.0.0
community.aws 6.0.0
```
### AWS SDK versions
```console (paste below)
$ pip show boto boto3 botocore
Name: boto3
Version: 1.22.0
Summary: The AWS SDK for Python
Home-page: https://github.com/boto/boto3
Author: Amazon Web Services
Author-email:
License: Apache License 2.0
Location: /Users/alinabuzachis/anaconda3/envs/py310/lib/python3.10/site-packages
Requires: botocore, jmespath, s3transfer
Required-by: gouttelette
---
Name: botocore
Version: 1.25.13
Summary: Low-level, data-driven core of boto 3.
Home-page: https://github.com/boto/botocore
Author: Amazon Web Services
Author-email:
License: Apache License 2.0
Location: /Users/alinabuzachis/anaconda3/envs/py310/lib/python3.10/site-packages
Requires: jmespath, python-dateutil, urllib3
Required-by: aiobotocore, awscli, boto3, s3transfer
```
### Configuration
```console (paste below)
$ ansible-config dump --only-changed
```
### OS / Environment
_No response_
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
```
### Expected Results
Create an MQ broker using the task I pasted before.
### Actual Results
```console (paste below)
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
| [
{
"content": "#!/usr/bin/python\n# -*- coding: utf-8 -*-\n\n# Copyright: Contributors to the Ansible project\n# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)\n\nDOCUMENTATION = r\"\"\"\n---\nmodule: mq_broker\nversion_added: 6.0.0\nshort_description: MQ broker management\ndescription:\n - Create/update/delete a broker.\n - Reboot a broker.\nauthor:\n - FCO (@fotto)\noptions:\n broker_name:\n description:\n - The Name of the MQ broker to work on.\n type: str\n required: true\n state:\n description:\n - \"C(present): Create/update broker.\"\n - \"C(absent): Delete broker.\"\n - \"C(restarted): Reboot broker.\"\n choices: [ 'present', 'absent', 'restarted' ]\n default: present\n type: str\n deployment_mode:\n description:\n - Set broker deployment type.\n - Can be used only during creation.\n - Defaults to C(SINGLE_INSTANCE).\n choices: [ 'SINGLE_INSTANCE', 'ACTIVE_STANDBY_MULTI_AZ', 'CLUSTER_MULTI_AZ' ]\n type: str\n use_aws_owned_key:\n description:\n - Must be set to C(false) if I(kms_key_id) is provided as well.\n - Can be used only during creation.\n - Defaults to C(true).\n type: bool\n kms_key_id:\n description:\n - Use referenced key to encrypt broker data at rest.\n - Can be used only during creation.\n type: str\n engine_type:\n description:\n - Set broker engine type.\n - Can be used only during creation.\n - Defaults to C(ACTIVEMQ).\n choices: [ 'ACTIVEMQ', 'RABBITMQ' ]\n type: str\n maintenance_window_start_time:\n description:\n - Set maintenance window for automatic minor upgrades.\n - Can be used only during creation.\n - Not providing any value means \"no maintenance window\".\n type: dict\n publicly_accessible:\n description:\n - Allow/disallow public access.\n - Can be used only during creation.\n - Defaults to C(false).\n type: bool\n storage_type:\n description:\n - Set underlying storage type.\n - Can be used only during creation.\n - Defaults to C(EFS).\n choices: [ 'EBS', 'EFS' ]\n type: str\n subnet_ids:\n description:\n - Defines where deploy broker instances to.\n - Minimum required number depends on deployment type.\n - Can be used only during creation.\n type: list\n elements: str\n users:\n description:\n - This parameter allows to use a custom set of initial user(s).\n - M(community.aws.mq_user) is the preferred way to manage (local) users\n however a broker cannot be created without any user.\n - If nothing is specified a default C(admin) user will be created along with brokers.\n - Can be used only during creation. Use M(community.aws.mq_user) module for updates.\n type: list\n elements: dict\n tags:\n description:\n - Tag newly created brokers.\n - Can be used only during creation.\n type: dict\n authentication_strategy:\n description: Choose between locally and remotely managed users.\n choices: [ 'SIMPLE', 'LDAP' ]\n type: str\n auto_minor_version_upgrade:\n description: Allow/disallow automatic minor version upgrades.\n type: bool\n default: true\n engine_version:\n description:\n - Set engine version of broker.\n - The special value C(latest) will pick the latest available version.\n - The special value C(latest) is ignored on update.\n type: str\n host_instance_type:\n description: Instance type of broker instances.\n type: str\n enable_audit_log:\n description: Enable/disable to push audit logs to AWS CloudWatch.\n type: bool\n default: false\n enable_general_log:\n description: Enable/disable to push general logs to AWS CloudWatch.\n type: bool\n default: false\n security_groups:\n description:\n - Associate security groups with broker.\n - At least one must be provided during creation.\n type: list\n elements: str\n\nextends_documentation_fragment:\n - amazon.aws.boto3\n - amazon.aws.common.modules\n - amazon.aws.region.modules\n\"\"\"\n\n\nEXAMPLES = r\"\"\"\n- name: create broker (if missing) with minimal required parameters\n community.aws.mq_broker:\n broker_name: \"{{ broker_name }}\"\n security_groups:\n - sg_xxxxxxx\n subnet_ids:\n - subnet_xxx\n - subnet_yyy\n register: result\n\n- set_fact:\n broker_id: \"{{ result.broker['BrokerId'] }}\"\n\n- name: use mq_broker_info to wait until broker is ready\n community.aws.mq_broker_info:\n broker_id: \"{{ broker_id }}\"\n register: result\n until: \"result.broker['BrokerState'] == 'RUNNING'\"\n retries: 15\n delay: 60\n\n- name: create or update broker with almost all parameter set including credentials\n community.aws.mq_broker:\n broker_name: \"my_broker_2\"\n state: present\n deployment_mode: 'ACTIVE_STANDBY_MULTI_AZ'\n use_aws_owned_key: false\n kms_key_id: 'my-precreted-key-id'\n engine_type: 'ACTIVEMQ'\n maintenance_window_start_time:\n DayOfWeek: 'MONDAY'\n TimeOfDay: '03:15'\n TimeZone: 'Europe/Berlin'\n publicly_accessible: true\n storage_type: 'EFS'\n security_groups:\n - sg_xxxxxxx\n subnet_ids:\n - subnet_xxx\n - subnet_yyy\n users:\n - Username: 'initial-user'\n Password': 'plain-text-password'\n ConsoleAccess: true\n tags:\n - env: Test\n creator: ansible\n authentication_strategy: 'SIMPLE'\n auto_minor_version_upgrade: true\n engine_version: \"5.15.13\"\n host_instance_type: 'mq.t3.micro'\n enable_audit_log: true\n enable_general_log: true\n\n- name: reboot a broker\n community.aws.mq_broker:\n broker_name: \"my_broker_2\"\n state: restarted\n\n- name: delete a broker\n community.aws.mq_broker:\n broker_name: \"my_broker_2\"\n state: absent\n\"\"\"\n\nRETURN = r\"\"\"\nbroker:\n description:\n - \"All API responses are converted to snake yaml except 'Tags'\"\n - \"'state=present': API response of create_broker() or update_broker() call\"\n - \"'state=absent': result of describe_broker() call before delete_broker() is triggerd\"\n - \"'state=restarted': result of describe_broker() after reboot has been triggered\"\n type: dict\n returned: success\n\"\"\"\n\ntry:\n import botocore\nexcept ImportError:\n # handled by AnsibleAWSModule\n pass\n\nfrom ansible.module_utils.common.dict_transformations import camel_dict_to_snake_dict\nfrom ansible_collections.amazon.aws.plugins.module_utils.modules import AnsibleAWSModule\n\n\nPARAMS_MAP = {\n \"authentication_strategy\": \"AuthenticationStrategy\",\n \"auto_minor_version_upgrade\": \"AutoMinorVersionUpgrade\",\n \"broker_name\": \"BrokerName\",\n \"deployment_mode\": \"DeploymentMode\",\n \"use_aws_owned_key\": \"EncryptionOptions/UseAwsOwnedKey\",\n \"kms_key_id\": \"EncryptionOptions/KmsKeyId\",\n \"engine_type\": \"EngineType\",\n \"engine_version\": \"EngineVersion\",\n \"host_instance_type\": \"HostInstanceType\",\n \"enable_audit_log\": \"Logs/Audit\",\n \"enable_general_log\": \"Logs/General\",\n \"maintenance_window_start_time\": \"MaintenanceWindowStartTime\",\n \"publicly_accessible\": \"PubliclyAccessible\",\n \"security_groups\": \"SecurityGroups\",\n \"storage_type\": \"StorageType\",\n \"subnet_ids\": \"SubnetIds\",\n \"users\": \"Users\",\n}\n\n\nDEFAULTS = {\n \"authentication_strategy\": \"SIMPLE\",\n \"auto_minor_version_upgrade\": False,\n \"deployment_mode\": \"SINGLE_INSTANCE\",\n \"use_aws_owned_key\": True,\n \"engine_type\": \"ACTIVEMQ\",\n \"engine_version\": \"latest\",\n \"host_instance_type\": \"mq.t3.micro\",\n \"enable_audit_log\": False,\n \"enable_general_log\": False,\n \"publicly_accessible\": False,\n \"storage_type\": \"EFS\",\n}\n\nCREATE_ONLY_PARAMS = [\n \"deployment_mode\",\n \"use_aws_owned_key\",\n \"kms_key_id\",\n \"engine_type\",\n \"maintenance_window_start_time\",\n \"publicly_accessible\",\n \"storage_type\",\n \"subnet_ids\",\n \"users\",\n \"tags\",\n]\n\n\ndef _set_kwarg(kwargs, key, value):\n mapped_key = PARAMS_MAP[key]\n if \"/\" in mapped_key:\n key_list = mapped_key.split(\"/\")\n key_list.reverse()\n else:\n key_list = [mapped_key]\n data = kwargs\n while len(key_list) > 1:\n this_key = key_list.pop()\n if this_key not in data:\n data[this_key] = {}\n #\n data = data[this_key]\n data[key_list[0]] = value\n\n\ndef _fill_kwargs(module, apply_defaults=True, ignore_create_params=False):\n kwargs = {}\n if apply_defaults:\n for p_name, p_value in DEFAULTS.items():\n _set_kwarg(kwargs, p_name, p_value)\n for p_name in module.params:\n if ignore_create_params and p_name in CREATE_ONLY_PARAMS:\n # silently ignore CREATE_ONLY_PARAMS on update to\n # make playbooks idempotent\n continue\n if p_name in PARAMS_MAP and module.params[p_name] is not None:\n _set_kwarg(kwargs, p_name, module.params[p_name])\n else:\n # ignore\n pass\n return kwargs\n\n\ndef __list_needs_change(current, desired):\n if len(current) != len(desired):\n return True\n # equal length:\n c_sorted = sorted(current)\n d_sorted = sorted(desired)\n for index, value in enumerate(current):\n if value != desired[index]:\n return True\n #\n return False\n\n\ndef __dict_needs_change(current, desired):\n # values contained in 'current' but not specified in 'desired' are ignored\n # value contained in 'desired' but not in 'current' (unsupported attributes) are ignored\n for key in desired:\n if key in current:\n if desired[key] != current[key]:\n return True\n #\n return False\n\n\ndef _needs_change(current, desired):\n needs_change = False\n for key in desired:\n current_value = current[key]\n desired_value = desired[key]\n if isinstance(current_value, (int, str, bool)):\n if current_value != desired_value:\n needs_change = True\n break\n elif isinstance(current_value, list):\n # assumption: all 'list' type settings we allow changes for have scalar values\n if __list_needs_change(current_value, desired_value):\n needs_change = True\n break\n elif isinstance(current_value, dict):\n # assumption: all 'dict' type settings we allow changes for have scalar values\n if __dict_needs_change(current_value, desired_value):\n needs_change = True\n break\n else:\n # unexpected type\n needs_change = True\n break\n #\n return needs_change\n\n\ndef get_latest_engine_version(conn, module, engine_type):\n try:\n response = conn.describe_broker_engine_types(EngineType=engine_type)\n return response[\"BrokerEngineTypes\"][0][\"EngineVersions\"][0][\"Name\"]\n except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:\n module.fail_json_aws(e, msg=\"Couldn't list engine versions\")\n\n\ndef get_broker_id(conn, module):\n try:\n broker_name = module.params[\"broker_name\"]\n broker_id = None\n response = conn.list_brokers(MaxResults=100)\n for broker in response[\"BrokerSummaries\"]:\n if broker[\"BrokerName\"] == broker_name:\n broker_id = broker[\"BrokerId\"]\n break\n return broker_id\n except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:\n module.fail_json_aws(e, msg=\"Couldn't list broker brokers.\")\n\n\ndef get_broker_info(conn, module, broker_id):\n try:\n return conn.describe_broker(BrokerId=broker_id)\n except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:\n module.fail_json_aws(e, msg=\"Couldn't get broker details.\")\n\n\ndef reboot_broker(conn, module, broker_id):\n try:\n return conn.reboot_broker(BrokerId=broker_id)\n except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:\n module.fail_json_aws(e, msg=\"Couldn't reboot broker.\")\n\n\ndef delete_broker(conn, module, broker_id):\n try:\n return conn.delete_broker(BrokerId=broker_id)\n except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:\n module.fail_json_aws(e, msg=\"Couldn't delete broker.\")\n\n\ndef create_broker(conn, module):\n kwargs = _fill_kwargs(module)\n if \"EngineVersion\" in kwargs and kwargs[\"EngineVersion\"] == \"latest\":\n kwargs[\"EngineVersion\"] = get_latest_engine_version(conn, module, kwargs[\"EngineType\"])\n if kwargs[\"AuthenticationStrategy\"] == \"LDAP\":\n module.fail_json(msg=\"'AuthenticationStrategy=LDAP' not supported, yet\")\n if \"Users\" not in kwargs:\n # add some stupid default (cannot create broker without any users)\n kwargs[\"Users\"] = [{\"Username\": \"admin\", \"Password\": \"adminPassword\", \"ConsoleAccess\": True, \"Groups\": []}]\n if \"EncryptionOptions\" in kwargs and \"UseAwsOwnedKey\" in kwargs[\"EncryptionOptions\"]:\n kwargs[\"EncryptionOptions\"][\"UseAwsOwnedKey\"] = False\n #\n if \"SecurityGroups\" not in kwargs or len(kwargs[\"SecurityGroups\"]) == 0:\n module.fail_json(msg=\"At least one security group must be specified on broker creation\")\n #\n changed = True\n result = conn.create_broker(**kwargs)\n #\n return {\"broker\": camel_dict_to_snake_dict(result, ignore_list=[\"Tags\"]), \"changed\": changed}\n\n\ndef update_broker(conn, module, broker_id):\n kwargs = _fill_kwargs(module, apply_defaults=False, ignore_create_params=True)\n # replace name with id\n broker_name = kwargs[\"BrokerName\"]\n del kwargs[\"BrokerName\"]\n kwargs[\"BrokerId\"] = broker_id\n # get current state for comparison:\n api_result = get_broker_info(conn, module, broker_id)\n if api_result[\"BrokerState\"] != \"RUNNING\":\n module.fail_json(\n msg=f\"Cannot trigger update while broker ({broker_id}) is in state {api_result['BrokerState']}\",\n )\n # engine version of 'latest' is taken as \"keep current one\"\n # i.e. do not request upgrade on playbook rerun\n if \"EngineVersion\" in kwargs and kwargs[\"EngineVersion\"] == \"latest\":\n kwargs[\"EngineVersion\"] = api_result[\"EngineVersion\"]\n result = {\"broker_id\": broker_id, \"broker_name\": broker_name}\n changed = False\n if _needs_change(api_result, kwargs):\n changed = True\n if not module.check_mode:\n api_result = conn.update_broker(**kwargs)\n #\n #\n return {\"broker\": result, \"changed\": changed}\n\n\ndef ensure_absent(conn, module):\n result = {\"broker_name\": module.params[\"broker_name\"], \"broker_id\": None}\n if module.check_mode:\n return {\"broker\": camel_dict_to_snake_dict(result, ignore_list=[\"Tags\"]), \"changed\": True}\n broker_id = get_broker_id(conn, module)\n result[\"broker_id\"] = broker_id\n\n if not broker_id:\n # silently ignore delete of unknown broker (to make it idempotent)\n return {\"broker\": result, \"changed\": False}\n\n try:\n # check for pending delete (small race condition possible here\n api_result = get_broker_info(conn, module, broker_id)\n if api_result[\"BrokerState\"] == \"DELETION_IN_PROGRESS\":\n return {\"broker\": result, \"changed\": False}\n delete_broker(conn, module, broker_id)\n except botocore.exceptions.ClientError as e:\n module.fail_json_aws(e)\n\n return {\"broker\": result, \"changed\": True}\n\n\ndef ensure_present(conn, module):\n if module.check_mode:\n return {\"broker\": {\"broker_arn\": \"fakeArn\", \"broker_id\": \"fakeId\"}, \"changed\": True}\n\n broker_id = get_broker_id(conn, module)\n if broker_id:\n return update_broker(conn, module, broker_id)\n\n return create_broker(conn, module)\n\n\ndef main():\n argument_spec = dict(\n broker_name=dict(required=True, type=\"str\"),\n state=dict(default=\"present\", choices=[\"present\", \"absent\", \"restarted\"]),\n # parameters only allowed on create\n deployment_mode=dict(choices=[\"SINGLE_INSTANCE\", \"ACTIVE_STANDBY_MULTI_AZ\", \"CLUSTER_MULTI_AZ\"]),\n use_aws_owned_key=dict(type=\"bool\"),\n kms_key_id=dict(type=\"str\"),\n engine_type=dict(choices=[\"ACTIVEMQ\", \"RABBITMQ\"], type=\"str\"),\n maintenance_window_start_time=dict(type=\"dict\"),\n publicly_accessible=dict(type=\"bool\"),\n storage_type=dict(choices=[\"EBS\", \"EFS\"]),\n subnet_ids=dict(type=\"list\", elements=\"str\"),\n users=dict(type=\"list\", elements=\"dict\"),\n tags=dict(type=\"dict\"),\n # parameters allowed on update as well\n authentication_strategy=dict(choices=[\"SIMPLE\", \"LDAP\"]),\n auto_minor_version_upgrade=dict(default=True, type=\"bool\"),\n engine_version=dict(type=\"str\"),\n host_instance_type=dict(type=\"str\"),\n enable_audit_log=dict(default=False, type=\"bool\"),\n enable_general_log=dict(default=False, type=\"bool\"),\n security_groups=dict(type=\"list\", elements=\"str\"),\n )\n\n module = AnsibleAWSModule(argument_spec=argument_spec, supports_check_mode=True)\n\n connection = module.client(\"mq\")\n\n if module.params[\"state\"] == \"present\":\n try:\n compound_result = ensure_present(connection, module)\n except botocore.exceptions.ClientError as e:\n module.fail_json_aws(e)\n #\n module.exit_json(**compound_result)\n\n if module.params[\"state\"] == \"absent\":\n try:\n compound_result = ensure_absent(connection, module)\n except botocore.exceptions.ClientError as e:\n module.fail_json_aws(e)\n #\n module.exit_json(**compound_result)\n\n if module.params[\"state\"] == \"restarted\":\n broker_id = get_broker_id(connection, module)\n if module.check_mode:\n module.exit_json(broker={\"broker_id\": broker_id if broker_id else \"fakeId\"}, changed=True)\n if not broker_id:\n module.fail_json(\n msg=\"Cannot find broker with name {module.params['broker_name']}.\",\n )\n try:\n changed = True\n if not module.check_mode:\n reboot_broker(connection, module, broker_id)\n #\n result = get_broker_info(connection, module, broker_id)\n except botocore.exceptions.ClientError as e:\n module.fail_json_aws(e)\n module.exit_json(broker=result, changed=changed)\n\n\nif __name__ == \"__main__\":\n main()\n",
"path": "plugins/modules/mq_broker.py"
}
] | [
{
"content": "#!/usr/bin/python\n# -*- coding: utf-8 -*-\n\n# Copyright: Contributors to the Ansible project\n# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)\n\nDOCUMENTATION = r\"\"\"\n---\nmodule: mq_broker\nversion_added: 6.0.0\nshort_description: MQ broker management\ndescription:\n - Create/update/delete a broker.\n - Reboot a broker.\nauthor:\n - FCO (@fotto)\noptions:\n broker_name:\n description:\n - The Name of the MQ broker to work on.\n type: str\n required: true\n state:\n description:\n - \"C(present): Create/update broker.\"\n - \"C(absent): Delete broker.\"\n - \"C(restarted): Reboot broker.\"\n choices: [ 'present', 'absent', 'restarted' ]\n default: present\n type: str\n deployment_mode:\n description:\n - Set broker deployment type.\n - Can be used only during creation.\n - Defaults to C(SINGLE_INSTANCE).\n choices: [ 'SINGLE_INSTANCE', 'ACTIVE_STANDBY_MULTI_AZ', 'CLUSTER_MULTI_AZ' ]\n type: str\n use_aws_owned_key:\n description:\n - Must be set to C(false) if I(kms_key_id) is provided as well.\n - Can be used only during creation.\n - Defaults to C(true).\n type: bool\n kms_key_id:\n description:\n - Use referenced key to encrypt broker data at rest.\n - Can be used only during creation.\n type: str\n engine_type:\n description:\n - Set broker engine type.\n - Can be used only during creation.\n - Defaults to C(ACTIVEMQ).\n choices: [ 'ACTIVEMQ', 'RABBITMQ' ]\n type: str\n maintenance_window_start_time:\n description:\n - Set maintenance window for automatic minor upgrades.\n - Can be used only during creation.\n - Not providing any value means \"no maintenance window\".\n type: dict\n publicly_accessible:\n description:\n - Allow/disallow public access.\n - Can be used only during creation.\n - Defaults to C(false).\n type: bool\n storage_type:\n description:\n - Set underlying storage type.\n - Can be used only during creation.\n - Defaults to C(EFS).\n choices: [ 'EBS', 'EFS' ]\n type: str\n subnet_ids:\n description:\n - Defines where deploy broker instances to.\n - Minimum required number depends on deployment type.\n - Can be used only during creation.\n type: list\n elements: str\n users:\n description:\n - This parameter allows to use a custom set of initial user(s).\n - M(community.aws.mq_user) is the preferred way to manage (local) users\n however a broker cannot be created without any user.\n - If nothing is specified a default C(admin) user will be created along with brokers.\n - Can be used only during creation. Use M(community.aws.mq_user) module for updates.\n type: list\n elements: dict\n tags:\n description:\n - Tag newly created brokers.\n - Can be used only during creation.\n type: dict\n authentication_strategy:\n description: Choose between locally and remotely managed users.\n choices: [ 'SIMPLE', 'LDAP' ]\n type: str\n auto_minor_version_upgrade:\n description: Allow/disallow automatic minor version upgrades.\n type: bool\n default: true\n engine_version:\n description:\n - Set engine version of broker.\n - The special value C(latest) will pick the latest available version.\n - The special value C(latest) is ignored on update.\n type: str\n host_instance_type:\n description: Instance type of broker instances.\n type: str\n enable_audit_log:\n description: Enable/disable to push audit logs to AWS CloudWatch.\n type: bool\n default: false\n enable_general_log:\n description: Enable/disable to push general logs to AWS CloudWatch.\n type: bool\n default: false\n security_groups:\n description:\n - Associate security groups with broker.\n - At least one must be provided during creation.\n type: list\n elements: str\n\nextends_documentation_fragment:\n - amazon.aws.boto3\n - amazon.aws.common.modules\n - amazon.aws.region.modules\n\"\"\"\n\n\nEXAMPLES = r\"\"\"\n- name: create broker (if missing) with minimal required parameters\n community.aws.mq_broker:\n broker_name: \"{{ broker_name }}\"\n security_groups:\n - sg_xxxxxxx\n subnet_ids:\n - subnet_xxx\n - subnet_yyy\n register: result\n\n- set_fact:\n broker_id: \"{{ result.broker['BrokerId'] }}\"\n\n- name: use mq_broker_info to wait until broker is ready\n community.aws.mq_broker_info:\n broker_id: \"{{ broker_id }}\"\n register: result\n until: \"result.broker['BrokerState'] == 'RUNNING'\"\n retries: 15\n delay: 60\n\n- name: create or update broker with almost all parameter set including credentials\n community.aws.mq_broker:\n broker_name: \"my_broker_2\"\n state: present\n deployment_mode: 'ACTIVE_STANDBY_MULTI_AZ'\n use_aws_owned_key: false\n kms_key_id: 'my-precreted-key-id'\n engine_type: 'ACTIVEMQ'\n maintenance_window_start_time:\n DayOfWeek: 'MONDAY'\n TimeOfDay: '03:15'\n TimeZone: 'Europe/Berlin'\n publicly_accessible: true\n storage_type: 'EFS'\n security_groups:\n - sg_xxxxxxx\n subnet_ids:\n - subnet_xxx\n - subnet_yyy\n users:\n - Username: 'initial-user'\n Password': 'plain-text-password'\n ConsoleAccess: true\n tags:\n - env: Test\n creator: ansible\n authentication_strategy: 'SIMPLE'\n auto_minor_version_upgrade: true\n engine_version: \"5.15.13\"\n host_instance_type: 'mq.t3.micro'\n enable_audit_log: true\n enable_general_log: true\n\n- name: reboot a broker\n community.aws.mq_broker:\n broker_name: \"my_broker_2\"\n state: restarted\n\n- name: delete a broker\n community.aws.mq_broker:\n broker_name: \"my_broker_2\"\n state: absent\n\"\"\"\n\nRETURN = r\"\"\"\nbroker:\n description:\n - \"All API responses are converted to snake yaml except 'Tags'\"\n - \"'state=present': API response of create_broker() or update_broker() call\"\n - \"'state=absent': result of describe_broker() call before delete_broker() is triggerd\"\n - \"'state=restarted': result of describe_broker() after reboot has been triggered\"\n type: dict\n returned: success\n\"\"\"\n\ntry:\n import botocore\nexcept ImportError:\n # handled by AnsibleAWSModule\n pass\n\nfrom ansible.module_utils.common.dict_transformations import camel_dict_to_snake_dict\nfrom ansible_collections.amazon.aws.plugins.module_utils.modules import AnsibleAWSModule\n\n\nPARAMS_MAP = {\n \"authentication_strategy\": \"AuthenticationStrategy\",\n \"auto_minor_version_upgrade\": \"AutoMinorVersionUpgrade\",\n \"broker_name\": \"BrokerName\",\n \"deployment_mode\": \"DeploymentMode\",\n \"use_aws_owned_key\": \"EncryptionOptions/UseAwsOwnedKey\",\n \"kms_key_id\": \"EncryptionOptions/KmsKeyId\",\n \"engine_type\": \"EngineType\",\n \"engine_version\": \"EngineVersion\",\n \"host_instance_type\": \"HostInstanceType\",\n \"enable_audit_log\": \"Logs/Audit\",\n \"enable_general_log\": \"Logs/General\",\n \"maintenance_window_start_time\": \"MaintenanceWindowStartTime\",\n \"publicly_accessible\": \"PubliclyAccessible\",\n \"security_groups\": \"SecurityGroups\",\n \"storage_type\": \"StorageType\",\n \"subnet_ids\": \"SubnetIds\",\n \"users\": \"Users\",\n \"tags\": \"Tags\",\n}\n\n\nDEFAULTS = {\n \"authentication_strategy\": \"SIMPLE\",\n \"auto_minor_version_upgrade\": False,\n \"deployment_mode\": \"SINGLE_INSTANCE\",\n \"use_aws_owned_key\": True,\n \"engine_type\": \"ACTIVEMQ\",\n \"engine_version\": \"latest\",\n \"host_instance_type\": \"mq.t3.micro\",\n \"enable_audit_log\": False,\n \"enable_general_log\": False,\n \"publicly_accessible\": False,\n \"storage_type\": \"EFS\",\n}\n\nCREATE_ONLY_PARAMS = [\n \"deployment_mode\",\n \"use_aws_owned_key\",\n \"kms_key_id\",\n \"engine_type\",\n \"maintenance_window_start_time\",\n \"publicly_accessible\",\n \"storage_type\",\n \"subnet_ids\",\n \"users\",\n \"tags\",\n]\n\n\ndef _set_kwarg(kwargs, key, value):\n mapped_key = PARAMS_MAP[key]\n if \"/\" in mapped_key:\n key_list = mapped_key.split(\"/\")\n key_list.reverse()\n else:\n key_list = [mapped_key]\n data = kwargs\n while len(key_list) > 1:\n this_key = key_list.pop()\n if this_key not in data:\n data[this_key] = {}\n #\n data = data[this_key]\n data[key_list[0]] = value\n\n\ndef _fill_kwargs(module, apply_defaults=True, ignore_create_params=False):\n kwargs = {}\n if apply_defaults:\n for p_name, p_value in DEFAULTS.items():\n _set_kwarg(kwargs, p_name, p_value)\n for p_name in module.params:\n if ignore_create_params and p_name in CREATE_ONLY_PARAMS:\n # silently ignore CREATE_ONLY_PARAMS on update to\n # make playbooks idempotent\n continue\n if p_name in PARAMS_MAP and module.params[p_name] is not None:\n _set_kwarg(kwargs, p_name, module.params[p_name])\n else:\n # ignore\n pass\n return kwargs\n\n\ndef __list_needs_change(current, desired):\n if len(current) != len(desired):\n return True\n # equal length:\n c_sorted = sorted(current)\n d_sorted = sorted(desired)\n for index, value in enumerate(current):\n if value != desired[index]:\n return True\n #\n return False\n\n\ndef __dict_needs_change(current, desired):\n # values contained in 'current' but not specified in 'desired' are ignored\n # value contained in 'desired' but not in 'current' (unsupported attributes) are ignored\n for key in desired:\n if key in current:\n if desired[key] != current[key]:\n return True\n #\n return False\n\n\ndef _needs_change(current, desired):\n needs_change = False\n for key in desired:\n current_value = current[key]\n desired_value = desired[key]\n if isinstance(current_value, (int, str, bool)):\n if current_value != desired_value:\n needs_change = True\n break\n elif isinstance(current_value, list):\n # assumption: all 'list' type settings we allow changes for have scalar values\n if __list_needs_change(current_value, desired_value):\n needs_change = True\n break\n elif isinstance(current_value, dict):\n # assumption: all 'dict' type settings we allow changes for have scalar values\n if __dict_needs_change(current_value, desired_value):\n needs_change = True\n break\n else:\n # unexpected type\n needs_change = True\n break\n #\n return needs_change\n\n\ndef get_latest_engine_version(conn, module, engine_type):\n try:\n response = conn.describe_broker_engine_types(EngineType=engine_type)\n return response[\"BrokerEngineTypes\"][0][\"EngineVersions\"][0][\"Name\"]\n except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:\n module.fail_json_aws(e, msg=\"Couldn't list engine versions\")\n\n\ndef get_broker_id(conn, module):\n try:\n broker_name = module.params[\"broker_name\"]\n broker_id = None\n response = conn.list_brokers(MaxResults=100)\n for broker in response[\"BrokerSummaries\"]:\n if broker[\"BrokerName\"] == broker_name:\n broker_id = broker[\"BrokerId\"]\n break\n return broker_id\n except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:\n module.fail_json_aws(e, msg=\"Couldn't list broker brokers.\")\n\n\ndef get_broker_info(conn, module, broker_id):\n try:\n return conn.describe_broker(BrokerId=broker_id)\n except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:\n module.fail_json_aws(e, msg=\"Couldn't get broker details.\")\n\n\ndef reboot_broker(conn, module, broker_id):\n try:\n return conn.reboot_broker(BrokerId=broker_id)\n except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:\n module.fail_json_aws(e, msg=\"Couldn't reboot broker.\")\n\n\ndef delete_broker(conn, module, broker_id):\n try:\n return conn.delete_broker(BrokerId=broker_id)\n except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:\n module.fail_json_aws(e, msg=\"Couldn't delete broker.\")\n\n\ndef create_broker(conn, module):\n kwargs = _fill_kwargs(module)\n if \"EngineVersion\" in kwargs and kwargs[\"EngineVersion\"] == \"latest\":\n kwargs[\"EngineVersion\"] = get_latest_engine_version(conn, module, kwargs[\"EngineType\"])\n if kwargs[\"AuthenticationStrategy\"] == \"LDAP\":\n module.fail_json(msg=\"'AuthenticationStrategy=LDAP' not supported, yet\")\n if \"Users\" not in kwargs:\n # add some stupid default (cannot create broker without any users)\n kwargs[\"Users\"] = [{\"Username\": \"admin\", \"Password\": \"adminPassword\", \"ConsoleAccess\": True, \"Groups\": []}]\n if \"EncryptionOptions\" in kwargs and \"UseAwsOwnedKey\" in kwargs[\"EncryptionOptions\"]:\n kwargs[\"EncryptionOptions\"][\"UseAwsOwnedKey\"] = False\n #\n if \"SecurityGroups\" not in kwargs or len(kwargs[\"SecurityGroups\"]) == 0:\n module.fail_json(msg=\"At least one security group must be specified on broker creation\")\n #\n changed = True\n result = conn.create_broker(**kwargs)\n #\n return {\"broker\": camel_dict_to_snake_dict(result, ignore_list=[\"Tags\"]), \"changed\": changed}\n\n\ndef update_broker(conn, module, broker_id):\n kwargs = _fill_kwargs(module, apply_defaults=False, ignore_create_params=True)\n # replace name with id\n broker_name = kwargs[\"BrokerName\"]\n del kwargs[\"BrokerName\"]\n kwargs[\"BrokerId\"] = broker_id\n # get current state for comparison:\n api_result = get_broker_info(conn, module, broker_id)\n if api_result[\"BrokerState\"] != \"RUNNING\":\n module.fail_json(\n msg=f\"Cannot trigger update while broker ({broker_id}) is in state {api_result['BrokerState']}\",\n )\n # engine version of 'latest' is taken as \"keep current one\"\n # i.e. do not request upgrade on playbook rerun\n if \"EngineVersion\" in kwargs and kwargs[\"EngineVersion\"] == \"latest\":\n kwargs[\"EngineVersion\"] = api_result[\"EngineVersion\"]\n result = {\"broker_id\": broker_id, \"broker_name\": broker_name}\n changed = False\n if _needs_change(api_result, kwargs):\n changed = True\n if not module.check_mode:\n api_result = conn.update_broker(**kwargs)\n #\n #\n return {\"broker\": result, \"changed\": changed}\n\n\ndef ensure_absent(conn, module):\n result = {\"broker_name\": module.params[\"broker_name\"], \"broker_id\": None}\n if module.check_mode:\n return {\"broker\": camel_dict_to_snake_dict(result, ignore_list=[\"Tags\"]), \"changed\": True}\n broker_id = get_broker_id(conn, module)\n result[\"broker_id\"] = broker_id\n\n if not broker_id:\n # silently ignore delete of unknown broker (to make it idempotent)\n return {\"broker\": result, \"changed\": False}\n\n try:\n # check for pending delete (small race condition possible here\n api_result = get_broker_info(conn, module, broker_id)\n if api_result[\"BrokerState\"] == \"DELETION_IN_PROGRESS\":\n return {\"broker\": result, \"changed\": False}\n delete_broker(conn, module, broker_id)\n except botocore.exceptions.ClientError as e:\n module.fail_json_aws(e)\n\n return {\"broker\": result, \"changed\": True}\n\n\ndef ensure_present(conn, module):\n if module.check_mode:\n return {\"broker\": {\"broker_arn\": \"fakeArn\", \"broker_id\": \"fakeId\"}, \"changed\": True}\n\n broker_id = get_broker_id(conn, module)\n if broker_id:\n return update_broker(conn, module, broker_id)\n\n return create_broker(conn, module)\n\n\ndef main():\n argument_spec = dict(\n broker_name=dict(required=True, type=\"str\"),\n state=dict(default=\"present\", choices=[\"present\", \"absent\", \"restarted\"]),\n # parameters only allowed on create\n deployment_mode=dict(choices=[\"SINGLE_INSTANCE\", \"ACTIVE_STANDBY_MULTI_AZ\", \"CLUSTER_MULTI_AZ\"]),\n use_aws_owned_key=dict(type=\"bool\"),\n kms_key_id=dict(type=\"str\"),\n engine_type=dict(choices=[\"ACTIVEMQ\", \"RABBITMQ\"], type=\"str\"),\n maintenance_window_start_time=dict(type=\"dict\"),\n publicly_accessible=dict(type=\"bool\"),\n storage_type=dict(choices=[\"EBS\", \"EFS\"]),\n subnet_ids=dict(type=\"list\", elements=\"str\"),\n users=dict(type=\"list\", elements=\"dict\"),\n tags=dict(type=\"dict\"),\n # parameters allowed on update as well\n authentication_strategy=dict(choices=[\"SIMPLE\", \"LDAP\"]),\n auto_minor_version_upgrade=dict(default=True, type=\"bool\"),\n engine_version=dict(type=\"str\"),\n host_instance_type=dict(type=\"str\"),\n enable_audit_log=dict(default=False, type=\"bool\"),\n enable_general_log=dict(default=False, type=\"bool\"),\n security_groups=dict(type=\"list\", elements=\"str\"),\n )\n\n module = AnsibleAWSModule(argument_spec=argument_spec, supports_check_mode=True)\n\n connection = module.client(\"mq\")\n\n if module.params[\"state\"] == \"present\":\n try:\n compound_result = ensure_present(connection, module)\n except botocore.exceptions.ClientError as e:\n module.fail_json_aws(e)\n #\n module.exit_json(**compound_result)\n\n if module.params[\"state\"] == \"absent\":\n try:\n compound_result = ensure_absent(connection, module)\n except botocore.exceptions.ClientError as e:\n module.fail_json_aws(e)\n #\n module.exit_json(**compound_result)\n\n if module.params[\"state\"] == \"restarted\":\n broker_id = get_broker_id(connection, module)\n if module.check_mode:\n module.exit_json(broker={\"broker_id\": broker_id if broker_id else \"fakeId\"}, changed=True)\n if not broker_id:\n module.fail_json(\n msg=\"Cannot find broker with name {module.params['broker_name']}.\",\n )\n try:\n changed = True\n if not module.check_mode:\n reboot_broker(connection, module, broker_id)\n #\n result = get_broker_info(connection, module, broker_id)\n except botocore.exceptions.ClientError as e:\n module.fail_json_aws(e)\n module.exit_json(broker=result, changed=changed)\n\n\nif __name__ == \"__main__\":\n main()\n",
"path": "plugins/modules/mq_broker.py"
}
] | diff --git a/changelogs/fragments/1832-mq_broker_tags.yml b/changelogs/fragments/1832-mq_broker_tags.yml
new file mode 100644
index 00000000000..b2320dd3c71
--- /dev/null
+++ b/changelogs/fragments/1832-mq_broker_tags.yml
@@ -0,0 +1,2 @@
+bugfixes:
+ - mq_broker - ensure broker is created with ``tags`` when passed (https://github.com/ansible-collections/community.aws/issues/1832).
\ No newline at end of file
diff --git a/plugins/modules/mq_broker.py b/plugins/modules/mq_broker.py
index 5fda006b8b0..0453a43b6d0 100644
--- a/plugins/modules/mq_broker.py
+++ b/plugins/modules/mq_broker.py
@@ -237,6 +237,7 @@
"storage_type": "StorageType",
"subnet_ids": "SubnetIds",
"users": "Users",
+ "tags": "Tags",
}
diff --git a/tests/integration/targets/mq/defaults/main.yml b/tests/integration/targets/mq/defaults/main.yml
index 896ba8afa7d..2199c2f637f 100644
--- a/tests/integration/targets/mq/defaults/main.yml
+++ b/tests/integration/targets/mq/defaults/main.yml
@@ -5,3 +5,5 @@ vpc_name: "{{ resource_prefix }}-vpc"
vpc_cidr: "10.0.0.0/16"
subnet_cidr: "10.0.1.0/24"
sg_name: "{{resource_prefix}}-sg"
+tags:
+ workload_type: other
\ No newline at end of file
diff --git a/tests/integration/targets/mq/tasks/broker_tests.yml b/tests/integration/targets/mq/tasks/broker_tests.yml
index 958b80cb205..d4d399da7c1 100644
--- a/tests/integration/targets/mq/tasks/broker_tests.yml
+++ b/tests/integration/targets/mq/tasks/broker_tests.yml
@@ -3,6 +3,7 @@
broker_name: "{{ broker_name }}"
security_groups: "{{ broker_sg_ids.split(',') }}"
subnet_ids: "{{ broker_subnet_ids.split(',') }}"
+ tags: "{{ tags }}"
register: result
- set_fact:
broker_id: "{{ result.broker['broker_id'] }}"
@@ -20,6 +21,7 @@
- result_c1.broker['broker_name'] == broker_name
- result_c1.broker['broker_state'] == 'CREATION_IN_PROGRESS'
- ( result_c1.broker['storage_type'] | upper ) == 'EFS'
+ - result_c1.broker['tags'] == tags
when: not ansible_check_mode
- debug:
msg: "Wait until broker {{ broker_name }} ({{ broker_id }}) enters running state. This may take several minutes"
diff --git a/tests/integration/targets/mq/tasks/main.yml b/tests/integration/targets/mq/tasks/main.yml
index 2055700480b..e84367a76c2 100644
--- a/tests/integration/targets/mq/tasks/main.yml
+++ b/tests/integration/targets/mq/tasks/main.yml
@@ -32,4 +32,4 @@
- name: cleanup broker
include_tasks: broker_cleanup.yml
- - include_tasks: env_cleanup.yml
\ No newline at end of file
+ - include_tasks: env_cleanup.yml
|
pytorch__torchdynamo-1539 | Remove the triton dependency of Inductor CPU codegen
We import triton library even we compile the CPU codegen, e.g:
```
from ctypes import c_void_p, c_long
import torch
import random
from torch import empty_strided, as_strided, device
from torchinductor.codecache import AsyncCompile
aten = torch.ops.aten
async_compile = AsyncCompile()
import triton
import triton.language as tl
from torchinductor.triton_ops.autotune import grid
from torch._C import _cuda_getCurrentRawStream as get_cuda_stream
kernel0 = async_compile.cpp('''
#include "/tmp/torchinductor_ybliang/i5/ci5zbqbzeij2usetynv7oczewshegubkvtpswwuumpp6xjync55y.h"
extern "C" void kernel(const float* __restrict__ in_ptr0,
const float* __restrict__ in_ptr1,
float* __restrict__ out_ptr0,
const long ks0)
{
#pragma GCC ivdep
for(long i0=0; i0<ks0*ks0; ++i0)
{
{
{
auto tmp0 = in_ptr0[i0];
auto tmp1 = in_ptr1[i0];
auto tmp2 = tmp0 + tmp1;
out_ptr0[i0] = tmp2;
}
}
}
}
''')
```
This will cause dependency issue if users just want to use inductor on CPU. I think we should remove this dependency. Look at the code [here](https://github.com/pytorch/torchdynamo/blob/main/torchinductor/codegen/wrapper.py#L198), actually we add these headers according if ```has_triton```, maybe we should define a better criteria.
| [
{
"content": "import collections\nimport functools\nimport operator\nimport time\nfrom importlib import import_module\nfrom typing import Any\nfrom typing import Dict\nfrom typing import List\n\nimport numpy as np\nimport sympy\nimport torch\nfrom torch.fx.immutable_collections import immutable_dict\nfrom torch.fx.immutable_collections import immutable_list\n\nfrom . import config\n\nVarRanges = Dict[sympy.Expr, sympy.Expr]\n\n# We import torchdynamo modules indirectly to allow a future rename to torch.dynamo\ndynamo_config = import_module(f\"{config.dynamo_import}.config\")\ndynamo_debug_utils = import_module(f\"{config.dynamo_import}.debug_utils\")\ndynamo_logging = import_module(f\"{config.dynamo_import}.logging\")\ndynamo_optimizations = import_module(f\"{config.dynamo_import}.optimizations\")\ndynamo_testing = import_module(f\"{config.dynamo_import}.testing\")\ndynamo_utils = import_module(f\"{config.dynamo_import}.utils\")\n\n\[email protected]_cache(None)\ndef has_triton():\n try:\n import triton\n\n return triton is not None\n except (ImportError, ModuleNotFoundError):\n return False\n\n\[email protected]_cache(None)\ndef has_torchvision_roi_align():\n try:\n from torchvision.ops import roi_align # noqa\n\n return roi_align is not None and hasattr(\n getattr(torch.ops, \"torchvision\", None), \"roi_align\"\n )\n except (ImportError, ModuleNotFoundError):\n return False\n\n\ndef conditional_product(*args):\n return functools.reduce(operator.mul, [x for x in args if x])\n\n\ndef sympy_product(it):\n return functools.reduce(operator.mul, it, sympy.Integer(1))\n\n\ndef sympy_dot(seq1, seq2):\n assert len(seq1) == len(seq2)\n return sympy.expand(sum(a * b for a, b in zip(seq1, seq2)))\n\n\ndef unique(it):\n return {id(x): x for x in it}.values()\n\n\ndef ceildiv(numer: int, denom: int):\n assert isinstance(numer, int) and isinstance(denom, int)\n return -(numer // -denom)\n\n\ndef gen_gm_and_inputs(target, args, kwargs):\n g = torch.fx.Graph()\n g_args = []\n a_args = []\n for n, arg in enumerate(args):\n if isinstance(arg, torch.Tensor):\n g_args.append(g.placeholder(f\"arg{n}\"))\n a_args.append(arg)\n else:\n g_args.append(arg)\n assert all(not isinstance(x, torch.Tensor) for x in kwargs.values())\n node = g.call_function(target, tuple(g_args), kwargs)\n if (\n len(target._schema.returns) == 1\n and str(target._schema.returns[0].type) == \"Tensor\"\n ):\n node = (node,)\n g.output(node)\n\n gm = torch.fx.GraphModule({}, g)\n return gm, a_args\n\n\ndef synchronize():\n if torch.cuda.is_available():\n torch.cuda.synchronize()\n\n\ndef timed(model, example_inputs, times=1):\n synchronize()\n torch.manual_seed(1337)\n t0 = time.perf_counter()\n for _ in range(times):\n result = model(*example_inputs)\n synchronize()\n t1 = time.perf_counter()\n # GC the result after timing\n assert result is not None\n return t1 - t0\n\n\ndef print_performance(fn, args=(), times=10, repeat=10, baseline=1.0):\n timings = [timed(fn, args, times) for _ in range(repeat)]\n took = np.median(timings)\n print(f\"{took/baseline:.6f}\")\n return took\n\n\nimmutable_dict.__hash__ = lambda self: hash(tuple(self.items()))\nimmutable_list.__hash__ = lambda self: hash(tuple(self))\n\n\ndef freeze_inputs(f):\n \"\"\"\n Useful for wrapping lists in tuples for caching purposes\n \"\"\"\n\n def freeze_value(x):\n if isinstance(x, (immutable_dict, immutable_list)):\n return x\n if isinstance(x, list):\n return immutable_list(x)\n if isinstance(x, dict):\n return immutable_dict(x)\n return x\n\n @functools.wraps(f)\n def wrapped(*args):\n args = [freeze_value(x) for x in args]\n return f(*args)\n\n wrapped.cache_info = f.cache_info\n return wrapped\n\n\ndef precompute_method(obj: Any, method: str):\n \"\"\"Replace obj.method() with a new method that returns a precomputed constant.\"\"\"\n result = getattr(obj, method)()\n setattr(obj, method, lambda: result)\n\n\ndef precompute_methods(obj: Any, methods: List[str]):\n \"\"\"Replace methods with new methods that returns a precomputed constants.\"\"\"\n for method in methods:\n precompute_method(obj, method)\n\n\ndef cmp(a, b):\n return int(a > b) - int(a < b)\n\n\ndef cache_on_self(fn):\n key = f\"__{fn.__name__}_cache\"\n\n @functools.wraps(fn)\n def wrapper(self):\n if not hasattr(self, key):\n setattr(self, key, fn(self))\n return getattr(self, key)\n\n return wrapper\n\n\ndef sympy_str(expr: sympy.Expr):\n \"\"\"\n Normal sympy str is very slow, this is a lot faster. The result are\n somewhat worse, as it doesn't do as much simplification. So don't\n use this for final codegen.\n \"\"\"\n if isinstance(expr, sympy.Symbol):\n return expr.name\n if isinstance(expr, sympy.Add):\n return \" + \".join(map(sympy_str, expr.args))\n if isinstance(expr, sympy.Mul):\n return \" * \".join(map(sympy_str, expr.args))\n\n from .ir import CleanDiv\n from .ir import IndexingDiv\n from .ir import ModularIndexing\n\n if isinstance(expr, (ModularIndexing, CleanDiv, IndexingDiv)):\n return f\"{expr.func.__name__}({', '.join(map(sympy_str, expr.args))})\"\n return str(expr)\n\n\ndef sympy_subs(expr: sympy.Expr, replacements: Dict[Any, Any]):\n \"\"\"\n xreplace is faster than subs, but is way more picky\n \"\"\"\n\n def promote_strings(key):\n if isinstance(key, str):\n return sympy.Symbol(key)\n return key\n\n return expr.xreplace(\n {promote_strings(k): promote_strings(v) for k, v in replacements.items()}\n )\n\n\ndef free_symbol_startswith(index: sympy.Expr, prefix: str):\n return any(v.name.startswith(prefix) for v in index.free_symbols)\n\n\ndef has_incompatible_cudagraph_ops(gm):\n forbidden_list = set(\n [\n \"aten._fused_moving_avg_obs_fq_helper.default\",\n \"aten._fused_moving_avg_obs_fq_helper_functional.default\",\n \"fbgemm.dense_to_jagged.default\",\n \"fbgemm.jagged_to_padded_dense.default\",\n ]\n )\n for node in gm.graph.nodes:\n if str(node.target) in forbidden_list:\n return True\n return False\n\n\ninstance_descriptor = collections.namedtuple(\n \"instance_descriptor\", [\"divisible_by_16\", \"equal_to_1\"]\n)\n",
"path": "torchinductor/utils.py"
}
] | [
{
"content": "import collections\nimport functools\nimport operator\nimport time\nfrom importlib import import_module\nfrom typing import Any\nfrom typing import Dict\nfrom typing import List\n\nimport numpy as np\nimport sympy\nimport torch\nfrom torch.fx.immutable_collections import immutable_dict\nfrom torch.fx.immutable_collections import immutable_list\n\nfrom . import config\n\nVarRanges = Dict[sympy.Expr, sympy.Expr]\n\n# We import torchdynamo modules indirectly to allow a future rename to torch.dynamo\ndynamo_debug_utils = import_module(f\"{config.dynamo_import}.debug_utils\")\ndynamo_optimizations = import_module(f\"{config.dynamo_import}.optimizations\")\ndynamo_testing = import_module(f\"{config.dynamo_import}.testing\")\ndynamo_utils = import_module(f\"{config.dynamo_import}.utils\")\n\n\[email protected]_cache(None)\ndef has_triton():\n if not torch.cuda.is_available():\n return False\n try:\n import triton\n\n return triton is not None\n except (ImportError, ModuleNotFoundError):\n return False\n\n\[email protected]_cache(None)\ndef has_torchvision_roi_align():\n try:\n from torchvision.ops import roi_align # noqa\n\n return roi_align is not None and hasattr(\n getattr(torch.ops, \"torchvision\", None), \"roi_align\"\n )\n except (ImportError, ModuleNotFoundError):\n return False\n\n\[email protected]_cache(None)\ndef has_triton_libdevice():\n try:\n from triton.language import libdevice\n\n return libdevice is not None\n except (ImportError, ModuleNotFoundError):\n return False\n\n\ndef conditional_product(*args):\n return functools.reduce(operator.mul, [x for x in args if x])\n\n\ndef sympy_product(it):\n return functools.reduce(operator.mul, it, sympy.Integer(1))\n\n\ndef sympy_dot(seq1, seq2):\n assert len(seq1) == len(seq2)\n return sympy.expand(sum(a * b for a, b in zip(seq1, seq2)))\n\n\ndef unique(it):\n return {id(x): x for x in it}.values()\n\n\ndef ceildiv(numer: int, denom: int):\n assert isinstance(numer, int) and isinstance(denom, int)\n return -(numer // -denom)\n\n\ndef gen_gm_and_inputs(target, args, kwargs):\n g = torch.fx.Graph()\n g_args = []\n a_args = []\n for n, arg in enumerate(args):\n if isinstance(arg, torch.Tensor):\n g_args.append(g.placeholder(f\"arg{n}\"))\n a_args.append(arg)\n else:\n g_args.append(arg)\n assert all(not isinstance(x, torch.Tensor) for x in kwargs.values())\n node = g.call_function(target, tuple(g_args), kwargs)\n if (\n len(target._schema.returns) == 1\n and str(target._schema.returns[0].type) == \"Tensor\"\n ):\n node = (node,)\n g.output(node)\n\n gm = torch.fx.GraphModule({}, g)\n return gm, a_args\n\n\ndef synchronize():\n if torch.cuda.is_available():\n torch.cuda.synchronize()\n\n\ndef timed(model, example_inputs, times=1):\n synchronize()\n torch.manual_seed(1337)\n t0 = time.perf_counter()\n for _ in range(times):\n result = model(*example_inputs)\n synchronize()\n t1 = time.perf_counter()\n # GC the result after timing\n assert result is not None\n return t1 - t0\n\n\ndef print_performance(fn, args=(), times=10, repeat=10, baseline=1.0):\n timings = [timed(fn, args, times) for _ in range(repeat)]\n took = np.median(timings)\n print(f\"{took/baseline:.6f}\")\n return took\n\n\nimmutable_dict.__hash__ = lambda self: hash(tuple(self.items()))\nimmutable_list.__hash__ = lambda self: hash(tuple(self))\n\n\ndef freeze_inputs(f):\n \"\"\"\n Useful for wrapping lists in tuples for caching purposes\n \"\"\"\n\n def freeze_value(x):\n if isinstance(x, (immutable_dict, immutable_list)):\n return x\n if isinstance(x, list):\n return immutable_list(x)\n if isinstance(x, dict):\n return immutable_dict(x)\n return x\n\n @functools.wraps(f)\n def wrapped(*args):\n args = [freeze_value(x) for x in args]\n return f(*args)\n\n wrapped.cache_info = f.cache_info\n return wrapped\n\n\ndef precompute_method(obj: Any, method: str):\n \"\"\"Replace obj.method() with a new method that returns a precomputed constant.\"\"\"\n result = getattr(obj, method)()\n setattr(obj, method, lambda: result)\n\n\ndef precompute_methods(obj: Any, methods: List[str]):\n \"\"\"Replace methods with new methods that returns a precomputed constants.\"\"\"\n for method in methods:\n precompute_method(obj, method)\n\n\ndef cmp(a, b):\n return int(a > b) - int(a < b)\n\n\ndef cache_on_self(fn):\n key = f\"__{fn.__name__}_cache\"\n\n @functools.wraps(fn)\n def wrapper(self):\n if not hasattr(self, key):\n setattr(self, key, fn(self))\n return getattr(self, key)\n\n return wrapper\n\n\ndef sympy_str(expr: sympy.Expr):\n \"\"\"\n Normal sympy str is very slow, this is a lot faster. The result are\n somewhat worse, as it doesn't do as much simplification. So don't\n use this for final codegen.\n \"\"\"\n if isinstance(expr, sympy.Symbol):\n return expr.name\n if isinstance(expr, sympy.Add):\n return \" + \".join(map(sympy_str, expr.args))\n if isinstance(expr, sympy.Mul):\n return \" * \".join(map(sympy_str, expr.args))\n\n from .ir import CleanDiv\n from .ir import IndexingDiv\n from .ir import ModularIndexing\n\n if isinstance(expr, (ModularIndexing, CleanDiv, IndexingDiv)):\n return f\"{expr.func.__name__}({', '.join(map(sympy_str, expr.args))})\"\n return str(expr)\n\n\ndef sympy_subs(expr: sympy.Expr, replacements: Dict[Any, Any]):\n \"\"\"\n xreplace is faster than subs, but is way more picky\n \"\"\"\n\n def promote_strings(key):\n if isinstance(key, str):\n return sympy.Symbol(key)\n return key\n\n return expr.xreplace(\n {promote_strings(k): promote_strings(v) for k, v in replacements.items()}\n )\n\n\ndef free_symbol_startswith(index: sympy.Expr, prefix: str):\n return any(v.name.startswith(prefix) for v in index.free_symbols)\n\n\ndef has_incompatible_cudagraph_ops(gm):\n forbidden_list = set(\n [\n \"aten._fused_moving_avg_obs_fq_helper.default\",\n \"aten._fused_moving_avg_obs_fq_helper_functional.default\",\n \"fbgemm.dense_to_jagged.default\",\n \"fbgemm.jagged_to_padded_dense.default\",\n ]\n )\n for node in gm.graph.nodes:\n if str(node.target) in forbidden_list:\n return True\n return False\n\n\ninstance_descriptor = collections.namedtuple(\n \"instance_descriptor\", [\"divisible_by_16\", \"equal_to_1\"]\n)\n",
"path": "torchinductor/utils.py"
}
] | diff --git a/torchinductor/utils.py b/torchinductor/utils.py
index 4716dd4daa..37722807f5 100644
--- a/torchinductor/utils.py
+++ b/torchinductor/utils.py
@@ -26,6 +26,8 @@
@functools.lru_cache(None)
def has_triton():
+ if not torch.cuda.is_available():
+ return False
try:
import triton
|
microsoft__superbenchmark-209 | V0.3.0 Release Plan
# Release Manager
@TobeyQin
# Endgame
Code freeze: 9/1/2021
Bug Bash date: 9/2/2021
Release date: 9/17/2021
# Main Features
## SuperBench Framework
### SB Runner -- @abuccts
- [x] MPI mode implementation
PR: #146
### SB Benchmarks -- @guoshzhao
- [x] Docker Base
PR: #179 and #180
## Single-node Validation
### Micro-benchmarks -- @guoshzhao @yukirora
1. - [x] Memory (Tool: Nvidia Bandwidth Test Tool) -- @yukirora ETA: 5/28/2021
PR: #114
| Metrics | Unit | Description |
|---|---|---|
| H2D_Mem_BW_\<GPU ID> | GB/s | host-to-GPU bandwidth for each GPU |
| D2H_Mem_BW_\<GPU ID> | GB/s | GPU-to-host bandwidth for each GPU |
2. - [ ] Device P2P Bandwidth (Tool: Nvidia p2pBandwidthLatencyTest Tool) -- Delayed
| Metrics | Unit | Description |
|---|---|---|
| P2P_BW_Max | GB/s | The maximum bandwidth in Bidirectional P2P=Enabled Bandwidth Matrix for all GPUs |
| P2P_BW_Min | GB/s | The minimum bandwidth |
| P2P_BW_Avg | GB/s | The average bandwidth |
3. - [x] IBLoopback (Tool: PerfTest – Standard RDMA Test Tool) -- @yukirora ETA: 7/30/2021
PR: #112 and #129
| Metrics | Unit | Description |
|---|---|---|
| IB_Write | MB/s | The IB write loopback throughput with different message size |
| IB_Read | MB/s | The IB read loopback throughput with different message size |
| IB_Send | MB/s | The IB send loopback throughput with different message size |
4. - [x] NCCL (Tool: Nvidia NCCL Test) -- @yukirora ETA: 7/30/2021
PR: #113 and #128
| Metrics | Unit | Description |
|---|---|---|
| NCCL_AllReduce | GB/s | The NCCL AllReduce performance with different message size |
| NCCL_AllGather | GB/s | The NCCL AllGather performance with different message size |
| NCCL_broadcast | GB/s | The NCCL Broadcast performance with different message size |
| NCCL_reduce | GB/s | The NCCL Reduce performance with different message size |
| NCCL_reduce_scatter | GB/s | The NCCL ReduceScatter performance with different message size |
5. - [x] Disk (Tool: FIO – Standard Disk Performance Tool) -- @yzygitzh ETA: 7/30/2021
PR: #127 and #132 and #161
| Metrics | Unit | Description |
|---|---|---|
| Seq_Read | MB/s | Sequential read performance |
| Seq_Write | MB/s | Sequential write performance |
| Rand_Read | MB/s | Random read performance |
| Rand_Write | MB/s | Random write performance |
| Seq_R/W_Read | MB/s | Read performance in sequential read/write, fixed measurement (read:write = 4:1)|
| Seq_R/W_Write | MB/s | Write performance in sequential read/write (read:write = 4:1)|
| Rand_R/W_Read | MB/s | Read performance in random read/write (read:write = 4:1)|
| Rand_R/W_Write | MB/s | Write performance in random read/write (read:write = 4:1)|
6. - [x] H2D/D2H SM Transmission Bandwidth (Tool: MSR-A build) -- @yzygitzh ETA: 8/6/2021
PR: #162 and #169
| Metrics | Unit | Description |
|---|---|---|
| H2D_SM_BW_\<GPU ID>| GB/s | host-to-GPU bandwidth using GPU kernel for each GPU |
| D2H_SM_BW_\<GPU ID> | GB/s | GPU-to-host bandwidth using GPU kernel for each GPU |
###
## Support AMD
### Docker Image Support -- @guoshzhao ETA: 7/16/2021
- [x] ROCm 4.2 PyTorch 1.7 PR: #164
- [x] ROCm 4.0 PyTorch 1.7 PR: #164
### Micro Benchmarks
1. - [x] Kernel Launch (Tool: MSR-A build) -- @yukirora ETA: 7/30/2021
PR: #137 and #136
| Metrics | Unit | Description |
|---|---|---|
| Kernel_Launch_Event_Time | Time (ms) | Dispatch latency measured in GPU time using hipEventRecord() |
|Kernel_Launch_Wall_Time| Time (ms) | Dispatch latency measured in CPU time |
2. - [x] RCCL (Tool: AMD RCCL Test) -- @yukirora ETA: 7/30/2021
PR: #139 and #143
| Metrics | Unit | Description |
|---|---|---|
| RCCL_AllReduce | GB/s | The RCCL AllReduce performance with different message size |
| RCCL_AllGather | GB/s | The RCCL AllGather performance with different message size |
| RCCL_broadcast | GB/s | The RCCL Broadcast performance with different message size |
| RCCL_reduce | GB/s | The RCCL Reduce performance with different message size |
| RCCL_reduce_scatter | GB/s | The RCCL ReduceScatter performance with different message size |
3. - [x] GEMM FLOPS (Tool: AMD rocblas-bench Tool) -- @yukirora ETA: 8/27/2021
PR: #144 and #165
| Metrics | Unit | Description |
|---|---|---|
| FP64 | GFLOPS | FP64 FLOPS without MatrixCore |
| FP32 | GFLOPS | FP32 FLOPS without MatrixCore |
| FP16 | GFLOPS | FP16 FLOPS without MatrixCore |
| FP32(MC) | GFLOPS | TF32 FLOPS with MatrixCore |
| FP16(MC) | GFLOPS | FP16 FLOPS with MatrixCore |
| BF16(MC) | GFLOPS | BF16 FLOPS with MatrixCore |
| INT8(MC) | GOPS | INT8 FLOPS with MatrixCore |
| INT4(MC) | GOPS | INT4 FLOPS with MatrixCore |
4. - [x] Memory (Tool: HIP Bandwidth Test Tool) -- @yukirora ETA: 8/27/2021
PR: #159 and #153
| Metrics | Unit | Description |
|---|---|---|
| H2D_Mem_BW_\<GPU ID> | GB/s | host-to-GPU bandwidth for each GPU |
| D2H_Mem_BW_\<GPU ID> | GB/s | GPU-to-host bandwidth for each GPU |
### E2E Benchmarks -- @guoshzhao ETA: 7/16/2021
1. - [x] CNN models -- User PyTorch TORCHVISION.MODELS sub-package
- ResNet: ResNet-50, ResNet-101, ResNet-152
- DenseNet: DenseNet-169, DenseNet-201
- VGG: VGG-11, VGG-13, VGG-16, VGG-19
2. - [x] BERT -- Use huggingface Transformers
- BERT
- BERT LARGE
3. - [x] LSTM -- Use PyTorch TORCH.NN sub-package
4. - [x] GPT-2 -- Use huggingface Transformers
## Result Summary -- @cp5555
- [x] Generate a report to summarize the results -- @guoshzhao ETA: 7/30/2021
PR: #147, #149, and #157
- [ ] Support basic analysis feature (boxplot figure, outlier detection, etc.)
## Bug Fix
- [x] VGG models failed on A100 GPU with batch_size=128 #115
PR: #134
## Other Improvement
1. Contribution related -- @lynex
- [x] Contribute rule (#131)
- [x] system information collection (#160)
2. Document -- @TobeyQin
- [x] Add release process doc (#130)
- [x] Add design documents (#125)
- [x] Add developer guide doc for coding style (#155)
- [x] Add contribution rules (#131)
- [x] Add docker image list (#154)
- [x] Add initial validation results
- [x] ~~Add metric reasoning doc -- @cp5555 @guoshzhao~~
3. Process monitor
- [ ] Add Heart beat to monitor process health
- [ ] Auto kill all processes on all nodes
4. Coding style -- @abuccts
- [x] Add vscode online
------------
## Backlogs
### Multi-Node Benchmarks
- Mellanox ClusterKit
- GPCNeT
### UI Design
| [
{
"content": "# Copyright (c) Microsoft Corporation.\n# Licensed under the MIT License.\n\n\"\"\"SuperBench Python module.\n\nProvide hardware and software benchmarks for AI systems.\n\"\"\"\n\n__version__ = '0.2.1'\n__author__ = 'Microsoft'\n",
"path": "superbench/__init__.py"
}
] | [
{
"content": "# Copyright (c) Microsoft Corporation.\n# Licensed under the MIT License.\n\n\"\"\"SuperBench Python module.\n\nProvide hardware and software benchmarks for AI systems.\n\"\"\"\n\n__version__ = '0.3.0'\n__author__ = 'Microsoft'\n",
"path": "superbench/__init__.py"
}
] | diff --git a/README.md b/README.md
index 320ecca21..eaefbf308 100644
--- a/README.md
+++ b/README.md
@@ -15,7 +15,7 @@
__SuperBench__ is a validation and profiling tool for AI infrastructure.
-📢 [v0.2.1](https://github.com/microsoft/superbenchmark/releases/tag/v0.2.1) has been released!
+📢 [v0.3.0](https://github.com/microsoft/superbenchmark/releases/tag/v0.3.0) has been released!
## _Check [aka.ms/superbench](https://aka.ms/superbench) for more details._
diff --git a/docs/developer-guides/using-docker.mdx b/docs/developer-guides/using-docker.mdx
index a27d88dd2..2c3ab02f0 100644
--- a/docs/developer-guides/using-docker.mdx
+++ b/docs/developer-guides/using-docker.mdx
@@ -36,7 +36,10 @@ docker buildx build \
<TabItem value='rocm'>
```bash
-# coming soon
+export DOCKER_BUILDKIT=1
+docker buildx build \
+ --platform linux/amd64 --cache-to type=inline,mode=max \
+ --tag superbench-dev --file dockerfile/rocm4.2-pytorch1.7.0.dockerfile .
```
</TabItem>
diff --git a/docs/getting-started/installation.md b/docs/getting-started/installation.md
index 53697e087..673901aac 100644
--- a/docs/getting-started/installation.md
+++ b/docs/getting-started/installation.md
@@ -57,7 +57,7 @@ You can clone the source from GitHub and build it.
:::note Note
You should checkout corresponding tag to use release version, for example,
-`git clone -b v0.2.1 https://github.com/microsoft/superbenchmark`
+`git clone -b v0.3.0 https://github.com/microsoft/superbenchmark`
:::
```bash
diff --git a/docs/getting-started/run-superbench.md b/docs/getting-started/run-superbench.md
index 661b914dc..0d88fa4f8 100644
--- a/docs/getting-started/run-superbench.md
+++ b/docs/getting-started/run-superbench.md
@@ -27,7 +27,7 @@ sb deploy -f remote.ini --host-password [password]
:::note Note
You should deploy corresponding Docker image to use release version, for example,
-`sb deploy -f local.ini -i superbench/superbench:v0.2.1-cuda11.1.1`
+`sb deploy -f local.ini -i superbench/superbench:v0.3.0-cuda11.1.1`
:::
## Run
diff --git a/docs/superbench-config.mdx b/docs/superbench-config.mdx
index b4bab07e4..4ef87f03d 100644
--- a/docs/superbench-config.mdx
+++ b/docs/superbench-config.mdx
@@ -66,7 +66,7 @@ superbench:
<TabItem value='example'>
```yaml
-version: v0.2
+version: v0.3
superbench:
enable: benchmark_1
var:
diff --git a/docs/tutorial/container-images.mdx b/docs/tutorial/container-images.mdx
index 9af332a89..d3ea643ef 100644
--- a/docs/tutorial/container-images.mdx
+++ b/docs/tutorial/container-images.mdx
@@ -29,13 +29,17 @@ available tags are listed below for all stable versions.
| Tag | Description |
| ----------------- | ---------------------------------- |
+| v0.3.0-cuda11.1.1 | SuperBench v0.3.0 with CUDA 11.1.1 |
| v0.2.1-cuda11.1.1 | SuperBench v0.2.1 with CUDA 11.1.1 |
| v0.2.0-cuda11.1.1 | SuperBench v0.2.0 with CUDA 11.1.1 |
</TabItem>
<TabItem value='rocm'>
- Coming soon.
+| Tag | Description |
+| --------------------------- | ---------------------------------------------- |
+| v0.3.0-rocm4.2-pytorch1.7.0 | SuperBench v0.3.0 with ROCm 4.2, PyTorch 1.7.0 |
+| v0.3.0-rocm4.0-pytorch1.7.0 | SuperBench v0.3.0 with ROCm 4.0, PyTorch 1.7.0 |
</TabItem>
</Tabs>
diff --git a/superbench/__init__.py b/superbench/__init__.py
index 0a7d95e9a..60cedd82e 100644
--- a/superbench/__init__.py
+++ b/superbench/__init__.py
@@ -6,5 +6,5 @@
Provide hardware and software benchmarks for AI systems.
"""
-__version__ = '0.2.1'
+__version__ = '0.3.0'
__author__ = 'Microsoft'
diff --git a/superbench/config/amd_mi100_hpe.yaml b/superbench/config/amd_mi100_hpe.yaml
index a54217eaf..f2c894314 100644
--- a/superbench/config/amd_mi100_hpe.yaml
+++ b/superbench/config/amd_mi100_hpe.yaml
@@ -3,7 +3,7 @@
# Server:
# - Product: HPE Apollo 6500
-version: v0.2
+version: v0.3
superbench:
enable: null
var:
@@ -52,8 +52,8 @@ superbench:
gemm-flops:
<<: *default_local_mode
parameters:
- m: 7680
- n: 8192
+ m: 7680
+ n: 8192
k: 8192
ib-loopback:
enable: true
diff --git a/superbench/config/amd_mi100_z53.yaml b/superbench/config/amd_mi100_z53.yaml
index 2ba94b61e..99f197bbf 100644
--- a/superbench/config/amd_mi100_z53.yaml
+++ b/superbench/config/amd_mi100_z53.yaml
@@ -4,7 +4,7 @@
# - Product: G482-Z53
# - Link: https://www.gigabyte.cn/FileUpload/Global/MicroSite/553/G482-Z53.html
-version: v0.2
+version: v0.3
superbench:
enable: null
var:
diff --git a/superbench/config/azure_ndv4.yaml b/superbench/config/azure_ndv4.yaml
index 633870ff5..aed971ca7 100644
--- a/superbench/config/azure_ndv4.yaml
+++ b/superbench/config/azure_ndv4.yaml
@@ -1,5 +1,5 @@
# SuperBench Config
-version: v0.2
+version: v0.3
superbench:
enable: null
var:
diff --git a/superbench/config/default.yaml b/superbench/config/default.yaml
index 98ee7fbbd..d3194a54c 100644
--- a/superbench/config/default.yaml
+++ b/superbench/config/default.yaml
@@ -1,5 +1,5 @@
# SuperBench Config
-version: v0.2
+version: v0.3
superbench:
enable: null
var:
diff --git a/website/blog/2021-09-22-release-0-3.md b/website/blog/2021-09-22-release-0-3.md
new file mode 100644
index 000000000..931434da0
--- /dev/null
+++ b/website/blog/2021-09-22-release-0-3.md
@@ -0,0 +1,132 @@
+---
+slug: release-sb-v0.3
+title: Releasing SuperBench v0.3
+author: Peng Cheng
+author_title: SuperBench Team
+author_url: https://github.com/cp5555
+author_image_url: https://github.com/cp5555.png
+tags: [superbench, announcement, release]
+---
+
+We are very happy to announce that **SuperBench 0.3.0 version** is officially released today!
+
+You can install and try superbench by following [Getting Started Tutorial](https://microsoft.github.io/superbenchmark/docs/getting-started/installation).
+
+## SuperBench 0.3.0 Release Notes
+
+### SuperBench Framework
+
+#### Runner
+
+- Implement MPI mode.
+
+#### Benchmarks
+
+- Support Docker benchmark.
+
+### Single-node Validation
+
+#### Micro Benchmarks
+
+1. Memory (Tool: NVIDIA/AMD Bandwidth Test Tool)
+
+ | Metrics | Unit | Description |
+ |----------------|------|-------------------------------------|
+ | H2D_Mem_BW_GPU | GB/s | host-to-GPU bandwidth for each GPU |
+ | D2H_Mem_BW_GPU | GB/s | GPU-to-host bandwidth for each GPU |
+
+2. IBLoopback (Tool: PerfTest – Standard RDMA Test Tool)
+
+ | Metrics | Unit | Description |
+ |----------|------|---------------------------------------------------------------|
+ | IB_Write | MB/s | The IB write loopback throughput with different message sizes |
+ | IB_Read | MB/s | The IB read loopback throughput with different message sizes |
+ | IB_Send | MB/s | The IB send loopback throughput with different message sizes |
+
+3. NCCL/RCCL (Tool: NCCL/RCCL Tests)
+
+ | Metrics | Unit | Description |
+ |---------------------|------|-----------------------------------------------------------------|
+ | NCCL_AllReduce | GB/s | The NCCL AllReduce performance with different message sizes |
+ | NCCL_AllGather | GB/s | The NCCL AllGather performance with different message sizes |
+ | NCCL_broadcast | GB/s | The NCCL Broadcast performance with different message sizes |
+ | NCCL_reduce | GB/s | The NCCL Reduce performance with different message sizes |
+ | NCCL_reduce_scatter | GB/s | The NCCL ReduceScatter performance with different message sizes |
+
+4. Disk (Tool: FIO – Standard Disk Performance Tool)
+
+ | Metrics | Unit | Description |
+ |----------------|------|---------------------------------------------------------------------------------|
+ | Seq_Read | MB/s | Sequential read performance |
+ | Seq_Write | MB/s | Sequential write performance |
+ | Rand_Read | MB/s | Random read performance |
+ | Rand_Write | MB/s | Random write performance |
+ | Seq_R/W_Read | MB/s | Read performance in sequential read/write, fixed measurement (read:write = 4:1) |
+ | Seq_R/W_Write | MB/s | Write performance in sequential read/write (read:write = 4:1) |
+ | Rand_R/W_Read | MB/s | Read performance in random read/write (read:write = 4:1) |
+ | Rand_R/W_Write | MB/s | Write performance in random read/write (read:write = 4:1) |
+
+5. H2D/D2H SM Transmission Bandwidth (Tool: MSR-A build)
+
+ | Metrics | Unit | Description |
+ |---------------|------|-----------------------------------------------------|
+ | H2D_SM_BW_GPU | GB/s | host-to-GPU bandwidth using GPU kernel for each GPU |
+ | D2H_SM_BW_GPU | GB/s | GPU-to-host bandwidth using GPU kernel for each GPU |
+
+### AMD GPU Support
+
+#### Docker Image Support
+
+- ROCm 4.2 PyTorch 1.7.0
+- ROCm 4.0 PyTorch 1.7.0
+
+#### Micro Benchmarks
+
+1. Kernel Launch (Tool: MSR-A build)
+
+ | Metrics | Unit | Description |
+ |--------------------------|-----------|--------------------------------------------------------------|
+ | Kernel_Launch_Event_Time | Time (ms) | Dispatch latency measured in GPU time using hipEventRecord() |
+ | Kernel_Launch_Wall_Time | Time (ms) | Dispatch latency measured in CPU time |
+
+2. GEMM FLOPS (Tool: AMD rocblas-bench Tool)
+
+ | Metrics | Unit | Description |
+ |----------|--------|-------------------------------|
+ | FP64 | GFLOPS | FP64 FLOPS without MatrixCore |
+ | FP32(MC) | GFLOPS | TF32 FLOPS with MatrixCore |
+ | FP16(MC) | GFLOPS | FP16 FLOPS with MatrixCore |
+ | BF16(MC) | GFLOPS | BF16 FLOPS with MatrixCore |
+ | INT8(MC) | GOPS | INT8 FLOPS with MatrixCore |
+
+#### E2E Benchmarks
+
+1. CNN models -- Use PyTorch torchvision models
+ - ResNet: ResNet-50, ResNet-101, ResNet-152
+ - DenseNet: DenseNet-169, DenseNet-201
+ - VGG: VGG-11, VGG-13, VGG-16, VGG-19
+
+2. BERT -- Use huggingface Transformers
+ - BERT
+ - BERT Large
+
+3. LSTM -- Use PyTorch
+4. GPT-2 -- Use huggingface Transformers
+
+### Bug Fix
+
+- VGG models failed on A100 GPU with batch_size=128
+
+### Other Improvement
+
+1. Contribution related
+ - Contribute rule
+ - System information collection
+
+2. Document
+ - Add release process doc
+ - Add design documents
+ - Add developer guide doc for coding style
+ - Add contribution rules
+ - Add docker image list
+ - Add initial validation results
diff --git a/website/docusaurus.config.js b/website/docusaurus.config.js
index 6c6fdbd5b..51982f062 100644
--- a/website/docusaurus.config.js
+++ b/website/docusaurus.config.js
@@ -101,7 +101,7 @@ module.exports = {
announcementBar: {
id: 'supportus',
content:
- '📢 <a href="https://microsoft.github.io/superbenchmark/blog/release-sb-v0.2">v0.2.1</a> has been released! ' +
+ '📢 <a href="https://microsoft.github.io/superbenchmark/blog/release-sb-v0.3">v0.3.0</a> has been released! ' +
'⭐️ If you like SuperBench, give it a star on <a target="_blank" rel="noopener noreferrer" href="https://github.com/microsoft/superbenchmark">GitHub</a>! ⭐️',
},
algolia: {
diff --git a/website/package-lock.json b/website/package-lock.json
index d04d66d26..50c6fc31b 100644
--- a/website/package-lock.json
+++ b/website/package-lock.json
@@ -1,6 +1,6 @@
{
"name": "superbench-website",
- "version": "0.2.1",
+ "version": "0.3.0",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
diff --git a/website/package.json b/website/package.json
index cf6874ebf..d2ed79ae4 100644
--- a/website/package.json
+++ b/website/package.json
@@ -1,6 +1,6 @@
{
"name": "superbench-website",
- "version": "0.2.1",
+ "version": "0.3.0",
"private": true,
"scripts": {
"docusaurus": "docusaurus",
|
ansible-collections__community.aws-1971 | mq_broker: Tagging a broker on creation does not work
### Summary
When creating a new MQ broker using the following task, the broker does not get tagged.
```
- name: create broker with minimal parameters
mq_broker:
broker_name: "{{ broker_name }}"
security_groups: "{{ broker_sg_ids.split(',') }}"
subnet_ids: "{{ broker_subnet_ids.split(',') }}"
tags:
"Foo": "Bar"
"FooBar": "foobar"
```
Actual result:
```
changed: [testhost] => {
"broker": {
"broker_arn": "arn:aws:mq:us-east-1:123456789100:broker:ansible-test-52903175--mq:b-70e0807b-102d-42ae-8805-94ec6395436c",
"broker_id": "b-70e0807b-102d-42ae-8805-94ec6395436c",
"response_metadata": {
"http_headers": {
"access-control-allow-origin": "*",
"access-control-expose-headers": "x-amzn-errortype,x-amzn-requestid,x-amzn-errormessage,x-amzn-trace-id,x-amz-apigw-id,date",
"cache-control": "no-cache; no-store, must-revalidate, private",
"connection": "keep-alive",
"content-length": "191",
"content-type": "application/json",
"date": "Wed, 31 May 2023 13:25:16 GMT",
"expires": "0",
"pragma": "no-cache",
"x-amz-apigw-id": "FyidUFppIAMF1zw=",
"x-amzn-requestid": "12345bcb-5678-890d-972c-26a92712aaeb",
"x-amzn-trace-id": "Root=1-64774abb-2b3bf58a2b0cbf7800afdef6"
},
"http_status_code": 200,
"request_id": "59392bcb-5406-460d-972c-26a92712aaeb",
"retry_attempts": 0
}
},
```
### Issue Type
Bug Report
### Component Name
mq_broker
### Ansible Version
```console (paste below)
$ ansible --version
ansible [core 2.14.3]
```
### Collection Versions
```console (paste below)
$ ansible-galaxy collection list
Collection Version
----------------------------- -------
amazon.aws 6.0.0
community.aws 6.0.0
```
### AWS SDK versions
```console (paste below)
$ pip show boto boto3 botocore
Name: boto3
Version: 1.22.0
Summary: The AWS SDK for Python
Home-page: https://github.com/boto/boto3
Author: Amazon Web Services
Author-email:
License: Apache License 2.0
Location: /Users/alinabuzachis/anaconda3/envs/py310/lib/python3.10/site-packages
Requires: botocore, jmespath, s3transfer
Required-by: gouttelette
---
Name: botocore
Version: 1.25.13
Summary: Low-level, data-driven core of boto 3.
Home-page: https://github.com/boto/botocore
Author: Amazon Web Services
Author-email:
License: Apache License 2.0
Location: /Users/alinabuzachis/anaconda3/envs/py310/lib/python3.10/site-packages
Requires: jmespath, python-dateutil, urllib3
Required-by: aiobotocore, awscli, boto3, s3transfer
```
### Configuration
```console (paste below)
$ ansible-config dump --only-changed
```
### OS / Environment
_No response_
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
```
### Expected Results
Create an MQ broker using the task I pasted before.
### Actual Results
```console (paste below)
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
mq_broker: Tagging a broker on creation does not work
### Summary
When creating a new MQ broker using the following task, the broker does not get tagged.
```
- name: create broker with minimal parameters
mq_broker:
broker_name: "{{ broker_name }}"
security_groups: "{{ broker_sg_ids.split(',') }}"
subnet_ids: "{{ broker_subnet_ids.split(',') }}"
tags:
"Foo": "Bar"
"FooBar": "foobar"
```
Actual result:
```
changed: [testhost] => {
"broker": {
"broker_arn": "arn:aws:mq:us-east-1:123456789100:broker:ansible-test-52903175--mq:b-70e0807b-102d-42ae-8805-94ec6395436c",
"broker_id": "b-70e0807b-102d-42ae-8805-94ec6395436c",
"response_metadata": {
"http_headers": {
"access-control-allow-origin": "*",
"access-control-expose-headers": "x-amzn-errortype,x-amzn-requestid,x-amzn-errormessage,x-amzn-trace-id,x-amz-apigw-id,date",
"cache-control": "no-cache; no-store, must-revalidate, private",
"connection": "keep-alive",
"content-length": "191",
"content-type": "application/json",
"date": "Wed, 31 May 2023 13:25:16 GMT",
"expires": "0",
"pragma": "no-cache",
"x-amz-apigw-id": "FyidUFppIAMF1zw=",
"x-amzn-requestid": "12345bcb-5678-890d-972c-26a92712aaeb",
"x-amzn-trace-id": "Root=1-64774abb-2b3bf58a2b0cbf7800afdef6"
},
"http_status_code": 200,
"request_id": "59392bcb-5406-460d-972c-26a92712aaeb",
"retry_attempts": 0
}
},
```
### Issue Type
Bug Report
### Component Name
mq_broker
### Ansible Version
```console (paste below)
$ ansible --version
ansible [core 2.14.3]
```
### Collection Versions
```console (paste below)
$ ansible-galaxy collection list
Collection Version
----------------------------- -------
amazon.aws 6.0.0
community.aws 6.0.0
```
### AWS SDK versions
```console (paste below)
$ pip show boto boto3 botocore
Name: boto3
Version: 1.22.0
Summary: The AWS SDK for Python
Home-page: https://github.com/boto/boto3
Author: Amazon Web Services
Author-email:
License: Apache License 2.0
Location: /Users/alinabuzachis/anaconda3/envs/py310/lib/python3.10/site-packages
Requires: botocore, jmespath, s3transfer
Required-by: gouttelette
---
Name: botocore
Version: 1.25.13
Summary: Low-level, data-driven core of boto 3.
Home-page: https://github.com/boto/botocore
Author: Amazon Web Services
Author-email:
License: Apache License 2.0
Location: /Users/alinabuzachis/anaconda3/envs/py310/lib/python3.10/site-packages
Requires: jmespath, python-dateutil, urllib3
Required-by: aiobotocore, awscli, boto3, s3transfer
```
### Configuration
```console (paste below)
$ ansible-config dump --only-changed
```
### OS / Environment
_No response_
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
```
### Expected Results
Create an MQ broker using the task I pasted before.
### Actual Results
```console (paste below)
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
| [
{
"content": "#!/usr/bin/python\n# -*- coding: utf-8 -*-\n\n# Copyright: Contributors to the Ansible project\n# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)\n\nDOCUMENTATION = r\"\"\"\n---\nmodule: mq_broker\nversion_added: 6.0.0\nshort_description: MQ broker management\ndescription:\n - Create/update/delete a broker.\n - Reboot a broker.\nauthor:\n - FCO (@fotto)\noptions:\n broker_name:\n description:\n - The Name of the MQ broker to work on.\n type: str\n required: true\n state:\n description:\n - \"C(present): Create/update broker.\"\n - \"C(absent): Delete broker.\"\n - \"C(restarted): Reboot broker.\"\n choices: [ 'present', 'absent', 'restarted' ]\n default: present\n type: str\n deployment_mode:\n description:\n - Set broker deployment type.\n - Can be used only during creation.\n - Defaults to C(SINGLE_INSTANCE).\n choices: [ 'SINGLE_INSTANCE', 'ACTIVE_STANDBY_MULTI_AZ', 'CLUSTER_MULTI_AZ' ]\n type: str\n use_aws_owned_key:\n description:\n - Must be set to C(false) if I(kms_key_id) is provided as well.\n - Can be used only during creation.\n - Defaults to C(true).\n type: bool\n kms_key_id:\n description:\n - Use referenced key to encrypt broker data at rest.\n - Can be used only during creation.\n type: str\n engine_type:\n description:\n - Set broker engine type.\n - Can be used only during creation.\n - Defaults to C(ACTIVEMQ).\n choices: [ 'ACTIVEMQ', 'RABBITMQ' ]\n type: str\n maintenance_window_start_time:\n description:\n - Set maintenance window for automatic minor upgrades.\n - Can be used only during creation.\n - Not providing any value means \"no maintenance window\".\n type: dict\n publicly_accessible:\n description:\n - Allow/disallow public access.\n - Can be used only during creation.\n - Defaults to C(false).\n type: bool\n storage_type:\n description:\n - Set underlying storage type.\n - Can be used only during creation.\n - Defaults to C(EFS).\n choices: [ 'EBS', 'EFS' ]\n type: str\n subnet_ids:\n description:\n - Defines where deploy broker instances to.\n - Minimum required number depends on deployment type.\n - Can be used only during creation.\n type: list\n elements: str\n users:\n description:\n - This parameter allows to use a custom set of initial user(s).\n - M(community.aws.mq_user) is the preferred way to manage (local) users\n however a broker cannot be created without any user.\n - If nothing is specified a default C(admin) user will be created along with brokers.\n - Can be used only during creation. Use M(community.aws.mq_user) module for updates.\n type: list\n elements: dict\n tags:\n description:\n - Tag newly created brokers.\n - Can be used only during creation.\n type: dict\n authentication_strategy:\n description: Choose between locally and remotely managed users.\n choices: [ 'SIMPLE', 'LDAP' ]\n type: str\n auto_minor_version_upgrade:\n description: Allow/disallow automatic minor version upgrades.\n type: bool\n default: true\n engine_version:\n description:\n - Set engine version of broker.\n - The special value C(latest) will pick the latest available version.\n - The special value C(latest) is ignored on update.\n type: str\n host_instance_type:\n description: Instance type of broker instances.\n type: str\n enable_audit_log:\n description: Enable/disable to push audit logs to AWS CloudWatch.\n type: bool\n default: false\n enable_general_log:\n description: Enable/disable to push general logs to AWS CloudWatch.\n type: bool\n default: false\n security_groups:\n description:\n - Associate security groups with broker.\n - At least one must be provided during creation.\n type: list\n elements: str\n\nextends_documentation_fragment:\n - amazon.aws.boto3\n - amazon.aws.common.modules\n - amazon.aws.region.modules\n\"\"\"\n\n\nEXAMPLES = r\"\"\"\n- name: create broker (if missing) with minimal required parameters\n community.aws.mq_broker:\n broker_name: \"{{ broker_name }}\"\n security_groups:\n - sg_xxxxxxx\n subnet_ids:\n - subnet_xxx\n - subnet_yyy\n register: result\n\n- set_fact:\n broker_id: \"{{ result.broker['BrokerId'] }}\"\n\n- name: use mq_broker_info to wait until broker is ready\n community.aws.mq_broker_info:\n broker_id: \"{{ broker_id }}\"\n register: result\n until: \"result.broker['BrokerState'] == 'RUNNING'\"\n retries: 15\n delay: 60\n\n- name: create or update broker with almost all parameter set including credentials\n community.aws.mq_broker:\n broker_name: \"my_broker_2\"\n state: present\n deployment_mode: 'ACTIVE_STANDBY_MULTI_AZ'\n use_aws_owned_key: false\n kms_key_id: 'my-precreted-key-id'\n engine_type: 'ACTIVEMQ'\n maintenance_window_start_time:\n DayOfWeek: 'MONDAY'\n TimeOfDay: '03:15'\n TimeZone: 'Europe/Berlin'\n publicly_accessible: true\n storage_type: 'EFS'\n security_groups:\n - sg_xxxxxxx\n subnet_ids:\n - subnet_xxx\n - subnet_yyy\n users:\n - Username: 'initial-user'\n Password: 'plain-text-password'\n ConsoleAccess: true\n tags:\n - env: Test\n creator: ansible\n authentication_strategy: 'SIMPLE'\n auto_minor_version_upgrade: true\n engine_version: \"5.15.13\"\n host_instance_type: 'mq.t3.micro'\n enable_audit_log: true\n enable_general_log: true\n\n- name: reboot a broker\n community.aws.mq_broker:\n broker_name: \"my_broker_2\"\n state: restarted\n\n- name: delete a broker\n community.aws.mq_broker:\n broker_name: \"my_broker_2\"\n state: absent\n\"\"\"\n\nRETURN = r\"\"\"\nbroker:\n description:\n - \"All API responses are converted to snake yaml except 'Tags'\"\n - \"'state=present': API response of create_broker() or update_broker() call\"\n - \"'state=absent': result of describe_broker() call before delete_broker() is triggerd\"\n - \"'state=restarted': result of describe_broker() after reboot has been triggered\"\n type: dict\n returned: success\n\"\"\"\n\ntry:\n import botocore\nexcept ImportError:\n # handled by AnsibleAWSModule\n pass\n\nfrom ansible.module_utils.common.dict_transformations import camel_dict_to_snake_dict\nfrom ansible_collections.amazon.aws.plugins.module_utils.modules import AnsibleAWSModule\n\n\nPARAMS_MAP = {\n \"authentication_strategy\": \"AuthenticationStrategy\",\n \"auto_minor_version_upgrade\": \"AutoMinorVersionUpgrade\",\n \"broker_name\": \"BrokerName\",\n \"deployment_mode\": \"DeploymentMode\",\n \"use_aws_owned_key\": \"EncryptionOptions/UseAwsOwnedKey\",\n \"kms_key_id\": \"EncryptionOptions/KmsKeyId\",\n \"engine_type\": \"EngineType\",\n \"engine_version\": \"EngineVersion\",\n \"host_instance_type\": \"HostInstanceType\",\n \"enable_audit_log\": \"Logs/Audit\",\n \"enable_general_log\": \"Logs/General\",\n \"maintenance_window_start_time\": \"MaintenanceWindowStartTime\",\n \"publicly_accessible\": \"PubliclyAccessible\",\n \"security_groups\": \"SecurityGroups\",\n \"storage_type\": \"StorageType\",\n \"subnet_ids\": \"SubnetIds\",\n \"users\": \"Users\",\n}\n\n\nDEFAULTS = {\n \"authentication_strategy\": \"SIMPLE\",\n \"auto_minor_version_upgrade\": False,\n \"deployment_mode\": \"SINGLE_INSTANCE\",\n \"use_aws_owned_key\": True,\n \"engine_type\": \"ACTIVEMQ\",\n \"engine_version\": \"latest\",\n \"host_instance_type\": \"mq.t3.micro\",\n \"enable_audit_log\": False,\n \"enable_general_log\": False,\n \"publicly_accessible\": False,\n \"storage_type\": \"EFS\",\n}\n\nCREATE_ONLY_PARAMS = [\n \"deployment_mode\",\n \"use_aws_owned_key\",\n \"kms_key_id\",\n \"engine_type\",\n \"maintenance_window_start_time\",\n \"publicly_accessible\",\n \"storage_type\",\n \"subnet_ids\",\n \"users\",\n \"tags\",\n]\n\n\ndef _set_kwarg(kwargs, key, value):\n mapped_key = PARAMS_MAP[key]\n if \"/\" in mapped_key:\n key_list = mapped_key.split(\"/\")\n key_list.reverse()\n else:\n key_list = [mapped_key]\n data = kwargs\n while len(key_list) > 1:\n this_key = key_list.pop()\n if this_key not in data:\n data[this_key] = {}\n #\n data = data[this_key]\n data[key_list[0]] = value\n\n\ndef _fill_kwargs(module, apply_defaults=True, ignore_create_params=False):\n kwargs = {}\n if apply_defaults:\n for p_name, p_value in DEFAULTS.items():\n _set_kwarg(kwargs, p_name, p_value)\n for p_name in module.params:\n if ignore_create_params and p_name in CREATE_ONLY_PARAMS:\n # silently ignore CREATE_ONLY_PARAMS on update to\n # make playbooks idempotent\n continue\n if p_name in PARAMS_MAP and module.params[p_name] is not None:\n _set_kwarg(kwargs, p_name, module.params[p_name])\n else:\n # ignore\n pass\n return kwargs\n\n\ndef __list_needs_change(current, desired):\n if len(current) != len(desired):\n return True\n # equal length:\n c_sorted = sorted(current)\n d_sorted = sorted(desired)\n for index, value in enumerate(current):\n if value != desired[index]:\n return True\n #\n return False\n\n\ndef __dict_needs_change(current, desired):\n # values contained in 'current' but not specified in 'desired' are ignored\n # value contained in 'desired' but not in 'current' (unsupported attributes) are ignored\n for key in desired:\n if key in current:\n if desired[key] != current[key]:\n return True\n #\n return False\n\n\ndef _needs_change(current, desired):\n needs_change = False\n for key in desired:\n current_value = current[key]\n desired_value = desired[key]\n if isinstance(current_value, (int, str, bool)):\n if current_value != desired_value:\n needs_change = True\n break\n elif isinstance(current_value, list):\n # assumption: all 'list' type settings we allow changes for have scalar values\n if __list_needs_change(current_value, desired_value):\n needs_change = True\n break\n elif isinstance(current_value, dict):\n # assumption: all 'dict' type settings we allow changes for have scalar values\n if __dict_needs_change(current_value, desired_value):\n needs_change = True\n break\n else:\n # unexpected type\n needs_change = True\n break\n #\n return needs_change\n\n\ndef get_latest_engine_version(conn, module, engine_type):\n try:\n response = conn.describe_broker_engine_types(EngineType=engine_type)\n return response[\"BrokerEngineTypes\"][0][\"EngineVersions\"][0][\"Name\"]\n except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:\n module.fail_json_aws(e, msg=\"Couldn't list engine versions\")\n\n\ndef get_broker_id(conn, module):\n try:\n broker_name = module.params[\"broker_name\"]\n broker_id = None\n response = conn.list_brokers(MaxResults=100)\n for broker in response[\"BrokerSummaries\"]:\n if broker[\"BrokerName\"] == broker_name:\n broker_id = broker[\"BrokerId\"]\n break\n return broker_id\n except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:\n module.fail_json_aws(e, msg=\"Couldn't list broker brokers.\")\n\n\ndef get_broker_info(conn, module, broker_id):\n try:\n return conn.describe_broker(BrokerId=broker_id)\n except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:\n module.fail_json_aws(e, msg=\"Couldn't get broker details.\")\n\n\ndef reboot_broker(conn, module, broker_id):\n try:\n return conn.reboot_broker(BrokerId=broker_id)\n except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:\n module.fail_json_aws(e, msg=\"Couldn't reboot broker.\")\n\n\ndef delete_broker(conn, module, broker_id):\n try:\n return conn.delete_broker(BrokerId=broker_id)\n except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:\n module.fail_json_aws(e, msg=\"Couldn't delete broker.\")\n\n\ndef create_broker(conn, module):\n kwargs = _fill_kwargs(module)\n if \"EngineVersion\" in kwargs and kwargs[\"EngineVersion\"] == \"latest\":\n kwargs[\"EngineVersion\"] = get_latest_engine_version(conn, module, kwargs[\"EngineType\"])\n if kwargs[\"AuthenticationStrategy\"] == \"LDAP\":\n module.fail_json(msg=\"'AuthenticationStrategy=LDAP' not supported, yet\")\n if \"Users\" not in kwargs:\n # add some stupid default (cannot create broker without any users)\n kwargs[\"Users\"] = [{\"Username\": \"admin\", \"Password\": \"adminPassword\", \"ConsoleAccess\": True, \"Groups\": []}]\n if \"EncryptionOptions\" in kwargs and \"UseAwsOwnedKey\" in kwargs[\"EncryptionOptions\"]:\n kwargs[\"EncryptionOptions\"][\"UseAwsOwnedKey\"] = False\n #\n if \"SecurityGroups\" not in kwargs or len(kwargs[\"SecurityGroups\"]) == 0:\n module.fail_json(msg=\"At least one security group must be specified on broker creation\")\n #\n changed = True\n result = conn.create_broker(**kwargs)\n #\n return {\"broker\": camel_dict_to_snake_dict(result, ignore_list=[\"Tags\"]), \"changed\": changed}\n\n\ndef update_broker(conn, module, broker_id):\n kwargs = _fill_kwargs(module, apply_defaults=False, ignore_create_params=True)\n # replace name with id\n broker_name = kwargs[\"BrokerName\"]\n del kwargs[\"BrokerName\"]\n kwargs[\"BrokerId\"] = broker_id\n # get current state for comparison:\n api_result = get_broker_info(conn, module, broker_id)\n if api_result[\"BrokerState\"] != \"RUNNING\":\n module.fail_json(\n msg=f\"Cannot trigger update while broker ({broker_id}) is in state {api_result['BrokerState']}\",\n )\n # engine version of 'latest' is taken as \"keep current one\"\n # i.e. do not request upgrade on playbook rerun\n if \"EngineVersion\" in kwargs and kwargs[\"EngineVersion\"] == \"latest\":\n kwargs[\"EngineVersion\"] = api_result[\"EngineVersion\"]\n result = {\"broker_id\": broker_id, \"broker_name\": broker_name}\n changed = False\n if _needs_change(api_result, kwargs):\n changed = True\n if not module.check_mode:\n api_result = conn.update_broker(**kwargs)\n #\n #\n return {\"broker\": result, \"changed\": changed}\n\n\ndef ensure_absent(conn, module):\n result = {\"broker_name\": module.params[\"broker_name\"], \"broker_id\": None}\n if module.check_mode:\n return {\"broker\": camel_dict_to_snake_dict(result, ignore_list=[\"Tags\"]), \"changed\": True}\n broker_id = get_broker_id(conn, module)\n result[\"broker_id\"] = broker_id\n\n if not broker_id:\n # silently ignore delete of unknown broker (to make it idempotent)\n return {\"broker\": result, \"changed\": False}\n\n try:\n # check for pending delete (small race condition possible here\n api_result = get_broker_info(conn, module, broker_id)\n if api_result[\"BrokerState\"] == \"DELETION_IN_PROGRESS\":\n return {\"broker\": result, \"changed\": False}\n delete_broker(conn, module, broker_id)\n except botocore.exceptions.ClientError as e:\n module.fail_json_aws(e)\n\n return {\"broker\": result, \"changed\": True}\n\n\ndef ensure_present(conn, module):\n if module.check_mode:\n return {\"broker\": {\"broker_arn\": \"fakeArn\", \"broker_id\": \"fakeId\"}, \"changed\": True}\n\n broker_id = get_broker_id(conn, module)\n if broker_id:\n return update_broker(conn, module, broker_id)\n\n return create_broker(conn, module)\n\n\ndef main():\n argument_spec = dict(\n broker_name=dict(required=True, type=\"str\"),\n state=dict(default=\"present\", choices=[\"present\", \"absent\", \"restarted\"]),\n # parameters only allowed on create\n deployment_mode=dict(choices=[\"SINGLE_INSTANCE\", \"ACTIVE_STANDBY_MULTI_AZ\", \"CLUSTER_MULTI_AZ\"]),\n use_aws_owned_key=dict(type=\"bool\"),\n kms_key_id=dict(type=\"str\"),\n engine_type=dict(choices=[\"ACTIVEMQ\", \"RABBITMQ\"], type=\"str\"),\n maintenance_window_start_time=dict(type=\"dict\"),\n publicly_accessible=dict(type=\"bool\"),\n storage_type=dict(choices=[\"EBS\", \"EFS\"]),\n subnet_ids=dict(type=\"list\", elements=\"str\"),\n users=dict(type=\"list\", elements=\"dict\"),\n tags=dict(type=\"dict\"),\n # parameters allowed on update as well\n authentication_strategy=dict(choices=[\"SIMPLE\", \"LDAP\"]),\n auto_minor_version_upgrade=dict(default=True, type=\"bool\"),\n engine_version=dict(type=\"str\"),\n host_instance_type=dict(type=\"str\"),\n enable_audit_log=dict(default=False, type=\"bool\"),\n enable_general_log=dict(default=False, type=\"bool\"),\n security_groups=dict(type=\"list\", elements=\"str\"),\n )\n\n module = AnsibleAWSModule(argument_spec=argument_spec, supports_check_mode=True)\n\n connection = module.client(\"mq\")\n\n if module.params[\"state\"] == \"present\":\n try:\n compound_result = ensure_present(connection, module)\n except botocore.exceptions.ClientError as e:\n module.fail_json_aws(e)\n #\n module.exit_json(**compound_result)\n\n if module.params[\"state\"] == \"absent\":\n try:\n compound_result = ensure_absent(connection, module)\n except botocore.exceptions.ClientError as e:\n module.fail_json_aws(e)\n #\n module.exit_json(**compound_result)\n\n if module.params[\"state\"] == \"restarted\":\n broker_id = get_broker_id(connection, module)\n if module.check_mode:\n module.exit_json(broker={\"broker_id\": broker_id if broker_id else \"fakeId\"}, changed=True)\n if not broker_id:\n module.fail_json(\n msg=\"Cannot find broker with name {module.params['broker_name']}.\",\n )\n try:\n changed = True\n if not module.check_mode:\n reboot_broker(connection, module, broker_id)\n #\n result = get_broker_info(connection, module, broker_id)\n except botocore.exceptions.ClientError as e:\n module.fail_json_aws(e)\n module.exit_json(broker=result, changed=changed)\n\n\nif __name__ == \"__main__\":\n main()\n",
"path": "plugins/modules/mq_broker.py"
}
] | [
{
"content": "#!/usr/bin/python\n# -*- coding: utf-8 -*-\n\n# Copyright: Contributors to the Ansible project\n# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)\n\nDOCUMENTATION = r\"\"\"\n---\nmodule: mq_broker\nversion_added: 6.0.0\nshort_description: MQ broker management\ndescription:\n - Create/update/delete a broker.\n - Reboot a broker.\nauthor:\n - FCO (@fotto)\noptions:\n broker_name:\n description:\n - The Name of the MQ broker to work on.\n type: str\n required: true\n state:\n description:\n - \"C(present): Create/update broker.\"\n - \"C(absent): Delete broker.\"\n - \"C(restarted): Reboot broker.\"\n choices: [ 'present', 'absent', 'restarted' ]\n default: present\n type: str\n deployment_mode:\n description:\n - Set broker deployment type.\n - Can be used only during creation.\n - Defaults to C(SINGLE_INSTANCE).\n choices: [ 'SINGLE_INSTANCE', 'ACTIVE_STANDBY_MULTI_AZ', 'CLUSTER_MULTI_AZ' ]\n type: str\n use_aws_owned_key:\n description:\n - Must be set to C(false) if I(kms_key_id) is provided as well.\n - Can be used only during creation.\n - Defaults to C(true).\n type: bool\n kms_key_id:\n description:\n - Use referenced key to encrypt broker data at rest.\n - Can be used only during creation.\n type: str\n engine_type:\n description:\n - Set broker engine type.\n - Can be used only during creation.\n - Defaults to C(ACTIVEMQ).\n choices: [ 'ACTIVEMQ', 'RABBITMQ' ]\n type: str\n maintenance_window_start_time:\n description:\n - Set maintenance window for automatic minor upgrades.\n - Can be used only during creation.\n - Not providing any value means \"no maintenance window\".\n type: dict\n publicly_accessible:\n description:\n - Allow/disallow public access.\n - Can be used only during creation.\n - Defaults to C(false).\n type: bool\n storage_type:\n description:\n - Set underlying storage type.\n - Can be used only during creation.\n - Defaults to C(EFS).\n choices: [ 'EBS', 'EFS' ]\n type: str\n subnet_ids:\n description:\n - Defines where deploy broker instances to.\n - Minimum required number depends on deployment type.\n - Can be used only during creation.\n type: list\n elements: str\n users:\n description:\n - This parameter allows to use a custom set of initial user(s).\n - M(community.aws.mq_user) is the preferred way to manage (local) users\n however a broker cannot be created without any user.\n - If nothing is specified a default C(admin) user will be created along with brokers.\n - Can be used only during creation. Use M(community.aws.mq_user) module for updates.\n type: list\n elements: dict\n tags:\n description:\n - Tag newly created brokers.\n - Can be used only during creation.\n type: dict\n authentication_strategy:\n description: Choose between locally and remotely managed users.\n choices: [ 'SIMPLE', 'LDAP' ]\n type: str\n auto_minor_version_upgrade:\n description: Allow/disallow automatic minor version upgrades.\n type: bool\n default: true\n engine_version:\n description:\n - Set engine version of broker.\n - The special value C(latest) will pick the latest available version.\n - The special value C(latest) is ignored on update.\n type: str\n host_instance_type:\n description: Instance type of broker instances.\n type: str\n enable_audit_log:\n description: Enable/disable to push audit logs to AWS CloudWatch.\n type: bool\n default: false\n enable_general_log:\n description: Enable/disable to push general logs to AWS CloudWatch.\n type: bool\n default: false\n security_groups:\n description:\n - Associate security groups with broker.\n - At least one must be provided during creation.\n type: list\n elements: str\n\nextends_documentation_fragment:\n - amazon.aws.boto3\n - amazon.aws.common.modules\n - amazon.aws.region.modules\n\"\"\"\n\n\nEXAMPLES = r\"\"\"\n- name: create broker (if missing) with minimal required parameters\n community.aws.mq_broker:\n broker_name: \"{{ broker_name }}\"\n security_groups:\n - sg_xxxxxxx\n subnet_ids:\n - subnet_xxx\n - subnet_yyy\n register: result\n\n- set_fact:\n broker_id: \"{{ result.broker['BrokerId'] }}\"\n\n- name: use mq_broker_info to wait until broker is ready\n community.aws.mq_broker_info:\n broker_id: \"{{ broker_id }}\"\n register: result\n until: \"result.broker['BrokerState'] == 'RUNNING'\"\n retries: 15\n delay: 60\n\n- name: create or update broker with almost all parameter set including credentials\n community.aws.mq_broker:\n broker_name: \"my_broker_2\"\n state: present\n deployment_mode: 'ACTIVE_STANDBY_MULTI_AZ'\n use_aws_owned_key: false\n kms_key_id: 'my-precreted-key-id'\n engine_type: 'ACTIVEMQ'\n maintenance_window_start_time:\n DayOfWeek: 'MONDAY'\n TimeOfDay: '03:15'\n TimeZone: 'Europe/Berlin'\n publicly_accessible: true\n storage_type: 'EFS'\n security_groups:\n - sg_xxxxxxx\n subnet_ids:\n - subnet_xxx\n - subnet_yyy\n users:\n - Username: 'initial-user'\n Password: 'plain-text-password'\n ConsoleAccess: true\n tags:\n - env: Test\n creator: ansible\n authentication_strategy: 'SIMPLE'\n auto_minor_version_upgrade: true\n engine_version: \"5.15.13\"\n host_instance_type: 'mq.t3.micro'\n enable_audit_log: true\n enable_general_log: true\n\n- name: reboot a broker\n community.aws.mq_broker:\n broker_name: \"my_broker_2\"\n state: restarted\n\n- name: delete a broker\n community.aws.mq_broker:\n broker_name: \"my_broker_2\"\n state: absent\n\"\"\"\n\nRETURN = r\"\"\"\nbroker:\n description:\n - \"All API responses are converted to snake yaml except 'Tags'\"\n - \"'state=present': API response of create_broker() or update_broker() call\"\n - \"'state=absent': result of describe_broker() call before delete_broker() is triggerd\"\n - \"'state=restarted': result of describe_broker() after reboot has been triggered\"\n type: dict\n returned: success\n\"\"\"\n\ntry:\n import botocore\nexcept ImportError:\n # handled by AnsibleAWSModule\n pass\n\nfrom ansible.module_utils.common.dict_transformations import camel_dict_to_snake_dict\nfrom ansible_collections.amazon.aws.plugins.module_utils.modules import AnsibleAWSModule\n\n\nPARAMS_MAP = {\n \"authentication_strategy\": \"AuthenticationStrategy\",\n \"auto_minor_version_upgrade\": \"AutoMinorVersionUpgrade\",\n \"broker_name\": \"BrokerName\",\n \"deployment_mode\": \"DeploymentMode\",\n \"use_aws_owned_key\": \"EncryptionOptions/UseAwsOwnedKey\",\n \"kms_key_id\": \"EncryptionOptions/KmsKeyId\",\n \"engine_type\": \"EngineType\",\n \"engine_version\": \"EngineVersion\",\n \"host_instance_type\": \"HostInstanceType\",\n \"enable_audit_log\": \"Logs/Audit\",\n \"enable_general_log\": \"Logs/General\",\n \"maintenance_window_start_time\": \"MaintenanceWindowStartTime\",\n \"publicly_accessible\": \"PubliclyAccessible\",\n \"security_groups\": \"SecurityGroups\",\n \"storage_type\": \"StorageType\",\n \"subnet_ids\": \"SubnetIds\",\n \"users\": \"Users\",\n \"tags\": \"Tags\",\n}\n\n\nDEFAULTS = {\n \"authentication_strategy\": \"SIMPLE\",\n \"auto_minor_version_upgrade\": False,\n \"deployment_mode\": \"SINGLE_INSTANCE\",\n \"use_aws_owned_key\": True,\n \"engine_type\": \"ACTIVEMQ\",\n \"engine_version\": \"latest\",\n \"host_instance_type\": \"mq.t3.micro\",\n \"enable_audit_log\": False,\n \"enable_general_log\": False,\n \"publicly_accessible\": False,\n \"storage_type\": \"EFS\",\n}\n\nCREATE_ONLY_PARAMS = [\n \"deployment_mode\",\n \"use_aws_owned_key\",\n \"kms_key_id\",\n \"engine_type\",\n \"maintenance_window_start_time\",\n \"publicly_accessible\",\n \"storage_type\",\n \"subnet_ids\",\n \"users\",\n \"tags\",\n]\n\n\ndef _set_kwarg(kwargs, key, value):\n mapped_key = PARAMS_MAP[key]\n if \"/\" in mapped_key:\n key_list = mapped_key.split(\"/\")\n key_list.reverse()\n else:\n key_list = [mapped_key]\n data = kwargs\n while len(key_list) > 1:\n this_key = key_list.pop()\n if this_key not in data:\n data[this_key] = {}\n #\n data = data[this_key]\n data[key_list[0]] = value\n\n\ndef _fill_kwargs(module, apply_defaults=True, ignore_create_params=False):\n kwargs = {}\n if apply_defaults:\n for p_name, p_value in DEFAULTS.items():\n _set_kwarg(kwargs, p_name, p_value)\n for p_name in module.params:\n if ignore_create_params and p_name in CREATE_ONLY_PARAMS:\n # silently ignore CREATE_ONLY_PARAMS on update to\n # make playbooks idempotent\n continue\n if p_name in PARAMS_MAP and module.params[p_name] is not None:\n _set_kwarg(kwargs, p_name, module.params[p_name])\n else:\n # ignore\n pass\n return kwargs\n\n\ndef __list_needs_change(current, desired):\n if len(current) != len(desired):\n return True\n # equal length:\n c_sorted = sorted(current)\n d_sorted = sorted(desired)\n for index, value in enumerate(current):\n if value != desired[index]:\n return True\n #\n return False\n\n\ndef __dict_needs_change(current, desired):\n # values contained in 'current' but not specified in 'desired' are ignored\n # value contained in 'desired' but not in 'current' (unsupported attributes) are ignored\n for key in desired:\n if key in current:\n if desired[key] != current[key]:\n return True\n #\n return False\n\n\ndef _needs_change(current, desired):\n needs_change = False\n for key in desired:\n current_value = current[key]\n desired_value = desired[key]\n if isinstance(current_value, (int, str, bool)):\n if current_value != desired_value:\n needs_change = True\n break\n elif isinstance(current_value, list):\n # assumption: all 'list' type settings we allow changes for have scalar values\n if __list_needs_change(current_value, desired_value):\n needs_change = True\n break\n elif isinstance(current_value, dict):\n # assumption: all 'dict' type settings we allow changes for have scalar values\n if __dict_needs_change(current_value, desired_value):\n needs_change = True\n break\n else:\n # unexpected type\n needs_change = True\n break\n #\n return needs_change\n\n\ndef get_latest_engine_version(conn, module, engine_type):\n try:\n response = conn.describe_broker_engine_types(EngineType=engine_type)\n return response[\"BrokerEngineTypes\"][0][\"EngineVersions\"][0][\"Name\"]\n except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:\n module.fail_json_aws(e, msg=\"Couldn't list engine versions\")\n\n\ndef get_broker_id(conn, module):\n try:\n broker_name = module.params[\"broker_name\"]\n broker_id = None\n response = conn.list_brokers(MaxResults=100)\n for broker in response[\"BrokerSummaries\"]:\n if broker[\"BrokerName\"] == broker_name:\n broker_id = broker[\"BrokerId\"]\n break\n return broker_id\n except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:\n module.fail_json_aws(e, msg=\"Couldn't list broker brokers.\")\n\n\ndef get_broker_info(conn, module, broker_id):\n try:\n return conn.describe_broker(BrokerId=broker_id)\n except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:\n module.fail_json_aws(e, msg=\"Couldn't get broker details.\")\n\n\ndef reboot_broker(conn, module, broker_id):\n try:\n return conn.reboot_broker(BrokerId=broker_id)\n except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:\n module.fail_json_aws(e, msg=\"Couldn't reboot broker.\")\n\n\ndef delete_broker(conn, module, broker_id):\n try:\n return conn.delete_broker(BrokerId=broker_id)\n except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:\n module.fail_json_aws(e, msg=\"Couldn't delete broker.\")\n\n\ndef create_broker(conn, module):\n kwargs = _fill_kwargs(module)\n if \"EngineVersion\" in kwargs and kwargs[\"EngineVersion\"] == \"latest\":\n kwargs[\"EngineVersion\"] = get_latest_engine_version(conn, module, kwargs[\"EngineType\"])\n if kwargs[\"AuthenticationStrategy\"] == \"LDAP\":\n module.fail_json(msg=\"'AuthenticationStrategy=LDAP' not supported, yet\")\n if \"Users\" not in kwargs:\n # add some stupid default (cannot create broker without any users)\n kwargs[\"Users\"] = [{\"Username\": \"admin\", \"Password\": \"adminPassword\", \"ConsoleAccess\": True, \"Groups\": []}]\n if \"EncryptionOptions\" in kwargs and \"UseAwsOwnedKey\" in kwargs[\"EncryptionOptions\"]:\n kwargs[\"EncryptionOptions\"][\"UseAwsOwnedKey\"] = False\n #\n if \"SecurityGroups\" not in kwargs or len(kwargs[\"SecurityGroups\"]) == 0:\n module.fail_json(msg=\"At least one security group must be specified on broker creation\")\n #\n changed = True\n result = conn.create_broker(**kwargs)\n #\n return {\"broker\": camel_dict_to_snake_dict(result, ignore_list=[\"Tags\"]), \"changed\": changed}\n\n\ndef update_broker(conn, module, broker_id):\n kwargs = _fill_kwargs(module, apply_defaults=False, ignore_create_params=True)\n # replace name with id\n broker_name = kwargs[\"BrokerName\"]\n del kwargs[\"BrokerName\"]\n kwargs[\"BrokerId\"] = broker_id\n # get current state for comparison:\n api_result = get_broker_info(conn, module, broker_id)\n if api_result[\"BrokerState\"] != \"RUNNING\":\n module.fail_json(\n msg=f\"Cannot trigger update while broker ({broker_id}) is in state {api_result['BrokerState']}\",\n )\n # engine version of 'latest' is taken as \"keep current one\"\n # i.e. do not request upgrade on playbook rerun\n if \"EngineVersion\" in kwargs and kwargs[\"EngineVersion\"] == \"latest\":\n kwargs[\"EngineVersion\"] = api_result[\"EngineVersion\"]\n result = {\"broker_id\": broker_id, \"broker_name\": broker_name}\n changed = False\n if _needs_change(api_result, kwargs):\n changed = True\n if not module.check_mode:\n api_result = conn.update_broker(**kwargs)\n #\n #\n return {\"broker\": result, \"changed\": changed}\n\n\ndef ensure_absent(conn, module):\n result = {\"broker_name\": module.params[\"broker_name\"], \"broker_id\": None}\n if module.check_mode:\n return {\"broker\": camel_dict_to_snake_dict(result, ignore_list=[\"Tags\"]), \"changed\": True}\n broker_id = get_broker_id(conn, module)\n result[\"broker_id\"] = broker_id\n\n if not broker_id:\n # silently ignore delete of unknown broker (to make it idempotent)\n return {\"broker\": result, \"changed\": False}\n\n try:\n # check for pending delete (small race condition possible here\n api_result = get_broker_info(conn, module, broker_id)\n if api_result[\"BrokerState\"] == \"DELETION_IN_PROGRESS\":\n return {\"broker\": result, \"changed\": False}\n delete_broker(conn, module, broker_id)\n except botocore.exceptions.ClientError as e:\n module.fail_json_aws(e)\n\n return {\"broker\": result, \"changed\": True}\n\n\ndef ensure_present(conn, module):\n if module.check_mode:\n return {\"broker\": {\"broker_arn\": \"fakeArn\", \"broker_id\": \"fakeId\"}, \"changed\": True}\n\n broker_id = get_broker_id(conn, module)\n if broker_id:\n return update_broker(conn, module, broker_id)\n\n return create_broker(conn, module)\n\n\ndef main():\n argument_spec = dict(\n broker_name=dict(required=True, type=\"str\"),\n state=dict(default=\"present\", choices=[\"present\", \"absent\", \"restarted\"]),\n # parameters only allowed on create\n deployment_mode=dict(choices=[\"SINGLE_INSTANCE\", \"ACTIVE_STANDBY_MULTI_AZ\", \"CLUSTER_MULTI_AZ\"]),\n use_aws_owned_key=dict(type=\"bool\"),\n kms_key_id=dict(type=\"str\"),\n engine_type=dict(choices=[\"ACTIVEMQ\", \"RABBITMQ\"], type=\"str\"),\n maintenance_window_start_time=dict(type=\"dict\"),\n publicly_accessible=dict(type=\"bool\"),\n storage_type=dict(choices=[\"EBS\", \"EFS\"]),\n subnet_ids=dict(type=\"list\", elements=\"str\"),\n users=dict(type=\"list\", elements=\"dict\"),\n tags=dict(type=\"dict\"),\n # parameters allowed on update as well\n authentication_strategy=dict(choices=[\"SIMPLE\", \"LDAP\"]),\n auto_minor_version_upgrade=dict(default=True, type=\"bool\"),\n engine_version=dict(type=\"str\"),\n host_instance_type=dict(type=\"str\"),\n enable_audit_log=dict(default=False, type=\"bool\"),\n enable_general_log=dict(default=False, type=\"bool\"),\n security_groups=dict(type=\"list\", elements=\"str\"),\n )\n\n module = AnsibleAWSModule(argument_spec=argument_spec, supports_check_mode=True)\n\n connection = module.client(\"mq\")\n\n if module.params[\"state\"] == \"present\":\n try:\n compound_result = ensure_present(connection, module)\n except botocore.exceptions.ClientError as e:\n module.fail_json_aws(e)\n #\n module.exit_json(**compound_result)\n\n if module.params[\"state\"] == \"absent\":\n try:\n compound_result = ensure_absent(connection, module)\n except botocore.exceptions.ClientError as e:\n module.fail_json_aws(e)\n #\n module.exit_json(**compound_result)\n\n if module.params[\"state\"] == \"restarted\":\n broker_id = get_broker_id(connection, module)\n if module.check_mode:\n module.exit_json(broker={\"broker_id\": broker_id if broker_id else \"fakeId\"}, changed=True)\n if not broker_id:\n module.fail_json(\n msg=\"Cannot find broker with name {module.params['broker_name']}.\",\n )\n try:\n changed = True\n if not module.check_mode:\n reboot_broker(connection, module, broker_id)\n #\n result = get_broker_info(connection, module, broker_id)\n except botocore.exceptions.ClientError as e:\n module.fail_json_aws(e)\n module.exit_json(broker=result, changed=changed)\n\n\nif __name__ == \"__main__\":\n main()\n",
"path": "plugins/modules/mq_broker.py"
}
] | diff --git a/changelogs/fragments/1832-mq_broker_tags.yml b/changelogs/fragments/1832-mq_broker_tags.yml
new file mode 100644
index 00000000000..b2320dd3c71
--- /dev/null
+++ b/changelogs/fragments/1832-mq_broker_tags.yml
@@ -0,0 +1,2 @@
+bugfixes:
+ - mq_broker - ensure broker is created with ``tags`` when passed (https://github.com/ansible-collections/community.aws/issues/1832).
\ No newline at end of file
diff --git a/plugins/modules/mq_broker.py b/plugins/modules/mq_broker.py
index b4e4593f866..ecc5b8acb15 100644
--- a/plugins/modules/mq_broker.py
+++ b/plugins/modules/mq_broker.py
@@ -237,6 +237,7 @@
"storage_type": "StorageType",
"subnet_ids": "SubnetIds",
"users": "Users",
+ "tags": "Tags",
}
diff --git a/tests/integration/targets/mq/defaults/main.yml b/tests/integration/targets/mq/defaults/main.yml
index 896ba8afa7d..2199c2f637f 100644
--- a/tests/integration/targets/mq/defaults/main.yml
+++ b/tests/integration/targets/mq/defaults/main.yml
@@ -5,3 +5,5 @@ vpc_name: "{{ resource_prefix }}-vpc"
vpc_cidr: "10.0.0.0/16"
subnet_cidr: "10.0.1.0/24"
sg_name: "{{resource_prefix}}-sg"
+tags:
+ workload_type: other
\ No newline at end of file
diff --git a/tests/integration/targets/mq/tasks/broker_tests.yml b/tests/integration/targets/mq/tasks/broker_tests.yml
index 958b80cb205..d4d399da7c1 100644
--- a/tests/integration/targets/mq/tasks/broker_tests.yml
+++ b/tests/integration/targets/mq/tasks/broker_tests.yml
@@ -3,6 +3,7 @@
broker_name: "{{ broker_name }}"
security_groups: "{{ broker_sg_ids.split(',') }}"
subnet_ids: "{{ broker_subnet_ids.split(',') }}"
+ tags: "{{ tags }}"
register: result
- set_fact:
broker_id: "{{ result.broker['broker_id'] }}"
@@ -20,6 +21,7 @@
- result_c1.broker['broker_name'] == broker_name
- result_c1.broker['broker_state'] == 'CREATION_IN_PROGRESS'
- ( result_c1.broker['storage_type'] | upper ) == 'EFS'
+ - result_c1.broker['tags'] == tags
when: not ansible_check_mode
- debug:
msg: "Wait until broker {{ broker_name }} ({{ broker_id }}) enters running state. This may take several minutes"
diff --git a/tests/integration/targets/mq/tasks/main.yml b/tests/integration/targets/mq/tasks/main.yml
index 2055700480b..e84367a76c2 100644
--- a/tests/integration/targets/mq/tasks/main.yml
+++ b/tests/integration/targets/mq/tasks/main.yml
@@ -32,4 +32,4 @@
- name: cleanup broker
include_tasks: broker_cleanup.yml
- - include_tasks: env_cleanup.yml
\ No newline at end of file
+ - include_tasks: env_cleanup.yml
|
chainer__chainer-239 | Add type check to Identity Function
Related to #123
| [
{
"content": "from chainer import function\n\n\nclass Identity(function.Function):\n\n \"\"\"Identity function.\"\"\"\n\n def forward(self, xs):\n return xs\n\n def backward(self, xs, gys):\n return gys\n\n\ndef identity(*inputs):\n \"\"\"Just returns input variables.\"\"\"\n return Identity()(*inputs)\n",
"path": "chainer/functions/identity.py"
}
] | [
{
"content": "from chainer import function\n\n\nclass Identity(function.Function):\n\n \"\"\"Identity function.\"\"\"\n\n def check_type_forward(self, in_types):\n pass\n\n def forward(self, xs):\n return xs\n\n def backward(self, xs, gys):\n return gys\n\n\ndef identity(*inputs):\n \"\"\"Just returns input variables.\"\"\"\n return Identity()(*inputs)\n",
"path": "chainer/functions/identity.py"
}
] | diff --git a/chainer/functions/identity.py b/chainer/functions/identity.py
index 300f5ddb223e..80938c408e48 100644
--- a/chainer/functions/identity.py
+++ b/chainer/functions/identity.py
@@ -5,6 +5,9 @@ class Identity(function.Function):
"""Identity function."""
+ def check_type_forward(self, in_types):
+ pass
+
def forward(self, xs):
return xs
|
oppia__oppia-3843 | AssertionError in controllers/base.py
Bug found when doing a sanity testpass on oppiatestserver for the 2.5.4 release.
Steps to reproduce:
- Go to https://oppiatestserver.appspot.com
- Ensure you're logged out
- From the splash screen, click 'Create your own lesson'
- Observe 'Error Communicating with Server' snackbar
Error info:
```
Traceback (most recent call last): (/base/data/home/apps/s~oppiatestserver/2-5-4.403832684503391573/core/controllers/base.py:438)
File "/base/data/home/runtimes/python27/python27_lib/versions/third_party/webapp2-2.5.2/webapp2.py", line 570, in dispatch
return method(*args, **kwargs)
File "/base/data/home/apps/s~oppiatestserver/2-5-4.403832684503391573/core/domain/acl_decorators.py", line 247, in test_can_create
return handler(self, **kwargs)
File "/base/data/home/apps/s~oppiatestserver/2-5-4.403832684503391573/core/controllers/creator_dashboard.py", line 279, in post
new_exploration_id = exp_services.get_new_exploration_id()
File "/base/data/home/apps/s~oppiatestserver/2-5-4.403832684503391573/core/domain/exp_services.py", line 325, in get_new_exploration_id
return exp_models.ExplorationModel.get_new_id('')
File "/base/data/home/apps/s~oppiatestserver/2-5-4.403832684503391573/core/storage/base_model/gae_models.py", line 178, in get_new_id
'%s%s' % (entity_name, utils.get_random_int(RAND_RANGE)),
File "/base/data/home/apps/s~oppiatestserver/2-5-4.403832684503391573/utils.py", line 218, in get_random_int
assert upper_bound >= 0 and isinstance(upper_bound, int)
Exception raised: (/base/data/home/apps/s~oppiatestserver/2-5-4.403832684503391573/core/controllers/base.py:439)
```
This is consistently reproable just by visiting https://oppiatestserver.appspot.com/creator_dashboard?mode=create.
| [
{
"content": "# Copyright 2014 The Oppia Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS-IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Base model class.\"\"\"\n\nfrom core.platform import models\nimport utils\n\nfrom google.appengine.datastore import datastore_query\nfrom google.appengine.ext import ndb\n\ntransaction_services = models.Registry.import_transaction_services()\n\n# The delimiter used to separate the version number from the model instance\n# id. To get the instance id from a snapshot id, use Python's rfind()\n# method to find the location of this delimiter.\n_VERSION_DELIMITER = '-'\n\n# Constants used for generating ids.\nMAX_RETRIES = 10\nRAND_RANGE = (1 << 60) - 1\nID_LENGTH = 12\n\n\nclass BaseModel(ndb.Model):\n \"\"\"Base model for all persistent object storage classes.\"\"\"\n\n # When this entity was first created. This can be overwritten and\n # set explicitly.\n created_on = ndb.DateTimeProperty(auto_now_add=True, indexed=True)\n # When this entity was last updated. This cannot be set directly.\n last_updated = ndb.DateTimeProperty(auto_now=True, indexed=True)\n # Whether the current version of the model instance is deleted.\n deleted = ndb.BooleanProperty(indexed=True, default=False)\n\n @property\n def id(self):\n \"\"\"A unique id for this model instance.\"\"\"\n return self.key.id()\n\n def _pre_put_hook(self):\n \"\"\"This is run before model instances are saved to the datastore.\n\n Subclasses of BaseModel should override this method.\n \"\"\"\n pass\n\n class EntityNotFoundError(Exception):\n \"\"\"Raised when no entity for a given id exists in the datastore.\"\"\"\n pass\n\n @classmethod\n def get(cls, entity_id, strict=True):\n \"\"\"Gets an entity by id.\n\n Args:\n entity_id: str.\n strict: bool. Whether to fail noisily if no entity with the given id\n exists in the datastore. Default is True.\n\n Returns:\n None, if strict == False and no undeleted entity with the given id\n exists in the datastore. Otherwise, the entity instance that\n corresponds to the given id.\n\n Raises:\n base_models.BaseModel.EntityNotFoundError: if strict == True and\n no undeleted entity with the given id exists in the datastore.\n \"\"\"\n entity = cls.get_by_id(entity_id)\n if entity and entity.deleted:\n entity = None\n\n if strict and entity is None:\n raise cls.EntityNotFoundError(\n 'Entity for class %s with id %s not found' %\n (cls.__name__, entity_id))\n return entity\n\n @classmethod\n def get_multi(cls, entity_ids, include_deleted=False):\n \"\"\"Gets list of entities by list of ids.\n\n Args:\n entity_ids: list(str).\n include_deleted: bool. Whether to include deleted entities in the\n return list. Default is False.\n\n Returns:\n list(*|None). A list that contains model instances that match\n the corresponding entity_ids in the input list. If an instance is\n not found, or it has been deleted and include_deleted is False,\n then the corresponding entry is None.\n \"\"\"\n entity_keys = []\n none_argument_indices = []\n for index, entity_id in enumerate(entity_ids):\n if entity_id:\n entity_keys.append(ndb.Key(cls, entity_id))\n else:\n none_argument_indices.append(index)\n\n entities = ndb.get_multi(entity_keys)\n for index in none_argument_indices:\n entities.insert(index, None)\n\n if not include_deleted:\n for i in xrange(len(entities)):\n if entities[i] and entities[i].deleted:\n entities[i] = None\n return entities\n\n @classmethod\n def put_multi(cls, entities):\n \"\"\"Stores the given ndb.Model instances.\n\n Args:\n entities: list(ndb.Model).\n \"\"\"\n ndb.put_multi(entities)\n\n def delete(self):\n \"\"\"Deletes this instance.\"\"\"\n super(BaseModel, self).key.delete()\n\n @classmethod\n def get_all(cls, include_deleted=False):\n \"\"\"Gets iterable of all entities of this class.\n\n Args:\n include_deleted: bool. If True, then entities that have been marked\n deleted are returned as well. Defaults to False.\n\n Returns:\n iterable. Filterable iterable of all entities of this class.\n \"\"\"\n query = cls.query()\n if not include_deleted:\n query = query.filter(cls.deleted == False) # pylint: disable=singleton-comparison\n return query\n\n @classmethod\n def get_new_id(cls, entity_name):\n \"\"\"Gets a new id for an entity, based on its name.\n\n The returned id is guaranteed to be unique among all instances of this\n entity.\n\n Args:\n entity_name: The name of the entity. Coerced to a utf-8 encoded\n string. Defaults to ''.\n\n Returns:\n str. New unique id for this entity class.\n\n Raises:\n Exception: An ID cannot be generated within a reasonable number\n of attempts.\n \"\"\"\n try:\n entity_name = unicode(entity_name).encode('utf-8')\n except Exception:\n entity_name = ''\n\n for _ in range(MAX_RETRIES):\n new_id = utils.convert_to_hash(\n '%s%s' % (entity_name, utils.get_random_int(RAND_RANGE)),\n ID_LENGTH)\n if not cls.get_by_id(new_id):\n return new_id\n\n raise Exception('New id generator is producing too many collisions.')\n\n @classmethod\n def _fetch_page_sorted_by_last_updated(\n cls, query, page_size, urlsafe_start_cursor):\n \"\"\"Fetches a page of entities sorted by their last_updated attribute in\n descending order (newly updated first).\n\n Args:\n query: ndb.Query.\n page_size: int. The maximum number of entities to be returned.\n urlsafe_start_cursor: str or None. If provided, the list of returned\n entities starts from this datastore cursor. Otherwise,\n the returned entities start from the beginning of the full\n list of entities.\n\n Returns:\n 3-tuple of (results, cursor, more) as described in fetch_page() at:\n https://developers.google.com/appengine/docs/python/ndb/queryclass,\n where:\n results: List of query results.\n cursor: str or None. A query cursor pointing to the next batch\n of results. If there are no more results, this will be None.\n more: bool. If True, there are (probably) more results after\n this batch. If False, there are no further results after\n this batch.\n \"\"\"\n if urlsafe_start_cursor:\n start_cursor = datastore_query.Cursor(urlsafe=urlsafe_start_cursor)\n else:\n start_cursor = None\n\n result = query.order(-cls.last_updated).fetch_page(\n page_size, start_cursor=start_cursor)\n return (\n result[0],\n (result[1].urlsafe() if result[1] else None),\n result[2])\n\n\nclass VersionedModel(BaseModel):\n \"\"\"Model that handles storage of the version history of model instances.\n\n To use this class, you must declare a SNAPSHOT_METADATA_CLASS and a\n SNAPSHOT_CONTENT_CLASS. The former must contain the String fields\n 'committer_id', 'commit_type' and 'commit_message', and a JSON field for\n the Python list of dicts, 'commit_cmds'. The latter must contain the JSON\n field 'content'. The item that is being versioned must be serializable to a\n JSON blob.\n\n Note that commit() should be used for VersionedModels, as opposed to put()\n for direct subclasses of BaseModel.\n \"\"\"\n # The class designated as the snapshot model. This should be a subclass of\n # BaseSnapshotMetadataModel.\n SNAPSHOT_METADATA_CLASS = None\n # The class designated as the snapshot content model. This should be a\n # subclass of BaseSnapshotContentModel.\n SNAPSHOT_CONTENT_CLASS = None\n # Whether reverting is allowed. Default is False.\n ALLOW_REVERT = False\n\n ### IMPORTANT: Subclasses should only overwrite things above this line. ###\n\n # The possible commit types.\n _COMMIT_TYPE_CREATE = 'create'\n _COMMIT_TYPE_REVERT = 'revert'\n _COMMIT_TYPE_EDIT = 'edit'\n _COMMIT_TYPE_DELETE = 'delete'\n # A list containing the possible commit types.\n COMMIT_TYPE_CHOICES = [\n _COMMIT_TYPE_CREATE, _COMMIT_TYPE_REVERT, _COMMIT_TYPE_EDIT,\n _COMMIT_TYPE_DELETE\n ]\n # The reserved prefix for keys that are automatically inserted into a\n # commit_cmd dict by this model.\n _AUTOGENERATED_PREFIX = 'AUTO'\n # The current version number of this instance. In each PUT operation,\n # this number is incremented and a snapshot of the modified instance is\n # stored in the snapshot metadata and content models. The snapshot\n # version number starts at 1 when the model instance is first created.\n # All data in this instance represents the version at HEAD; data about the\n # previous versions is stored in the snapshot models.\n version = ndb.IntegerProperty(default=0)\n\n def _require_not_marked_deleted(self):\n if self.deleted:\n raise Exception('This model instance has been deleted.')\n\n def _compute_snapshot(self):\n \"\"\"Generates a snapshot (dict) from the model property values.\"\"\"\n return self.to_dict(exclude=['created_on', 'last_updated'])\n\n def _reconstitute(self, snapshot_dict):\n self.populate(**snapshot_dict)\n return self\n\n def _reconstitute_from_snapshot_id(self, snapshot_id):\n \"\"\"Gets a reconstituted instance of this model class, based on the given\n snapshot id.\n\n Args:\n snapshot_id: str.\n\n Returns:\n VersionedModel. Reconstituted instance.\n \"\"\"\n snapshot_model = self.SNAPSHOT_CONTENT_CLASS.get(snapshot_id)\n snapshot_dict = snapshot_model.content\n reconstituted_model = self._reconstitute(snapshot_dict)\n # TODO(sll): The 'created_on' and 'last_updated' values here will be\n # slightly different from the values the entity model would have had,\n # since they correspond to the corresponding fields for the snapshot\n # content model instead. Figure out whether this is a problem or not,\n # and whether we need to record the contents of those fields in the\n # actual entity model (in which case we also need a way to deal with\n # old snapshots that don't have this information).\n reconstituted_model.created_on = snapshot_model.created_on\n reconstituted_model.last_updated = snapshot_model.last_updated\n return reconstituted_model\n\n @classmethod\n def _get_snapshot_id(cls, instance_id, version_number):\n \"\"\"Gets a unique snapshot id for this instance and version.\n\n Args:\n instance_id: str.\n version_number: int.\n\n Returns:\n str. The unique snapshot id corresponding to the given instance and\n version.\n \"\"\"\n return '%s%s%s' % (\n instance_id, _VERSION_DELIMITER, version_number)\n\n def _trusted_commit(\n self, committer_id, commit_type, commit_message, commit_cmds):\n \"\"\"Evaluates and executes commit. Main function for all commit types.\n\n Args:\n committer_id: str. The user_id of the user who committed the change.\n commit_type: str. Unique identifier of commit type. Possible values\n are in COMMIT_TYPE_CHOICES.\n commit_message: str.\n commit_cmds: list(dict). A list of commands, describing changes\n made in this model, should give sufficient information to\n reconstruct the commit. Dict always contains:\n cmd: str. Unique command.\n And then additional arguments for that command. For example:\n\n {'cmd': 'AUTO_revert_version_number'\n 'version_number': 4}\n\n Raises:\n Exception: No snapshot metadata class has been defined.\n Exception: No snapshot content class has been defined.\n Exception: commit_cmds is not a list of dicts.\n \"\"\"\n if self.SNAPSHOT_METADATA_CLASS is None:\n raise Exception('No snapshot metadata class defined.')\n if self.SNAPSHOT_CONTENT_CLASS is None:\n raise Exception('No snapshot content class defined.')\n if not isinstance(commit_cmds, list):\n raise Exception(\n 'Expected commit_cmds to be a list of dicts, received %s'\n % commit_cmds)\n for item in commit_cmds:\n if not isinstance(item, dict):\n raise Exception(\n 'Expected commit_cmds to be a list of dicts, received %s'\n % commit_cmds)\n\n self.version += 1\n\n snapshot = self._compute_snapshot()\n snapshot_id = self._get_snapshot_id(self.id, self.version)\n\n snapshot_metadata_instance = self.SNAPSHOT_METADATA_CLASS( # pylint: disable=not-callable\n id=snapshot_id, committer_id=committer_id, commit_type=commit_type,\n commit_message=commit_message, commit_cmds=commit_cmds)\n snapshot_content_instance = self.SNAPSHOT_CONTENT_CLASS( # pylint: disable=not-callable\n id=snapshot_id, content=snapshot)\n\n transaction_services.run_in_transaction(\n ndb.put_multi,\n [snapshot_metadata_instance, snapshot_content_instance, self])\n\n def delete(self, committer_id, commit_message, force_deletion=False):\n \"\"\"Deletes this model instance.\n\n Args:\n committer_id: str. The user_id of the user who committed the change.\n commit_message: str.\n force_deletion: bool. If True this model is deleted\n completely from storage, otherwise it is only marked as deleted.\n Default is False.\n\n Raises:\n Exception: This model instance has been already deleted.\n \"\"\"\n if force_deletion:\n current_version = self.version\n\n version_numbers = [str(num + 1) for num in range(current_version)]\n snapshot_ids = [\n self._get_snapshot_id(self.id, version_number)\n for version_number in version_numbers]\n\n metadata_keys = [\n ndb.Key(self.SNAPSHOT_METADATA_CLASS, snapshot_id)\n for snapshot_id in snapshot_ids]\n ndb.delete_multi(metadata_keys)\n\n content_keys = [\n ndb.Key(self.SNAPSHOT_CONTENT_CLASS, snapshot_id)\n for snapshot_id in snapshot_ids]\n ndb.delete_multi(content_keys)\n\n super(VersionedModel, self).delete()\n else:\n self._require_not_marked_deleted() # pylint: disable=protected-access\n self.deleted = True\n\n commit_cmds = [{\n 'cmd': '%s_mark_deleted' % self._AUTOGENERATED_PREFIX\n }]\n\n self._trusted_commit(\n committer_id, self._COMMIT_TYPE_DELETE, commit_message,\n commit_cmds)\n\n def put(self, *args, **kwargs):\n \"\"\"For VersionedModels, this method is replaced with commit().\"\"\"\n raise NotImplementedError\n\n def commit(self, committer_id, commit_message, commit_cmds):\n \"\"\"Saves a version snapshot and updates the model.\n\n Args:\n committer_id: str. The user_id of the user who committed the change.\n commit_message: str.\n commit_cmds: list(dict). A list of commands, describing changes\n made in this model, should give sufficient information to\n reconstruct the commit. Dict always contains:\n cmd: str. Unique command.\n And then additional arguments for that command. For example:\n\n {'cmd': 'AUTO_revert_version_number'\n 'version_number': 4}\n\n Raises:\n Exception: This model instance has been already deleted.\n Exception: commit_cmd is in invalid format.\n \"\"\"\n self._require_not_marked_deleted()\n\n for commit_cmd in commit_cmds:\n if 'cmd' not in commit_cmd:\n raise Exception(\n 'Invalid commit_cmd: %s. Expected a \\'cmd\\' key.'\n % commit_cmd)\n if commit_cmd['cmd'].startswith(self._AUTOGENERATED_PREFIX):\n raise Exception(\n 'Invalid change list command: ' % commit_cmd['cmd'])\n\n commit_type = (\n self._COMMIT_TYPE_CREATE if self.version == 0 else\n self._COMMIT_TYPE_EDIT)\n\n self._trusted_commit(\n committer_id, commit_type, commit_message, commit_cmds)\n\n @classmethod\n def revert(cls, model, committer_id, commit_message, version_number):\n \"\"\"Reverts model to previous version.\n\n Args:\n model: VersionedModel.\n committer_id: str. The user_id of the user who committed the change.\n commit_message: str.\n version_number: int. Version to revert to.\n\n Raises:\n Exception: This model instance has been deleted.\n Exception: Reverting is not allowed on this model.\n \"\"\"\n model._require_not_marked_deleted() # pylint: disable=protected-access\n\n if not model.ALLOW_REVERT:\n raise Exception(\n 'Reverting of objects of type %s is not allowed.'\n % model.__class__.__name__)\n\n commit_cmds = [{\n 'cmd': (\n '%s_revert_version_number' %\n model._AUTOGENERATED_PREFIX), # pylint: disable=protected-access\n 'version_number': version_number\n }]\n\n # Do not overwrite the version number.\n current_version = model.version\n\n # If a new property is introduced after a certain version of a model,\n # the property should be its default value when an old snapshot of the\n # model is applied during reversion. E.g. states_schema_version in\n # ExplorationModel may be added after some version of a saved\n # exploration. If that exploration is reverted to a version that does\n # not have a states_schema_version property, it should revert to the\n # default states_schema_version value rather than taking the\n # states_schema_version value from the latest exploration version.\n\n # pylint: disable=protected-access\n snapshot_id = model._get_snapshot_id(model.id, version_number)\n new_model = cls(id=model.id)\n new_model._reconstitute_from_snapshot_id(snapshot_id)\n new_model.version = current_version\n\n new_model._trusted_commit(\n committer_id, cls._COMMIT_TYPE_REVERT, commit_message,\n commit_cmds)\n # pylint: enable=protected-access\n\n @classmethod\n def get_version(cls, entity_id, version_number):\n \"\"\"Gets model instance representing the given version.\n\n The snapshot content is used to populate this model instance. The\n snapshot metadata is not used.\n\n Args:\n entity_id: str.\n version_number: int.\n\n Returns:\n VersionedModel. Model instance representing given version.\n\n Raises:\n Exception: This model instance has been deleted.\n \"\"\"\n # pylint: disable=protected-access\n cls.get(entity_id)._require_not_marked_deleted()\n\n snapshot_id = cls._get_snapshot_id(entity_id, version_number)\n return cls(id=entity_id)._reconstitute_from_snapshot_id(\n snapshot_id)\n # pylint: enable=protected-access\n\n @classmethod\n def get(cls, entity_id, strict=True, version=None):\n \"\"\"Gets model instance.\n\n Args:\n entity_id: str.\n strict: bool. Whether to fail noisily if no entity with the given id\n exists in the datastore. Default is True.\n version: int. Version we want to get. Default is None.\n\n Returns:\n VersionedModel. If version is None, get the newest version of the\n model. Otherwise, get the specified version.\n \"\"\"\n if version is None:\n return super(VersionedModel, cls).get(entity_id, strict=strict)\n else:\n return cls.get_version(entity_id, version)\n\n @classmethod\n def get_snapshots_metadata(\n cls, model_instance_id, version_numbers, allow_deleted=False):\n \"\"\"Gets a list of dicts, each representing a model snapshot.\n\n One dict is returned for each version number in the list of version\n numbers requested. If any of the version numbers does not exist, an\n error is raised.\n\n Args:\n model_instance_id: str. Id of requested model.\n version_numbers: list(int). List of version numbers.\n allow_deleted: bool. If is False, an error is raised if the current\n model has been deleted. Default is False.\n\n Returns:\n list(dict). Each dict contains metadata for a particular snapshot.\n It has the following keys:\n committer_id: str. The user_id of the user who committed the\n change.\n commit_message: str.\n commit_cmds: list(dict). A list of commands, describing changes\n made in this model, should give sufficient information to\n reconstruct the commit. Dict always contains:\n cmd: str. Unique command.\n And then additional arguments for that command. For example:\n\n {'cmd': 'AUTO_revert_version_number'\n 'version_number': 4}\n\n commit_type: str. Unique identifier of commit type. Possible\n values are in COMMIT_TYPE_CHOICES.\n version_number: int.\n created_on_ms: float. Snapshot creation time in milliseconds\n since the Epoch.\n\n Raises:\n Exception: There is no model instance corresponding to at least one\n of the given version numbers.\n \"\"\"\n # pylint: disable=protected-access\n if not allow_deleted:\n cls.get(model_instance_id)._require_not_marked_deleted()\n\n snapshot_ids = [\n cls._get_snapshot_id(model_instance_id, version_number)\n for version_number in version_numbers]\n # pylint: enable=protected-access\n metadata_keys = [\n ndb.Key(cls.SNAPSHOT_METADATA_CLASS, snapshot_id)\n for snapshot_id in snapshot_ids]\n returned_models = ndb.get_multi(metadata_keys)\n\n for ind, model in enumerate(returned_models):\n if model is None:\n raise Exception(\n 'Invalid version number %s for model %s with id %s'\n % (version_numbers[ind], cls.__name__, model_instance_id))\n\n return [{\n 'committer_id': model.committer_id,\n 'commit_message': model.commit_message,\n 'commit_cmds': model.commit_cmds,\n 'commit_type': model.commit_type,\n 'version_number': version_numbers[ind],\n 'created_on_ms': utils.get_time_in_millisecs(model.created_on),\n } for (ind, model) in enumerate(returned_models)]\n\n\nclass BaseSnapshotMetadataModel(BaseModel):\n \"\"\"Base class for snapshot metadata classes.\n\n The id of this model is computed using VersionedModel.get_snapshot_id().\n \"\"\"\n\n # The id of the user who committed this revision.\n committer_id = ndb.StringProperty(required=True)\n # The type of the commit associated with this snapshot.\n commit_type = ndb.StringProperty(\n required=True, choices=VersionedModel.COMMIT_TYPE_CHOICES)\n # The commit message associated with this snapshot.\n commit_message = ndb.TextProperty(indexed=False)\n # A sequence of commands that can be used to describe this commit.\n # Represented as a list of dicts.\n commit_cmds = ndb.JsonProperty(indexed=False)\n\n def get_unversioned_instance_id(self):\n \"\"\"Gets the instance id from the snapshot id.\n\n Returns:\n str. Instance id part of snapshot id.\n \"\"\"\n return self.id[:self.id.rfind(_VERSION_DELIMITER)]\n\n def get_version_string(self):\n \"\"\"Gets the version number from the snapshot id.\n\n Returns:\n str. Version number part of snapshot id.\n \"\"\"\n return self.id[self.id.rfind(_VERSION_DELIMITER) + 1:]\n\n\nclass BaseSnapshotContentModel(BaseModel):\n \"\"\"Base class for snapshot content classes.\n\n The id of this model is computed using VersionedModel.get_snapshot_id().\n \"\"\"\n\n # The snapshot content, as a JSON blob.\n content = ndb.JsonProperty(indexed=False)\n\n def get_unversioned_instance_id(self):\n \"\"\"Gets the instance id from the snapshot id.\n\n Returns:\n str. Instance id part of snapshot id.\n \"\"\"\n return self.id[:self.id.rfind(_VERSION_DELIMITER)]\n\n def get_version_string(self):\n \"\"\"Gets the version number from the snapshot id.\n\n Returns:\n str. Version number part of snapshot id.\n \"\"\"\n return self.id[self.id.rfind(_VERSION_DELIMITER) + 1:]\n\n\nclass BaseMapReduceBatchResultsModel(BaseModel):\n \"\"\"Base model for batch storage for MR jobs.\n\n This model turns off caching, because this results in stale data being\n shown after each MapReduce job run. Classes which are used by a MR job to\n store its batch results should subclass this class.\n \"\"\"\n _use_cache = False\n _use_memcache = False\n",
"path": "core/storage/base_model/gae_models.py"
}
] | [
{
"content": "# Copyright 2014 The Oppia Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS-IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Base model class.\"\"\"\n\nfrom core.platform import models\nimport utils\n\nfrom google.appengine.datastore import datastore_query\nfrom google.appengine.ext import ndb\n\ntransaction_services = models.Registry.import_transaction_services()\n\n# The delimiter used to separate the version number from the model instance\n# id. To get the instance id from a snapshot id, use Python's rfind()\n# method to find the location of this delimiter.\n_VERSION_DELIMITER = '-'\n\n# Constants used for generating ids.\nMAX_RETRIES = 10\nRAND_RANGE = (1 << 30) - 1\nID_LENGTH = 12\n\n\nclass BaseModel(ndb.Model):\n \"\"\"Base model for all persistent object storage classes.\"\"\"\n\n # When this entity was first created. This can be overwritten and\n # set explicitly.\n created_on = ndb.DateTimeProperty(auto_now_add=True, indexed=True)\n # When this entity was last updated. This cannot be set directly.\n last_updated = ndb.DateTimeProperty(auto_now=True, indexed=True)\n # Whether the current version of the model instance is deleted.\n deleted = ndb.BooleanProperty(indexed=True, default=False)\n\n @property\n def id(self):\n \"\"\"A unique id for this model instance.\"\"\"\n return self.key.id()\n\n def _pre_put_hook(self):\n \"\"\"This is run before model instances are saved to the datastore.\n\n Subclasses of BaseModel should override this method.\n \"\"\"\n pass\n\n class EntityNotFoundError(Exception):\n \"\"\"Raised when no entity for a given id exists in the datastore.\"\"\"\n pass\n\n @classmethod\n def get(cls, entity_id, strict=True):\n \"\"\"Gets an entity by id.\n\n Args:\n entity_id: str.\n strict: bool. Whether to fail noisily if no entity with the given id\n exists in the datastore. Default is True.\n\n Returns:\n None, if strict == False and no undeleted entity with the given id\n exists in the datastore. Otherwise, the entity instance that\n corresponds to the given id.\n\n Raises:\n base_models.BaseModel.EntityNotFoundError: if strict == True and\n no undeleted entity with the given id exists in the datastore.\n \"\"\"\n entity = cls.get_by_id(entity_id)\n if entity and entity.deleted:\n entity = None\n\n if strict and entity is None:\n raise cls.EntityNotFoundError(\n 'Entity for class %s with id %s not found' %\n (cls.__name__, entity_id))\n return entity\n\n @classmethod\n def get_multi(cls, entity_ids, include_deleted=False):\n \"\"\"Gets list of entities by list of ids.\n\n Args:\n entity_ids: list(str).\n include_deleted: bool. Whether to include deleted entities in the\n return list. Default is False.\n\n Returns:\n list(*|None). A list that contains model instances that match\n the corresponding entity_ids in the input list. If an instance is\n not found, or it has been deleted and include_deleted is False,\n then the corresponding entry is None.\n \"\"\"\n entity_keys = []\n none_argument_indices = []\n for index, entity_id in enumerate(entity_ids):\n if entity_id:\n entity_keys.append(ndb.Key(cls, entity_id))\n else:\n none_argument_indices.append(index)\n\n entities = ndb.get_multi(entity_keys)\n for index in none_argument_indices:\n entities.insert(index, None)\n\n if not include_deleted:\n for i in xrange(len(entities)):\n if entities[i] and entities[i].deleted:\n entities[i] = None\n return entities\n\n @classmethod\n def put_multi(cls, entities):\n \"\"\"Stores the given ndb.Model instances.\n\n Args:\n entities: list(ndb.Model).\n \"\"\"\n ndb.put_multi(entities)\n\n def delete(self):\n \"\"\"Deletes this instance.\"\"\"\n super(BaseModel, self).key.delete()\n\n @classmethod\n def get_all(cls, include_deleted=False):\n \"\"\"Gets iterable of all entities of this class.\n\n Args:\n include_deleted: bool. If True, then entities that have been marked\n deleted are returned as well. Defaults to False.\n\n Returns:\n iterable. Filterable iterable of all entities of this class.\n \"\"\"\n query = cls.query()\n if not include_deleted:\n query = query.filter(cls.deleted == False) # pylint: disable=singleton-comparison\n return query\n\n @classmethod\n def get_new_id(cls, entity_name):\n \"\"\"Gets a new id for an entity, based on its name.\n\n The returned id is guaranteed to be unique among all instances of this\n entity.\n\n Args:\n entity_name: The name of the entity. Coerced to a utf-8 encoded\n string. Defaults to ''.\n\n Returns:\n str. New unique id for this entity class.\n\n Raises:\n Exception: An ID cannot be generated within a reasonable number\n of attempts.\n \"\"\"\n try:\n entity_name = unicode(entity_name).encode('utf-8')\n except Exception:\n entity_name = ''\n\n for _ in range(MAX_RETRIES):\n new_id = utils.convert_to_hash(\n '%s%s' % (entity_name, utils.get_random_int(RAND_RANGE)),\n ID_LENGTH)\n if not cls.get_by_id(new_id):\n return new_id\n\n raise Exception('New id generator is producing too many collisions.')\n\n @classmethod\n def _fetch_page_sorted_by_last_updated(\n cls, query, page_size, urlsafe_start_cursor):\n \"\"\"Fetches a page of entities sorted by their last_updated attribute in\n descending order (newly updated first).\n\n Args:\n query: ndb.Query.\n page_size: int. The maximum number of entities to be returned.\n urlsafe_start_cursor: str or None. If provided, the list of returned\n entities starts from this datastore cursor. Otherwise,\n the returned entities start from the beginning of the full\n list of entities.\n\n Returns:\n 3-tuple of (results, cursor, more) as described in fetch_page() at:\n https://developers.google.com/appengine/docs/python/ndb/queryclass,\n where:\n results: List of query results.\n cursor: str or None. A query cursor pointing to the next batch\n of results. If there are no more results, this will be None.\n more: bool. If True, there are (probably) more results after\n this batch. If False, there are no further results after\n this batch.\n \"\"\"\n if urlsafe_start_cursor:\n start_cursor = datastore_query.Cursor(urlsafe=urlsafe_start_cursor)\n else:\n start_cursor = None\n\n result = query.order(-cls.last_updated).fetch_page(\n page_size, start_cursor=start_cursor)\n return (\n result[0],\n (result[1].urlsafe() if result[1] else None),\n result[2])\n\n\nclass VersionedModel(BaseModel):\n \"\"\"Model that handles storage of the version history of model instances.\n\n To use this class, you must declare a SNAPSHOT_METADATA_CLASS and a\n SNAPSHOT_CONTENT_CLASS. The former must contain the String fields\n 'committer_id', 'commit_type' and 'commit_message', and a JSON field for\n the Python list of dicts, 'commit_cmds'. The latter must contain the JSON\n field 'content'. The item that is being versioned must be serializable to a\n JSON blob.\n\n Note that commit() should be used for VersionedModels, as opposed to put()\n for direct subclasses of BaseModel.\n \"\"\"\n # The class designated as the snapshot model. This should be a subclass of\n # BaseSnapshotMetadataModel.\n SNAPSHOT_METADATA_CLASS = None\n # The class designated as the snapshot content model. This should be a\n # subclass of BaseSnapshotContentModel.\n SNAPSHOT_CONTENT_CLASS = None\n # Whether reverting is allowed. Default is False.\n ALLOW_REVERT = False\n\n ### IMPORTANT: Subclasses should only overwrite things above this line. ###\n\n # The possible commit types.\n _COMMIT_TYPE_CREATE = 'create'\n _COMMIT_TYPE_REVERT = 'revert'\n _COMMIT_TYPE_EDIT = 'edit'\n _COMMIT_TYPE_DELETE = 'delete'\n # A list containing the possible commit types.\n COMMIT_TYPE_CHOICES = [\n _COMMIT_TYPE_CREATE, _COMMIT_TYPE_REVERT, _COMMIT_TYPE_EDIT,\n _COMMIT_TYPE_DELETE\n ]\n # The reserved prefix for keys that are automatically inserted into a\n # commit_cmd dict by this model.\n _AUTOGENERATED_PREFIX = 'AUTO'\n # The current version number of this instance. In each PUT operation,\n # this number is incremented and a snapshot of the modified instance is\n # stored in the snapshot metadata and content models. The snapshot\n # version number starts at 1 when the model instance is first created.\n # All data in this instance represents the version at HEAD; data about the\n # previous versions is stored in the snapshot models.\n version = ndb.IntegerProperty(default=0)\n\n def _require_not_marked_deleted(self):\n if self.deleted:\n raise Exception('This model instance has been deleted.')\n\n def _compute_snapshot(self):\n \"\"\"Generates a snapshot (dict) from the model property values.\"\"\"\n return self.to_dict(exclude=['created_on', 'last_updated'])\n\n def _reconstitute(self, snapshot_dict):\n self.populate(**snapshot_dict)\n return self\n\n def _reconstitute_from_snapshot_id(self, snapshot_id):\n \"\"\"Gets a reconstituted instance of this model class, based on the given\n snapshot id.\n\n Args:\n snapshot_id: str.\n\n Returns:\n VersionedModel. Reconstituted instance.\n \"\"\"\n snapshot_model = self.SNAPSHOT_CONTENT_CLASS.get(snapshot_id)\n snapshot_dict = snapshot_model.content\n reconstituted_model = self._reconstitute(snapshot_dict)\n # TODO(sll): The 'created_on' and 'last_updated' values here will be\n # slightly different from the values the entity model would have had,\n # since they correspond to the corresponding fields for the snapshot\n # content model instead. Figure out whether this is a problem or not,\n # and whether we need to record the contents of those fields in the\n # actual entity model (in which case we also need a way to deal with\n # old snapshots that don't have this information).\n reconstituted_model.created_on = snapshot_model.created_on\n reconstituted_model.last_updated = snapshot_model.last_updated\n return reconstituted_model\n\n @classmethod\n def _get_snapshot_id(cls, instance_id, version_number):\n \"\"\"Gets a unique snapshot id for this instance and version.\n\n Args:\n instance_id: str.\n version_number: int.\n\n Returns:\n str. The unique snapshot id corresponding to the given instance and\n version.\n \"\"\"\n return '%s%s%s' % (\n instance_id, _VERSION_DELIMITER, version_number)\n\n def _trusted_commit(\n self, committer_id, commit_type, commit_message, commit_cmds):\n \"\"\"Evaluates and executes commit. Main function for all commit types.\n\n Args:\n committer_id: str. The user_id of the user who committed the change.\n commit_type: str. Unique identifier of commit type. Possible values\n are in COMMIT_TYPE_CHOICES.\n commit_message: str.\n commit_cmds: list(dict). A list of commands, describing changes\n made in this model, should give sufficient information to\n reconstruct the commit. Dict always contains:\n cmd: str. Unique command.\n And then additional arguments for that command. For example:\n\n {'cmd': 'AUTO_revert_version_number'\n 'version_number': 4}\n\n Raises:\n Exception: No snapshot metadata class has been defined.\n Exception: No snapshot content class has been defined.\n Exception: commit_cmds is not a list of dicts.\n \"\"\"\n if self.SNAPSHOT_METADATA_CLASS is None:\n raise Exception('No snapshot metadata class defined.')\n if self.SNAPSHOT_CONTENT_CLASS is None:\n raise Exception('No snapshot content class defined.')\n if not isinstance(commit_cmds, list):\n raise Exception(\n 'Expected commit_cmds to be a list of dicts, received %s'\n % commit_cmds)\n for item in commit_cmds:\n if not isinstance(item, dict):\n raise Exception(\n 'Expected commit_cmds to be a list of dicts, received %s'\n % commit_cmds)\n\n self.version += 1\n\n snapshot = self._compute_snapshot()\n snapshot_id = self._get_snapshot_id(self.id, self.version)\n\n snapshot_metadata_instance = self.SNAPSHOT_METADATA_CLASS( # pylint: disable=not-callable\n id=snapshot_id, committer_id=committer_id, commit_type=commit_type,\n commit_message=commit_message, commit_cmds=commit_cmds)\n snapshot_content_instance = self.SNAPSHOT_CONTENT_CLASS( # pylint: disable=not-callable\n id=snapshot_id, content=snapshot)\n\n transaction_services.run_in_transaction(\n ndb.put_multi,\n [snapshot_metadata_instance, snapshot_content_instance, self])\n\n def delete(self, committer_id, commit_message, force_deletion=False):\n \"\"\"Deletes this model instance.\n\n Args:\n committer_id: str. The user_id of the user who committed the change.\n commit_message: str.\n force_deletion: bool. If True this model is deleted\n completely from storage, otherwise it is only marked as deleted.\n Default is False.\n\n Raises:\n Exception: This model instance has been already deleted.\n \"\"\"\n if force_deletion:\n current_version = self.version\n\n version_numbers = [str(num + 1) for num in range(current_version)]\n snapshot_ids = [\n self._get_snapshot_id(self.id, version_number)\n for version_number in version_numbers]\n\n metadata_keys = [\n ndb.Key(self.SNAPSHOT_METADATA_CLASS, snapshot_id)\n for snapshot_id in snapshot_ids]\n ndb.delete_multi(metadata_keys)\n\n content_keys = [\n ndb.Key(self.SNAPSHOT_CONTENT_CLASS, snapshot_id)\n for snapshot_id in snapshot_ids]\n ndb.delete_multi(content_keys)\n\n super(VersionedModel, self).delete()\n else:\n self._require_not_marked_deleted() # pylint: disable=protected-access\n self.deleted = True\n\n commit_cmds = [{\n 'cmd': '%s_mark_deleted' % self._AUTOGENERATED_PREFIX\n }]\n\n self._trusted_commit(\n committer_id, self._COMMIT_TYPE_DELETE, commit_message,\n commit_cmds)\n\n def put(self, *args, **kwargs):\n \"\"\"For VersionedModels, this method is replaced with commit().\"\"\"\n raise NotImplementedError\n\n def commit(self, committer_id, commit_message, commit_cmds):\n \"\"\"Saves a version snapshot and updates the model.\n\n Args:\n committer_id: str. The user_id of the user who committed the change.\n commit_message: str.\n commit_cmds: list(dict). A list of commands, describing changes\n made in this model, should give sufficient information to\n reconstruct the commit. Dict always contains:\n cmd: str. Unique command.\n And then additional arguments for that command. For example:\n\n {'cmd': 'AUTO_revert_version_number'\n 'version_number': 4}\n\n Raises:\n Exception: This model instance has been already deleted.\n Exception: commit_cmd is in invalid format.\n \"\"\"\n self._require_not_marked_deleted()\n\n for commit_cmd in commit_cmds:\n if 'cmd' not in commit_cmd:\n raise Exception(\n 'Invalid commit_cmd: %s. Expected a \\'cmd\\' key.'\n % commit_cmd)\n if commit_cmd['cmd'].startswith(self._AUTOGENERATED_PREFIX):\n raise Exception(\n 'Invalid change list command: ' % commit_cmd['cmd'])\n\n commit_type = (\n self._COMMIT_TYPE_CREATE if self.version == 0 else\n self._COMMIT_TYPE_EDIT)\n\n self._trusted_commit(\n committer_id, commit_type, commit_message, commit_cmds)\n\n @classmethod\n def revert(cls, model, committer_id, commit_message, version_number):\n \"\"\"Reverts model to previous version.\n\n Args:\n model: VersionedModel.\n committer_id: str. The user_id of the user who committed the change.\n commit_message: str.\n version_number: int. Version to revert to.\n\n Raises:\n Exception: This model instance has been deleted.\n Exception: Reverting is not allowed on this model.\n \"\"\"\n model._require_not_marked_deleted() # pylint: disable=protected-access\n\n if not model.ALLOW_REVERT:\n raise Exception(\n 'Reverting of objects of type %s is not allowed.'\n % model.__class__.__name__)\n\n commit_cmds = [{\n 'cmd': (\n '%s_revert_version_number' %\n model._AUTOGENERATED_PREFIX), # pylint: disable=protected-access\n 'version_number': version_number\n }]\n\n # Do not overwrite the version number.\n current_version = model.version\n\n # If a new property is introduced after a certain version of a model,\n # the property should be its default value when an old snapshot of the\n # model is applied during reversion. E.g. states_schema_version in\n # ExplorationModel may be added after some version of a saved\n # exploration. If that exploration is reverted to a version that does\n # not have a states_schema_version property, it should revert to the\n # default states_schema_version value rather than taking the\n # states_schema_version value from the latest exploration version.\n\n # pylint: disable=protected-access\n snapshot_id = model._get_snapshot_id(model.id, version_number)\n new_model = cls(id=model.id)\n new_model._reconstitute_from_snapshot_id(snapshot_id)\n new_model.version = current_version\n\n new_model._trusted_commit(\n committer_id, cls._COMMIT_TYPE_REVERT, commit_message,\n commit_cmds)\n # pylint: enable=protected-access\n\n @classmethod\n def get_version(cls, entity_id, version_number):\n \"\"\"Gets model instance representing the given version.\n\n The snapshot content is used to populate this model instance. The\n snapshot metadata is not used.\n\n Args:\n entity_id: str.\n version_number: int.\n\n Returns:\n VersionedModel. Model instance representing given version.\n\n Raises:\n Exception: This model instance has been deleted.\n \"\"\"\n # pylint: disable=protected-access\n cls.get(entity_id)._require_not_marked_deleted()\n\n snapshot_id = cls._get_snapshot_id(entity_id, version_number)\n return cls(id=entity_id)._reconstitute_from_snapshot_id(\n snapshot_id)\n # pylint: enable=protected-access\n\n @classmethod\n def get(cls, entity_id, strict=True, version=None):\n \"\"\"Gets model instance.\n\n Args:\n entity_id: str.\n strict: bool. Whether to fail noisily if no entity with the given id\n exists in the datastore. Default is True.\n version: int. Version we want to get. Default is None.\n\n Returns:\n VersionedModel. If version is None, get the newest version of the\n model. Otherwise, get the specified version.\n \"\"\"\n if version is None:\n return super(VersionedModel, cls).get(entity_id, strict=strict)\n else:\n return cls.get_version(entity_id, version)\n\n @classmethod\n def get_snapshots_metadata(\n cls, model_instance_id, version_numbers, allow_deleted=False):\n \"\"\"Gets a list of dicts, each representing a model snapshot.\n\n One dict is returned for each version number in the list of version\n numbers requested. If any of the version numbers does not exist, an\n error is raised.\n\n Args:\n model_instance_id: str. Id of requested model.\n version_numbers: list(int). List of version numbers.\n allow_deleted: bool. If is False, an error is raised if the current\n model has been deleted. Default is False.\n\n Returns:\n list(dict). Each dict contains metadata for a particular snapshot.\n It has the following keys:\n committer_id: str. The user_id of the user who committed the\n change.\n commit_message: str.\n commit_cmds: list(dict). A list of commands, describing changes\n made in this model, should give sufficient information to\n reconstruct the commit. Dict always contains:\n cmd: str. Unique command.\n And then additional arguments for that command. For example:\n\n {'cmd': 'AUTO_revert_version_number'\n 'version_number': 4}\n\n commit_type: str. Unique identifier of commit type. Possible\n values are in COMMIT_TYPE_CHOICES.\n version_number: int.\n created_on_ms: float. Snapshot creation time in milliseconds\n since the Epoch.\n\n Raises:\n Exception: There is no model instance corresponding to at least one\n of the given version numbers.\n \"\"\"\n # pylint: disable=protected-access\n if not allow_deleted:\n cls.get(model_instance_id)._require_not_marked_deleted()\n\n snapshot_ids = [\n cls._get_snapshot_id(model_instance_id, version_number)\n for version_number in version_numbers]\n # pylint: enable=protected-access\n metadata_keys = [\n ndb.Key(cls.SNAPSHOT_METADATA_CLASS, snapshot_id)\n for snapshot_id in snapshot_ids]\n returned_models = ndb.get_multi(metadata_keys)\n\n for ind, model in enumerate(returned_models):\n if model is None:\n raise Exception(\n 'Invalid version number %s for model %s with id %s'\n % (version_numbers[ind], cls.__name__, model_instance_id))\n\n return [{\n 'committer_id': model.committer_id,\n 'commit_message': model.commit_message,\n 'commit_cmds': model.commit_cmds,\n 'commit_type': model.commit_type,\n 'version_number': version_numbers[ind],\n 'created_on_ms': utils.get_time_in_millisecs(model.created_on),\n } for (ind, model) in enumerate(returned_models)]\n\n\nclass BaseSnapshotMetadataModel(BaseModel):\n \"\"\"Base class for snapshot metadata classes.\n\n The id of this model is computed using VersionedModel.get_snapshot_id().\n \"\"\"\n\n # The id of the user who committed this revision.\n committer_id = ndb.StringProperty(required=True)\n # The type of the commit associated with this snapshot.\n commit_type = ndb.StringProperty(\n required=True, choices=VersionedModel.COMMIT_TYPE_CHOICES)\n # The commit message associated with this snapshot.\n commit_message = ndb.TextProperty(indexed=False)\n # A sequence of commands that can be used to describe this commit.\n # Represented as a list of dicts.\n commit_cmds = ndb.JsonProperty(indexed=False)\n\n def get_unversioned_instance_id(self):\n \"\"\"Gets the instance id from the snapshot id.\n\n Returns:\n str. Instance id part of snapshot id.\n \"\"\"\n return self.id[:self.id.rfind(_VERSION_DELIMITER)]\n\n def get_version_string(self):\n \"\"\"Gets the version number from the snapshot id.\n\n Returns:\n str. Version number part of snapshot id.\n \"\"\"\n return self.id[self.id.rfind(_VERSION_DELIMITER) + 1:]\n\n\nclass BaseSnapshotContentModel(BaseModel):\n \"\"\"Base class for snapshot content classes.\n\n The id of this model is computed using VersionedModel.get_snapshot_id().\n \"\"\"\n\n # The snapshot content, as a JSON blob.\n content = ndb.JsonProperty(indexed=False)\n\n def get_unversioned_instance_id(self):\n \"\"\"Gets the instance id from the snapshot id.\n\n Returns:\n str. Instance id part of snapshot id.\n \"\"\"\n return self.id[:self.id.rfind(_VERSION_DELIMITER)]\n\n def get_version_string(self):\n \"\"\"Gets the version number from the snapshot id.\n\n Returns:\n str. Version number part of snapshot id.\n \"\"\"\n return self.id[self.id.rfind(_VERSION_DELIMITER) + 1:]\n\n\nclass BaseMapReduceBatchResultsModel(BaseModel):\n \"\"\"Base model for batch storage for MR jobs.\n\n This model turns off caching, because this results in stale data being\n shown after each MapReduce job run. Classes which are used by a MR job to\n store its batch results should subclass this class.\n \"\"\"\n _use_cache = False\n _use_memcache = False\n",
"path": "core/storage/base_model/gae_models.py"
}
] | diff --git a/core/storage/base_model/gae_models.py b/core/storage/base_model/gae_models.py
index bbc9a7ae46287..ce1fab8f8b139 100644
--- a/core/storage/base_model/gae_models.py
+++ b/core/storage/base_model/gae_models.py
@@ -29,7 +29,7 @@
# Constants used for generating ids.
MAX_RETRIES = 10
-RAND_RANGE = (1 << 60) - 1
+RAND_RANGE = (1 << 30) - 1
ID_LENGTH = 12
|
litestar-org__litestar-2648 | Bug: Schema generation partially broken since litestar version 2.3.0
### Description
2.2.1 is my last working version of litestar.
Before:
<img width="467" alt="image" src="https://github.com/litestar-org/litestar/assets/85191795/dc9594b1-4b09-4607-9061-dcd65bf0a09f">
After:
I first get this `internal server error` when i first try to go to my Swagger URL
<img width="436" alt="image" src="https://github.com/litestar-org/litestar/assets/85191795/90112884-907e-4ee0-a14c-a92c338ef761">
And then when i refresh once more, it goes to my swagger page, but only 2/3 of it.
<img width="217" alt="image" src="https://github.com/litestar-org/litestar/assets/85191795/74f16208-e80a-46de-b580-3dd566e0f14b">
With no changes in my code, the problems just start at version 2.3.0 and beyond. Just wanted to bring attention to this, as I will now be sticking to litestar 2.2.1 until this is resolved.
### URL to code causing the issue
_No response_
### MCVE
```python
How my app code looks like when passing in my controllers:
app = Litestar(
route_handlers=[
read_root,
refresh_templates,
LinuxPXEController,
WindowsPXEController,
ESXiPXEController
],
...
```
### Steps to reproduce
_No response_
### Screenshots
```bash
""
```
### Logs
_No response_
### Litestar Version
2.3.0
### Platform
- [X] Linux
- [X] Mac
- [ ] Windows
- [ ] Other (Please specify in the description above)
<!-- POLAR PLEDGE BADGE START -->
---
> [!NOTE]
> While we are open for sponsoring on [GitHub Sponsors](https://github.com/sponsors/litestar-org/) and
> [OpenCollective](https://opencollective.com/litestar), we also utilize [Polar.sh](https://polar.sh/) to engage in pledge-based sponsorship.
>
> Check out all issues funded or available for funding [on our Polar.sh Litestar dashboard](https://polar.sh/litestar-org)
> * If you would like to see an issue prioritized, make a pledge towards it!
> * We receive the pledge once the issue is completed & verified
> * This, along with engagement in the community, helps us know which features are a priority to our users.
<a href="https://polar.sh/litestar-org/litestar/issues/2635">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://polar.sh/api/github/litestar-org/litestar/issues/2635/pledge.svg?darkmode=1">
<img alt="Fund with Polar" src="https://polar.sh/api/github/litestar-org/litestar/issues/2635/pledge.svg">
</picture>
</a>
<!-- POLAR PLEDGE BADGE END -->
| [
{
"content": "from __future__ import annotations\n\nfrom typing import TYPE_CHECKING, Any\n\nfrom typing_extensions import get_type_hints\n\nfrom litestar.types import Empty\nfrom litestar.utils import is_class_and_subclass\nfrom litestar.utils.predicates import is_generic\nfrom litestar.utils.typing import (\n _substitute_typevars,\n get_origin_or_inner_type,\n get_type_hints_with_generics_resolved,\n instantiable_type_mapping,\n)\n\n# isort: off\ntry:\n from pydantic import v1 as pydantic_v1\n import pydantic as pydantic_v2\n from pydantic.fields import PydanticUndefined as Pydantic2Undefined # type: ignore[attr-defined]\n from pydantic.v1.fields import Undefined as Pydantic1Undefined\n\n PYDANTIC_UNDEFINED_SENTINELS = {Pydantic1Undefined, Pydantic2Undefined}\nexcept ImportError:\n try:\n import pydantic as pydantic_v1 # type: ignore[no-redef]\n from pydantic.fields import Undefined as Pydantic1Undefined # type: ignore[attr-defined, no-redef]\n\n pydantic_v2 = Empty # type: ignore[assignment]\n PYDANTIC_UNDEFINED_SENTINELS = {Pydantic1Undefined}\n\n except ImportError: # pyright: ignore\n pydantic_v1 = Empty # type: ignore[assignment]\n pydantic_v2 = Empty # type: ignore[assignment]\n PYDANTIC_UNDEFINED_SENTINELS = set()\n# isort: on\n\n\nif TYPE_CHECKING:\n from typing_extensions import TypeGuard\n\n\ndef is_pydantic_model_class(\n annotation: Any,\n) -> TypeGuard[type[pydantic_v1.BaseModel | pydantic_v2.BaseModel]]: # pyright: ignore\n \"\"\"Given a type annotation determine if the annotation is a subclass of pydantic's BaseModel.\n\n Args:\n annotation: A type.\n\n Returns:\n A typeguard determining whether the type is :data:`BaseModel pydantic.BaseModel>`.\n \"\"\"\n if pydantic_v1 is Empty: # type: ignore[comparison-overlap] # pragma: no cover\n return False\n\n if pydantic_v2 is Empty: # type: ignore[comparison-overlap] # pragma: no cover\n return is_class_and_subclass(annotation, pydantic_v1.BaseModel)\n\n return is_class_and_subclass(annotation, (pydantic_v1.BaseModel, pydantic_v2.BaseModel))\n\n\ndef is_pydantic_model_instance(\n annotation: Any,\n) -> TypeGuard[pydantic_v1.BaseModel | pydantic_v2.BaseModel]: # pyright: ignore\n \"\"\"Given a type annotation determine if the annotation is an instance of pydantic's BaseModel.\n\n Args:\n annotation: A type.\n\n Returns:\n A typeguard determining whether the type is :data:`BaseModel pydantic.BaseModel>`.\n \"\"\"\n if pydantic_v1 is Empty: # type: ignore[comparison-overlap] # pragma: no cover\n return False\n\n if pydantic_v2 is Empty: # type: ignore[comparison-overlap] # pragma: no cover\n return isinstance(annotation, pydantic_v1.BaseModel)\n\n return isinstance(annotation, (pydantic_v1.BaseModel, pydantic_v2.BaseModel))\n\n\ndef is_pydantic_constrained_field(annotation: Any) -> bool:\n \"\"\"Check if the given annotation is a constrained pydantic type.\n\n Args:\n annotation: A type annotation\n\n Returns:\n True if pydantic is installed and the type is a constrained type, otherwise False.\n \"\"\"\n if pydantic_v1 is Empty: # type: ignore[comparison-overlap] # pragma: no cover\n return False\n\n return any(\n is_class_and_subclass(annotation, constrained_type) # pyright: ignore\n for constrained_type in (\n pydantic_v1.ConstrainedBytes,\n pydantic_v1.ConstrainedDate,\n pydantic_v1.ConstrainedDecimal,\n pydantic_v1.ConstrainedFloat,\n pydantic_v1.ConstrainedFrozenSet,\n pydantic_v1.ConstrainedInt,\n pydantic_v1.ConstrainedList,\n pydantic_v1.ConstrainedSet,\n pydantic_v1.ConstrainedStr,\n )\n )\n\n\ndef pydantic_unwrap_and_get_origin(annotation: Any) -> Any | None:\n if pydantic_v2 is Empty or is_class_and_subclass(annotation, pydantic_v1.BaseModel): # type: ignore[comparison-overlap]\n return get_origin_or_inner_type(annotation)\n\n origin = annotation.__pydantic_generic_metadata__[\"origin\"]\n return instantiable_type_mapping.get(origin, origin)\n\n\ndef pydantic_get_type_hints_with_generics_resolved(\n annotation: Any,\n globalns: dict[str, Any] | None = None,\n localns: dict[str, Any] | None = None,\n include_extras: bool = False,\n) -> dict[str, Any]:\n if pydantic_v2 is Empty or is_class_and_subclass(annotation, pydantic_v1.BaseModel): # type: ignore[comparison-overlap]\n return get_type_hints_with_generics_resolved(annotation)\n\n origin = pydantic_unwrap_and_get_origin(annotation)\n if origin is None:\n type_hints = get_type_hints(annotation, globalns=globalns, localns=localns, include_extras=include_extras)\n typevar_map = {p: p for p in annotation.__pydantic_generic_metadata__[\"parameters\"]}\n else:\n type_hints = get_type_hints(origin, globalns=globalns, localns=localns, include_extras=include_extras)\n args = annotation.__pydantic_generic_metadata__[\"args\"]\n parameters = origin.__pydantic_generic_metadata__[\"parameters\"]\n typevar_map = dict(zip(parameters, args))\n\n return {n: _substitute_typevars(type_, typevar_map) for n, type_ in type_hints.items()}\n\n\ndef pydantic_get_unwrapped_annotation_and_type_hints(annotation: Any) -> tuple[Any, dict[str, Any]]:\n \"\"\"Get the unwrapped annotation and the type hints after resolving generics.\n\n Args:\n annotation: A type annotation.\n\n Returns:\n A tuple containing the unwrapped annotation and the type hints.\n \"\"\"\n\n if is_generic(annotation):\n origin = pydantic_unwrap_and_get_origin(annotation)\n return origin or annotation, pydantic_get_type_hints_with_generics_resolved(annotation, include_extras=True)\n return annotation, get_type_hints(annotation, include_extras=True)\n\n\ndef is_pydantic_2_model(\n obj: type[pydantic_v1.BaseModel | pydantic_v2.BaseModel], # pyright: ignore\n) -> TypeGuard[pydantic_v2.BaseModel]: # pyright: ignore\n return issubclass(obj, pydantic_v2.BaseModel) # pyright: ignore\n\n\ndef is_pydantic_undefined(value: Any) -> bool:\n return value in PYDANTIC_UNDEFINED_SENTINELS\n",
"path": "litestar/contrib/pydantic/utils.py"
}
] | [
{
"content": "from __future__ import annotations\n\nfrom typing import TYPE_CHECKING, Any\n\nfrom typing_extensions import get_type_hints\n\nfrom litestar.types import Empty\nfrom litestar.utils import is_class_and_subclass\nfrom litestar.utils.predicates import is_generic\nfrom litestar.utils.typing import (\n _substitute_typevars,\n get_origin_or_inner_type,\n get_type_hints_with_generics_resolved,\n instantiable_type_mapping,\n)\n\n# isort: off\ntry:\n from pydantic import v1 as pydantic_v1\n import pydantic as pydantic_v2\n from pydantic.fields import PydanticUndefined as Pydantic2Undefined # type: ignore[attr-defined]\n from pydantic.v1.fields import Undefined as Pydantic1Undefined\n\n PYDANTIC_UNDEFINED_SENTINELS = {Pydantic1Undefined, Pydantic2Undefined}\nexcept ImportError:\n try:\n import pydantic as pydantic_v1 # type: ignore[no-redef]\n from pydantic.fields import Undefined as Pydantic1Undefined # type: ignore[attr-defined, no-redef]\n\n pydantic_v2 = Empty # type: ignore[assignment]\n PYDANTIC_UNDEFINED_SENTINELS = {Pydantic1Undefined}\n\n except ImportError: # pyright: ignore\n pydantic_v1 = Empty # type: ignore[assignment]\n pydantic_v2 = Empty # type: ignore[assignment]\n PYDANTIC_UNDEFINED_SENTINELS = set()\n# isort: on\n\n\nif TYPE_CHECKING:\n from typing_extensions import TypeGuard\n\n\ndef is_pydantic_model_class(\n annotation: Any,\n) -> TypeGuard[type[pydantic_v1.BaseModel | pydantic_v2.BaseModel]]: # pyright: ignore\n \"\"\"Given a type annotation determine if the annotation is a subclass of pydantic's BaseModel.\n\n Args:\n annotation: A type.\n\n Returns:\n A typeguard determining whether the type is :data:`BaseModel pydantic.BaseModel>`.\n \"\"\"\n if pydantic_v1 is Empty: # type: ignore[comparison-overlap] # pragma: no cover\n return False\n\n if pydantic_v2 is Empty: # type: ignore[comparison-overlap] # pragma: no cover\n return is_class_and_subclass(annotation, pydantic_v1.BaseModel)\n\n return is_class_and_subclass(annotation, (pydantic_v1.BaseModel, pydantic_v2.BaseModel))\n\n\ndef is_pydantic_model_instance(\n annotation: Any,\n) -> TypeGuard[pydantic_v1.BaseModel | pydantic_v2.BaseModel]: # pyright: ignore\n \"\"\"Given a type annotation determine if the annotation is an instance of pydantic's BaseModel.\n\n Args:\n annotation: A type.\n\n Returns:\n A typeguard determining whether the type is :data:`BaseModel pydantic.BaseModel>`.\n \"\"\"\n if pydantic_v1 is Empty: # type: ignore[comparison-overlap] # pragma: no cover\n return False\n\n if pydantic_v2 is Empty: # type: ignore[comparison-overlap] # pragma: no cover\n return isinstance(annotation, pydantic_v1.BaseModel)\n\n return isinstance(annotation, (pydantic_v1.BaseModel, pydantic_v2.BaseModel))\n\n\ndef is_pydantic_constrained_field(annotation: Any) -> bool:\n \"\"\"Check if the given annotation is a constrained pydantic type.\n\n Args:\n annotation: A type annotation\n\n Returns:\n True if pydantic is installed and the type is a constrained type, otherwise False.\n \"\"\"\n if pydantic_v1 is Empty: # type: ignore[comparison-overlap] # pragma: no cover\n return False\n\n return any(\n is_class_and_subclass(annotation, constrained_type) # pyright: ignore\n for constrained_type in (\n pydantic_v1.ConstrainedBytes,\n pydantic_v1.ConstrainedDate,\n pydantic_v1.ConstrainedDecimal,\n pydantic_v1.ConstrainedFloat,\n pydantic_v1.ConstrainedFrozenSet,\n pydantic_v1.ConstrainedInt,\n pydantic_v1.ConstrainedList,\n pydantic_v1.ConstrainedSet,\n pydantic_v1.ConstrainedStr,\n )\n )\n\n\ndef pydantic_unwrap_and_get_origin(annotation: Any) -> Any | None:\n if pydantic_v2 is Empty or is_class_and_subclass(annotation, pydantic_v1.BaseModel): # type: ignore[comparison-overlap]\n return get_origin_or_inner_type(annotation)\n\n origin = annotation.__pydantic_generic_metadata__[\"origin\"]\n return instantiable_type_mapping.get(origin, origin)\n\n\ndef pydantic_get_type_hints_with_generics_resolved(\n annotation: Any,\n globalns: dict[str, Any] | None = None,\n localns: dict[str, Any] | None = None,\n include_extras: bool = False,\n) -> dict[str, Any]:\n if pydantic_v2 is Empty or is_class_and_subclass(annotation, pydantic_v1.BaseModel): # type: ignore[comparison-overlap]\n return get_type_hints_with_generics_resolved(annotation)\n\n origin = pydantic_unwrap_and_get_origin(annotation)\n if origin is None:\n type_hints = get_type_hints(annotation, globalns=globalns, localns=localns, include_extras=include_extras)\n typevar_map = {p: p for p in annotation.__pydantic_generic_metadata__[\"parameters\"]}\n else:\n type_hints = get_type_hints(origin, globalns=globalns, localns=localns, include_extras=include_extras)\n args = annotation.__pydantic_generic_metadata__[\"args\"]\n parameters = origin.__pydantic_generic_metadata__[\"parameters\"]\n typevar_map = dict(zip(parameters, args))\n\n return {n: _substitute_typevars(type_, typevar_map) for n, type_ in type_hints.items()}\n\n\ndef pydantic_get_unwrapped_annotation_and_type_hints(annotation: Any) -> tuple[Any, dict[str, Any]]:\n \"\"\"Get the unwrapped annotation and the type hints after resolving generics.\n\n Args:\n annotation: A type annotation.\n\n Returns:\n A tuple containing the unwrapped annotation and the type hints.\n \"\"\"\n\n if is_generic(annotation):\n origin = pydantic_unwrap_and_get_origin(annotation)\n return origin or annotation, pydantic_get_type_hints_with_generics_resolved(annotation, include_extras=True)\n return annotation, get_type_hints(annotation, include_extras=True)\n\n\ndef is_pydantic_2_model(\n obj: type[pydantic_v1.BaseModel | pydantic_v2.BaseModel], # pyright: ignore\n) -> TypeGuard[pydantic_v2.BaseModel]: # pyright: ignore\n return issubclass(obj, pydantic_v2.BaseModel) # pyright: ignore\n\n\ndef is_pydantic_undefined(value: Any) -> bool:\n return any(v is value for v in PYDANTIC_UNDEFINED_SENTINELS)\n",
"path": "litestar/contrib/pydantic/utils.py"
}
] | diff --git a/litestar/contrib/pydantic/utils.py b/litestar/contrib/pydantic/utils.py
index cae63d58e8..8a03d6c413 100644
--- a/litestar/contrib/pydantic/utils.py
+++ b/litestar/contrib/pydantic/utils.py
@@ -162,4 +162,4 @@ def is_pydantic_2_model(
def is_pydantic_undefined(value: Any) -> bool:
- return value in PYDANTIC_UNDEFINED_SENTINELS
+ return any(v is value for v in PYDANTIC_UNDEFINED_SENTINELS)
diff --git a/tests/unit/test_contrib/test_pydantic/test_openapi.py b/tests/unit/test_contrib/test_pydantic/test_openapi.py
index 8ef86544c4..ea34aee472 100644
--- a/tests/unit/test_contrib/test_pydantic/test_openapi.py
+++ b/tests/unit/test_contrib/test_pydantic/test_openapi.py
@@ -547,3 +547,35 @@ class Foo(BaseModel):
)
schema = schemas["Foo"]
assert schema.properties and "foo" in schema.properties
+
+
+def test_create_schema_for_pydantic_model_with_unhashable_literal_default(
+ create_module: "Callable[[str], ModuleType]",
+) -> None:
+ """Test that a model with unhashable literal defaults is correctly handled."""
+ module = create_module(
+ """
+from pydantic import BaseModel, Field
+
+class Model(BaseModel):
+ id: int
+ dict_default: dict = {}
+ dict_default_in_field: dict = Field(default={})
+ dict_default_in_factory: dict = Field(default_factory=dict)
+ list_default: list = []
+ list_default_in_field: list = Field(default=[])
+ list_default_in_factory: list = Field(default_factory=list)
+"""
+ )
+ schemas: Dict[str, Schema] = {}
+ SchemaCreator(schemas=schemas, plugins=[PydanticSchemaPlugin()]).for_field_definition(
+ FieldDefinition.from_annotation(module.Model)
+ )
+ schema = schemas["Model"]
+ assert schema.properties
+ assert "dict_default" in schema.properties
+ assert "dict_default_in_field" in schema.properties
+ assert "dict_default_in_factory" in schema.properties
+ assert "list_default" in schema.properties
+ assert "list_default_in_field" in schema.properties
+ assert "list_default_in_factory" in schema.properties
|
xonsh__xonsh-3049 | Exception on startup (pygments_cache)
<!--- Provide a general summary of the issue in the Title above -->
<!--- If you have a question along the lines of "How do I do this Bash command in xonsh"
please first look over the Bash to Xonsh translation guide: http://xon.sh/bash_to_xsh.html
If you don't find an answer there, please do open an issue! -->
## xonfig
<!--- Please post the output of the `xonfig` command (run from inside xonsh) so we know more about your current setup -->
## Expected Behavior
<!--- Tell us what should happen -->
## Current Behavior
<!--- Tell us what happens instead of the expected behavior -->
<!--- If part of your bug report is a traceback, please first enter debug mode before triggering the error
To enter debug mode, set the environment variable `XONSH_DEBUG=1` _before_ starting `xonsh`.
On Linux and OSX, an easy way to to do this is to run `env XONSH_DEBUG=1 xonsh` -->
## Steps to Reproduce
<!--- Please try to write out a minimal reproducible snippet to trigger the bug, it will help us fix it! -->
| [
{
"content": "# must come before ptk / pygments imports\nfrom xonsh.lazyasd import load_module_in_background\n\nload_module_in_background(\n \"pkg_resources\",\n debug=\"XONSH_DEBUG\",\n replacements={\"pygments.plugin\": \"pkg_resources\"},\n)\n",
"path": "xonsh/ptk2/__init__.py"
}
] | [
{
"content": "",
"path": "xonsh/ptk2/__init__.py"
}
] | diff --git a/news/nllpr-ptk2.rst b/news/nllpr-ptk2.rst
new file mode 100644
index 0000000000..e7bbfdf5b5
--- /dev/null
+++ b/news/nllpr-ptk2.rst
@@ -0,0 +1,27 @@
+**Added:**
+
+* <news item>
+
+**Changed:**
+
+* <news item>
+
+**Deprecated:**
+
+* <news item>
+
+**Removed:**
+
+* <news item>
+
+**Fixed:**
+
+* Fixed issue with pygments-cache not properly generating a cache the first
+ time when using prompt-toolkit when using ``ptk2``.
+ This was due to a lingering lazy import of ``pkg_resources``
+ that has been removed.
+
+**Security:**
+
+* <news item>
+
diff --git a/xonsh/ptk2/__init__.py b/xonsh/ptk2/__init__.py
index 7817046f78..e69de29bb2 100644
--- a/xonsh/ptk2/__init__.py
+++ b/xonsh/ptk2/__init__.py
@@ -1,8 +0,0 @@
-# must come before ptk / pygments imports
-from xonsh.lazyasd import load_module_in_background
-
-load_module_in_background(
- "pkg_resources",
- debug="XONSH_DEBUG",
- replacements={"pygments.plugin": "pkg_resources"},
-)
|
django-wiki__django-wiki-1084 | Release to support django 3.1
Hi,
The current release on pypi is limiting the ability to upgrade to django 3.1. And, as far as I can tell, there's no incompatibilities with the version 0.6 and django 3.1.
Would it be possible to release a 0.6.1 release or something like that loosening that requirement?
| [
{
"content": "# This package and all its sub-packages are part of django-wiki,\n# except where otherwise stated.\n#\n# django-wiki is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# django-wiki is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with django-wiki. If not, see <http://www.gnu.org/licenses/>.\nfrom wiki.core.version import get_version\n\ndefault_app_config = \"wiki.apps.WikiConfig\"\n\nVERSION = (0, 7, 0, \"alpha\", 0)\n__version__ = get_version(VERSION)\n",
"path": "src/wiki/__init__.py"
}
] | [
{
"content": "# This package and all its sub-packages are part of django-wiki,\n# except where otherwise stated.\n#\n# django-wiki is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# django-wiki is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with django-wiki. If not, see <http://www.gnu.org/licenses/>.\nfrom wiki.core.version import get_version\n\ndefault_app_config = \"wiki.apps.WikiConfig\"\n\nVERSION = (0, 7, 0, \"final\", 0)\n__version__ = get_version(VERSION)\n",
"path": "src/wiki/__init__.py"
}
] | diff --git a/docs/release_notes.rst b/docs/release_notes.rst
index 619c56331..e4465b297 100644
--- a/docs/release_notes.rst
+++ b/docs/release_notes.rst
@@ -10,16 +10,16 @@ Release plan
* **0.4.x** supports Django 1.11 and Django 2.1 and Python 3.4+.
* **0.5.x** Remove Django 1.11 support, adds Django 2.2 and 3.x support. Python 3.5+.
* **0.6.x** Targets Bootstrap v4, if you are interested in this work, please get in touch on Github!
-* **0.7.x** Milestone TBA
+* **0.7.x** Removes Django 2.1 support, adds Django 3.1
-0.7.dev
--------
+0.7
+---
Added
~~~~~
-* Django 3.1 support :url-issue:`1061` (Mads Jensen)
+* Django 3.1 support :url-issue:`1061` and :url-issue:`1082` (Mads Jensen, Benjamin Bach)
Fixed
~~~~~
diff --git a/src/wiki/__init__.py b/src/wiki/__init__.py
index 7b108b0ef..0db1b9774 100644
--- a/src/wiki/__init__.py
+++ b/src/wiki/__init__.py
@@ -17,5 +17,5 @@
default_app_config = "wiki.apps.WikiConfig"
-VERSION = (0, 7, 0, "alpha", 0)
+VERSION = (0, 7, 0, "final", 0)
__version__ = get_version(VERSION)
|
django-wiki__django-wiki-919 | jquery 1.12.4 is bundled, some security holes exist
see [CVE-2015-9251](https://cve.mitre.org/cgi-bin/cvekey.cgi?keyword=CVE-2015-9251) and others
| [
{
"content": "# This package and all its sub-packages are part of django-wiki,\n# except where otherwise stated.\n#\n# django-wiki is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# django-wiki is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with django-wiki. If not, see <http://www.gnu.org/licenses/>.\n\n\nfrom wiki.core.version import get_version\n\ndefault_app_config = 'wiki.apps.WikiConfig'\n\nVERSION = (0, 4, 0, 'final', 0)\n__version__ = get_version(VERSION)\n",
"path": "src/wiki/__init__.py"
}
] | [
{
"content": "# This package and all its sub-packages are part of django-wiki,\n# except where otherwise stated.\n#\n# django-wiki is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# django-wiki is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with django-wiki. If not, see <http://www.gnu.org/licenses/>.\n\n\nfrom wiki.core.version import get_version\n\ndefault_app_config = 'wiki.apps.WikiConfig'\n\nVERSION = (0, 4, 1, 'final', 0)\n__version__ = get_version(VERSION)\n",
"path": "src/wiki/__init__.py"
}
] | diff --git a/docs/release_notes.rst b/docs/release_notes.rst
index bf4f6ba15..8b97e081f 100644
--- a/docs/release_notes.rst
+++ b/docs/release_notes.rst
@@ -8,9 +8,17 @@ Release plan
* **0.3** series supported Django 1.11. As with the upstream Django release, 0.3 was be the last series with Python 2.7 support.
* **0.4+** supports Django 1.11 and Django 2.x and Python 3.4+.
-* **0.5** will target Bootstrap v4, if you are interested in this work, please get in touch on Github!
+* **0.5** should remove Django 1.11 support and target Bootstrap v4, if you are interested in this work, please get in touch on Github!
+0.4.1
+-----
+
+Security
+~~~~~~~~
+
+* jQuery upgrade from 1.12.4 to 3.3.1. jQuery UI also upgraded (for dynamic resizing of modals) :url-issue:`882` (Benjamin Bach)
+
0.4
---
diff --git a/src/wiki/__init__.py b/src/wiki/__init__.py
index fc16a7ff4..6bcd76ae9 100644
--- a/src/wiki/__init__.py
+++ b/src/wiki/__init__.py
@@ -19,5 +19,5 @@
default_app_config = 'wiki.apps.WikiConfig'
-VERSION = (0, 4, 0, 'final', 0)
+VERSION = (0, 4, 1, 'final', 0)
__version__ = get_version(VERSION)
diff --git a/src/wiki/static/wiki/js/jquery-3.3.1.min.js b/src/wiki/static/wiki/js/jquery-3.3.1.min.js
new file mode 100644
index 000000000..4d9b3a258
--- /dev/null
+++ b/src/wiki/static/wiki/js/jquery-3.3.1.min.js
@@ -0,0 +1,2 @@
+/*! jQuery v3.3.1 | (c) JS Foundation and other contributors | jquery.org/license */
+!function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(e,t){"use strict";var n=[],r=e.document,i=Object.getPrototypeOf,o=n.slice,a=n.concat,s=n.push,u=n.indexOf,l={},c=l.toString,f=l.hasOwnProperty,p=f.toString,d=p.call(Object),h={},g=function e(t){return"function"==typeof t&&"number"!=typeof t.nodeType},y=function e(t){return null!=t&&t===t.window},v={type:!0,src:!0,noModule:!0};function m(e,t,n){var i,o=(t=t||r).createElement("script");if(o.text=e,n)for(i in v)n[i]&&(o[i]=n[i]);t.head.appendChild(o).parentNode.removeChild(o)}function x(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?l[c.call(e)]||"object":typeof e}var b="3.3.1",w=function(e,t){return new w.fn.init(e,t)},T=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g;w.fn=w.prototype={jquery:"3.3.1",constructor:w,length:0,toArray:function(){return o.call(this)},get:function(e){return null==e?o.call(this):e<0?this[e+this.length]:this[e]},pushStack:function(e){var t=w.merge(this.constructor(),e);return t.prevObject=this,t},each:function(e){return w.each(this,e)},map:function(e){return this.pushStack(w.map(this,function(t,n){return e.call(t,n,t)}))},slice:function(){return this.pushStack(o.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(e){var t=this.length,n=+e+(e<0?t:0);return this.pushStack(n>=0&&n<t?[this[n]]:[])},end:function(){return this.prevObject||this.constructor()},push:s,sort:n.sort,splice:n.splice},w.extend=w.fn.extend=function(){var e,t,n,r,i,o,a=arguments[0]||{},s=1,u=arguments.length,l=!1;for("boolean"==typeof a&&(l=a,a=arguments[s]||{},s++),"object"==typeof a||g(a)||(a={}),s===u&&(a=this,s--);s<u;s++)if(null!=(e=arguments[s]))for(t in e)n=a[t],a!==(r=e[t])&&(l&&r&&(w.isPlainObject(r)||(i=Array.isArray(r)))?(i?(i=!1,o=n&&Array.isArray(n)?n:[]):o=n&&w.isPlainObject(n)?n:{},a[t]=w.extend(l,o,r)):void 0!==r&&(a[t]=r));return a},w.extend({expando:"jQuery"+("3.3.1"+Math.random()).replace(/\D/g,""),isReady:!0,error:function(e){throw new Error(e)},noop:function(){},isPlainObject:function(e){var t,n;return!(!e||"[object Object]"!==c.call(e))&&(!(t=i(e))||"function"==typeof(n=f.call(t,"constructor")&&t.constructor)&&p.call(n)===d)},isEmptyObject:function(e){var t;for(t in e)return!1;return!0},globalEval:function(e){m(e)},each:function(e,t){var n,r=0;if(C(e)){for(n=e.length;r<n;r++)if(!1===t.call(e[r],r,e[r]))break}else for(r in e)if(!1===t.call(e[r],r,e[r]))break;return e},trim:function(e){return null==e?"":(e+"").replace(T,"")},makeArray:function(e,t){var n=t||[];return null!=e&&(C(Object(e))?w.merge(n,"string"==typeof e?[e]:e):s.call(n,e)),n},inArray:function(e,t,n){return null==t?-1:u.call(t,e,n)},merge:function(e,t){for(var n=+t.length,r=0,i=e.length;r<n;r++)e[i++]=t[r];return e.length=i,e},grep:function(e,t,n){for(var r,i=[],o=0,a=e.length,s=!n;o<a;o++)(r=!t(e[o],o))!==s&&i.push(e[o]);return i},map:function(e,t,n){var r,i,o=0,s=[];if(C(e))for(r=e.length;o<r;o++)null!=(i=t(e[o],o,n))&&s.push(i);else for(o in e)null!=(i=t(e[o],o,n))&&s.push(i);return a.apply([],s)},guid:1,support:h}),"function"==typeof Symbol&&(w.fn[Symbol.iterator]=n[Symbol.iterator]),w.each("Boolean Number String Function Array Date RegExp Object Error Symbol".split(" "),function(e,t){l["[object "+t+"]"]=t.toLowerCase()});function C(e){var t=!!e&&"length"in e&&e.length,n=x(e);return!g(e)&&!y(e)&&("array"===n||0===t||"number"==typeof t&&t>0&&t-1 in e)}var E=function(e){var t,n,r,i,o,a,s,u,l,c,f,p,d,h,g,y,v,m,x,b="sizzle"+1*new Date,w=e.document,T=0,C=0,E=ae(),k=ae(),S=ae(),D=function(e,t){return e===t&&(f=!0),0},N={}.hasOwnProperty,A=[],j=A.pop,q=A.push,L=A.push,H=A.slice,O=function(e,t){for(var n=0,r=e.length;n<r;n++)if(e[n]===t)return n;return-1},P="checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",M="[\\x20\\t\\r\\n\\f]",R="(?:\\\\.|[\\w-]|[^\0-\\xa0])+",I="\\["+M+"*("+R+")(?:"+M+"*([*^$|!~]?=)"+M+"*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|("+R+"))|)"+M+"*\\]",W=":("+R+")(?:\\((('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|((?:\\\\.|[^\\\\()[\\]]|"+I+")*)|.*)\\)|)",$=new RegExp(M+"+","g"),B=new RegExp("^"+M+"+|((?:^|[^\\\\])(?:\\\\.)*)"+M+"+$","g"),F=new RegExp("^"+M+"*,"+M+"*"),_=new RegExp("^"+M+"*([>+~]|"+M+")"+M+"*"),z=new RegExp("="+M+"*([^\\]'\"]*?)"+M+"*\\]","g"),X=new RegExp(W),U=new RegExp("^"+R+"$"),V={ID:new RegExp("^#("+R+")"),CLASS:new RegExp("^\\.("+R+")"),TAG:new RegExp("^("+R+"|[*])"),ATTR:new RegExp("^"+I),PSEUDO:new RegExp("^"+W),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+M+"*(even|odd|(([+-]|)(\\d*)n|)"+M+"*(?:([+-]|)"+M+"*(\\d+)|))"+M+"*\\)|)","i"),bool:new RegExp("^(?:"+P+")$","i"),needsContext:new RegExp("^"+M+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+M+"*((?:-\\d)?\\d*)"+M+"*\\)|)(?=[^-]|$)","i")},G=/^(?:input|select|textarea|button)$/i,Y=/^h\d$/i,Q=/^[^{]+\{\s*\[native \w/,J=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,K=/[+~]/,Z=new RegExp("\\\\([\\da-f]{1,6}"+M+"?|("+M+")|.)","ig"),ee=function(e,t,n){var r="0x"+t-65536;return r!==r||n?t:r<0?String.fromCharCode(r+65536):String.fromCharCode(r>>10|55296,1023&r|56320)},te=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g,ne=function(e,t){return t?"\0"===e?"\ufffd":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" ":"\\"+e},re=function(){p()},ie=me(function(e){return!0===e.disabled&&("form"in e||"label"in e)},{dir:"parentNode",next:"legend"});try{L.apply(A=H.call(w.childNodes),w.childNodes),A[w.childNodes.length].nodeType}catch(e){L={apply:A.length?function(e,t){q.apply(e,H.call(t))}:function(e,t){var n=e.length,r=0;while(e[n++]=t[r++]);e.length=n-1}}}function oe(e,t,r,i){var o,s,l,c,f,h,v,m=t&&t.ownerDocument,T=t?t.nodeType:9;if(r=r||[],"string"!=typeof e||!e||1!==T&&9!==T&&11!==T)return r;if(!i&&((t?t.ownerDocument||t:w)!==d&&p(t),t=t||d,g)){if(11!==T&&(f=J.exec(e)))if(o=f[1]){if(9===T){if(!(l=t.getElementById(o)))return r;if(l.id===o)return r.push(l),r}else if(m&&(l=m.getElementById(o))&&x(t,l)&&l.id===o)return r.push(l),r}else{if(f[2])return L.apply(r,t.getElementsByTagName(e)),r;if((o=f[3])&&n.getElementsByClassName&&t.getElementsByClassName)return L.apply(r,t.getElementsByClassName(o)),r}if(n.qsa&&!S[e+" "]&&(!y||!y.test(e))){if(1!==T)m=t,v=e;else if("object"!==t.nodeName.toLowerCase()){(c=t.getAttribute("id"))?c=c.replace(te,ne):t.setAttribute("id",c=b),s=(h=a(e)).length;while(s--)h[s]="#"+c+" "+ve(h[s]);v=h.join(","),m=K.test(e)&&ge(t.parentNode)||t}if(v)try{return L.apply(r,m.querySelectorAll(v)),r}catch(e){}finally{c===b&&t.removeAttribute("id")}}}return u(e.replace(B,"$1"),t,r,i)}function ae(){var e=[];function t(n,i){return e.push(n+" ")>r.cacheLength&&delete t[e.shift()],t[n+" "]=i}return t}function se(e){return e[b]=!0,e}function ue(e){var t=d.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function le(e,t){var n=e.split("|"),i=n.length;while(i--)r.attrHandle[n[i]]=t}function ce(e,t){var n=t&&e,r=n&&1===e.nodeType&&1===t.nodeType&&e.sourceIndex-t.sourceIndex;if(r)return r;if(n)while(n=n.nextSibling)if(n===t)return-1;return e?1:-1}function fe(e){return function(t){return"input"===t.nodeName.toLowerCase()&&t.type===e}}function pe(e){return function(t){var n=t.nodeName.toLowerCase();return("input"===n||"button"===n)&&t.type===e}}function de(e){return function(t){return"form"in t?t.parentNode&&!1===t.disabled?"label"in t?"label"in t.parentNode?t.parentNode.disabled===e:t.disabled===e:t.isDisabled===e||t.isDisabled!==!e&&ie(t)===e:t.disabled===e:"label"in t&&t.disabled===e}}function he(e){return se(function(t){return t=+t,se(function(n,r){var i,o=e([],n.length,t),a=o.length;while(a--)n[i=o[a]]&&(n[i]=!(r[i]=n[i]))})})}function ge(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}n=oe.support={},o=oe.isXML=function(e){var t=e&&(e.ownerDocument||e).documentElement;return!!t&&"HTML"!==t.nodeName},p=oe.setDocument=function(e){var t,i,a=e?e.ownerDocument||e:w;return a!==d&&9===a.nodeType&&a.documentElement?(d=a,h=d.documentElement,g=!o(d),w!==d&&(i=d.defaultView)&&i.top!==i&&(i.addEventListener?i.addEventListener("unload",re,!1):i.attachEvent&&i.attachEvent("onunload",re)),n.attributes=ue(function(e){return e.className="i",!e.getAttribute("className")}),n.getElementsByTagName=ue(function(e){return e.appendChild(d.createComment("")),!e.getElementsByTagName("*").length}),n.getElementsByClassName=Q.test(d.getElementsByClassName),n.getById=ue(function(e){return h.appendChild(e).id=b,!d.getElementsByName||!d.getElementsByName(b).length}),n.getById?(r.filter.ID=function(e){var t=e.replace(Z,ee);return function(e){return e.getAttribute("id")===t}},r.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&g){var n=t.getElementById(e);return n?[n]:[]}}):(r.filter.ID=function(e){var t=e.replace(Z,ee);return function(e){var n="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return n&&n.value===t}},r.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&g){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),r.find.TAG=n.getElementsByTagName?function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):n.qsa?t.querySelectorAll(e):void 0}:function(e,t){var n,r=[],i=0,o=t.getElementsByTagName(e);if("*"===e){while(n=o[i++])1===n.nodeType&&r.push(n);return r}return o},r.find.CLASS=n.getElementsByClassName&&function(e,t){if("undefined"!=typeof t.getElementsByClassName&&g)return t.getElementsByClassName(e)},v=[],y=[],(n.qsa=Q.test(d.querySelectorAll))&&(ue(function(e){h.appendChild(e).innerHTML="<a id='"+b+"'></a><select id='"+b+"-\r\\' msallowcapture=''><option selected=''></option></select>",e.querySelectorAll("[msallowcapture^='']").length&&y.push("[*^$]="+M+"*(?:''|\"\")"),e.querySelectorAll("[selected]").length||y.push("\\["+M+"*(?:value|"+P+")"),e.querySelectorAll("[id~="+b+"-]").length||y.push("~="),e.querySelectorAll(":checked").length||y.push(":checked"),e.querySelectorAll("a#"+b+"+*").length||y.push(".#.+[+~]")}),ue(function(e){e.innerHTML="<a href='' disabled='disabled'></a><select disabled='disabled'><option/></select>";var t=d.createElement("input");t.setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),e.querySelectorAll("[name=d]").length&&y.push("name"+M+"*[*^$|!~]?="),2!==e.querySelectorAll(":enabled").length&&y.push(":enabled",":disabled"),h.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&y.push(":enabled",":disabled"),e.querySelectorAll("*,:x"),y.push(",.*:")})),(n.matchesSelector=Q.test(m=h.matches||h.webkitMatchesSelector||h.mozMatchesSelector||h.oMatchesSelector||h.msMatchesSelector))&&ue(function(e){n.disconnectedMatch=m.call(e,"*"),m.call(e,"[s!='']:x"),v.push("!=",W)}),y=y.length&&new RegExp(y.join("|")),v=v.length&&new RegExp(v.join("|")),t=Q.test(h.compareDocumentPosition),x=t||Q.test(h.contains)?function(e,t){var n=9===e.nodeType?e.documentElement:e,r=t&&t.parentNode;return e===r||!(!r||1!==r.nodeType||!(n.contains?n.contains(r):e.compareDocumentPosition&&16&e.compareDocumentPosition(r)))}:function(e,t){if(t)while(t=t.parentNode)if(t===e)return!0;return!1},D=t?function(e,t){if(e===t)return f=!0,0;var r=!e.compareDocumentPosition-!t.compareDocumentPosition;return r||(1&(r=(e.ownerDocument||e)===(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!n.sortDetached&&t.compareDocumentPosition(e)===r?e===d||e.ownerDocument===w&&x(w,e)?-1:t===d||t.ownerDocument===w&&x(w,t)?1:c?O(c,e)-O(c,t):0:4&r?-1:1)}:function(e,t){if(e===t)return f=!0,0;var n,r=0,i=e.parentNode,o=t.parentNode,a=[e],s=[t];if(!i||!o)return e===d?-1:t===d?1:i?-1:o?1:c?O(c,e)-O(c,t):0;if(i===o)return ce(e,t);n=e;while(n=n.parentNode)a.unshift(n);n=t;while(n=n.parentNode)s.unshift(n);while(a[r]===s[r])r++;return r?ce(a[r],s[r]):a[r]===w?-1:s[r]===w?1:0},d):d},oe.matches=function(e,t){return oe(e,null,null,t)},oe.matchesSelector=function(e,t){if((e.ownerDocument||e)!==d&&p(e),t=t.replace(z,"='$1']"),n.matchesSelector&&g&&!S[t+" "]&&(!v||!v.test(t))&&(!y||!y.test(t)))try{var r=m.call(e,t);if(r||n.disconnectedMatch||e.document&&11!==e.document.nodeType)return r}catch(e){}return oe(t,d,null,[e]).length>0},oe.contains=function(e,t){return(e.ownerDocument||e)!==d&&p(e),x(e,t)},oe.attr=function(e,t){(e.ownerDocument||e)!==d&&p(e);var i=r.attrHandle[t.toLowerCase()],o=i&&N.call(r.attrHandle,t.toLowerCase())?i(e,t,!g):void 0;return void 0!==o?o:n.attributes||!g?e.getAttribute(t):(o=e.getAttributeNode(t))&&o.specified?o.value:null},oe.escape=function(e){return(e+"").replace(te,ne)},oe.error=function(e){throw new Error("Syntax error, unrecognized expression: "+e)},oe.uniqueSort=function(e){var t,r=[],i=0,o=0;if(f=!n.detectDuplicates,c=!n.sortStable&&e.slice(0),e.sort(D),f){while(t=e[o++])t===e[o]&&(i=r.push(o));while(i--)e.splice(r[i],1)}return c=null,e},i=oe.getText=function(e){var t,n="",r=0,o=e.nodeType;if(o){if(1===o||9===o||11===o){if("string"==typeof e.textContent)return e.textContent;for(e=e.firstChild;e;e=e.nextSibling)n+=i(e)}else if(3===o||4===o)return e.nodeValue}else while(t=e[r++])n+=i(t);return n},(r=oe.selectors={cacheLength:50,createPseudo:se,match:V,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(Z,ee),e[3]=(e[3]||e[4]||e[5]||"").replace(Z,ee),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||oe.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&oe.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return V.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&X.test(n)&&(t=a(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(Z,ee).toLowerCase();return"*"===e?function(){return!0}:function(e){return e.nodeName&&e.nodeName.toLowerCase()===t}},CLASS:function(e){var t=E[e+" "];return t||(t=new RegExp("(^|"+M+")"+e+"("+M+"|$)"))&&E(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(e,t,n){return function(r){var i=oe.attr(r,e);return null==i?"!="===t:!t||(i+="","="===t?i===n:"!="===t?i!==n:"^="===t?n&&0===i.indexOf(n):"*="===t?n&&i.indexOf(n)>-1:"$="===t?n&&i.slice(-n.length)===n:"~="===t?(" "+i.replace($," ")+" ").indexOf(n)>-1:"|="===t&&(i===n||i.slice(0,n.length+1)===n+"-"))}},CHILD:function(e,t,n,r,i){var o="nth"!==e.slice(0,3),a="last"!==e.slice(-4),s="of-type"===t;return 1===r&&0===i?function(e){return!!e.parentNode}:function(t,n,u){var l,c,f,p,d,h,g=o!==a?"nextSibling":"previousSibling",y=t.parentNode,v=s&&t.nodeName.toLowerCase(),m=!u&&!s,x=!1;if(y){if(o){while(g){p=t;while(p=p[g])if(s?p.nodeName.toLowerCase()===v:1===p.nodeType)return!1;h=g="only"===e&&!h&&"nextSibling"}return!0}if(h=[a?y.firstChild:y.lastChild],a&&m){x=(d=(l=(c=(f=(p=y)[b]||(p[b]={}))[p.uniqueID]||(f[p.uniqueID]={}))[e]||[])[0]===T&&l[1])&&l[2],p=d&&y.childNodes[d];while(p=++d&&p&&p[g]||(x=d=0)||h.pop())if(1===p.nodeType&&++x&&p===t){c[e]=[T,d,x];break}}else if(m&&(x=d=(l=(c=(f=(p=t)[b]||(p[b]={}))[p.uniqueID]||(f[p.uniqueID]={}))[e]||[])[0]===T&&l[1]),!1===x)while(p=++d&&p&&p[g]||(x=d=0)||h.pop())if((s?p.nodeName.toLowerCase()===v:1===p.nodeType)&&++x&&(m&&((c=(f=p[b]||(p[b]={}))[p.uniqueID]||(f[p.uniqueID]={}))[e]=[T,x]),p===t))break;return(x-=i)===r||x%r==0&&x/r>=0}}},PSEUDO:function(e,t){var n,i=r.pseudos[e]||r.setFilters[e.toLowerCase()]||oe.error("unsupported pseudo: "+e);return i[b]?i(t):i.length>1?(n=[e,e,"",t],r.setFilters.hasOwnProperty(e.toLowerCase())?se(function(e,n){var r,o=i(e,t),a=o.length;while(a--)e[r=O(e,o[a])]=!(n[r]=o[a])}):function(e){return i(e,0,n)}):i}},pseudos:{not:se(function(e){var t=[],n=[],r=s(e.replace(B,"$1"));return r[b]?se(function(e,t,n,i){var o,a=r(e,null,i,[]),s=e.length;while(s--)(o=a[s])&&(e[s]=!(t[s]=o))}):function(e,i,o){return t[0]=e,r(t,null,o,n),t[0]=null,!n.pop()}}),has:se(function(e){return function(t){return oe(e,t).length>0}}),contains:se(function(e){return e=e.replace(Z,ee),function(t){return(t.textContent||t.innerText||i(t)).indexOf(e)>-1}}),lang:se(function(e){return U.test(e||"")||oe.error("unsupported lang: "+e),e=e.replace(Z,ee).toLowerCase(),function(t){var n;do{if(n=g?t.lang:t.getAttribute("xml:lang")||t.getAttribute("lang"))return(n=n.toLowerCase())===e||0===n.indexOf(e+"-")}while((t=t.parentNode)&&1===t.nodeType);return!1}}),target:function(t){var n=e.location&&e.location.hash;return n&&n.slice(1)===t.id},root:function(e){return e===h},focus:function(e){return e===d.activeElement&&(!d.hasFocus||d.hasFocus())&&!!(e.type||e.href||~e.tabIndex)},enabled:de(!1),disabled:de(!0),checked:function(e){var t=e.nodeName.toLowerCase();return"input"===t&&!!e.checked||"option"===t&&!!e.selected},selected:function(e){return e.parentNode&&e.parentNode.selectedIndex,!0===e.selected},empty:function(e){for(e=e.firstChild;e;e=e.nextSibling)if(e.nodeType<6)return!1;return!0},parent:function(e){return!r.pseudos.empty(e)},header:function(e){return Y.test(e.nodeName)},input:function(e){return G.test(e.nodeName)},button:function(e){var t=e.nodeName.toLowerCase();return"input"===t&&"button"===e.type||"button"===t},text:function(e){var t;return"input"===e.nodeName.toLowerCase()&&"text"===e.type&&(null==(t=e.getAttribute("type"))||"text"===t.toLowerCase())},first:he(function(){return[0]}),last:he(function(e,t){return[t-1]}),eq:he(function(e,t,n){return[n<0?n+t:n]}),even:he(function(e,t){for(var n=0;n<t;n+=2)e.push(n);return e}),odd:he(function(e,t){for(var n=1;n<t;n+=2)e.push(n);return e}),lt:he(function(e,t,n){for(var r=n<0?n+t:n;--r>=0;)e.push(r);return e}),gt:he(function(e,t,n){for(var r=n<0?n+t:n;++r<t;)e.push(r);return e})}}).pseudos.nth=r.pseudos.eq;for(t in{radio:!0,checkbox:!0,file:!0,password:!0,image:!0})r.pseudos[t]=fe(t);for(t in{submit:!0,reset:!0})r.pseudos[t]=pe(t);function ye(){}ye.prototype=r.filters=r.pseudos,r.setFilters=new ye,a=oe.tokenize=function(e,t){var n,i,o,a,s,u,l,c=k[e+" "];if(c)return t?0:c.slice(0);s=e,u=[],l=r.preFilter;while(s){n&&!(i=F.exec(s))||(i&&(s=s.slice(i[0].length)||s),u.push(o=[])),n=!1,(i=_.exec(s))&&(n=i.shift(),o.push({value:n,type:i[0].replace(B," ")}),s=s.slice(n.length));for(a in r.filter)!(i=V[a].exec(s))||l[a]&&!(i=l[a](i))||(n=i.shift(),o.push({value:n,type:a,matches:i}),s=s.slice(n.length));if(!n)break}return t?s.length:s?oe.error(e):k(e,u).slice(0)};function ve(e){for(var t=0,n=e.length,r="";t<n;t++)r+=e[t].value;return r}function me(e,t,n){var r=t.dir,i=t.next,o=i||r,a=n&&"parentNode"===o,s=C++;return t.first?function(t,n,i){while(t=t[r])if(1===t.nodeType||a)return e(t,n,i);return!1}:function(t,n,u){var l,c,f,p=[T,s];if(u){while(t=t[r])if((1===t.nodeType||a)&&e(t,n,u))return!0}else while(t=t[r])if(1===t.nodeType||a)if(f=t[b]||(t[b]={}),c=f[t.uniqueID]||(f[t.uniqueID]={}),i&&i===t.nodeName.toLowerCase())t=t[r]||t;else{if((l=c[o])&&l[0]===T&&l[1]===s)return p[2]=l[2];if(c[o]=p,p[2]=e(t,n,u))return!0}return!1}}function xe(e){return e.length>1?function(t,n,r){var i=e.length;while(i--)if(!e[i](t,n,r))return!1;return!0}:e[0]}function be(e,t,n){for(var r=0,i=t.length;r<i;r++)oe(e,t[r],n);return n}function we(e,t,n,r,i){for(var o,a=[],s=0,u=e.length,l=null!=t;s<u;s++)(o=e[s])&&(n&&!n(o,r,i)||(a.push(o),l&&t.push(s)));return a}function Te(e,t,n,r,i,o){return r&&!r[b]&&(r=Te(r)),i&&!i[b]&&(i=Te(i,o)),se(function(o,a,s,u){var l,c,f,p=[],d=[],h=a.length,g=o||be(t||"*",s.nodeType?[s]:s,[]),y=!e||!o&&t?g:we(g,p,e,s,u),v=n?i||(o?e:h||r)?[]:a:y;if(n&&n(y,v,s,u),r){l=we(v,d),r(l,[],s,u),c=l.length;while(c--)(f=l[c])&&(v[d[c]]=!(y[d[c]]=f))}if(o){if(i||e){if(i){l=[],c=v.length;while(c--)(f=v[c])&&l.push(y[c]=f);i(null,v=[],l,u)}c=v.length;while(c--)(f=v[c])&&(l=i?O(o,f):p[c])>-1&&(o[l]=!(a[l]=f))}}else v=we(v===a?v.splice(h,v.length):v),i?i(null,a,v,u):L.apply(a,v)})}function Ce(e){for(var t,n,i,o=e.length,a=r.relative[e[0].type],s=a||r.relative[" "],u=a?1:0,c=me(function(e){return e===t},s,!0),f=me(function(e){return O(t,e)>-1},s,!0),p=[function(e,n,r){var i=!a&&(r||n!==l)||((t=n).nodeType?c(e,n,r):f(e,n,r));return t=null,i}];u<o;u++)if(n=r.relative[e[u].type])p=[me(xe(p),n)];else{if((n=r.filter[e[u].type].apply(null,e[u].matches))[b]){for(i=++u;i<o;i++)if(r.relative[e[i].type])break;return Te(u>1&&xe(p),u>1&&ve(e.slice(0,u-1).concat({value:" "===e[u-2].type?"*":""})).replace(B,"$1"),n,u<i&&Ce(e.slice(u,i)),i<o&&Ce(e=e.slice(i)),i<o&&ve(e))}p.push(n)}return xe(p)}function Ee(e,t){var n=t.length>0,i=e.length>0,o=function(o,a,s,u,c){var f,h,y,v=0,m="0",x=o&&[],b=[],w=l,C=o||i&&r.find.TAG("*",c),E=T+=null==w?1:Math.random()||.1,k=C.length;for(c&&(l=a===d||a||c);m!==k&&null!=(f=C[m]);m++){if(i&&f){h=0,a||f.ownerDocument===d||(p(f),s=!g);while(y=e[h++])if(y(f,a||d,s)){u.push(f);break}c&&(T=E)}n&&((f=!y&&f)&&v--,o&&x.push(f))}if(v+=m,n&&m!==v){h=0;while(y=t[h++])y(x,b,a,s);if(o){if(v>0)while(m--)x[m]||b[m]||(b[m]=j.call(u));b=we(b)}L.apply(u,b),c&&!o&&b.length>0&&v+t.length>1&&oe.uniqueSort(u)}return c&&(T=E,l=w),x};return n?se(o):o}return s=oe.compile=function(e,t){var n,r=[],i=[],o=S[e+" "];if(!o){t||(t=a(e)),n=t.length;while(n--)(o=Ce(t[n]))[b]?r.push(o):i.push(o);(o=S(e,Ee(i,r))).selector=e}return o},u=oe.select=function(e,t,n,i){var o,u,l,c,f,p="function"==typeof e&&e,d=!i&&a(e=p.selector||e);if(n=n||[],1===d.length){if((u=d[0]=d[0].slice(0)).length>2&&"ID"===(l=u[0]).type&&9===t.nodeType&&g&&r.relative[u[1].type]){if(!(t=(r.find.ID(l.matches[0].replace(Z,ee),t)||[])[0]))return n;p&&(t=t.parentNode),e=e.slice(u.shift().value.length)}o=V.needsContext.test(e)?0:u.length;while(o--){if(l=u[o],r.relative[c=l.type])break;if((f=r.find[c])&&(i=f(l.matches[0].replace(Z,ee),K.test(u[0].type)&&ge(t.parentNode)||t))){if(u.splice(o,1),!(e=i.length&&ve(u)))return L.apply(n,i),n;break}}}return(p||s(e,d))(i,t,!g,n,!t||K.test(e)&&ge(t.parentNode)||t),n},n.sortStable=b.split("").sort(D).join("")===b,n.detectDuplicates=!!f,p(),n.sortDetached=ue(function(e){return 1&e.compareDocumentPosition(d.createElement("fieldset"))}),ue(function(e){return e.innerHTML="<a href='#'></a>","#"===e.firstChild.getAttribute("href")})||le("type|href|height|width",function(e,t,n){if(!n)return e.getAttribute(t,"type"===t.toLowerCase()?1:2)}),n.attributes&&ue(function(e){return e.innerHTML="<input/>",e.firstChild.setAttribute("value",""),""===e.firstChild.getAttribute("value")})||le("value",function(e,t,n){if(!n&&"input"===e.nodeName.toLowerCase())return e.defaultValue}),ue(function(e){return null==e.getAttribute("disabled")})||le(P,function(e,t,n){var r;if(!n)return!0===e[t]?t.toLowerCase():(r=e.getAttributeNode(t))&&r.specified?r.value:null}),oe}(e);w.find=E,w.expr=E.selectors,w.expr[":"]=w.expr.pseudos,w.uniqueSort=w.unique=E.uniqueSort,w.text=E.getText,w.isXMLDoc=E.isXML,w.contains=E.contains,w.escapeSelector=E.escape;var k=function(e,t,n){var r=[],i=void 0!==n;while((e=e[t])&&9!==e.nodeType)if(1===e.nodeType){if(i&&w(e).is(n))break;r.push(e)}return r},S=function(e,t){for(var n=[];e;e=e.nextSibling)1===e.nodeType&&e!==t&&n.push(e);return n},D=w.expr.match.needsContext;function N(e,t){return e.nodeName&&e.nodeName.toLowerCase()===t.toLowerCase()}var A=/^<([a-z][^\/\0>:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function j(e,t,n){return g(t)?w.grep(e,function(e,r){return!!t.call(e,r,e)!==n}):t.nodeType?w.grep(e,function(e){return e===t!==n}):"string"!=typeof t?w.grep(e,function(e){return u.call(t,e)>-1!==n}):w.filter(t,e,n)}w.filter=function(e,t,n){var r=t[0];return n&&(e=":not("+e+")"),1===t.length&&1===r.nodeType?w.find.matchesSelector(r,e)?[r]:[]:w.find.matches(e,w.grep(t,function(e){return 1===e.nodeType}))},w.fn.extend({find:function(e){var t,n,r=this.length,i=this;if("string"!=typeof e)return this.pushStack(w(e).filter(function(){for(t=0;t<r;t++)if(w.contains(i[t],this))return!0}));for(n=this.pushStack([]),t=0;t<r;t++)w.find(e,i[t],n);return r>1?w.uniqueSort(n):n},filter:function(e){return this.pushStack(j(this,e||[],!1))},not:function(e){return this.pushStack(j(this,e||[],!0))},is:function(e){return!!j(this,"string"==typeof e&&D.test(e)?w(e):e||[],!1).length}});var q,L=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]+))$/;(w.fn.init=function(e,t,n){var i,o;if(!e)return this;if(n=n||q,"string"==typeof e){if(!(i="<"===e[0]&&">"===e[e.length-1]&&e.length>=3?[null,e,null]:L.exec(e))||!i[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(i[1]){if(t=t instanceof w?t[0]:t,w.merge(this,w.parseHTML(i[1],t&&t.nodeType?t.ownerDocument||t:r,!0)),A.test(i[1])&&w.isPlainObject(t))for(i in t)g(this[i])?this[i](t[i]):this.attr(i,t[i]);return this}return(o=r.getElementById(i[2]))&&(this[0]=o,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):g(e)?void 0!==n.ready?n.ready(e):e(w):w.makeArray(e,this)}).prototype=w.fn,q=w(r);var H=/^(?:parents|prev(?:Until|All))/,O={children:!0,contents:!0,next:!0,prev:!0};w.fn.extend({has:function(e){var t=w(e,this),n=t.length;return this.filter(function(){for(var e=0;e<n;e++)if(w.contains(this,t[e]))return!0})},closest:function(e,t){var n,r=0,i=this.length,o=[],a="string"!=typeof e&&w(e);if(!D.test(e))for(;r<i;r++)for(n=this[r];n&&n!==t;n=n.parentNode)if(n.nodeType<11&&(a?a.index(n)>-1:1===n.nodeType&&w.find.matchesSelector(n,e))){o.push(n);break}return this.pushStack(o.length>1?w.uniqueSort(o):o)},index:function(e){return e?"string"==typeof e?u.call(w(e),this[0]):u.call(this,e.jquery?e[0]:e):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(e,t){return this.pushStack(w.uniqueSort(w.merge(this.get(),w(e,t))))},addBack:function(e){return this.add(null==e?this.prevObject:this.prevObject.filter(e))}});function P(e,t){while((e=e[t])&&1!==e.nodeType);return e}w.each({parent:function(e){var t=e.parentNode;return t&&11!==t.nodeType?t:null},parents:function(e){return k(e,"parentNode")},parentsUntil:function(e,t,n){return k(e,"parentNode",n)},next:function(e){return P(e,"nextSibling")},prev:function(e){return P(e,"previousSibling")},nextAll:function(e){return k(e,"nextSibling")},prevAll:function(e){return k(e,"previousSibling")},nextUntil:function(e,t,n){return k(e,"nextSibling",n)},prevUntil:function(e,t,n){return k(e,"previousSibling",n)},siblings:function(e){return S((e.parentNode||{}).firstChild,e)},children:function(e){return S(e.firstChild)},contents:function(e){return N(e,"iframe")?e.contentDocument:(N(e,"template")&&(e=e.content||e),w.merge([],e.childNodes))}},function(e,t){w.fn[e]=function(n,r){var i=w.map(this,t,n);return"Until"!==e.slice(-5)&&(r=n),r&&"string"==typeof r&&(i=w.filter(r,i)),this.length>1&&(O[e]||w.uniqueSort(i),H.test(e)&&i.reverse()),this.pushStack(i)}});var M=/[^\x20\t\r\n\f]+/g;function R(e){var t={};return w.each(e.match(M)||[],function(e,n){t[n]=!0}),t}w.Callbacks=function(e){e="string"==typeof e?R(e):w.extend({},e);var t,n,r,i,o=[],a=[],s=-1,u=function(){for(i=i||e.once,r=t=!0;a.length;s=-1){n=a.shift();while(++s<o.length)!1===o[s].apply(n[0],n[1])&&e.stopOnFalse&&(s=o.length,n=!1)}e.memory||(n=!1),t=!1,i&&(o=n?[]:"")},l={add:function(){return o&&(n&&!t&&(s=o.length-1,a.push(n)),function t(n){w.each(n,function(n,r){g(r)?e.unique&&l.has(r)||o.push(r):r&&r.length&&"string"!==x(r)&&t(r)})}(arguments),n&&!t&&u()),this},remove:function(){return w.each(arguments,function(e,t){var n;while((n=w.inArray(t,o,n))>-1)o.splice(n,1),n<=s&&s--}),this},has:function(e){return e?w.inArray(e,o)>-1:o.length>0},empty:function(){return o&&(o=[]),this},disable:function(){return i=a=[],o=n="",this},disabled:function(){return!o},lock:function(){return i=a=[],n||t||(o=n=""),this},locked:function(){return!!i},fireWith:function(e,n){return i||(n=[e,(n=n||[]).slice?n.slice():n],a.push(n),t||u()),this},fire:function(){return l.fireWith(this,arguments),this},fired:function(){return!!r}};return l};function I(e){return e}function W(e){throw e}function $(e,t,n,r){var i;try{e&&g(i=e.promise)?i.call(e).done(t).fail(n):e&&g(i=e.then)?i.call(e,t,n):t.apply(void 0,[e].slice(r))}catch(e){n.apply(void 0,[e])}}w.extend({Deferred:function(t){var n=[["notify","progress",w.Callbacks("memory"),w.Callbacks("memory"),2],["resolve","done",w.Callbacks("once memory"),w.Callbacks("once memory"),0,"resolved"],["reject","fail",w.Callbacks("once memory"),w.Callbacks("once memory"),1,"rejected"]],r="pending",i={state:function(){return r},always:function(){return o.done(arguments).fail(arguments),this},"catch":function(e){return i.then(null,e)},pipe:function(){var e=arguments;return w.Deferred(function(t){w.each(n,function(n,r){var i=g(e[r[4]])&&e[r[4]];o[r[1]](function(){var e=i&&i.apply(this,arguments);e&&g(e.promise)?e.promise().progress(t.notify).done(t.resolve).fail(t.reject):t[r[0]+"With"](this,i?[e]:arguments)})}),e=null}).promise()},then:function(t,r,i){var o=0;function a(t,n,r,i){return function(){var s=this,u=arguments,l=function(){var e,l;if(!(t<o)){if((e=r.apply(s,u))===n.promise())throw new TypeError("Thenable self-resolution");l=e&&("object"==typeof e||"function"==typeof e)&&e.then,g(l)?i?l.call(e,a(o,n,I,i),a(o,n,W,i)):(o++,l.call(e,a(o,n,I,i),a(o,n,W,i),a(o,n,I,n.notifyWith))):(r!==I&&(s=void 0,u=[e]),(i||n.resolveWith)(s,u))}},c=i?l:function(){try{l()}catch(e){w.Deferred.exceptionHook&&w.Deferred.exceptionHook(e,c.stackTrace),t+1>=o&&(r!==W&&(s=void 0,u=[e]),n.rejectWith(s,u))}};t?c():(w.Deferred.getStackHook&&(c.stackTrace=w.Deferred.getStackHook()),e.setTimeout(c))}}return w.Deferred(function(e){n[0][3].add(a(0,e,g(i)?i:I,e.notifyWith)),n[1][3].add(a(0,e,g(t)?t:I)),n[2][3].add(a(0,e,g(r)?r:W))}).promise()},promise:function(e){return null!=e?w.extend(e,i):i}},o={};return w.each(n,function(e,t){var a=t[2],s=t[5];i[t[1]]=a.add,s&&a.add(function(){r=s},n[3-e][2].disable,n[3-e][3].disable,n[0][2].lock,n[0][3].lock),a.add(t[3].fire),o[t[0]]=function(){return o[t[0]+"With"](this===o?void 0:this,arguments),this},o[t[0]+"With"]=a.fireWith}),i.promise(o),t&&t.call(o,o),o},when:function(e){var t=arguments.length,n=t,r=Array(n),i=o.call(arguments),a=w.Deferred(),s=function(e){return function(n){r[e]=this,i[e]=arguments.length>1?o.call(arguments):n,--t||a.resolveWith(r,i)}};if(t<=1&&($(e,a.done(s(n)).resolve,a.reject,!t),"pending"===a.state()||g(i[n]&&i[n].then)))return a.then();while(n--)$(i[n],s(n),a.reject);return a.promise()}});var B=/^(Eval|Internal|Range|Reference|Syntax|Type|URI)Error$/;w.Deferred.exceptionHook=function(t,n){e.console&&e.console.warn&&t&&B.test(t.name)&&e.console.warn("jQuery.Deferred exception: "+t.message,t.stack,n)},w.readyException=function(t){e.setTimeout(function(){throw t})};var F=w.Deferred();w.fn.ready=function(e){return F.then(e)["catch"](function(e){w.readyException(e)}),this},w.extend({isReady:!1,readyWait:1,ready:function(e){(!0===e?--w.readyWait:w.isReady)||(w.isReady=!0,!0!==e&&--w.readyWait>0||F.resolveWith(r,[w]))}}),w.ready.then=F.then;function _(){r.removeEventListener("DOMContentLoaded",_),e.removeEventListener("load",_),w.ready()}"complete"===r.readyState||"loading"!==r.readyState&&!r.documentElement.doScroll?e.setTimeout(w.ready):(r.addEventListener("DOMContentLoaded",_),e.addEventListener("load",_));var z=function(e,t,n,r,i,o,a){var s=0,u=e.length,l=null==n;if("object"===x(n)){i=!0;for(s in n)z(e,t,s,n[s],!0,o,a)}else if(void 0!==r&&(i=!0,g(r)||(a=!0),l&&(a?(t.call(e,r),t=null):(l=t,t=function(e,t,n){return l.call(w(e),n)})),t))for(;s<u;s++)t(e[s],n,a?r:r.call(e[s],s,t(e[s],n)));return i?e:l?t.call(e):u?t(e[0],n):o},X=/^-ms-/,U=/-([a-z])/g;function V(e,t){return t.toUpperCase()}function G(e){return e.replace(X,"ms-").replace(U,V)}var Y=function(e){return 1===e.nodeType||9===e.nodeType||!+e.nodeType};function Q(){this.expando=w.expando+Q.uid++}Q.uid=1,Q.prototype={cache:function(e){var t=e[this.expando];return t||(t={},Y(e)&&(e.nodeType?e[this.expando]=t:Object.defineProperty(e,this.expando,{value:t,configurable:!0}))),t},set:function(e,t,n){var r,i=this.cache(e);if("string"==typeof t)i[G(t)]=n;else for(r in t)i[G(r)]=t[r];return i},get:function(e,t){return void 0===t?this.cache(e):e[this.expando]&&e[this.expando][G(t)]},access:function(e,t,n){return void 0===t||t&&"string"==typeof t&&void 0===n?this.get(e,t):(this.set(e,t,n),void 0!==n?n:t)},remove:function(e,t){var n,r=e[this.expando];if(void 0!==r){if(void 0!==t){n=(t=Array.isArray(t)?t.map(G):(t=G(t))in r?[t]:t.match(M)||[]).length;while(n--)delete r[t[n]]}(void 0===t||w.isEmptyObject(r))&&(e.nodeType?e[this.expando]=void 0:delete e[this.expando])}},hasData:function(e){var t=e[this.expando];return void 0!==t&&!w.isEmptyObject(t)}};var J=new Q,K=new Q,Z=/^(?:\{[\w\W]*\}|\[[\w\W]*\])$/,ee=/[A-Z]/g;function te(e){return"true"===e||"false"!==e&&("null"===e?null:e===+e+""?+e:Z.test(e)?JSON.parse(e):e)}function ne(e,t,n){var r;if(void 0===n&&1===e.nodeType)if(r="data-"+t.replace(ee,"-$&").toLowerCase(),"string"==typeof(n=e.getAttribute(r))){try{n=te(n)}catch(e){}K.set(e,t,n)}else n=void 0;return n}w.extend({hasData:function(e){return K.hasData(e)||J.hasData(e)},data:function(e,t,n){return K.access(e,t,n)},removeData:function(e,t){K.remove(e,t)},_data:function(e,t,n){return J.access(e,t,n)},_removeData:function(e,t){J.remove(e,t)}}),w.fn.extend({data:function(e,t){var n,r,i,o=this[0],a=o&&o.attributes;if(void 0===e){if(this.length&&(i=K.get(o),1===o.nodeType&&!J.get(o,"hasDataAttrs"))){n=a.length;while(n--)a[n]&&0===(r=a[n].name).indexOf("data-")&&(r=G(r.slice(5)),ne(o,r,i[r]));J.set(o,"hasDataAttrs",!0)}return i}return"object"==typeof e?this.each(function(){K.set(this,e)}):z(this,function(t){var n;if(o&&void 0===t){if(void 0!==(n=K.get(o,e)))return n;if(void 0!==(n=ne(o,e)))return n}else this.each(function(){K.set(this,e,t)})},null,t,arguments.length>1,null,!0)},removeData:function(e){return this.each(function(){K.remove(this,e)})}}),w.extend({queue:function(e,t,n){var r;if(e)return t=(t||"fx")+"queue",r=J.get(e,t),n&&(!r||Array.isArray(n)?r=J.access(e,t,w.makeArray(n)):r.push(n)),r||[]},dequeue:function(e,t){t=t||"fx";var n=w.queue(e,t),r=n.length,i=n.shift(),o=w._queueHooks(e,t),a=function(){w.dequeue(e,t)};"inprogress"===i&&(i=n.shift(),r--),i&&("fx"===t&&n.unshift("inprogress"),delete o.stop,i.call(e,a,o)),!r&&o&&o.empty.fire()},_queueHooks:function(e,t){var n=t+"queueHooks";return J.get(e,n)||J.access(e,n,{empty:w.Callbacks("once memory").add(function(){J.remove(e,[t+"queue",n])})})}}),w.fn.extend({queue:function(e,t){var n=2;return"string"!=typeof e&&(t=e,e="fx",n--),arguments.length<n?w.queue(this[0],e):void 0===t?this:this.each(function(){var n=w.queue(this,e,t);w._queueHooks(this,e),"fx"===e&&"inprogress"!==n[0]&&w.dequeue(this,e)})},dequeue:function(e){return this.each(function(){w.dequeue(this,e)})},clearQueue:function(e){return this.queue(e||"fx",[])},promise:function(e,t){var n,r=1,i=w.Deferred(),o=this,a=this.length,s=function(){--r||i.resolveWith(o,[o])};"string"!=typeof e&&(t=e,e=void 0),e=e||"fx";while(a--)(n=J.get(o[a],e+"queueHooks"))&&n.empty&&(r++,n.empty.add(s));return s(),i.promise(t)}});var re=/[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source,ie=new RegExp("^(?:([+-])=|)("+re+")([a-z%]*)$","i"),oe=["Top","Right","Bottom","Left"],ae=function(e,t){return"none"===(e=t||e).style.display||""===e.style.display&&w.contains(e.ownerDocument,e)&&"none"===w.css(e,"display")},se=function(e,t,n,r){var i,o,a={};for(o in t)a[o]=e.style[o],e.style[o]=t[o];i=n.apply(e,r||[]);for(o in t)e.style[o]=a[o];return i};function ue(e,t,n,r){var i,o,a=20,s=r?function(){return r.cur()}:function(){return w.css(e,t,"")},u=s(),l=n&&n[3]||(w.cssNumber[t]?"":"px"),c=(w.cssNumber[t]||"px"!==l&&+u)&&ie.exec(w.css(e,t));if(c&&c[3]!==l){u/=2,l=l||c[3],c=+u||1;while(a--)w.style(e,t,c+l),(1-o)*(1-(o=s()/u||.5))<=0&&(a=0),c/=o;c*=2,w.style(e,t,c+l),n=n||[]}return n&&(c=+c||+u||0,i=n[1]?c+(n[1]+1)*n[2]:+n[2],r&&(r.unit=l,r.start=c,r.end=i)),i}var le={};function ce(e){var t,n=e.ownerDocument,r=e.nodeName,i=le[r];return i||(t=n.body.appendChild(n.createElement(r)),i=w.css(t,"display"),t.parentNode.removeChild(t),"none"===i&&(i="block"),le[r]=i,i)}function fe(e,t){for(var n,r,i=[],o=0,a=e.length;o<a;o++)(r=e[o]).style&&(n=r.style.display,t?("none"===n&&(i[o]=J.get(r,"display")||null,i[o]||(r.style.display="")),""===r.style.display&&ae(r)&&(i[o]=ce(r))):"none"!==n&&(i[o]="none",J.set(r,"display",n)));for(o=0;o<a;o++)null!=i[o]&&(e[o].style.display=i[o]);return e}w.fn.extend({show:function(){return fe(this,!0)},hide:function(){return fe(this)},toggle:function(e){return"boolean"==typeof e?e?this.show():this.hide():this.each(function(){ae(this)?w(this).show():w(this).hide()})}});var pe=/^(?:checkbox|radio)$/i,de=/<([a-z][^\/\0>\x20\t\r\n\f]+)/i,he=/^$|^module$|\/(?:java|ecma)script/i,ge={option:[1,"<select multiple='multiple'>","</select>"],thead:[1,"<table>","</table>"],col:[2,"<table><colgroup>","</colgroup></table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],_default:[0,"",""]};ge.optgroup=ge.option,ge.tbody=ge.tfoot=ge.colgroup=ge.caption=ge.thead,ge.th=ge.td;function ye(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&N(e,t)?w.merge([e],n):n}function ve(e,t){for(var n=0,r=e.length;n<r;n++)J.set(e[n],"globalEval",!t||J.get(t[n],"globalEval"))}var me=/<|&#?\w+;/;function xe(e,t,n,r,i){for(var o,a,s,u,l,c,f=t.createDocumentFragment(),p=[],d=0,h=e.length;d<h;d++)if((o=e[d])||0===o)if("object"===x(o))w.merge(p,o.nodeType?[o]:o);else if(me.test(o)){a=a||f.appendChild(t.createElement("div")),s=(de.exec(o)||["",""])[1].toLowerCase(),u=ge[s]||ge._default,a.innerHTML=u[1]+w.htmlPrefilter(o)+u[2],c=u[0];while(c--)a=a.lastChild;w.merge(p,a.childNodes),(a=f.firstChild).textContent=""}else p.push(t.createTextNode(o));f.textContent="",d=0;while(o=p[d++])if(r&&w.inArray(o,r)>-1)i&&i.push(o);else if(l=w.contains(o.ownerDocument,o),a=ye(f.appendChild(o),"script"),l&&ve(a),n){c=0;while(o=a[c++])he.test(o.type||"")&&n.push(o)}return f}!function(){var e=r.createDocumentFragment().appendChild(r.createElement("div")),t=r.createElement("input");t.setAttribute("type","radio"),t.setAttribute("checked","checked"),t.setAttribute("name","t"),e.appendChild(t),h.checkClone=e.cloneNode(!0).cloneNode(!0).lastChild.checked,e.innerHTML="<textarea>x</textarea>",h.noCloneChecked=!!e.cloneNode(!0).lastChild.defaultValue}();var be=r.documentElement,we=/^key/,Te=/^(?:mouse|pointer|contextmenu|drag|drop)|click/,Ce=/^([^.]*)(?:\.(.+)|)/;function Ee(){return!0}function ke(){return!1}function Se(){try{return r.activeElement}catch(e){}}function De(e,t,n,r,i,o){var a,s;if("object"==typeof t){"string"!=typeof n&&(r=r||n,n=void 0);for(s in t)De(e,s,n,r,t[s],o);return e}if(null==r&&null==i?(i=n,r=n=void 0):null==i&&("string"==typeof n?(i=r,r=void 0):(i=r,r=n,n=void 0)),!1===i)i=ke;else if(!i)return e;return 1===o&&(a=i,(i=function(e){return w().off(e),a.apply(this,arguments)}).guid=a.guid||(a.guid=w.guid++)),e.each(function(){w.event.add(this,t,i,r,n)})}w.event={global:{},add:function(e,t,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,y=J.get(e);if(y){n.handler&&(n=(o=n).handler,i=o.selector),i&&w.find.matchesSelector(be,i),n.guid||(n.guid=w.guid++),(u=y.events)||(u=y.events={}),(a=y.handle)||(a=y.handle=function(t){return"undefined"!=typeof w&&w.event.triggered!==t.type?w.event.dispatch.apply(e,arguments):void 0}),l=(t=(t||"").match(M)||[""]).length;while(l--)d=g=(s=Ce.exec(t[l])||[])[1],h=(s[2]||"").split(".").sort(),d&&(f=w.event.special[d]||{},d=(i?f.delegateType:f.bindType)||d,f=w.event.special[d]||{},c=w.extend({type:d,origType:g,data:r,handler:n,guid:n.guid,selector:i,needsContext:i&&w.expr.match.needsContext.test(i),namespace:h.join(".")},o),(p=u[d])||((p=u[d]=[]).delegateCount=0,f.setup&&!1!==f.setup.call(e,r,h,a)||e.addEventListener&&e.addEventListener(d,a)),f.add&&(f.add.call(e,c),c.handler.guid||(c.handler.guid=n.guid)),i?p.splice(p.delegateCount++,0,c):p.push(c),w.event.global[d]=!0)}},remove:function(e,t,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,y=J.hasData(e)&&J.get(e);if(y&&(u=y.events)){l=(t=(t||"").match(M)||[""]).length;while(l--)if(s=Ce.exec(t[l])||[],d=g=s[1],h=(s[2]||"").split(".").sort(),d){f=w.event.special[d]||{},p=u[d=(r?f.delegateType:f.bindType)||d]||[],s=s[2]&&new RegExp("(^|\\.)"+h.join("\\.(?:.*\\.|)")+"(\\.|$)"),a=o=p.length;while(o--)c=p[o],!i&&g!==c.origType||n&&n.guid!==c.guid||s&&!s.test(c.namespace)||r&&r!==c.selector&&("**"!==r||!c.selector)||(p.splice(o,1),c.selector&&p.delegateCount--,f.remove&&f.remove.call(e,c));a&&!p.length&&(f.teardown&&!1!==f.teardown.call(e,h,y.handle)||w.removeEvent(e,d,y.handle),delete u[d])}else for(d in u)w.event.remove(e,d+t[l],n,r,!0);w.isEmptyObject(u)&&J.remove(e,"handle events")}},dispatch:function(e){var t=w.event.fix(e),n,r,i,o,a,s,u=new Array(arguments.length),l=(J.get(this,"events")||{})[t.type]||[],c=w.event.special[t.type]||{};for(u[0]=t,n=1;n<arguments.length;n++)u[n]=arguments[n];if(t.delegateTarget=this,!c.preDispatch||!1!==c.preDispatch.call(this,t)){s=w.event.handlers.call(this,t,l),n=0;while((o=s[n++])&&!t.isPropagationStopped()){t.currentTarget=o.elem,r=0;while((a=o.handlers[r++])&&!t.isImmediatePropagationStopped())t.rnamespace&&!t.rnamespace.test(a.namespace)||(t.handleObj=a,t.data=a.data,void 0!==(i=((w.event.special[a.origType]||{}).handle||a.handler).apply(o.elem,u))&&!1===(t.result=i)&&(t.preventDefault(),t.stopPropagation()))}return c.postDispatch&&c.postDispatch.call(this,t),t.result}},handlers:function(e,t){var n,r,i,o,a,s=[],u=t.delegateCount,l=e.target;if(u&&l.nodeType&&!("click"===e.type&&e.button>=1))for(;l!==this;l=l.parentNode||this)if(1===l.nodeType&&("click"!==e.type||!0!==l.disabled)){for(o=[],a={},n=0;n<u;n++)void 0===a[i=(r=t[n]).selector+" "]&&(a[i]=r.needsContext?w(i,this).index(l)>-1:w.find(i,this,null,[l]).length),a[i]&&o.push(r);o.length&&s.push({elem:l,handlers:o})}return l=this,u<t.length&&s.push({elem:l,handlers:t.slice(u)}),s},addProp:function(e,t){Object.defineProperty(w.Event.prototype,e,{enumerable:!0,configurable:!0,get:g(t)?function(){if(this.originalEvent)return t(this.originalEvent)}:function(){if(this.originalEvent)return this.originalEvent[e]},set:function(t){Object.defineProperty(this,e,{enumerable:!0,configurable:!0,writable:!0,value:t})}})},fix:function(e){return e[w.expando]?e:new w.Event(e)},special:{load:{noBubble:!0},focus:{trigger:function(){if(this!==Se()&&this.focus)return this.focus(),!1},delegateType:"focusin"},blur:{trigger:function(){if(this===Se()&&this.blur)return this.blur(),!1},delegateType:"focusout"},click:{trigger:function(){if("checkbox"===this.type&&this.click&&N(this,"input"))return this.click(),!1},_default:function(e){return N(e.target,"a")}},beforeunload:{postDispatch:function(e){void 0!==e.result&&e.originalEvent&&(e.originalEvent.returnValue=e.result)}}}},w.removeEvent=function(e,t,n){e.removeEventListener&&e.removeEventListener(t,n)},w.Event=function(e,t){if(!(this instanceof w.Event))return new w.Event(e,t);e&&e.type?(this.originalEvent=e,this.type=e.type,this.isDefaultPrevented=e.defaultPrevented||void 0===e.defaultPrevented&&!1===e.returnValue?Ee:ke,this.target=e.target&&3===e.target.nodeType?e.target.parentNode:e.target,this.currentTarget=e.currentTarget,this.relatedTarget=e.relatedTarget):this.type=e,t&&w.extend(this,t),this.timeStamp=e&&e.timeStamp||Date.now(),this[w.expando]=!0},w.Event.prototype={constructor:w.Event,isDefaultPrevented:ke,isPropagationStopped:ke,isImmediatePropagationStopped:ke,isSimulated:!1,preventDefault:function(){var e=this.originalEvent;this.isDefaultPrevented=Ee,e&&!this.isSimulated&&e.preventDefault()},stopPropagation:function(){var e=this.originalEvent;this.isPropagationStopped=Ee,e&&!this.isSimulated&&e.stopPropagation()},stopImmediatePropagation:function(){var e=this.originalEvent;this.isImmediatePropagationStopped=Ee,e&&!this.isSimulated&&e.stopImmediatePropagation(),this.stopPropagation()}},w.each({altKey:!0,bubbles:!0,cancelable:!0,changedTouches:!0,ctrlKey:!0,detail:!0,eventPhase:!0,metaKey:!0,pageX:!0,pageY:!0,shiftKey:!0,view:!0,"char":!0,charCode:!0,key:!0,keyCode:!0,button:!0,buttons:!0,clientX:!0,clientY:!0,offsetX:!0,offsetY:!0,pointerId:!0,pointerType:!0,screenX:!0,screenY:!0,targetTouches:!0,toElement:!0,touches:!0,which:function(e){var t=e.button;return null==e.which&&we.test(e.type)?null!=e.charCode?e.charCode:e.keyCode:!e.which&&void 0!==t&&Te.test(e.type)?1&t?1:2&t?3:4&t?2:0:e.which}},w.event.addProp),w.each({mouseenter:"mouseover",mouseleave:"mouseout",pointerenter:"pointerover",pointerleave:"pointerout"},function(e,t){w.event.special[e]={delegateType:t,bindType:t,handle:function(e){var n,r=this,i=e.relatedTarget,o=e.handleObj;return i&&(i===r||w.contains(r,i))||(e.type=o.origType,n=o.handler.apply(this,arguments),e.type=t),n}}}),w.fn.extend({on:function(e,t,n,r){return De(this,e,t,n,r)},one:function(e,t,n,r){return De(this,e,t,n,r,1)},off:function(e,t,n){var r,i;if(e&&e.preventDefault&&e.handleObj)return r=e.handleObj,w(e.delegateTarget).off(r.namespace?r.origType+"."+r.namespace:r.origType,r.selector,r.handler),this;if("object"==typeof e){for(i in e)this.off(i,t,e[i]);return this}return!1!==t&&"function"!=typeof t||(n=t,t=void 0),!1===n&&(n=ke),this.each(function(){w.event.remove(this,e,n,t)})}});var Ne=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([a-z][^\/\0>\x20\t\r\n\f]*)[^>]*)\/>/gi,Ae=/<script|<style|<link/i,je=/checked\s*(?:[^=]|=\s*.checked.)/i,qe=/^\s*<!(?:\[CDATA\[|--)|(?:\]\]|--)>\s*$/g;function Le(e,t){return N(e,"table")&&N(11!==t.nodeType?t:t.firstChild,"tr")?w(e).children("tbody")[0]||e:e}function He(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function Oe(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Pe(e,t){var n,r,i,o,a,s,u,l;if(1===t.nodeType){if(J.hasData(e)&&(o=J.access(e),a=J.set(t,o),l=o.events)){delete a.handle,a.events={};for(i in l)for(n=0,r=l[i].length;n<r;n++)w.event.add(t,i,l[i][n])}K.hasData(e)&&(s=K.access(e),u=w.extend({},s),K.set(t,u))}}function Me(e,t){var n=t.nodeName.toLowerCase();"input"===n&&pe.test(e.type)?t.checked=e.checked:"input"!==n&&"textarea"!==n||(t.defaultValue=e.defaultValue)}function Re(e,t,n,r){t=a.apply([],t);var i,o,s,u,l,c,f=0,p=e.length,d=p-1,y=t[0],v=g(y);if(v||p>1&&"string"==typeof y&&!h.checkClone&&je.test(y))return e.each(function(i){var o=e.eq(i);v&&(t[0]=y.call(this,i,o.html())),Re(o,t,n,r)});if(p&&(i=xe(t,e[0].ownerDocument,!1,e,r),o=i.firstChild,1===i.childNodes.length&&(i=o),o||r)){for(u=(s=w.map(ye(i,"script"),He)).length;f<p;f++)l=i,f!==d&&(l=w.clone(l,!0,!0),u&&w.merge(s,ye(l,"script"))),n.call(e[f],l,f);if(u)for(c=s[s.length-1].ownerDocument,w.map(s,Oe),f=0;f<u;f++)l=s[f],he.test(l.type||"")&&!J.access(l,"globalEval")&&w.contains(c,l)&&(l.src&&"module"!==(l.type||"").toLowerCase()?w._evalUrl&&w._evalUrl(l.src):m(l.textContent.replace(qe,""),c,l))}return e}function Ie(e,t,n){for(var r,i=t?w.filter(t,e):e,o=0;null!=(r=i[o]);o++)n||1!==r.nodeType||w.cleanData(ye(r)),r.parentNode&&(n&&w.contains(r.ownerDocument,r)&&ve(ye(r,"script")),r.parentNode.removeChild(r));return e}w.extend({htmlPrefilter:function(e){return e.replace(Ne,"<$1></$2>")},clone:function(e,t,n){var r,i,o,a,s=e.cloneNode(!0),u=w.contains(e.ownerDocument,e);if(!(h.noCloneChecked||1!==e.nodeType&&11!==e.nodeType||w.isXMLDoc(e)))for(a=ye(s),r=0,i=(o=ye(e)).length;r<i;r++)Me(o[r],a[r]);if(t)if(n)for(o=o||ye(e),a=a||ye(s),r=0,i=o.length;r<i;r++)Pe(o[r],a[r]);else Pe(e,s);return(a=ye(s,"script")).length>0&&ve(a,!u&&ye(e,"script")),s},cleanData:function(e){for(var t,n,r,i=w.event.special,o=0;void 0!==(n=e[o]);o++)if(Y(n)){if(t=n[J.expando]){if(t.events)for(r in t.events)i[r]?w.event.remove(n,r):w.removeEvent(n,r,t.handle);n[J.expando]=void 0}n[K.expando]&&(n[K.expando]=void 0)}}}),w.fn.extend({detach:function(e){return Ie(this,e,!0)},remove:function(e){return Ie(this,e)},text:function(e){return z(this,function(e){return void 0===e?w.text(this):this.empty().each(function(){1!==this.nodeType&&11!==this.nodeType&&9!==this.nodeType||(this.textContent=e)})},null,e,arguments.length)},append:function(){return Re(this,arguments,function(e){1!==this.nodeType&&11!==this.nodeType&&9!==this.nodeType||Le(this,e).appendChild(e)})},prepend:function(){return Re(this,arguments,function(e){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var t=Le(this,e);t.insertBefore(e,t.firstChild)}})},before:function(){return Re(this,arguments,function(e){this.parentNode&&this.parentNode.insertBefore(e,this)})},after:function(){return Re(this,arguments,function(e){this.parentNode&&this.parentNode.insertBefore(e,this.nextSibling)})},empty:function(){for(var e,t=0;null!=(e=this[t]);t++)1===e.nodeType&&(w.cleanData(ye(e,!1)),e.textContent="");return this},clone:function(e,t){return e=null!=e&&e,t=null==t?e:t,this.map(function(){return w.clone(this,e,t)})},html:function(e){return z(this,function(e){var t=this[0]||{},n=0,r=this.length;if(void 0===e&&1===t.nodeType)return t.innerHTML;if("string"==typeof e&&!Ae.test(e)&&!ge[(de.exec(e)||["",""])[1].toLowerCase()]){e=w.htmlPrefilter(e);try{for(;n<r;n++)1===(t=this[n]||{}).nodeType&&(w.cleanData(ye(t,!1)),t.innerHTML=e);t=0}catch(e){}}t&&this.empty().append(e)},null,e,arguments.length)},replaceWith:function(){var e=[];return Re(this,arguments,function(t){var n=this.parentNode;w.inArray(this,e)<0&&(w.cleanData(ye(this)),n&&n.replaceChild(t,this))},e)}}),w.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(e,t){w.fn[e]=function(e){for(var n,r=[],i=w(e),o=i.length-1,a=0;a<=o;a++)n=a===o?this:this.clone(!0),w(i[a])[t](n),s.apply(r,n.get());return this.pushStack(r)}});var We=new RegExp("^("+re+")(?!px)[a-z%]+$","i"),$e=function(t){var n=t.ownerDocument.defaultView;return n&&n.opener||(n=e),n.getComputedStyle(t)},Be=new RegExp(oe.join("|"),"i");!function(){function t(){if(c){l.style.cssText="position:absolute;left:-11111px;width:60px;margin-top:1px;padding:0;border:0",c.style.cssText="position:relative;display:block;box-sizing:border-box;overflow:scroll;margin:auto;border:1px;padding:1px;width:60%;top:1%",be.appendChild(l).appendChild(c);var t=e.getComputedStyle(c);i="1%"!==t.top,u=12===n(t.marginLeft),c.style.right="60%",s=36===n(t.right),o=36===n(t.width),c.style.position="absolute",a=36===c.offsetWidth||"absolute",be.removeChild(l),c=null}}function n(e){return Math.round(parseFloat(e))}var i,o,a,s,u,l=r.createElement("div"),c=r.createElement("div");c.style&&(c.style.backgroundClip="content-box",c.cloneNode(!0).style.backgroundClip="",h.clearCloneStyle="content-box"===c.style.backgroundClip,w.extend(h,{boxSizingReliable:function(){return t(),o},pixelBoxStyles:function(){return t(),s},pixelPosition:function(){return t(),i},reliableMarginLeft:function(){return t(),u},scrollboxSize:function(){return t(),a}}))}();function Fe(e,t,n){var r,i,o,a,s=e.style;return(n=n||$e(e))&&(""!==(a=n.getPropertyValue(t)||n[t])||w.contains(e.ownerDocument,e)||(a=w.style(e,t)),!h.pixelBoxStyles()&&We.test(a)&&Be.test(t)&&(r=s.width,i=s.minWidth,o=s.maxWidth,s.minWidth=s.maxWidth=s.width=a,a=n.width,s.width=r,s.minWidth=i,s.maxWidth=o)),void 0!==a?a+"":a}function _e(e,t){return{get:function(){if(!e())return(this.get=t).apply(this,arguments);delete this.get}}}var ze=/^(none|table(?!-c[ea]).+)/,Xe=/^--/,Ue={position:"absolute",visibility:"hidden",display:"block"},Ve={letterSpacing:"0",fontWeight:"400"},Ge=["Webkit","Moz","ms"],Ye=r.createElement("div").style;function Qe(e){if(e in Ye)return e;var t=e[0].toUpperCase()+e.slice(1),n=Ge.length;while(n--)if((e=Ge[n]+t)in Ye)return e}function Je(e){var t=w.cssProps[e];return t||(t=w.cssProps[e]=Qe(e)||e),t}function Ke(e,t,n){var r=ie.exec(t);return r?Math.max(0,r[2]-(n||0))+(r[3]||"px"):t}function Ze(e,t,n,r,i,o){var a="width"===t?1:0,s=0,u=0;if(n===(r?"border":"content"))return 0;for(;a<4;a+=2)"margin"===n&&(u+=w.css(e,n+oe[a],!0,i)),r?("content"===n&&(u-=w.css(e,"padding"+oe[a],!0,i)),"margin"!==n&&(u-=w.css(e,"border"+oe[a]+"Width",!0,i))):(u+=w.css(e,"padding"+oe[a],!0,i),"padding"!==n?u+=w.css(e,"border"+oe[a]+"Width",!0,i):s+=w.css(e,"border"+oe[a]+"Width",!0,i));return!r&&o>=0&&(u+=Math.max(0,Math.ceil(e["offset"+t[0].toUpperCase()+t.slice(1)]-o-u-s-.5))),u}function et(e,t,n){var r=$e(e),i=Fe(e,t,r),o="border-box"===w.css(e,"boxSizing",!1,r),a=o;if(We.test(i)){if(!n)return i;i="auto"}return a=a&&(h.boxSizingReliable()||i===e.style[t]),("auto"===i||!parseFloat(i)&&"inline"===w.css(e,"display",!1,r))&&(i=e["offset"+t[0].toUpperCase()+t.slice(1)],a=!0),(i=parseFloat(i)||0)+Ze(e,t,n||(o?"border":"content"),a,r,i)+"px"}w.extend({cssHooks:{opacity:{get:function(e,t){if(t){var n=Fe(e,"opacity");return""===n?"1":n}}}},cssNumber:{animationIterationCount:!0,columnCount:!0,fillOpacity:!0,flexGrow:!0,flexShrink:!0,fontWeight:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{},style:function(e,t,n,r){if(e&&3!==e.nodeType&&8!==e.nodeType&&e.style){var i,o,a,s=G(t),u=Xe.test(t),l=e.style;if(u||(t=Je(s)),a=w.cssHooks[t]||w.cssHooks[s],void 0===n)return a&&"get"in a&&void 0!==(i=a.get(e,!1,r))?i:l[t];"string"==(o=typeof n)&&(i=ie.exec(n))&&i[1]&&(n=ue(e,t,i),o="number"),null!=n&&n===n&&("number"===o&&(n+=i&&i[3]||(w.cssNumber[s]?"":"px")),h.clearCloneStyle||""!==n||0!==t.indexOf("background")||(l[t]="inherit"),a&&"set"in a&&void 0===(n=a.set(e,n,r))||(u?l.setProperty(t,n):l[t]=n))}},css:function(e,t,n,r){var i,o,a,s=G(t);return Xe.test(t)||(t=Je(s)),(a=w.cssHooks[t]||w.cssHooks[s])&&"get"in a&&(i=a.get(e,!0,n)),void 0===i&&(i=Fe(e,t,r)),"normal"===i&&t in Ve&&(i=Ve[t]),""===n||n?(o=parseFloat(i),!0===n||isFinite(o)?o||0:i):i}}),w.each(["height","width"],function(e,t){w.cssHooks[t]={get:function(e,n,r){if(n)return!ze.test(w.css(e,"display"))||e.getClientRects().length&&e.getBoundingClientRect().width?et(e,t,r):se(e,Ue,function(){return et(e,t,r)})},set:function(e,n,r){var i,o=$e(e),a="border-box"===w.css(e,"boxSizing",!1,o),s=r&&Ze(e,t,r,a,o);return a&&h.scrollboxSize()===o.position&&(s-=Math.ceil(e["offset"+t[0].toUpperCase()+t.slice(1)]-parseFloat(o[t])-Ze(e,t,"border",!1,o)-.5)),s&&(i=ie.exec(n))&&"px"!==(i[3]||"px")&&(e.style[t]=n,n=w.css(e,t)),Ke(e,n,s)}}}),w.cssHooks.marginLeft=_e(h.reliableMarginLeft,function(e,t){if(t)return(parseFloat(Fe(e,"marginLeft"))||e.getBoundingClientRect().left-se(e,{marginLeft:0},function(){return e.getBoundingClientRect().left}))+"px"}),w.each({margin:"",padding:"",border:"Width"},function(e,t){w.cssHooks[e+t]={expand:function(n){for(var r=0,i={},o="string"==typeof n?n.split(" "):[n];r<4;r++)i[e+oe[r]+t]=o[r]||o[r-2]||o[0];return i}},"margin"!==e&&(w.cssHooks[e+t].set=Ke)}),w.fn.extend({css:function(e,t){return z(this,function(e,t,n){var r,i,o={},a=0;if(Array.isArray(t)){for(r=$e(e),i=t.length;a<i;a++)o[t[a]]=w.css(e,t[a],!1,r);return o}return void 0!==n?w.style(e,t,n):w.css(e,t)},e,t,arguments.length>1)}});function tt(e,t,n,r,i){return new tt.prototype.init(e,t,n,r,i)}w.Tween=tt,tt.prototype={constructor:tt,init:function(e,t,n,r,i,o){this.elem=e,this.prop=n,this.easing=i||w.easing._default,this.options=t,this.start=this.now=this.cur(),this.end=r,this.unit=o||(w.cssNumber[n]?"":"px")},cur:function(){var e=tt.propHooks[this.prop];return e&&e.get?e.get(this):tt.propHooks._default.get(this)},run:function(e){var t,n=tt.propHooks[this.prop];return this.options.duration?this.pos=t=w.easing[this.easing](e,this.options.duration*e,0,1,this.options.duration):this.pos=t=e,this.now=(this.end-this.start)*t+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),n&&n.set?n.set(this):tt.propHooks._default.set(this),this}},tt.prototype.init.prototype=tt.prototype,tt.propHooks={_default:{get:function(e){var t;return 1!==e.elem.nodeType||null!=e.elem[e.prop]&&null==e.elem.style[e.prop]?e.elem[e.prop]:(t=w.css(e.elem,e.prop,""))&&"auto"!==t?t:0},set:function(e){w.fx.step[e.prop]?w.fx.step[e.prop](e):1!==e.elem.nodeType||null==e.elem.style[w.cssProps[e.prop]]&&!w.cssHooks[e.prop]?e.elem[e.prop]=e.now:w.style(e.elem,e.prop,e.now+e.unit)}}},tt.propHooks.scrollTop=tt.propHooks.scrollLeft={set:function(e){e.elem.nodeType&&e.elem.parentNode&&(e.elem[e.prop]=e.now)}},w.easing={linear:function(e){return e},swing:function(e){return.5-Math.cos(e*Math.PI)/2},_default:"swing"},w.fx=tt.prototype.init,w.fx.step={};var nt,rt,it=/^(?:toggle|show|hide)$/,ot=/queueHooks$/;function at(){rt&&(!1===r.hidden&&e.requestAnimationFrame?e.requestAnimationFrame(at):e.setTimeout(at,w.fx.interval),w.fx.tick())}function st(){return e.setTimeout(function(){nt=void 0}),nt=Date.now()}function ut(e,t){var n,r=0,i={height:e};for(t=t?1:0;r<4;r+=2-t)i["margin"+(n=oe[r])]=i["padding"+n]=e;return t&&(i.opacity=i.width=e),i}function lt(e,t,n){for(var r,i=(pt.tweeners[t]||[]).concat(pt.tweeners["*"]),o=0,a=i.length;o<a;o++)if(r=i[o].call(n,t,e))return r}function ct(e,t,n){var r,i,o,a,s,u,l,c,f="width"in t||"height"in t,p=this,d={},h=e.style,g=e.nodeType&&ae(e),y=J.get(e,"fxshow");n.queue||(null==(a=w._queueHooks(e,"fx")).unqueued&&(a.unqueued=0,s=a.empty.fire,a.empty.fire=function(){a.unqueued||s()}),a.unqueued++,p.always(function(){p.always(function(){a.unqueued--,w.queue(e,"fx").length||a.empty.fire()})}));for(r in t)if(i=t[r],it.test(i)){if(delete t[r],o=o||"toggle"===i,i===(g?"hide":"show")){if("show"!==i||!y||void 0===y[r])continue;g=!0}d[r]=y&&y[r]||w.style(e,r)}if((u=!w.isEmptyObject(t))||!w.isEmptyObject(d)){f&&1===e.nodeType&&(n.overflow=[h.overflow,h.overflowX,h.overflowY],null==(l=y&&y.display)&&(l=J.get(e,"display")),"none"===(c=w.css(e,"display"))&&(l?c=l:(fe([e],!0),l=e.style.display||l,c=w.css(e,"display"),fe([e]))),("inline"===c||"inline-block"===c&&null!=l)&&"none"===w.css(e,"float")&&(u||(p.done(function(){h.display=l}),null==l&&(c=h.display,l="none"===c?"":c)),h.display="inline-block")),n.overflow&&(h.overflow="hidden",p.always(function(){h.overflow=n.overflow[0],h.overflowX=n.overflow[1],h.overflowY=n.overflow[2]})),u=!1;for(r in d)u||(y?"hidden"in y&&(g=y.hidden):y=J.access(e,"fxshow",{display:l}),o&&(y.hidden=!g),g&&fe([e],!0),p.done(function(){g||fe([e]),J.remove(e,"fxshow");for(r in d)w.style(e,r,d[r])})),u=lt(g?y[r]:0,r,p),r in y||(y[r]=u.start,g&&(u.end=u.start,u.start=0))}}function ft(e,t){var n,r,i,o,a;for(n in e)if(r=G(n),i=t[r],o=e[n],Array.isArray(o)&&(i=o[1],o=e[n]=o[0]),n!==r&&(e[r]=o,delete e[n]),(a=w.cssHooks[r])&&"expand"in a){o=a.expand(o),delete e[r];for(n in o)n in e||(e[n]=o[n],t[n]=i)}else t[r]=i}function pt(e,t,n){var r,i,o=0,a=pt.prefilters.length,s=w.Deferred().always(function(){delete u.elem}),u=function(){if(i)return!1;for(var t=nt||st(),n=Math.max(0,l.startTime+l.duration-t),r=1-(n/l.duration||0),o=0,a=l.tweens.length;o<a;o++)l.tweens[o].run(r);return s.notifyWith(e,[l,r,n]),r<1&&a?n:(a||s.notifyWith(e,[l,1,0]),s.resolveWith(e,[l]),!1)},l=s.promise({elem:e,props:w.extend({},t),opts:w.extend(!0,{specialEasing:{},easing:w.easing._default},n),originalProperties:t,originalOptions:n,startTime:nt||st(),duration:n.duration,tweens:[],createTween:function(t,n){var r=w.Tween(e,l.opts,t,n,l.opts.specialEasing[t]||l.opts.easing);return l.tweens.push(r),r},stop:function(t){var n=0,r=t?l.tweens.length:0;if(i)return this;for(i=!0;n<r;n++)l.tweens[n].run(1);return t?(s.notifyWith(e,[l,1,0]),s.resolveWith(e,[l,t])):s.rejectWith(e,[l,t]),this}}),c=l.props;for(ft(c,l.opts.specialEasing);o<a;o++)if(r=pt.prefilters[o].call(l,e,c,l.opts))return g(r.stop)&&(w._queueHooks(l.elem,l.opts.queue).stop=r.stop.bind(r)),r;return w.map(c,lt,l),g(l.opts.start)&&l.opts.start.call(e,l),l.progress(l.opts.progress).done(l.opts.done,l.opts.complete).fail(l.opts.fail).always(l.opts.always),w.fx.timer(w.extend(u,{elem:e,anim:l,queue:l.opts.queue})),l}w.Animation=w.extend(pt,{tweeners:{"*":[function(e,t){var n=this.createTween(e,t);return ue(n.elem,e,ie.exec(t),n),n}]},tweener:function(e,t){g(e)?(t=e,e=["*"]):e=e.match(M);for(var n,r=0,i=e.length;r<i;r++)n=e[r],pt.tweeners[n]=pt.tweeners[n]||[],pt.tweeners[n].unshift(t)},prefilters:[ct],prefilter:function(e,t){t?pt.prefilters.unshift(e):pt.prefilters.push(e)}}),w.speed=function(e,t,n){var r=e&&"object"==typeof e?w.extend({},e):{complete:n||!n&&t||g(e)&&e,duration:e,easing:n&&t||t&&!g(t)&&t};return w.fx.off?r.duration=0:"number"!=typeof r.duration&&(r.duration in w.fx.speeds?r.duration=w.fx.speeds[r.duration]:r.duration=w.fx.speeds._default),null!=r.queue&&!0!==r.queue||(r.queue="fx"),r.old=r.complete,r.complete=function(){g(r.old)&&r.old.call(this),r.queue&&w.dequeue(this,r.queue)},r},w.fn.extend({fadeTo:function(e,t,n,r){return this.filter(ae).css("opacity",0).show().end().animate({opacity:t},e,n,r)},animate:function(e,t,n,r){var i=w.isEmptyObject(e),o=w.speed(t,n,r),a=function(){var t=pt(this,w.extend({},e),o);(i||J.get(this,"finish"))&&t.stop(!0)};return a.finish=a,i||!1===o.queue?this.each(a):this.queue(o.queue,a)},stop:function(e,t,n){var r=function(e){var t=e.stop;delete e.stop,t(n)};return"string"!=typeof e&&(n=t,t=e,e=void 0),t&&!1!==e&&this.queue(e||"fx",[]),this.each(function(){var t=!0,i=null!=e&&e+"queueHooks",o=w.timers,a=J.get(this);if(i)a[i]&&a[i].stop&&r(a[i]);else for(i in a)a[i]&&a[i].stop&&ot.test(i)&&r(a[i]);for(i=o.length;i--;)o[i].elem!==this||null!=e&&o[i].queue!==e||(o[i].anim.stop(n),t=!1,o.splice(i,1));!t&&n||w.dequeue(this,e)})},finish:function(e){return!1!==e&&(e=e||"fx"),this.each(function(){var t,n=J.get(this),r=n[e+"queue"],i=n[e+"queueHooks"],o=w.timers,a=r?r.length:0;for(n.finish=!0,w.queue(this,e,[]),i&&i.stop&&i.stop.call(this,!0),t=o.length;t--;)o[t].elem===this&&o[t].queue===e&&(o[t].anim.stop(!0),o.splice(t,1));for(t=0;t<a;t++)r[t]&&r[t].finish&&r[t].finish.call(this);delete n.finish})}}),w.each(["toggle","show","hide"],function(e,t){var n=w.fn[t];w.fn[t]=function(e,r,i){return null==e||"boolean"==typeof e?n.apply(this,arguments):this.animate(ut(t,!0),e,r,i)}}),w.each({slideDown:ut("show"),slideUp:ut("hide"),slideToggle:ut("toggle"),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(e,t){w.fn[e]=function(e,n,r){return this.animate(t,e,n,r)}}),w.timers=[],w.fx.tick=function(){var e,t=0,n=w.timers;for(nt=Date.now();t<n.length;t++)(e=n[t])()||n[t]!==e||n.splice(t--,1);n.length||w.fx.stop(),nt=void 0},w.fx.timer=function(e){w.timers.push(e),w.fx.start()},w.fx.interval=13,w.fx.start=function(){rt||(rt=!0,at())},w.fx.stop=function(){rt=null},w.fx.speeds={slow:600,fast:200,_default:400},w.fn.delay=function(t,n){return t=w.fx?w.fx.speeds[t]||t:t,n=n||"fx",this.queue(n,function(n,r){var i=e.setTimeout(n,t);r.stop=function(){e.clearTimeout(i)}})},function(){var e=r.createElement("input"),t=r.createElement("select").appendChild(r.createElement("option"));e.type="checkbox",h.checkOn=""!==e.value,h.optSelected=t.selected,(e=r.createElement("input")).value="t",e.type="radio",h.radioValue="t"===e.value}();var dt,ht=w.expr.attrHandle;w.fn.extend({attr:function(e,t){return z(this,w.attr,e,t,arguments.length>1)},removeAttr:function(e){return this.each(function(){w.removeAttr(this,e)})}}),w.extend({attr:function(e,t,n){var r,i,o=e.nodeType;if(3!==o&&8!==o&&2!==o)return"undefined"==typeof e.getAttribute?w.prop(e,t,n):(1===o&&w.isXMLDoc(e)||(i=w.attrHooks[t.toLowerCase()]||(w.expr.match.bool.test(t)?dt:void 0)),void 0!==n?null===n?void w.removeAttr(e,t):i&&"set"in i&&void 0!==(r=i.set(e,n,t))?r:(e.setAttribute(t,n+""),n):i&&"get"in i&&null!==(r=i.get(e,t))?r:null==(r=w.find.attr(e,t))?void 0:r)},attrHooks:{type:{set:function(e,t){if(!h.radioValue&&"radio"===t&&N(e,"input")){var n=e.value;return e.setAttribute("type",t),n&&(e.value=n),t}}}},removeAttr:function(e,t){var n,r=0,i=t&&t.match(M);if(i&&1===e.nodeType)while(n=i[r++])e.removeAttribute(n)}}),dt={set:function(e,t,n){return!1===t?w.removeAttr(e,n):e.setAttribute(n,n),n}},w.each(w.expr.match.bool.source.match(/\w+/g),function(e,t){var n=ht[t]||w.find.attr;ht[t]=function(e,t,r){var i,o,a=t.toLowerCase();return r||(o=ht[a],ht[a]=i,i=null!=n(e,t,r)?a:null,ht[a]=o),i}});var gt=/^(?:input|select|textarea|button)$/i,yt=/^(?:a|area)$/i;w.fn.extend({prop:function(e,t){return z(this,w.prop,e,t,arguments.length>1)},removeProp:function(e){return this.each(function(){delete this[w.propFix[e]||e]})}}),w.extend({prop:function(e,t,n){var r,i,o=e.nodeType;if(3!==o&&8!==o&&2!==o)return 1===o&&w.isXMLDoc(e)||(t=w.propFix[t]||t,i=w.propHooks[t]),void 0!==n?i&&"set"in i&&void 0!==(r=i.set(e,n,t))?r:e[t]=n:i&&"get"in i&&null!==(r=i.get(e,t))?r:e[t]},propHooks:{tabIndex:{get:function(e){var t=w.find.attr(e,"tabindex");return t?parseInt(t,10):gt.test(e.nodeName)||yt.test(e.nodeName)&&e.href?0:-1}}},propFix:{"for":"htmlFor","class":"className"}}),h.optSelected||(w.propHooks.selected={get:function(e){var t=e.parentNode;return t&&t.parentNode&&t.parentNode.selectedIndex,null},set:function(e){var t=e.parentNode;t&&(t.selectedIndex,t.parentNode&&t.parentNode.selectedIndex)}}),w.each(["tabIndex","readOnly","maxLength","cellSpacing","cellPadding","rowSpan","colSpan","useMap","frameBorder","contentEditable"],function(){w.propFix[this.toLowerCase()]=this});function vt(e){return(e.match(M)||[]).join(" ")}function mt(e){return e.getAttribute&&e.getAttribute("class")||""}function xt(e){return Array.isArray(e)?e:"string"==typeof e?e.match(M)||[]:[]}w.fn.extend({addClass:function(e){var t,n,r,i,o,a,s,u=0;if(g(e))return this.each(function(t){w(this).addClass(e.call(this,t,mt(this)))});if((t=xt(e)).length)while(n=this[u++])if(i=mt(n),r=1===n.nodeType&&" "+vt(i)+" "){a=0;while(o=t[a++])r.indexOf(" "+o+" ")<0&&(r+=o+" ");i!==(s=vt(r))&&n.setAttribute("class",s)}return this},removeClass:function(e){var t,n,r,i,o,a,s,u=0;if(g(e))return this.each(function(t){w(this).removeClass(e.call(this,t,mt(this)))});if(!arguments.length)return this.attr("class","");if((t=xt(e)).length)while(n=this[u++])if(i=mt(n),r=1===n.nodeType&&" "+vt(i)+" "){a=0;while(o=t[a++])while(r.indexOf(" "+o+" ")>-1)r=r.replace(" "+o+" "," ");i!==(s=vt(r))&&n.setAttribute("class",s)}return this},toggleClass:function(e,t){var n=typeof e,r="string"===n||Array.isArray(e);return"boolean"==typeof t&&r?t?this.addClass(e):this.removeClass(e):g(e)?this.each(function(n){w(this).toggleClass(e.call(this,n,mt(this),t),t)}):this.each(function(){var t,i,o,a;if(r){i=0,o=w(this),a=xt(e);while(t=a[i++])o.hasClass(t)?o.removeClass(t):o.addClass(t)}else void 0!==e&&"boolean"!==n||((t=mt(this))&&J.set(this,"__className__",t),this.setAttribute&&this.setAttribute("class",t||!1===e?"":J.get(this,"__className__")||""))})},hasClass:function(e){var t,n,r=0;t=" "+e+" ";while(n=this[r++])if(1===n.nodeType&&(" "+vt(mt(n))+" ").indexOf(t)>-1)return!0;return!1}});var bt=/\r/g;w.fn.extend({val:function(e){var t,n,r,i=this[0];{if(arguments.length)return r=g(e),this.each(function(n){var i;1===this.nodeType&&(null==(i=r?e.call(this,n,w(this).val()):e)?i="":"number"==typeof i?i+="":Array.isArray(i)&&(i=w.map(i,function(e){return null==e?"":e+""})),(t=w.valHooks[this.type]||w.valHooks[this.nodeName.toLowerCase()])&&"set"in t&&void 0!==t.set(this,i,"value")||(this.value=i))});if(i)return(t=w.valHooks[i.type]||w.valHooks[i.nodeName.toLowerCase()])&&"get"in t&&void 0!==(n=t.get(i,"value"))?n:"string"==typeof(n=i.value)?n.replace(bt,""):null==n?"":n}}}),w.extend({valHooks:{option:{get:function(e){var t=w.find.attr(e,"value");return null!=t?t:vt(w.text(e))}},select:{get:function(e){var t,n,r,i=e.options,o=e.selectedIndex,a="select-one"===e.type,s=a?null:[],u=a?o+1:i.length;for(r=o<0?u:a?o:0;r<u;r++)if(((n=i[r]).selected||r===o)&&!n.disabled&&(!n.parentNode.disabled||!N(n.parentNode,"optgroup"))){if(t=w(n).val(),a)return t;s.push(t)}return s},set:function(e,t){var n,r,i=e.options,o=w.makeArray(t),a=i.length;while(a--)((r=i[a]).selected=w.inArray(w.valHooks.option.get(r),o)>-1)&&(n=!0);return n||(e.selectedIndex=-1),o}}}}),w.each(["radio","checkbox"],function(){w.valHooks[this]={set:function(e,t){if(Array.isArray(t))return e.checked=w.inArray(w(e).val(),t)>-1}},h.checkOn||(w.valHooks[this].get=function(e){return null===e.getAttribute("value")?"on":e.value})}),h.focusin="onfocusin"in e;var wt=/^(?:focusinfocus|focusoutblur)$/,Tt=function(e){e.stopPropagation()};w.extend(w.event,{trigger:function(t,n,i,o){var a,s,u,l,c,p,d,h,v=[i||r],m=f.call(t,"type")?t.type:t,x=f.call(t,"namespace")?t.namespace.split("."):[];if(s=h=u=i=i||r,3!==i.nodeType&&8!==i.nodeType&&!wt.test(m+w.event.triggered)&&(m.indexOf(".")>-1&&(m=(x=m.split(".")).shift(),x.sort()),c=m.indexOf(":")<0&&"on"+m,t=t[w.expando]?t:new w.Event(m,"object"==typeof t&&t),t.isTrigger=o?2:3,t.namespace=x.join("."),t.rnamespace=t.namespace?new RegExp("(^|\\.)"+x.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,t.result=void 0,t.target||(t.target=i),n=null==n?[t]:w.makeArray(n,[t]),d=w.event.special[m]||{},o||!d.trigger||!1!==d.trigger.apply(i,n))){if(!o&&!d.noBubble&&!y(i)){for(l=d.delegateType||m,wt.test(l+m)||(s=s.parentNode);s;s=s.parentNode)v.push(s),u=s;u===(i.ownerDocument||r)&&v.push(u.defaultView||u.parentWindow||e)}a=0;while((s=v[a++])&&!t.isPropagationStopped())h=s,t.type=a>1?l:d.bindType||m,(p=(J.get(s,"events")||{})[t.type]&&J.get(s,"handle"))&&p.apply(s,n),(p=c&&s[c])&&p.apply&&Y(s)&&(t.result=p.apply(s,n),!1===t.result&&t.preventDefault());return t.type=m,o||t.isDefaultPrevented()||d._default&&!1!==d._default.apply(v.pop(),n)||!Y(i)||c&&g(i[m])&&!y(i)&&((u=i[c])&&(i[c]=null),w.event.triggered=m,t.isPropagationStopped()&&h.addEventListener(m,Tt),i[m](),t.isPropagationStopped()&&h.removeEventListener(m,Tt),w.event.triggered=void 0,u&&(i[c]=u)),t.result}},simulate:function(e,t,n){var r=w.extend(new w.Event,n,{type:e,isSimulated:!0});w.event.trigger(r,null,t)}}),w.fn.extend({trigger:function(e,t){return this.each(function(){w.event.trigger(e,t,this)})},triggerHandler:function(e,t){var n=this[0];if(n)return w.event.trigger(e,t,n,!0)}}),h.focusin||w.each({focus:"focusin",blur:"focusout"},function(e,t){var n=function(e){w.event.simulate(t,e.target,w.event.fix(e))};w.event.special[t]={setup:function(){var r=this.ownerDocument||this,i=J.access(r,t);i||r.addEventListener(e,n,!0),J.access(r,t,(i||0)+1)},teardown:function(){var r=this.ownerDocument||this,i=J.access(r,t)-1;i?J.access(r,t,i):(r.removeEventListener(e,n,!0),J.remove(r,t))}}});var Ct=e.location,Et=Date.now(),kt=/\?/;w.parseXML=function(t){var n;if(!t||"string"!=typeof t)return null;try{n=(new e.DOMParser).parseFromString(t,"text/xml")}catch(e){n=void 0}return n&&!n.getElementsByTagName("parsererror").length||w.error("Invalid XML: "+t),n};var St=/\[\]$/,Dt=/\r?\n/g,Nt=/^(?:submit|button|image|reset|file)$/i,At=/^(?:input|select|textarea|keygen)/i;function jt(e,t,n,r){var i;if(Array.isArray(t))w.each(t,function(t,i){n||St.test(e)?r(e,i):jt(e+"["+("object"==typeof i&&null!=i?t:"")+"]",i,n,r)});else if(n||"object"!==x(t))r(e,t);else for(i in t)jt(e+"["+i+"]",t[i],n,r)}w.param=function(e,t){var n,r=[],i=function(e,t){var n=g(t)?t():t;r[r.length]=encodeURIComponent(e)+"="+encodeURIComponent(null==n?"":n)};if(Array.isArray(e)||e.jquery&&!w.isPlainObject(e))w.each(e,function(){i(this.name,this.value)});else for(n in e)jt(n,e[n],t,i);return r.join("&")},w.fn.extend({serialize:function(){return w.param(this.serializeArray())},serializeArray:function(){return this.map(function(){var e=w.prop(this,"elements");return e?w.makeArray(e):this}).filter(function(){var e=this.type;return this.name&&!w(this).is(":disabled")&&At.test(this.nodeName)&&!Nt.test(e)&&(this.checked||!pe.test(e))}).map(function(e,t){var n=w(this).val();return null==n?null:Array.isArray(n)?w.map(n,function(e){return{name:t.name,value:e.replace(Dt,"\r\n")}}):{name:t.name,value:n.replace(Dt,"\r\n")}}).get()}});var qt=/%20/g,Lt=/#.*$/,Ht=/([?&])_=[^&]*/,Ot=/^(.*?):[ \t]*([^\r\n]*)$/gm,Pt=/^(?:about|app|app-storage|.+-extension|file|res|widget):$/,Mt=/^(?:GET|HEAD)$/,Rt=/^\/\//,It={},Wt={},$t="*/".concat("*"),Bt=r.createElement("a");Bt.href=Ct.href;function Ft(e){return function(t,n){"string"!=typeof t&&(n=t,t="*");var r,i=0,o=t.toLowerCase().match(M)||[];if(g(n))while(r=o[i++])"+"===r[0]?(r=r.slice(1)||"*",(e[r]=e[r]||[]).unshift(n)):(e[r]=e[r]||[]).push(n)}}function _t(e,t,n,r){var i={},o=e===Wt;function a(s){var u;return i[s]=!0,w.each(e[s]||[],function(e,s){var l=s(t,n,r);return"string"!=typeof l||o||i[l]?o?!(u=l):void 0:(t.dataTypes.unshift(l),a(l),!1)}),u}return a(t.dataTypes[0])||!i["*"]&&a("*")}function zt(e,t){var n,r,i=w.ajaxSettings.flatOptions||{};for(n in t)void 0!==t[n]&&((i[n]?e:r||(r={}))[n]=t[n]);return r&&w.extend(!0,e,r),e}function Xt(e,t,n){var r,i,o,a,s=e.contents,u=e.dataTypes;while("*"===u[0])u.shift(),void 0===r&&(r=e.mimeType||t.getResponseHeader("Content-Type"));if(r)for(i in s)if(s[i]&&s[i].test(r)){u.unshift(i);break}if(u[0]in n)o=u[0];else{for(i in n){if(!u[0]||e.converters[i+" "+u[0]]){o=i;break}a||(a=i)}o=o||a}if(o)return o!==u[0]&&u.unshift(o),n[o]}function Ut(e,t,n,r){var i,o,a,s,u,l={},c=e.dataTypes.slice();if(c[1])for(a in e.converters)l[a.toLowerCase()]=e.converters[a];o=c.shift();while(o)if(e.responseFields[o]&&(n[e.responseFields[o]]=t),!u&&r&&e.dataFilter&&(t=e.dataFilter(t,e.dataType)),u=o,o=c.shift())if("*"===o)o=u;else if("*"!==u&&u!==o){if(!(a=l[u+" "+o]||l["* "+o]))for(i in l)if((s=i.split(" "))[1]===o&&(a=l[u+" "+s[0]]||l["* "+s[0]])){!0===a?a=l[i]:!0!==l[i]&&(o=s[0],c.unshift(s[1]));break}if(!0!==a)if(a&&e["throws"])t=a(t);else try{t=a(t)}catch(e){return{state:"parsererror",error:a?e:"No conversion from "+u+" to "+o}}}return{state:"success",data:t}}w.extend({active:0,lastModified:{},etag:{},ajaxSettings:{url:Ct.href,type:"GET",isLocal:Pt.test(Ct.protocol),global:!0,processData:!0,async:!0,contentType:"application/x-www-form-urlencoded; charset=UTF-8",accepts:{"*":$t,text:"text/plain",html:"text/html",xml:"application/xml, text/xml",json:"application/json, text/javascript"},contents:{xml:/\bxml\b/,html:/\bhtml/,json:/\bjson\b/},responseFields:{xml:"responseXML",text:"responseText",json:"responseJSON"},converters:{"* text":String,"text html":!0,"text json":JSON.parse,"text xml":w.parseXML},flatOptions:{url:!0,context:!0}},ajaxSetup:function(e,t){return t?zt(zt(e,w.ajaxSettings),t):zt(w.ajaxSettings,e)},ajaxPrefilter:Ft(It),ajaxTransport:Ft(Wt),ajax:function(t,n){"object"==typeof t&&(n=t,t=void 0),n=n||{};var i,o,a,s,u,l,c,f,p,d,h=w.ajaxSetup({},n),g=h.context||h,y=h.context&&(g.nodeType||g.jquery)?w(g):w.event,v=w.Deferred(),m=w.Callbacks("once memory"),x=h.statusCode||{},b={},T={},C="canceled",E={readyState:0,getResponseHeader:function(e){var t;if(c){if(!s){s={};while(t=Ot.exec(a))s[t[1].toLowerCase()]=t[2]}t=s[e.toLowerCase()]}return null==t?null:t},getAllResponseHeaders:function(){return c?a:null},setRequestHeader:function(e,t){return null==c&&(e=T[e.toLowerCase()]=T[e.toLowerCase()]||e,b[e]=t),this},overrideMimeType:function(e){return null==c&&(h.mimeType=e),this},statusCode:function(e){var t;if(e)if(c)E.always(e[E.status]);else for(t in e)x[t]=[x[t],e[t]];return this},abort:function(e){var t=e||C;return i&&i.abort(t),k(0,t),this}};if(v.promise(E),h.url=((t||h.url||Ct.href)+"").replace(Rt,Ct.protocol+"//"),h.type=n.method||n.type||h.method||h.type,h.dataTypes=(h.dataType||"*").toLowerCase().match(M)||[""],null==h.crossDomain){l=r.createElement("a");try{l.href=h.url,l.href=l.href,h.crossDomain=Bt.protocol+"//"+Bt.host!=l.protocol+"//"+l.host}catch(e){h.crossDomain=!0}}if(h.data&&h.processData&&"string"!=typeof h.data&&(h.data=w.param(h.data,h.traditional)),_t(It,h,n,E),c)return E;(f=w.event&&h.global)&&0==w.active++&&w.event.trigger("ajaxStart"),h.type=h.type.toUpperCase(),h.hasContent=!Mt.test(h.type),o=h.url.replace(Lt,""),h.hasContent?h.data&&h.processData&&0===(h.contentType||"").indexOf("application/x-www-form-urlencoded")&&(h.data=h.data.replace(qt,"+")):(d=h.url.slice(o.length),h.data&&(h.processData||"string"==typeof h.data)&&(o+=(kt.test(o)?"&":"?")+h.data,delete h.data),!1===h.cache&&(o=o.replace(Ht,"$1"),d=(kt.test(o)?"&":"?")+"_="+Et+++d),h.url=o+d),h.ifModified&&(w.lastModified[o]&&E.setRequestHeader("If-Modified-Since",w.lastModified[o]),w.etag[o]&&E.setRequestHeader("If-None-Match",w.etag[o])),(h.data&&h.hasContent&&!1!==h.contentType||n.contentType)&&E.setRequestHeader("Content-Type",h.contentType),E.setRequestHeader("Accept",h.dataTypes[0]&&h.accepts[h.dataTypes[0]]?h.accepts[h.dataTypes[0]]+("*"!==h.dataTypes[0]?", "+$t+"; q=0.01":""):h.accepts["*"]);for(p in h.headers)E.setRequestHeader(p,h.headers[p]);if(h.beforeSend&&(!1===h.beforeSend.call(g,E,h)||c))return E.abort();if(C="abort",m.add(h.complete),E.done(h.success),E.fail(h.error),i=_t(Wt,h,n,E)){if(E.readyState=1,f&&y.trigger("ajaxSend",[E,h]),c)return E;h.async&&h.timeout>0&&(u=e.setTimeout(function(){E.abort("timeout")},h.timeout));try{c=!1,i.send(b,k)}catch(e){if(c)throw e;k(-1,e)}}else k(-1,"No Transport");function k(t,n,r,s){var l,p,d,b,T,C=n;c||(c=!0,u&&e.clearTimeout(u),i=void 0,a=s||"",E.readyState=t>0?4:0,l=t>=200&&t<300||304===t,r&&(b=Xt(h,E,r)),b=Ut(h,b,E,l),l?(h.ifModified&&((T=E.getResponseHeader("Last-Modified"))&&(w.lastModified[o]=T),(T=E.getResponseHeader("etag"))&&(w.etag[o]=T)),204===t||"HEAD"===h.type?C="nocontent":304===t?C="notmodified":(C=b.state,p=b.data,l=!(d=b.error))):(d=C,!t&&C||(C="error",t<0&&(t=0))),E.status=t,E.statusText=(n||C)+"",l?v.resolveWith(g,[p,C,E]):v.rejectWith(g,[E,C,d]),E.statusCode(x),x=void 0,f&&y.trigger(l?"ajaxSuccess":"ajaxError",[E,h,l?p:d]),m.fireWith(g,[E,C]),f&&(y.trigger("ajaxComplete",[E,h]),--w.active||w.event.trigger("ajaxStop")))}return E},getJSON:function(e,t,n){return w.get(e,t,n,"json")},getScript:function(e,t){return w.get(e,void 0,t,"script")}}),w.each(["get","post"],function(e,t){w[t]=function(e,n,r,i){return g(n)&&(i=i||r,r=n,n=void 0),w.ajax(w.extend({url:e,type:t,dataType:i,data:n,success:r},w.isPlainObject(e)&&e))}}),w._evalUrl=function(e){return w.ajax({url:e,type:"GET",dataType:"script",cache:!0,async:!1,global:!1,"throws":!0})},w.fn.extend({wrapAll:function(e){var t;return this[0]&&(g(e)&&(e=e.call(this[0])),t=w(e,this[0].ownerDocument).eq(0).clone(!0),this[0].parentNode&&t.insertBefore(this[0]),t.map(function(){var e=this;while(e.firstElementChild)e=e.firstElementChild;return e}).append(this)),this},wrapInner:function(e){return g(e)?this.each(function(t){w(this).wrapInner(e.call(this,t))}):this.each(function(){var t=w(this),n=t.contents();n.length?n.wrapAll(e):t.append(e)})},wrap:function(e){var t=g(e);return this.each(function(n){w(this).wrapAll(t?e.call(this,n):e)})},unwrap:function(e){return this.parent(e).not("body").each(function(){w(this).replaceWith(this.childNodes)}),this}}),w.expr.pseudos.hidden=function(e){return!w.expr.pseudos.visible(e)},w.expr.pseudos.visible=function(e){return!!(e.offsetWidth||e.offsetHeight||e.getClientRects().length)},w.ajaxSettings.xhr=function(){try{return new e.XMLHttpRequest}catch(e){}};var Vt={0:200,1223:204},Gt=w.ajaxSettings.xhr();h.cors=!!Gt&&"withCredentials"in Gt,h.ajax=Gt=!!Gt,w.ajaxTransport(function(t){var n,r;if(h.cors||Gt&&!t.crossDomain)return{send:function(i,o){var a,s=t.xhr();if(s.open(t.type,t.url,t.async,t.username,t.password),t.xhrFields)for(a in t.xhrFields)s[a]=t.xhrFields[a];t.mimeType&&s.overrideMimeType&&s.overrideMimeType(t.mimeType),t.crossDomain||i["X-Requested-With"]||(i["X-Requested-With"]="XMLHttpRequest");for(a in i)s.setRequestHeader(a,i[a]);n=function(e){return function(){n&&(n=r=s.onload=s.onerror=s.onabort=s.ontimeout=s.onreadystatechange=null,"abort"===e?s.abort():"error"===e?"number"!=typeof s.status?o(0,"error"):o(s.status,s.statusText):o(Vt[s.status]||s.status,s.statusText,"text"!==(s.responseType||"text")||"string"!=typeof s.responseText?{binary:s.response}:{text:s.responseText},s.getAllResponseHeaders()))}},s.onload=n(),r=s.onerror=s.ontimeout=n("error"),void 0!==s.onabort?s.onabort=r:s.onreadystatechange=function(){4===s.readyState&&e.setTimeout(function(){n&&r()})},n=n("abort");try{s.send(t.hasContent&&t.data||null)}catch(e){if(n)throw e}},abort:function(){n&&n()}}}),w.ajaxPrefilter(function(e){e.crossDomain&&(e.contents.script=!1)}),w.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/\b(?:java|ecma)script\b/},converters:{"text script":function(e){return w.globalEval(e),e}}}),w.ajaxPrefilter("script",function(e){void 0===e.cache&&(e.cache=!1),e.crossDomain&&(e.type="GET")}),w.ajaxTransport("script",function(e){if(e.crossDomain){var t,n;return{send:function(i,o){t=w("<script>").prop({charset:e.scriptCharset,src:e.url}).on("load error",n=function(e){t.remove(),n=null,e&&o("error"===e.type?404:200,e.type)}),r.head.appendChild(t[0])},abort:function(){n&&n()}}}});var Yt=[],Qt=/(=)\?(?=&|$)|\?\?/;w.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Yt.pop()||w.expando+"_"+Et++;return this[e]=!0,e}}),w.ajaxPrefilter("json jsonp",function(t,n,r){var i,o,a,s=!1!==t.jsonp&&(Qt.test(t.url)?"url":"string"==typeof t.data&&0===(t.contentType||"").indexOf("application/x-www-form-urlencoded")&&Qt.test(t.data)&&"data");if(s||"jsonp"===t.dataTypes[0])return i=t.jsonpCallback=g(t.jsonpCallback)?t.jsonpCallback():t.jsonpCallback,s?t[s]=t[s].replace(Qt,"$1"+i):!1!==t.jsonp&&(t.url+=(kt.test(t.url)?"&":"?")+t.jsonp+"="+i),t.converters["script json"]=function(){return a||w.error(i+" was not called"),a[0]},t.dataTypes[0]="json",o=e[i],e[i]=function(){a=arguments},r.always(function(){void 0===o?w(e).removeProp(i):e[i]=o,t[i]&&(t.jsonpCallback=n.jsonpCallback,Yt.push(i)),a&&g(o)&&o(a[0]),a=o=void 0}),"script"}),h.createHTMLDocument=function(){var e=r.implementation.createHTMLDocument("").body;return e.innerHTML="<form></form><form></form>",2===e.childNodes.length}(),w.parseHTML=function(e,t,n){if("string"!=typeof e)return[];"boolean"==typeof t&&(n=t,t=!1);var i,o,a;return t||(h.createHTMLDocument?((i=(t=r.implementation.createHTMLDocument("")).createElement("base")).href=r.location.href,t.head.appendChild(i)):t=r),o=A.exec(e),a=!n&&[],o?[t.createElement(o[1])]:(o=xe([e],t,a),a&&a.length&&w(a).remove(),w.merge([],o.childNodes))},w.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return s>-1&&(r=vt(e.slice(s)),e=e.slice(0,s)),g(t)?(n=t,t=void 0):t&&"object"==typeof t&&(i="POST"),a.length>0&&w.ajax({url:e,type:i||"GET",dataType:"html",data:t}).done(function(e){o=arguments,a.html(r?w("<div>").append(w.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},w.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){w.fn[t]=function(e){return this.on(t,e)}}),w.expr.pseudos.animated=function(e){return w.grep(w.timers,function(t){return e===t.elem}).length},w.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l,c=w.css(e,"position"),f=w(e),p={};"static"===c&&(e.style.position="relative"),s=f.offset(),o=w.css(e,"top"),u=w.css(e,"left"),(l=("absolute"===c||"fixed"===c)&&(o+u).indexOf("auto")>-1)?(a=(r=f.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),g(t)&&(t=t.call(e,n,w.extend({},s))),null!=t.top&&(p.top=t.top-s.top+a),null!=t.left&&(p.left=t.left-s.left+i),"using"in t?t.using.call(e,p):f.css(p)}},w.fn.extend({offset:function(e){if(arguments.length)return void 0===e?this:this.each(function(t){w.offset.setOffset(this,e,t)});var t,n,r=this[0];if(r)return r.getClientRects().length?(t=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:t.top+n.pageYOffset,left:t.left+n.pageXOffset}):{top:0,left:0}},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===w.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===w.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=w(e).offset()).top+=w.css(e,"borderTopWidth",!0),i.left+=w.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-w.css(r,"marginTop",!0),left:t.left-i.left-w.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===w.css(e,"position"))e=e.offsetParent;return e||be})}}),w.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(e,t){var n="pageYOffset"===t;w.fn[e]=function(r){return z(this,function(e,r,i){var o;if(y(e)?o=e:9===e.nodeType&&(o=e.defaultView),void 0===i)return o?o[t]:e[r];o?o.scrollTo(n?o.pageXOffset:i,n?i:o.pageYOffset):e[r]=i},e,r,arguments.length)}}),w.each(["top","left"],function(e,t){w.cssHooks[t]=_e(h.pixelPosition,function(e,n){if(n)return n=Fe(e,t),We.test(n)?w(e).position()[t]+"px":n})}),w.each({Height:"height",Width:"width"},function(e,t){w.each({padding:"inner"+e,content:t,"":"outer"+e},function(n,r){w.fn[r]=function(i,o){var a=arguments.length&&(n||"boolean"!=typeof i),s=n||(!0===i||!0===o?"margin":"border");return z(this,function(t,n,i){var o;return y(t)?0===r.indexOf("outer")?t["inner"+e]:t.document.documentElement["client"+e]:9===t.nodeType?(o=t.documentElement,Math.max(t.body["scroll"+e],o["scroll"+e],t.body["offset"+e],o["offset"+e],o["client"+e])):void 0===i?w.css(t,n,s):w.style(t,n,i,s)},t,a?i:void 0,a)}})}),w.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,t){w.fn[t]=function(e,n){return arguments.length>0?this.on(t,null,e,n):this.trigger(t)}}),w.fn.extend({hover:function(e,t){return this.mouseenter(e).mouseleave(t||e)}}),w.fn.extend({bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)}}),w.proxy=function(e,t){var n,r,i;if("string"==typeof t&&(n=e[t],t=e,e=n),g(e))return r=o.call(arguments,2),i=function(){return e.apply(t||this,r.concat(o.call(arguments)))},i.guid=e.guid=e.guid||w.guid++,i},w.holdReady=function(e){e?w.readyWait++:w.ready(!0)},w.isArray=Array.isArray,w.parseJSON=JSON.parse,w.nodeName=N,w.isFunction=g,w.isWindow=y,w.camelCase=G,w.type=x,w.now=Date.now,w.isNumeric=function(e){var t=w.type(e);return("number"===t||"string"===t)&&!isNaN(e-parseFloat(e))},"function"==typeof define&&define.amd&&define("jquery",[],function(){return w});var Jt=e.jQuery,Kt=e.$;return w.noConflict=function(t){return e.$===w&&(e.$=Kt),t&&e.jQuery===w&&(e.jQuery=Jt),w},t||(e.jQuery=e.$=w),w});
diff --git a/src/wiki/static/wiki/js/jquery-ui-1.12.1.custom/LICENSE.txt b/src/wiki/static/wiki/js/jquery-ui-1.12.1.custom/LICENSE.txt
new file mode 100644
index 000000000..4819e5421
--- /dev/null
+++ b/src/wiki/static/wiki/js/jquery-ui-1.12.1.custom/LICENSE.txt
@@ -0,0 +1,43 @@
+Copyright jQuery Foundation and other contributors, https://jquery.org/
+
+This software consists of voluntary contributions made by many
+individuals. For exact contribution history, see the revision history
+available at https://github.com/jquery/jquery-ui
+
+The following license applies to all parts of this software except as
+documented below:
+
+====
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+====
+
+Copyright and related rights for sample code are waived via CC0. Sample
+code is defined as all source code contained within the demos directory.
+
+CC0: http://creativecommons.org/publicdomain/zero/1.0/
+
+====
+
+All files located in the node_modules and external directories are
+externally maintained libraries used by this software which have their
+own licenses; we recommend you read them, as their terms may differ from
+the terms above.
diff --git a/src/wiki/static/wiki/js/jquery-ui-1.12.1.custom/jquery-ui.min.css b/src/wiki/static/wiki/js/jquery-ui-1.12.1.custom/jquery-ui.min.css
new file mode 100644
index 000000000..73ee505a6
--- /dev/null
+++ b/src/wiki/static/wiki/js/jquery-ui-1.12.1.custom/jquery-ui.min.css
@@ -0,0 +1,6 @@
+/*! jQuery UI - v1.12.1 - 2018-10-20
+* http://jqueryui.com
+* Includes: core.css, resizable.css
+* Copyright jQuery Foundation and other contributors; Licensed MIT */
+
+.ui-helper-hidden{display:none}.ui-helper-hidden-accessible{border:0;clip:rect(0 0 0 0);height:1px;margin:-1px;overflow:hidden;padding:0;position:absolute;width:1px}.ui-helper-reset{margin:0;padding:0;border:0;outline:0;line-height:1.3;text-decoration:none;font-size:100%;list-style:none}.ui-helper-clearfix:before,.ui-helper-clearfix:after{content:"";display:table;border-collapse:collapse}.ui-helper-clearfix:after{clear:both}.ui-helper-zfix{width:100%;height:100%;top:0;left:0;position:absolute;opacity:0;filter:Alpha(Opacity=0)}.ui-front{z-index:100}.ui-state-disabled{cursor:default!important;pointer-events:none}.ui-icon{display:inline-block;vertical-align:middle;margin-top:-.25em;position:relative;text-indent:-99999px;overflow:hidden;background-repeat:no-repeat}.ui-widget-icon-block{left:50%;margin-left:-8px;display:block}.ui-widget-overlay{position:fixed;top:0;left:0;width:100%;height:100%}.ui-resizable{position:relative}.ui-resizable-handle{position:absolute;font-size:0.1px;display:block;-ms-touch-action:none;touch-action:none}.ui-resizable-disabled .ui-resizable-handle,.ui-resizable-autohide .ui-resizable-handle{display:none}.ui-resizable-n{cursor:n-resize;height:7px;width:100%;top:-5px;left:0}.ui-resizable-s{cursor:s-resize;height:7px;width:100%;bottom:-5px;left:0}.ui-resizable-e{cursor:e-resize;width:7px;right:-5px;top:0;height:100%}.ui-resizable-w{cursor:w-resize;width:7px;left:-5px;top:0;height:100%}.ui-resizable-se{cursor:se-resize;width:12px;height:12px;right:1px;bottom:1px}.ui-resizable-sw{cursor:sw-resize;width:9px;height:9px;left:-5px;bottom:-5px}.ui-resizable-nw{cursor:nw-resize;width:9px;height:9px;left:-5px;top:-5px}.ui-resizable-ne{cursor:ne-resize;width:9px;height:9px;right:-5px;top:-5px}
diff --git a/src/wiki/static/wiki/js/jquery-ui-1.12.1.custom/jquery-ui.min.js b/src/wiki/static/wiki/js/jquery-ui-1.12.1.custom/jquery-ui.min.js
new file mode 100644
index 000000000..20bc7d6fc
--- /dev/null
+++ b/src/wiki/static/wiki/js/jquery-ui-1.12.1.custom/jquery-ui.min.js
@@ -0,0 +1,7 @@
+/*! jQuery UI - v1.12.1 - 2018-10-20
+* http://jqueryui.com
+* Includes: widget.js, disable-selection.js, widgets/resizable.js, widgets/mouse.js, effect.js, effects/effect-blind.js, effects/effect-bounce.js, effects/effect-clip.js, effects/effect-drop.js, effects/effect-explode.js, effects/effect-fade.js, effects/effect-fold.js, effects/effect-highlight.js, effects/effect-puff.js, effects/effect-pulsate.js, effects/effect-scale.js, effects/effect-shake.js, effects/effect-size.js, effects/effect-slide.js, effects/effect-transfer.js
+* Copyright jQuery Foundation and other contributors; Licensed MIT */
+
+(function(t){"function"==typeof define&&define.amd?define(["jquery"],t):t(jQuery)})(function(t){t.ui=t.ui||{},t.ui.version="1.12.1";var e=0,i=Array.prototype.slice;t.cleanData=function(e){return function(i){var s,n,o;for(o=0;null!=(n=i[o]);o++)try{s=t._data(n,"events"),s&&s.remove&&t(n).triggerHandler("remove")}catch(a){}e(i)}}(t.cleanData),t.widget=function(e,i,s){var n,o,a,r={},l=e.split(".")[0];e=e.split(".")[1];var h=l+"-"+e;return s||(s=i,i=t.Widget),t.isArray(s)&&(s=t.extend.apply(null,[{}].concat(s))),t.expr[":"][h.toLowerCase()]=function(e){return!!t.data(e,h)},t[l]=t[l]||{},n=t[l][e],o=t[l][e]=function(t,e){return this._createWidget?(arguments.length&&this._createWidget(t,e),void 0):new o(t,e)},t.extend(o,n,{version:s.version,_proto:t.extend({},s),_childConstructors:[]}),a=new i,a.options=t.widget.extend({},a.options),t.each(s,function(e,s){return t.isFunction(s)?(r[e]=function(){function t(){return i.prototype[e].apply(this,arguments)}function n(t){return i.prototype[e].apply(this,t)}return function(){var e,i=this._super,o=this._superApply;return this._super=t,this._superApply=n,e=s.apply(this,arguments),this._super=i,this._superApply=o,e}}(),void 0):(r[e]=s,void 0)}),o.prototype=t.widget.extend(a,{widgetEventPrefix:n?a.widgetEventPrefix||e:e},r,{constructor:o,namespace:l,widgetName:e,widgetFullName:h}),n?(t.each(n._childConstructors,function(e,i){var s=i.prototype;t.widget(s.namespace+"."+s.widgetName,o,i._proto)}),delete n._childConstructors):i._childConstructors.push(o),t.widget.bridge(e,o),o},t.widget.extend=function(e){for(var s,n,o=i.call(arguments,1),a=0,r=o.length;r>a;a++)for(s in o[a])n=o[a][s],o[a].hasOwnProperty(s)&&void 0!==n&&(e[s]=t.isPlainObject(n)?t.isPlainObject(e[s])?t.widget.extend({},e[s],n):t.widget.extend({},n):n);return e},t.widget.bridge=function(e,s){var n=s.prototype.widgetFullName||e;t.fn[e]=function(o){var a="string"==typeof o,r=i.call(arguments,1),l=this;return a?this.length||"instance"!==o?this.each(function(){var i,s=t.data(this,n);return"instance"===o?(l=s,!1):s?t.isFunction(s[o])&&"_"!==o.charAt(0)?(i=s[o].apply(s,r),i!==s&&void 0!==i?(l=i&&i.jquery?l.pushStack(i.get()):i,!1):void 0):t.error("no such method '"+o+"' for "+e+" widget instance"):t.error("cannot call methods on "+e+" prior to initialization; "+"attempted to call method '"+o+"'")}):l=void 0:(r.length&&(o=t.widget.extend.apply(null,[o].concat(r))),this.each(function(){var e=t.data(this,n);e?(e.option(o||{}),e._init&&e._init()):t.data(this,n,new s(o,this))})),l}},t.Widget=function(){},t.Widget._childConstructors=[],t.Widget.prototype={widgetName:"widget",widgetEventPrefix:"",defaultElement:"<div>",options:{classes:{},disabled:!1,create:null},_createWidget:function(i,s){s=t(s||this.defaultElement||this)[0],this.element=t(s),this.uuid=e++,this.eventNamespace="."+this.widgetName+this.uuid,this.bindings=t(),this.hoverable=t(),this.focusable=t(),this.classesElementLookup={},s!==this&&(t.data(s,this.widgetFullName,this),this._on(!0,this.element,{remove:function(t){t.target===s&&this.destroy()}}),this.document=t(s.style?s.ownerDocument:s.document||s),this.window=t(this.document[0].defaultView||this.document[0].parentWindow)),this.options=t.widget.extend({},this.options,this._getCreateOptions(),i),this._create(),this.options.disabled&&this._setOptionDisabled(this.options.disabled),this._trigger("create",null,this._getCreateEventData()),this._init()},_getCreateOptions:function(){return{}},_getCreateEventData:t.noop,_create:t.noop,_init:t.noop,destroy:function(){var e=this;this._destroy(),t.each(this.classesElementLookup,function(t,i){e._removeClass(i,t)}),this.element.off(this.eventNamespace).removeData(this.widgetFullName),this.widget().off(this.eventNamespace).removeAttr("aria-disabled"),this.bindings.off(this.eventNamespace)},_destroy:t.noop,widget:function(){return this.element},option:function(e,i){var s,n,o,a=e;if(0===arguments.length)return t.widget.extend({},this.options);if("string"==typeof e)if(a={},s=e.split("."),e=s.shift(),s.length){for(n=a[e]=t.widget.extend({},this.options[e]),o=0;s.length-1>o;o++)n[s[o]]=n[s[o]]||{},n=n[s[o]];if(e=s.pop(),1===arguments.length)return void 0===n[e]?null:n[e];n[e]=i}else{if(1===arguments.length)return void 0===this.options[e]?null:this.options[e];a[e]=i}return this._setOptions(a),this},_setOptions:function(t){var e;for(e in t)this._setOption(e,t[e]);return this},_setOption:function(t,e){return"classes"===t&&this._setOptionClasses(e),this.options[t]=e,"disabled"===t&&this._setOptionDisabled(e),this},_setOptionClasses:function(e){var i,s,n;for(i in e)n=this.classesElementLookup[i],e[i]!==this.options.classes[i]&&n&&n.length&&(s=t(n.get()),this._removeClass(n,i),s.addClass(this._classes({element:s,keys:i,classes:e,add:!0})))},_setOptionDisabled:function(t){this._toggleClass(this.widget(),this.widgetFullName+"-disabled",null,!!t),t&&(this._removeClass(this.hoverable,null,"ui-state-hover"),this._removeClass(this.focusable,null,"ui-state-focus"))},enable:function(){return this._setOptions({disabled:!1})},disable:function(){return this._setOptions({disabled:!0})},_classes:function(e){function i(i,o){var a,r;for(r=0;i.length>r;r++)a=n.classesElementLookup[i[r]]||t(),a=e.add?t(t.unique(a.get().concat(e.element.get()))):t(a.not(e.element).get()),n.classesElementLookup[i[r]]=a,s.push(i[r]),o&&e.classes[i[r]]&&s.push(e.classes[i[r]])}var s=[],n=this;return e=t.extend({element:this.element,classes:this.options.classes||{}},e),this._on(e.element,{remove:"_untrackClassesElement"}),e.keys&&i(e.keys.match(/\S+/g)||[],!0),e.extra&&i(e.extra.match(/\S+/g)||[]),s.join(" ")},_untrackClassesElement:function(e){var i=this;t.each(i.classesElementLookup,function(s,n){-1!==t.inArray(e.target,n)&&(i.classesElementLookup[s]=t(n.not(e.target).get()))})},_removeClass:function(t,e,i){return this._toggleClass(t,e,i,!1)},_addClass:function(t,e,i){return this._toggleClass(t,e,i,!0)},_toggleClass:function(t,e,i,s){s="boolean"==typeof s?s:i;var n="string"==typeof t||null===t,o={extra:n?e:i,keys:n?t:e,element:n?this.element:t,add:s};return o.element.toggleClass(this._classes(o),s),this},_on:function(e,i,s){var n,o=this;"boolean"!=typeof e&&(s=i,i=e,e=!1),s?(i=n=t(i),this.bindings=this.bindings.add(i)):(s=i,i=this.element,n=this.widget()),t.each(s,function(s,a){function r(){return e||o.options.disabled!==!0&&!t(this).hasClass("ui-state-disabled")?("string"==typeof a?o[a]:a).apply(o,arguments):void 0}"string"!=typeof a&&(r.guid=a.guid=a.guid||r.guid||t.guid++);var l=s.match(/^([\w:-]*)\s*(.*)$/),h=l[1]+o.eventNamespace,c=l[2];c?n.on(h,c,r):i.on(h,r)})},_off:function(e,i){i=(i||"").split(" ").join(this.eventNamespace+" ")+this.eventNamespace,e.off(i).off(i),this.bindings=t(this.bindings.not(e).get()),this.focusable=t(this.focusable.not(e).get()),this.hoverable=t(this.hoverable.not(e).get())},_delay:function(t,e){function i(){return("string"==typeof t?s[t]:t).apply(s,arguments)}var s=this;return setTimeout(i,e||0)},_hoverable:function(e){this.hoverable=this.hoverable.add(e),this._on(e,{mouseenter:function(e){this._addClass(t(e.currentTarget),null,"ui-state-hover")},mouseleave:function(e){this._removeClass(t(e.currentTarget),null,"ui-state-hover")}})},_focusable:function(e){this.focusable=this.focusable.add(e),this._on(e,{focusin:function(e){this._addClass(t(e.currentTarget),null,"ui-state-focus")},focusout:function(e){this._removeClass(t(e.currentTarget),null,"ui-state-focus")}})},_trigger:function(e,i,s){var n,o,a=this.options[e];if(s=s||{},i=t.Event(i),i.type=(e===this.widgetEventPrefix?e:this.widgetEventPrefix+e).toLowerCase(),i.target=this.element[0],o=i.originalEvent)for(n in o)n in i||(i[n]=o[n]);return this.element.trigger(i,s),!(t.isFunction(a)&&a.apply(this.element[0],[i].concat(s))===!1||i.isDefaultPrevented())}},t.each({show:"fadeIn",hide:"fadeOut"},function(e,i){t.Widget.prototype["_"+e]=function(s,n,o){"string"==typeof n&&(n={effect:n});var a,r=n?n===!0||"number"==typeof n?i:n.effect||i:e;n=n||{},"number"==typeof n&&(n={duration:n}),a=!t.isEmptyObject(n),n.complete=o,n.delay&&s.delay(n.delay),a&&t.effects&&t.effects.effect[r]?s[e](n):r!==e&&s[r]?s[r](n.duration,n.easing,o):s.queue(function(i){t(this)[e](),o&&o.call(s[0]),i()})}}),t.widget,t.fn.extend({disableSelection:function(){var t="onselectstart"in document.createElement("div")?"selectstart":"mousedown";return function(){return this.on(t+".ui-disableSelection",function(t){t.preventDefault()})}}(),enableSelection:function(){return this.off(".ui-disableSelection")}}),t.ui.ie=!!/msie [\w.]+/.exec(navigator.userAgent.toLowerCase());var s=!1;t(document).on("mouseup",function(){s=!1}),t.widget("ui.mouse",{version:"1.12.1",options:{cancel:"input, textarea, button, select, option",distance:1,delay:0},_mouseInit:function(){var e=this;this.element.on("mousedown."+this.widgetName,function(t){return e._mouseDown(t)}).on("click."+this.widgetName,function(i){return!0===t.data(i.target,e.widgetName+".preventClickEvent")?(t.removeData(i.target,e.widgetName+".preventClickEvent"),i.stopImmediatePropagation(),!1):void 0}),this.started=!1},_mouseDestroy:function(){this.element.off("."+this.widgetName),this._mouseMoveDelegate&&this.document.off("mousemove."+this.widgetName,this._mouseMoveDelegate).off("mouseup."+this.widgetName,this._mouseUpDelegate)},_mouseDown:function(e){if(!s){this._mouseMoved=!1,this._mouseStarted&&this._mouseUp(e),this._mouseDownEvent=e;var i=this,n=1===e.which,o="string"==typeof this.options.cancel&&e.target.nodeName?t(e.target).closest(this.options.cancel).length:!1;return n&&!o&&this._mouseCapture(e)?(this.mouseDelayMet=!this.options.delay,this.mouseDelayMet||(this._mouseDelayTimer=setTimeout(function(){i.mouseDelayMet=!0},this.options.delay)),this._mouseDistanceMet(e)&&this._mouseDelayMet(e)&&(this._mouseStarted=this._mouseStart(e)!==!1,!this._mouseStarted)?(e.preventDefault(),!0):(!0===t.data(e.target,this.widgetName+".preventClickEvent")&&t.removeData(e.target,this.widgetName+".preventClickEvent"),this._mouseMoveDelegate=function(t){return i._mouseMove(t)},this._mouseUpDelegate=function(t){return i._mouseUp(t)},this.document.on("mousemove."+this.widgetName,this._mouseMoveDelegate).on("mouseup."+this.widgetName,this._mouseUpDelegate),e.preventDefault(),s=!0,!0)):!0}},_mouseMove:function(e){if(this._mouseMoved){if(t.ui.ie&&(!document.documentMode||9>document.documentMode)&&!e.button)return this._mouseUp(e);if(!e.which)if(e.originalEvent.altKey||e.originalEvent.ctrlKey||e.originalEvent.metaKey||e.originalEvent.shiftKey)this.ignoreMissingWhich=!0;else if(!this.ignoreMissingWhich)return this._mouseUp(e)}return(e.which||e.button)&&(this._mouseMoved=!0),this._mouseStarted?(this._mouseDrag(e),e.preventDefault()):(this._mouseDistanceMet(e)&&this._mouseDelayMet(e)&&(this._mouseStarted=this._mouseStart(this._mouseDownEvent,e)!==!1,this._mouseStarted?this._mouseDrag(e):this._mouseUp(e)),!this._mouseStarted)},_mouseUp:function(e){this.document.off("mousemove."+this.widgetName,this._mouseMoveDelegate).off("mouseup."+this.widgetName,this._mouseUpDelegate),this._mouseStarted&&(this._mouseStarted=!1,e.target===this._mouseDownEvent.target&&t.data(e.target,this.widgetName+".preventClickEvent",!0),this._mouseStop(e)),this._mouseDelayTimer&&(clearTimeout(this._mouseDelayTimer),delete this._mouseDelayTimer),this.ignoreMissingWhich=!1,s=!1,e.preventDefault()},_mouseDistanceMet:function(t){return Math.max(Math.abs(this._mouseDownEvent.pageX-t.pageX),Math.abs(this._mouseDownEvent.pageY-t.pageY))>=this.options.distance},_mouseDelayMet:function(){return this.mouseDelayMet},_mouseStart:function(){},_mouseDrag:function(){},_mouseStop:function(){},_mouseCapture:function(){return!0}}),t.ui.plugin={add:function(e,i,s){var n,o=t.ui[e].prototype;for(n in s)o.plugins[n]=o.plugins[n]||[],o.plugins[n].push([i,s[n]])},call:function(t,e,i,s){var n,o=t.plugins[e];if(o&&(s||t.element[0].parentNode&&11!==t.element[0].parentNode.nodeType))for(n=0;o.length>n;n++)t.options[o[n][0]]&&o[n][1].apply(t.element,i)}},t.widget("ui.resizable",t.ui.mouse,{version:"1.12.1",widgetEventPrefix:"resize",options:{alsoResize:!1,animate:!1,animateDuration:"slow",animateEasing:"swing",aspectRatio:!1,autoHide:!1,classes:{"ui-resizable-se":"ui-icon ui-icon-gripsmall-diagonal-se"},containment:!1,ghost:!1,grid:!1,handles:"e,s,se",helper:!1,maxHeight:null,maxWidth:null,minHeight:10,minWidth:10,zIndex:90,resize:null,start:null,stop:null},_num:function(t){return parseFloat(t)||0},_isNumber:function(t){return!isNaN(parseFloat(t))},_hasScroll:function(e,i){if("hidden"===t(e).css("overflow"))return!1;var s=i&&"left"===i?"scrollLeft":"scrollTop",n=!1;return e[s]>0?!0:(e[s]=1,n=e[s]>0,e[s]=0,n)},_create:function(){var e,i=this.options,s=this;this._addClass("ui-resizable"),t.extend(this,{_aspectRatio:!!i.aspectRatio,aspectRatio:i.aspectRatio,originalElement:this.element,_proportionallyResizeElements:[],_helper:i.helper||i.ghost||i.animate?i.helper||"ui-resizable-helper":null}),this.element[0].nodeName.match(/^(canvas|textarea|input|select|button|img)$/i)&&(this.element.wrap(t("<div class='ui-wrapper' style='overflow: hidden;'></div>").css({position:this.element.css("position"),width:this.element.outerWidth(),height:this.element.outerHeight(),top:this.element.css("top"),left:this.element.css("left")})),this.element=this.element.parent().data("ui-resizable",this.element.resizable("instance")),this.elementIsWrapper=!0,e={marginTop:this.originalElement.css("marginTop"),marginRight:this.originalElement.css("marginRight"),marginBottom:this.originalElement.css("marginBottom"),marginLeft:this.originalElement.css("marginLeft")},this.element.css(e),this.originalElement.css("margin",0),this.originalResizeStyle=this.originalElement.css("resize"),this.originalElement.css("resize","none"),this._proportionallyResizeElements.push(this.originalElement.css({position:"static",zoom:1,display:"block"})),this.originalElement.css(e),this._proportionallyResize()),this._setupHandles(),i.autoHide&&t(this.element).on("mouseenter",function(){i.disabled||(s._removeClass("ui-resizable-autohide"),s._handles.show())}).on("mouseleave",function(){i.disabled||s.resizing||(s._addClass("ui-resizable-autohide"),s._handles.hide())}),this._mouseInit()},_destroy:function(){this._mouseDestroy();var e,i=function(e){t(e).removeData("resizable").removeData("ui-resizable").off(".resizable").find(".ui-resizable-handle").remove()};return this.elementIsWrapper&&(i(this.element),e=this.element,this.originalElement.css({position:e.css("position"),width:e.outerWidth(),height:e.outerHeight(),top:e.css("top"),left:e.css("left")}).insertAfter(e),e.remove()),this.originalElement.css("resize",this.originalResizeStyle),i(this.originalElement),this},_setOption:function(t,e){switch(this._super(t,e),t){case"handles":this._removeHandles(),this._setupHandles();break;default:}},_setupHandles:function(){var e,i,s,n,o,a=this.options,r=this;if(this.handles=a.handles||(t(".ui-resizable-handle",this.element).length?{n:".ui-resizable-n",e:".ui-resizable-e",s:".ui-resizable-s",w:".ui-resizable-w",se:".ui-resizable-se",sw:".ui-resizable-sw",ne:".ui-resizable-ne",nw:".ui-resizable-nw"}:"e,s,se"),this._handles=t(),this.handles.constructor===String)for("all"===this.handles&&(this.handles="n,e,s,w,se,sw,ne,nw"),s=this.handles.split(","),this.handles={},i=0;s.length>i;i++)e=t.trim(s[i]),n="ui-resizable-"+e,o=t("<div>"),this._addClass(o,"ui-resizable-handle "+n),o.css({zIndex:a.zIndex}),this.handles[e]=".ui-resizable-"+e,this.element.append(o);this._renderAxis=function(e){var i,s,n,o;e=e||this.element;for(i in this.handles)this.handles[i].constructor===String?this.handles[i]=this.element.children(this.handles[i]).first().show():(this.handles[i].jquery||this.handles[i].nodeType)&&(this.handles[i]=t(this.handles[i]),this._on(this.handles[i],{mousedown:r._mouseDown})),this.elementIsWrapper&&this.originalElement[0].nodeName.match(/^(textarea|input|select|button)$/i)&&(s=t(this.handles[i],this.element),o=/sw|ne|nw|se|n|s/.test(i)?s.outerHeight():s.outerWidth(),n=["padding",/ne|nw|n/.test(i)?"Top":/se|sw|s/.test(i)?"Bottom":/^e$/.test(i)?"Right":"Left"].join(""),e.css(n,o),this._proportionallyResize()),this._handles=this._handles.add(this.handles[i])},this._renderAxis(this.element),this._handles=this._handles.add(this.element.find(".ui-resizable-handle")),this._handles.disableSelection(),this._handles.on("mouseover",function(){r.resizing||(this.className&&(o=this.className.match(/ui-resizable-(se|sw|ne|nw|n|e|s|w)/i)),r.axis=o&&o[1]?o[1]:"se")}),a.autoHide&&(this._handles.hide(),this._addClass("ui-resizable-autohide"))},_removeHandles:function(){this._handles.remove()},_mouseCapture:function(e){var i,s,n=!1;for(i in this.handles)s=t(this.handles[i])[0],(s===e.target||t.contains(s,e.target))&&(n=!0);return!this.options.disabled&&n},_mouseStart:function(e){var i,s,n,o=this.options,a=this.element;return this.resizing=!0,this._renderProxy(),i=this._num(this.helper.css("left")),s=this._num(this.helper.css("top")),o.containment&&(i+=t(o.containment).scrollLeft()||0,s+=t(o.containment).scrollTop()||0),this.offset=this.helper.offset(),this.position={left:i,top:s},this.size=this._helper?{width:this.helper.width(),height:this.helper.height()}:{width:a.width(),height:a.height()},this.originalSize=this._helper?{width:a.outerWidth(),height:a.outerHeight()}:{width:a.width(),height:a.height()},this.sizeDiff={width:a.outerWidth()-a.width(),height:a.outerHeight()-a.height()},this.originalPosition={left:i,top:s},this.originalMousePosition={left:e.pageX,top:e.pageY},this.aspectRatio="number"==typeof o.aspectRatio?o.aspectRatio:this.originalSize.width/this.originalSize.height||1,n=t(".ui-resizable-"+this.axis).css("cursor"),t("body").css("cursor","auto"===n?this.axis+"-resize":n),this._addClass("ui-resizable-resizing"),this._propagate("start",e),!0},_mouseDrag:function(e){var i,s,n=this.originalMousePosition,o=this.axis,a=e.pageX-n.left||0,r=e.pageY-n.top||0,l=this._change[o];return this._updatePrevProperties(),l?(i=l.apply(this,[e,a,r]),this._updateVirtualBoundaries(e.shiftKey),(this._aspectRatio||e.shiftKey)&&(i=this._updateRatio(i,e)),i=this._respectSize(i,e),this._updateCache(i),this._propagate("resize",e),s=this._applyChanges(),!this._helper&&this._proportionallyResizeElements.length&&this._proportionallyResize(),t.isEmptyObject(s)||(this._updatePrevProperties(),this._trigger("resize",e,this.ui()),this._applyChanges()),!1):!1},_mouseStop:function(e){this.resizing=!1;var i,s,n,o,a,r,l,h=this.options,c=this;return this._helper&&(i=this._proportionallyResizeElements,s=i.length&&/textarea/i.test(i[0].nodeName),n=s&&this._hasScroll(i[0],"left")?0:c.sizeDiff.height,o=s?0:c.sizeDiff.width,a={width:c.helper.width()-o,height:c.helper.height()-n},r=parseFloat(c.element.css("left"))+(c.position.left-c.originalPosition.left)||null,l=parseFloat(c.element.css("top"))+(c.position.top-c.originalPosition.top)||null,h.animate||this.element.css(t.extend(a,{top:l,left:r})),c.helper.height(c.size.height),c.helper.width(c.size.width),this._helper&&!h.animate&&this._proportionallyResize()),t("body").css("cursor","auto"),this._removeClass("ui-resizable-resizing"),this._propagate("stop",e),this._helper&&this.helper.remove(),!1},_updatePrevProperties:function(){this.prevPosition={top:this.position.top,left:this.position.left},this.prevSize={width:this.size.width,height:this.size.height}},_applyChanges:function(){var t={};return this.position.top!==this.prevPosition.top&&(t.top=this.position.top+"px"),this.position.left!==this.prevPosition.left&&(t.left=this.position.left+"px"),this.size.width!==this.prevSize.width&&(t.width=this.size.width+"px"),this.size.height!==this.prevSize.height&&(t.height=this.size.height+"px"),this.helper.css(t),t},_updateVirtualBoundaries:function(t){var e,i,s,n,o,a=this.options;o={minWidth:this._isNumber(a.minWidth)?a.minWidth:0,maxWidth:this._isNumber(a.maxWidth)?a.maxWidth:1/0,minHeight:this._isNumber(a.minHeight)?a.minHeight:0,maxHeight:this._isNumber(a.maxHeight)?a.maxHeight:1/0},(this._aspectRatio||t)&&(e=o.minHeight*this.aspectRatio,s=o.minWidth/this.aspectRatio,i=o.maxHeight*this.aspectRatio,n=o.maxWidth/this.aspectRatio,e>o.minWidth&&(o.minWidth=e),s>o.minHeight&&(o.minHeight=s),o.maxWidth>i&&(o.maxWidth=i),o.maxHeight>n&&(o.maxHeight=n)),this._vBoundaries=o},_updateCache:function(t){this.offset=this.helper.offset(),this._isNumber(t.left)&&(this.position.left=t.left),this._isNumber(t.top)&&(this.position.top=t.top),this._isNumber(t.height)&&(this.size.height=t.height),this._isNumber(t.width)&&(this.size.width=t.width)},_updateRatio:function(t){var e=this.position,i=this.size,s=this.axis;return this._isNumber(t.height)?t.width=t.height*this.aspectRatio:this._isNumber(t.width)&&(t.height=t.width/this.aspectRatio),"sw"===s&&(t.left=e.left+(i.width-t.width),t.top=null),"nw"===s&&(t.top=e.top+(i.height-t.height),t.left=e.left+(i.width-t.width)),t},_respectSize:function(t){var e=this._vBoundaries,i=this.axis,s=this._isNumber(t.width)&&e.maxWidth&&e.maxWidth<t.width,n=this._isNumber(t.height)&&e.maxHeight&&e.maxHeight<t.height,o=this._isNumber(t.width)&&e.minWidth&&e.minWidth>t.width,a=this._isNumber(t.height)&&e.minHeight&&e.minHeight>t.height,r=this.originalPosition.left+this.originalSize.width,l=this.originalPosition.top+this.originalSize.height,h=/sw|nw|w/.test(i),c=/nw|ne|n/.test(i);return o&&(t.width=e.minWidth),a&&(t.height=e.minHeight),s&&(t.width=e.maxWidth),n&&(t.height=e.maxHeight),o&&h&&(t.left=r-e.minWidth),s&&h&&(t.left=r-e.maxWidth),a&&c&&(t.top=l-e.minHeight),n&&c&&(t.top=l-e.maxHeight),t.width||t.height||t.left||!t.top?t.width||t.height||t.top||!t.left||(t.left=null):t.top=null,t},_getPaddingPlusBorderDimensions:function(t){for(var e=0,i=[],s=[t.css("borderTopWidth"),t.css("borderRightWidth"),t.css("borderBottomWidth"),t.css("borderLeftWidth")],n=[t.css("paddingTop"),t.css("paddingRight"),t.css("paddingBottom"),t.css("paddingLeft")];4>e;e++)i[e]=parseFloat(s[e])||0,i[e]+=parseFloat(n[e])||0;return{height:i[0]+i[2],width:i[1]+i[3]}},_proportionallyResize:function(){if(this._proportionallyResizeElements.length)for(var t,e=0,i=this.helper||this.element;this._proportionallyResizeElements.length>e;e++)t=this._proportionallyResizeElements[e],this.outerDimensions||(this.outerDimensions=this._getPaddingPlusBorderDimensions(t)),t.css({height:i.height()-this.outerDimensions.height||0,width:i.width()-this.outerDimensions.width||0})},_renderProxy:function(){var e=this.element,i=this.options;this.elementOffset=e.offset(),this._helper?(this.helper=this.helper||t("<div style='overflow:hidden;'></div>"),this._addClass(this.helper,this._helper),this.helper.css({width:this.element.outerWidth(),height:this.element.outerHeight(),position:"absolute",left:this.elementOffset.left+"px",top:this.elementOffset.top+"px",zIndex:++i.zIndex}),this.helper.appendTo("body").disableSelection()):this.helper=this.element},_change:{e:function(t,e){return{width:this.originalSize.width+e}},w:function(t,e){var i=this.originalSize,s=this.originalPosition;return{left:s.left+e,width:i.width-e}},n:function(t,e,i){var s=this.originalSize,n=this.originalPosition;return{top:n.top+i,height:s.height-i}},s:function(t,e,i){return{height:this.originalSize.height+i}},se:function(e,i,s){return t.extend(this._change.s.apply(this,arguments),this._change.e.apply(this,[e,i,s]))},sw:function(e,i,s){return t.extend(this._change.s.apply(this,arguments),this._change.w.apply(this,[e,i,s]))},ne:function(e,i,s){return t.extend(this._change.n.apply(this,arguments),this._change.e.apply(this,[e,i,s]))},nw:function(e,i,s){return t.extend(this._change.n.apply(this,arguments),this._change.w.apply(this,[e,i,s]))}},_propagate:function(e,i){t.ui.plugin.call(this,e,[i,this.ui()]),"resize"!==e&&this._trigger(e,i,this.ui())},plugins:{},ui:function(){return{originalElement:this.originalElement,element:this.element,helper:this.helper,position:this.position,size:this.size,originalSize:this.originalSize,originalPosition:this.originalPosition}}}),t.ui.plugin.add("resizable","animate",{stop:function(e){var i=t(this).resizable("instance"),s=i.options,n=i._proportionallyResizeElements,o=n.length&&/textarea/i.test(n[0].nodeName),a=o&&i._hasScroll(n[0],"left")?0:i.sizeDiff.height,r=o?0:i.sizeDiff.width,l={width:i.size.width-r,height:i.size.height-a},h=parseFloat(i.element.css("left"))+(i.position.left-i.originalPosition.left)||null,c=parseFloat(i.element.css("top"))+(i.position.top-i.originalPosition.top)||null;i.element.animate(t.extend(l,c&&h?{top:c,left:h}:{}),{duration:s.animateDuration,easing:s.animateEasing,step:function(){var s={width:parseFloat(i.element.css("width")),height:parseFloat(i.element.css("height")),top:parseFloat(i.element.css("top")),left:parseFloat(i.element.css("left"))};n&&n.length&&t(n[0]).css({width:s.width,height:s.height}),i._updateCache(s),i._propagate("resize",e)}})}}),t.ui.plugin.add("resizable","containment",{start:function(){var e,i,s,n,o,a,r,l=t(this).resizable("instance"),h=l.options,c=l.element,u=h.containment,d=u instanceof t?u.get(0):/parent/.test(u)?c.parent().get(0):u;d&&(l.containerElement=t(d),/document/.test(u)||u===document?(l.containerOffset={left:0,top:0},l.containerPosition={left:0,top:0},l.parentData={element:t(document),left:0,top:0,width:t(document).width(),height:t(document).height()||document.body.parentNode.scrollHeight}):(e=t(d),i=[],t(["Top","Right","Left","Bottom"]).each(function(t,s){i[t]=l._num(e.css("padding"+s))}),l.containerOffset=e.offset(),l.containerPosition=e.position(),l.containerSize={height:e.innerHeight()-i[3],width:e.innerWidth()-i[1]},s=l.containerOffset,n=l.containerSize.height,o=l.containerSize.width,a=l._hasScroll(d,"left")?d.scrollWidth:o,r=l._hasScroll(d)?d.scrollHeight:n,l.parentData={element:d,left:s.left,top:s.top,width:a,height:r}))},resize:function(e){var i,s,n,o,a=t(this).resizable("instance"),r=a.options,l=a.containerOffset,h=a.position,c=a._aspectRatio||e.shiftKey,u={top:0,left:0},d=a.containerElement,p=!0;d[0]!==document&&/static/.test(d.css("position"))&&(u=l),h.left<(a._helper?l.left:0)&&(a.size.width=a.size.width+(a._helper?a.position.left-l.left:a.position.left-u.left),c&&(a.size.height=a.size.width/a.aspectRatio,p=!1),a.position.left=r.helper?l.left:0),h.top<(a._helper?l.top:0)&&(a.size.height=a.size.height+(a._helper?a.position.top-l.top:a.position.top),c&&(a.size.width=a.size.height*a.aspectRatio,p=!1),a.position.top=a._helper?l.top:0),n=a.containerElement.get(0)===a.element.parent().get(0),o=/relative|absolute/.test(a.containerElement.css("position")),n&&o?(a.offset.left=a.parentData.left+a.position.left,a.offset.top=a.parentData.top+a.position.top):(a.offset.left=a.element.offset().left,a.offset.top=a.element.offset().top),i=Math.abs(a.sizeDiff.width+(a._helper?a.offset.left-u.left:a.offset.left-l.left)),s=Math.abs(a.sizeDiff.height+(a._helper?a.offset.top-u.top:a.offset.top-l.top)),i+a.size.width>=a.parentData.width&&(a.size.width=a.parentData.width-i,c&&(a.size.height=a.size.width/a.aspectRatio,p=!1)),s+a.size.height>=a.parentData.height&&(a.size.height=a.parentData.height-s,c&&(a.size.width=a.size.height*a.aspectRatio,p=!1)),p||(a.position.left=a.prevPosition.left,a.position.top=a.prevPosition.top,a.size.width=a.prevSize.width,a.size.height=a.prevSize.height)},stop:function(){var e=t(this).resizable("instance"),i=e.options,s=e.containerOffset,n=e.containerPosition,o=e.containerElement,a=t(e.helper),r=a.offset(),l=a.outerWidth()-e.sizeDiff.width,h=a.outerHeight()-e.sizeDiff.height;e._helper&&!i.animate&&/relative/.test(o.css("position"))&&t(this).css({left:r.left-n.left-s.left,width:l,height:h}),e._helper&&!i.animate&&/static/.test(o.css("position"))&&t(this).css({left:r.left-n.left-s.left,width:l,height:h})}}),t.ui.plugin.add("resizable","alsoResize",{start:function(){var e=t(this).resizable("instance"),i=e.options;t(i.alsoResize).each(function(){var e=t(this);e.data("ui-resizable-alsoresize",{width:parseFloat(e.width()),height:parseFloat(e.height()),left:parseFloat(e.css("left")),top:parseFloat(e.css("top"))})})},resize:function(e,i){var s=t(this).resizable("instance"),n=s.options,o=s.originalSize,a=s.originalPosition,r={height:s.size.height-o.height||0,width:s.size.width-o.width||0,top:s.position.top-a.top||0,left:s.position.left-a.left||0};t(n.alsoResize).each(function(){var e=t(this),s=t(this).data("ui-resizable-alsoresize"),n={},o=e.parents(i.originalElement[0]).length?["width","height"]:["width","height","top","left"];t.each(o,function(t,e){var i=(s[e]||0)+(r[e]||0);i&&i>=0&&(n[e]=i||null)}),e.css(n)})},stop:function(){t(this).removeData("ui-resizable-alsoresize")}}),t.ui.plugin.add("resizable","ghost",{start:function(){var e=t(this).resizable("instance"),i=e.size;e.ghost=e.originalElement.clone(),e.ghost.css({opacity:.25,display:"block",position:"relative",height:i.height,width:i.width,margin:0,left:0,top:0}),e._addClass(e.ghost,"ui-resizable-ghost"),t.uiBackCompat!==!1&&"string"==typeof e.options.ghost&&e.ghost.addClass(this.options.ghost),e.ghost.appendTo(e.helper)},resize:function(){var e=t(this).resizable("instance");e.ghost&&e.ghost.css({position:"relative",height:e.size.height,width:e.size.width})},stop:function(){var e=t(this).resizable("instance");e.ghost&&e.helper&&e.helper.get(0).removeChild(e.ghost.get(0))}}),t.ui.plugin.add("resizable","grid",{resize:function(){var e,i=t(this).resizable("instance"),s=i.options,n=i.size,o=i.originalSize,a=i.originalPosition,r=i.axis,l="number"==typeof s.grid?[s.grid,s.grid]:s.grid,h=l[0]||1,c=l[1]||1,u=Math.round((n.width-o.width)/h)*h,d=Math.round((n.height-o.height)/c)*c,p=o.width+u,f=o.height+d,g=s.maxWidth&&p>s.maxWidth,m=s.maxHeight&&f>s.maxHeight,_=s.minWidth&&s.minWidth>p,v=s.minHeight&&s.minHeight>f;s.grid=l,_&&(p+=h),v&&(f+=c),g&&(p-=h),m&&(f-=c),/^(se|s|e)$/.test(r)?(i.size.width=p,i.size.height=f):/^(ne)$/.test(r)?(i.size.width=p,i.size.height=f,i.position.top=a.top-d):/^(sw)$/.test(r)?(i.size.width=p,i.size.height=f,i.position.left=a.left-u):((0>=f-c||0>=p-h)&&(e=i._getPaddingPlusBorderDimensions(this)),f-c>0?(i.size.height=f,i.position.top=a.top-d):(f=c-e.height,i.size.height=f,i.position.top=a.top+o.height-f),p-h>0?(i.size.width=p,i.position.left=a.left-u):(p=h-e.width,i.size.width=p,i.position.left=a.left+o.width-p))}}),t.ui.resizable;var n="ui-effects-",o="ui-effects-style",a="ui-effects-animated",r=t;t.effects={effect:{}},function(t,e){function i(t,e,i){var s=u[e.type]||{};return null==t?i||!e.def?null:e.def:(t=s.floor?~~t:parseFloat(t),isNaN(t)?e.def:s.mod?(t+s.mod)%s.mod:0>t?0:t>s.max?s.max:t)}function s(i){var s=h(),n=s._rgba=[];return i=i.toLowerCase(),f(l,function(t,o){var a,r=o.re.exec(i),l=r&&o.parse(r),h=o.space||"rgba";return l?(a=s[h](l),s[c[h].cache]=a[c[h].cache],n=s._rgba=a._rgba,!1):e}),n.length?("0,0,0,0"===n.join()&&t.extend(n,o.transparent),s):o[i]}function n(t,e,i){return i=(i+1)%1,1>6*i?t+6*(e-t)*i:1>2*i?e:2>3*i?t+6*(e-t)*(2/3-i):t}var o,a="backgroundColor borderBottomColor borderLeftColor borderRightColor borderTopColor color columnRuleColor outlineColor textDecorationColor textEmphasisColor",r=/^([\-+])=\s*(\d+\.?\d*)/,l=[{re:/rgba?\(\s*(\d{1,3})\s*,\s*(\d{1,3})\s*,\s*(\d{1,3})\s*(?:,\s*(\d?(?:\.\d+)?)\s*)?\)/,parse:function(t){return[t[1],t[2],t[3],t[4]]}},{re:/rgba?\(\s*(\d+(?:\.\d+)?)\%\s*,\s*(\d+(?:\.\d+)?)\%\s*,\s*(\d+(?:\.\d+)?)\%\s*(?:,\s*(\d?(?:\.\d+)?)\s*)?\)/,parse:function(t){return[2.55*t[1],2.55*t[2],2.55*t[3],t[4]]}},{re:/#([a-f0-9]{2})([a-f0-9]{2})([a-f0-9]{2})/,parse:function(t){return[parseInt(t[1],16),parseInt(t[2],16),parseInt(t[3],16)]}},{re:/#([a-f0-9])([a-f0-9])([a-f0-9])/,parse:function(t){return[parseInt(t[1]+t[1],16),parseInt(t[2]+t[2],16),parseInt(t[3]+t[3],16)]}},{re:/hsla?\(\s*(\d+(?:\.\d+)?)\s*,\s*(\d+(?:\.\d+)?)\%\s*,\s*(\d+(?:\.\d+)?)\%\s*(?:,\s*(\d?(?:\.\d+)?)\s*)?\)/,space:"hsla",parse:function(t){return[t[1],t[2]/100,t[3]/100,t[4]]}}],h=t.Color=function(e,i,s,n){return new t.Color.fn.parse(e,i,s,n)},c={rgba:{props:{red:{idx:0,type:"byte"},green:{idx:1,type:"byte"},blue:{idx:2,type:"byte"}}},hsla:{props:{hue:{idx:0,type:"degrees"},saturation:{idx:1,type:"percent"},lightness:{idx:2,type:"percent"}}}},u={"byte":{floor:!0,max:255},percent:{max:1},degrees:{mod:360,floor:!0}},d=h.support={},p=t("<p>")[0],f=t.each;
+p.style.cssText="background-color:rgba(1,1,1,.5)",d.rgba=p.style.backgroundColor.indexOf("rgba")>-1,f(c,function(t,e){e.cache="_"+t,e.props.alpha={idx:3,type:"percent",def:1}}),h.fn=t.extend(h.prototype,{parse:function(n,a,r,l){if(n===e)return this._rgba=[null,null,null,null],this;(n.jquery||n.nodeType)&&(n=t(n).css(a),a=e);var u=this,d=t.type(n),p=this._rgba=[];return a!==e&&(n=[n,a,r,l],d="array"),"string"===d?this.parse(s(n)||o._default):"array"===d?(f(c.rgba.props,function(t,e){p[e.idx]=i(n[e.idx],e)}),this):"object"===d?(n instanceof h?f(c,function(t,e){n[e.cache]&&(u[e.cache]=n[e.cache].slice())}):f(c,function(e,s){var o=s.cache;f(s.props,function(t,e){if(!u[o]&&s.to){if("alpha"===t||null==n[t])return;u[o]=s.to(u._rgba)}u[o][e.idx]=i(n[t],e,!0)}),u[o]&&0>t.inArray(null,u[o].slice(0,3))&&(u[o][3]=1,s.from&&(u._rgba=s.from(u[o])))}),this):e},is:function(t){var i=h(t),s=!0,n=this;return f(c,function(t,o){var a,r=i[o.cache];return r&&(a=n[o.cache]||o.to&&o.to(n._rgba)||[],f(o.props,function(t,i){return null!=r[i.idx]?s=r[i.idx]===a[i.idx]:e})),s}),s},_space:function(){var t=[],e=this;return f(c,function(i,s){e[s.cache]&&t.push(i)}),t.pop()},transition:function(t,e){var s=h(t),n=s._space(),o=c[n],a=0===this.alpha()?h("transparent"):this,r=a[o.cache]||o.to(a._rgba),l=r.slice();return s=s[o.cache],f(o.props,function(t,n){var o=n.idx,a=r[o],h=s[o],c=u[n.type]||{};null!==h&&(null===a?l[o]=h:(c.mod&&(h-a>c.mod/2?a+=c.mod:a-h>c.mod/2&&(a-=c.mod)),l[o]=i((h-a)*e+a,n)))}),this[n](l)},blend:function(e){if(1===this._rgba[3])return this;var i=this._rgba.slice(),s=i.pop(),n=h(e)._rgba;return h(t.map(i,function(t,e){return(1-s)*n[e]+s*t}))},toRgbaString:function(){var e="rgba(",i=t.map(this._rgba,function(t,e){return null==t?e>2?1:0:t});return 1===i[3]&&(i.pop(),e="rgb("),e+i.join()+")"},toHslaString:function(){var e="hsla(",i=t.map(this.hsla(),function(t,e){return null==t&&(t=e>2?1:0),e&&3>e&&(t=Math.round(100*t)+"%"),t});return 1===i[3]&&(i.pop(),e="hsl("),e+i.join()+")"},toHexString:function(e){var i=this._rgba.slice(),s=i.pop();return e&&i.push(~~(255*s)),"#"+t.map(i,function(t){return t=(t||0).toString(16),1===t.length?"0"+t:t}).join("")},toString:function(){return 0===this._rgba[3]?"transparent":this.toRgbaString()}}),h.fn.parse.prototype=h.fn,c.hsla.to=function(t){if(null==t[0]||null==t[1]||null==t[2])return[null,null,null,t[3]];var e,i,s=t[0]/255,n=t[1]/255,o=t[2]/255,a=t[3],r=Math.max(s,n,o),l=Math.min(s,n,o),h=r-l,c=r+l,u=.5*c;return e=l===r?0:s===r?60*(n-o)/h+360:n===r?60*(o-s)/h+120:60*(s-n)/h+240,i=0===h?0:.5>=u?h/c:h/(2-c),[Math.round(e)%360,i,u,null==a?1:a]},c.hsla.from=function(t){if(null==t[0]||null==t[1]||null==t[2])return[null,null,null,t[3]];var e=t[0]/360,i=t[1],s=t[2],o=t[3],a=.5>=s?s*(1+i):s+i-s*i,r=2*s-a;return[Math.round(255*n(r,a,e+1/3)),Math.round(255*n(r,a,e)),Math.round(255*n(r,a,e-1/3)),o]},f(c,function(s,n){var o=n.props,a=n.cache,l=n.to,c=n.from;h.fn[s]=function(s){if(l&&!this[a]&&(this[a]=l(this._rgba)),s===e)return this[a].slice();var n,r=t.type(s),u="array"===r||"object"===r?s:arguments,d=this[a].slice();return f(o,function(t,e){var s=u["object"===r?t:e.idx];null==s&&(s=d[e.idx]),d[e.idx]=i(s,e)}),c?(n=h(c(d)),n[a]=d,n):h(d)},f(o,function(e,i){h.fn[e]||(h.fn[e]=function(n){var o,a=t.type(n),l="alpha"===e?this._hsla?"hsla":"rgba":s,h=this[l](),c=h[i.idx];return"undefined"===a?c:("function"===a&&(n=n.call(this,c),a=t.type(n)),null==n&&i.empty?this:("string"===a&&(o=r.exec(n),o&&(n=c+parseFloat(o[2])*("+"===o[1]?1:-1))),h[i.idx]=n,this[l](h)))})})}),h.hook=function(e){var i=e.split(" ");f(i,function(e,i){t.cssHooks[i]={set:function(e,n){var o,a,r="";if("transparent"!==n&&("string"!==t.type(n)||(o=s(n)))){if(n=h(o||n),!d.rgba&&1!==n._rgba[3]){for(a="backgroundColor"===i?e.parentNode:e;(""===r||"transparent"===r)&&a&&a.style;)try{r=t.css(a,"backgroundColor"),a=a.parentNode}catch(l){}n=n.blend(r&&"transparent"!==r?r:"_default")}n=n.toRgbaString()}try{e.style[i]=n}catch(l){}}},t.fx.step[i]=function(e){e.colorInit||(e.start=h(e.elem,i),e.end=h(e.end),e.colorInit=!0),t.cssHooks[i].set(e.elem,e.start.transition(e.end,e.pos))}})},h.hook(a),t.cssHooks.borderColor={expand:function(t){var e={};return f(["Top","Right","Bottom","Left"],function(i,s){e["border"+s+"Color"]=t}),e}},o=t.Color.names={aqua:"#00ffff",black:"#000000",blue:"#0000ff",fuchsia:"#ff00ff",gray:"#808080",green:"#008000",lime:"#00ff00",maroon:"#800000",navy:"#000080",olive:"#808000",purple:"#800080",red:"#ff0000",silver:"#c0c0c0",teal:"#008080",white:"#ffffff",yellow:"#ffff00",transparent:[null,null,null,0],_default:"#ffffff"}}(r),function(){function e(e){var i,s,n=e.ownerDocument.defaultView?e.ownerDocument.defaultView.getComputedStyle(e,null):e.currentStyle,o={};if(n&&n.length&&n[0]&&n[n[0]])for(s=n.length;s--;)i=n[s],"string"==typeof n[i]&&(o[t.camelCase(i)]=n[i]);else for(i in n)"string"==typeof n[i]&&(o[i]=n[i]);return o}function i(e,i){var s,o,a={};for(s in i)o=i[s],e[s]!==o&&(n[s]||(t.fx.step[s]||!isNaN(parseFloat(o)))&&(a[s]=o));return a}var s=["add","remove","toggle"],n={border:1,borderBottom:1,borderColor:1,borderLeft:1,borderRight:1,borderTop:1,borderWidth:1,margin:1,padding:1};t.each(["borderLeftStyle","borderRightStyle","borderBottomStyle","borderTopStyle"],function(e,i){t.fx.step[i]=function(t){("none"!==t.end&&!t.setAttr||1===t.pos&&!t.setAttr)&&(r.style(t.elem,i,t.end),t.setAttr=!0)}}),t.fn.addBack||(t.fn.addBack=function(t){return this.add(null==t?this.prevObject:this.prevObject.filter(t))}),t.effects.animateClass=function(n,o,a,r){var l=t.speed(o,a,r);return this.queue(function(){var o,a=t(this),r=a.attr("class")||"",h=l.children?a.find("*").addBack():a;h=h.map(function(){var i=t(this);return{el:i,start:e(this)}}),o=function(){t.each(s,function(t,e){n[e]&&a[e+"Class"](n[e])})},o(),h=h.map(function(){return this.end=e(this.el[0]),this.diff=i(this.start,this.end),this}),a.attr("class",r),h=h.map(function(){var e=this,i=t.Deferred(),s=t.extend({},l,{queue:!1,complete:function(){i.resolve(e)}});return this.el.animate(this.diff,s),i.promise()}),t.when.apply(t,h.get()).done(function(){o(),t.each(arguments,function(){var e=this.el;t.each(this.diff,function(t){e.css(t,"")})}),l.complete.call(a[0])})})},t.fn.extend({addClass:function(e){return function(i,s,n,o){return s?t.effects.animateClass.call(this,{add:i},s,n,o):e.apply(this,arguments)}}(t.fn.addClass),removeClass:function(e){return function(i,s,n,o){return arguments.length>1?t.effects.animateClass.call(this,{remove:i},s,n,o):e.apply(this,arguments)}}(t.fn.removeClass),toggleClass:function(e){return function(i,s,n,o,a){return"boolean"==typeof s||void 0===s?n?t.effects.animateClass.call(this,s?{add:i}:{remove:i},n,o,a):e.apply(this,arguments):t.effects.animateClass.call(this,{toggle:i},s,n,o)}}(t.fn.toggleClass),switchClass:function(e,i,s,n,o){return t.effects.animateClass.call(this,{add:i,remove:e},s,n,o)}})}(),function(){function e(e,i,s,n){return t.isPlainObject(e)&&(i=e,e=e.effect),e={effect:e},null==i&&(i={}),t.isFunction(i)&&(n=i,s=null,i={}),("number"==typeof i||t.fx.speeds[i])&&(n=s,s=i,i={}),t.isFunction(s)&&(n=s,s=null),i&&t.extend(e,i),s=s||i.duration,e.duration=t.fx.off?0:"number"==typeof s?s:s in t.fx.speeds?t.fx.speeds[s]:t.fx.speeds._default,e.complete=n||i.complete,e}function i(e){return!e||"number"==typeof e||t.fx.speeds[e]?!0:"string"!=typeof e||t.effects.effect[e]?t.isFunction(e)?!0:"object"!=typeof e||e.effect?!1:!0:!0}function s(t,e){var i=e.outerWidth(),s=e.outerHeight(),n=/^rect\((-?\d*\.?\d*px|-?\d+%|auto),?\s*(-?\d*\.?\d*px|-?\d+%|auto),?\s*(-?\d*\.?\d*px|-?\d+%|auto),?\s*(-?\d*\.?\d*px|-?\d+%|auto)\)$/,o=n.exec(t)||["",0,i,s,0];return{top:parseFloat(o[1])||0,right:"auto"===o[2]?i:parseFloat(o[2]),bottom:"auto"===o[3]?s:parseFloat(o[3]),left:parseFloat(o[4])||0}}t.expr&&t.expr.filters&&t.expr.filters.animated&&(t.expr.filters.animated=function(e){return function(i){return!!t(i).data(a)||e(i)}}(t.expr.filters.animated)),t.uiBackCompat!==!1&&t.extend(t.effects,{save:function(t,e){for(var i=0,s=e.length;s>i;i++)null!==e[i]&&t.data(n+e[i],t[0].style[e[i]])},restore:function(t,e){for(var i,s=0,o=e.length;o>s;s++)null!==e[s]&&(i=t.data(n+e[s]),t.css(e[s],i))},setMode:function(t,e){return"toggle"===e&&(e=t.is(":hidden")?"show":"hide"),e},createWrapper:function(e){if(e.parent().is(".ui-effects-wrapper"))return e.parent();var i={width:e.outerWidth(!0),height:e.outerHeight(!0),"float":e.css("float")},s=t("<div></div>").addClass("ui-effects-wrapper").css({fontSize:"100%",background:"transparent",border:"none",margin:0,padding:0}),n={width:e.width(),height:e.height()},o=document.activeElement;try{o.id}catch(a){o=document.body}return e.wrap(s),(e[0]===o||t.contains(e[0],o))&&t(o).trigger("focus"),s=e.parent(),"static"===e.css("position")?(s.css({position:"relative"}),e.css({position:"relative"})):(t.extend(i,{position:e.css("position"),zIndex:e.css("z-index")}),t.each(["top","left","bottom","right"],function(t,s){i[s]=e.css(s),isNaN(parseInt(i[s],10))&&(i[s]="auto")}),e.css({position:"relative",top:0,left:0,right:"auto",bottom:"auto"})),e.css(n),s.css(i).show()},removeWrapper:function(e){var i=document.activeElement;return e.parent().is(".ui-effects-wrapper")&&(e.parent().replaceWith(e),(e[0]===i||t.contains(e[0],i))&&t(i).trigger("focus")),e}}),t.extend(t.effects,{version:"1.12.1",define:function(e,i,s){return s||(s=i,i="effect"),t.effects.effect[e]=s,t.effects.effect[e].mode=i,s},scaledDimensions:function(t,e,i){if(0===e)return{height:0,width:0,outerHeight:0,outerWidth:0};var s="horizontal"!==i?(e||100)/100:1,n="vertical"!==i?(e||100)/100:1;return{height:t.height()*n,width:t.width()*s,outerHeight:t.outerHeight()*n,outerWidth:t.outerWidth()*s}},clipToBox:function(t){return{width:t.clip.right-t.clip.left,height:t.clip.bottom-t.clip.top,left:t.clip.left,top:t.clip.top}},unshift:function(t,e,i){var s=t.queue();e>1&&s.splice.apply(s,[1,0].concat(s.splice(e,i))),t.dequeue()},saveStyle:function(t){t.data(o,t[0].style.cssText)},restoreStyle:function(t){t[0].style.cssText=t.data(o)||"",t.removeData(o)},mode:function(t,e){var i=t.is(":hidden");return"toggle"===e&&(e=i?"show":"hide"),(i?"hide"===e:"show"===e)&&(e="none"),e},getBaseline:function(t,e){var i,s;switch(t[0]){case"top":i=0;break;case"middle":i=.5;break;case"bottom":i=1;break;default:i=t[0]/e.height}switch(t[1]){case"left":s=0;break;case"center":s=.5;break;case"right":s=1;break;default:s=t[1]/e.width}return{x:s,y:i}},createPlaceholder:function(e){var i,s=e.css("position"),o=e.position();return e.css({marginTop:e.css("marginTop"),marginBottom:e.css("marginBottom"),marginLeft:e.css("marginLeft"),marginRight:e.css("marginRight")}).outerWidth(e.outerWidth()).outerHeight(e.outerHeight()),/^(static|relative)/.test(s)&&(s="absolute",i=t("<"+e[0].nodeName+">").insertAfter(e).css({display:/^(inline|ruby)/.test(e.css("display"))?"inline-block":"block",visibility:"hidden",marginTop:e.css("marginTop"),marginBottom:e.css("marginBottom"),marginLeft:e.css("marginLeft"),marginRight:e.css("marginRight"),"float":e.css("float")}).outerWidth(e.outerWidth()).outerHeight(e.outerHeight()).addClass("ui-effects-placeholder"),e.data(n+"placeholder",i)),e.css({position:s,left:o.left,top:o.top}),i},removePlaceholder:function(t){var e=n+"placeholder",i=t.data(e);i&&(i.remove(),t.removeData(e))},cleanUp:function(e){t.effects.restoreStyle(e),t.effects.removePlaceholder(e)},setTransition:function(e,i,s,n){return n=n||{},t.each(i,function(t,i){var o=e.cssUnit(i);o[0]>0&&(n[i]=o[0]*s+o[1])}),n}}),t.fn.extend({effect:function(){function i(e){function i(){l.removeData(a),t.effects.cleanUp(l),"hide"===s.mode&&l.hide(),r()}function r(){t.isFunction(h)&&h.call(l[0]),t.isFunction(e)&&e()}var l=t(this);s.mode=u.shift(),t.uiBackCompat===!1||o?"none"===s.mode?(l[c](),r()):n.call(l[0],s,i):(l.is(":hidden")?"hide"===c:"show"===c)?(l[c](),r()):n.call(l[0],s,r)}var s=e.apply(this,arguments),n=t.effects.effect[s.effect],o=n.mode,r=s.queue,l=r||"fx",h=s.complete,c=s.mode,u=[],d=function(e){var i=t(this),s=t.effects.mode(i,c)||o;i.data(a,!0),u.push(s),o&&("show"===s||s===o&&"hide"===s)&&i.show(),o&&"none"===s||t.effects.saveStyle(i),t.isFunction(e)&&e()};return t.fx.off||!n?c?this[c](s.duration,h):this.each(function(){h&&h.call(this)}):r===!1?this.each(d).each(i):this.queue(l,d).queue(l,i)},show:function(t){return function(s){if(i(s))return t.apply(this,arguments);var n=e.apply(this,arguments);return n.mode="show",this.effect.call(this,n)}}(t.fn.show),hide:function(t){return function(s){if(i(s))return t.apply(this,arguments);var n=e.apply(this,arguments);return n.mode="hide",this.effect.call(this,n)}}(t.fn.hide),toggle:function(t){return function(s){if(i(s)||"boolean"==typeof s)return t.apply(this,arguments);var n=e.apply(this,arguments);return n.mode="toggle",this.effect.call(this,n)}}(t.fn.toggle),cssUnit:function(e){var i=this.css(e),s=[];return t.each(["em","px","%","pt"],function(t,e){i.indexOf(e)>0&&(s=[parseFloat(i),e])}),s},cssClip:function(t){return t?this.css("clip","rect("+t.top+"px "+t.right+"px "+t.bottom+"px "+t.left+"px)"):s(this.css("clip"),this)},transfer:function(e,i){var s=t(this),n=t(e.to),o="fixed"===n.css("position"),a=t("body"),r=o?a.scrollTop():0,l=o?a.scrollLeft():0,h=n.offset(),c={top:h.top-r,left:h.left-l,height:n.innerHeight(),width:n.innerWidth()},u=s.offset(),d=t("<div class='ui-effects-transfer'></div>").appendTo("body").addClass(e.className).css({top:u.top-r,left:u.left-l,height:s.innerHeight(),width:s.innerWidth(),position:o?"fixed":"absolute"}).animate(c,e.duration,e.easing,function(){d.remove(),t.isFunction(i)&&i()})}}),t.fx.step.clip=function(e){e.clipInit||(e.start=t(e.elem).cssClip(),"string"==typeof e.end&&(e.end=s(e.end,e.elem)),e.clipInit=!0),t(e.elem).cssClip({top:e.pos*(e.end.top-e.start.top)+e.start.top,right:e.pos*(e.end.right-e.start.right)+e.start.right,bottom:e.pos*(e.end.bottom-e.start.bottom)+e.start.bottom,left:e.pos*(e.end.left-e.start.left)+e.start.left})}}(),function(){var e={};t.each(["Quad","Cubic","Quart","Quint","Expo"],function(t,i){e[i]=function(e){return Math.pow(e,t+2)}}),t.extend(e,{Sine:function(t){return 1-Math.cos(t*Math.PI/2)},Circ:function(t){return 1-Math.sqrt(1-t*t)},Elastic:function(t){return 0===t||1===t?t:-Math.pow(2,8*(t-1))*Math.sin((80*(t-1)-7.5)*Math.PI/15)},Back:function(t){return t*t*(3*t-2)},Bounce:function(t){for(var e,i=4;((e=Math.pow(2,--i))-1)/11>t;);return 1/Math.pow(4,3-i)-7.5625*Math.pow((3*e-2)/22-t,2)}}),t.each(e,function(e,i){t.easing["easeIn"+e]=i,t.easing["easeOut"+e]=function(t){return 1-i(1-t)},t.easing["easeInOut"+e]=function(t){return.5>t?i(2*t)/2:1-i(-2*t+2)/2}})}();var l=t.effects;t.effects.define("blind","hide",function(e,i){var s={up:["bottom","top"],vertical:["bottom","top"],down:["top","bottom"],left:["right","left"],horizontal:["right","left"],right:["left","right"]},n=t(this),o=e.direction||"up",a=n.cssClip(),r={clip:t.extend({},a)},l=t.effects.createPlaceholder(n);r.clip[s[o][0]]=r.clip[s[o][1]],"show"===e.mode&&(n.cssClip(r.clip),l&&l.css(t.effects.clipToBox(r)),r.clip=a),l&&l.animate(t.effects.clipToBox(r),e.duration,e.easing),n.animate(r,{queue:!1,duration:e.duration,easing:e.easing,complete:i})}),t.effects.define("bounce",function(e,i){var s,n,o,a=t(this),r=e.mode,l="hide"===r,h="show"===r,c=e.direction||"up",u=e.distance,d=e.times||5,p=2*d+(h||l?1:0),f=e.duration/p,g=e.easing,m="up"===c||"down"===c?"top":"left",_="up"===c||"left"===c,v=0,b=a.queue().length;for(t.effects.createPlaceholder(a),o=a.css(m),u||(u=a["top"===m?"outerHeight":"outerWidth"]()/3),h&&(n={opacity:1},n[m]=o,a.css("opacity",0).css(m,_?2*-u:2*u).animate(n,f,g)),l&&(u/=Math.pow(2,d-1)),n={},n[m]=o;d>v;v++)s={},s[m]=(_?"-=":"+=")+u,a.animate(s,f,g).animate(n,f,g),u=l?2*u:u/2;l&&(s={opacity:0},s[m]=(_?"-=":"+=")+u,a.animate(s,f,g)),a.queue(i),t.effects.unshift(a,b,p+1)}),t.effects.define("clip","hide",function(e,i){var s,n={},o=t(this),a=e.direction||"vertical",r="both"===a,l=r||"horizontal"===a,h=r||"vertical"===a;s=o.cssClip(),n.clip={top:h?(s.bottom-s.top)/2:s.top,right:l?(s.right-s.left)/2:s.right,bottom:h?(s.bottom-s.top)/2:s.bottom,left:l?(s.right-s.left)/2:s.left},t.effects.createPlaceholder(o),"show"===e.mode&&(o.cssClip(n.clip),n.clip=s),o.animate(n,{queue:!1,duration:e.duration,easing:e.easing,complete:i})}),t.effects.define("drop","hide",function(e,i){var s,n=t(this),o=e.mode,a="show"===o,r=e.direction||"left",l="up"===r||"down"===r?"top":"left",h="up"===r||"left"===r?"-=":"+=",c="+="===h?"-=":"+=",u={opacity:0};t.effects.createPlaceholder(n),s=e.distance||n["top"===l?"outerHeight":"outerWidth"](!0)/2,u[l]=h+s,a&&(n.css(u),u[l]=c+s,u.opacity=1),n.animate(u,{queue:!1,duration:e.duration,easing:e.easing,complete:i})}),t.effects.define("explode","hide",function(e,i){function s(){b.push(this),b.length===u*d&&n()}function n(){p.css({visibility:"visible"}),t(b).remove(),i()}var o,a,r,l,h,c,u=e.pieces?Math.round(Math.sqrt(e.pieces)):3,d=u,p=t(this),f=e.mode,g="show"===f,m=p.show().css("visibility","hidden").offset(),_=Math.ceil(p.outerWidth()/d),v=Math.ceil(p.outerHeight()/u),b=[];for(o=0;u>o;o++)for(l=m.top+o*v,c=o-(u-1)/2,a=0;d>a;a++)r=m.left+a*_,h=a-(d-1)/2,p.clone().appendTo("body").wrap("<div></div>").css({position:"absolute",visibility:"visible",left:-a*_,top:-o*v}).parent().addClass("ui-effects-explode").css({position:"absolute",overflow:"hidden",width:_,height:v,left:r+(g?h*_:0),top:l+(g?c*v:0),opacity:g?0:1}).animate({left:r+(g?0:h*_),top:l+(g?0:c*v),opacity:g?1:0},e.duration||500,e.easing,s)}),t.effects.define("fade","toggle",function(e,i){var s="show"===e.mode;t(this).css("opacity",s?0:1).animate({opacity:s?1:0},{queue:!1,duration:e.duration,easing:e.easing,complete:i})}),t.effects.define("fold","hide",function(e,i){var s=t(this),n=e.mode,o="show"===n,a="hide"===n,r=e.size||15,l=/([0-9]+)%/.exec(r),h=!!e.horizFirst,c=h?["right","bottom"]:["bottom","right"],u=e.duration/2,d=t.effects.createPlaceholder(s),p=s.cssClip(),f={clip:t.extend({},p)},g={clip:t.extend({},p)},m=[p[c[0]],p[c[1]]],_=s.queue().length;l&&(r=parseInt(l[1],10)/100*m[a?0:1]),f.clip[c[0]]=r,g.clip[c[0]]=r,g.clip[c[1]]=0,o&&(s.cssClip(g.clip),d&&d.css(t.effects.clipToBox(g)),g.clip=p),s.queue(function(i){d&&d.animate(t.effects.clipToBox(f),u,e.easing).animate(t.effects.clipToBox(g),u,e.easing),i()}).animate(f,u,e.easing).animate(g,u,e.easing).queue(i),t.effects.unshift(s,_,4)}),t.effects.define("highlight","show",function(e,i){var s=t(this),n={backgroundColor:s.css("backgroundColor")};"hide"===e.mode&&(n.opacity=0),t.effects.saveStyle(s),s.css({backgroundImage:"none",backgroundColor:e.color||"#ffff99"}).animate(n,{queue:!1,duration:e.duration,easing:e.easing,complete:i})}),t.effects.define("size",function(e,i){var s,n,o,a=t(this),r=["fontSize"],l=["borderTopWidth","borderBottomWidth","paddingTop","paddingBottom"],h=["borderLeftWidth","borderRightWidth","paddingLeft","paddingRight"],c=e.mode,u="effect"!==c,d=e.scale||"both",p=e.origin||["middle","center"],f=a.css("position"),g=a.position(),m=t.effects.scaledDimensions(a),_=e.from||m,v=e.to||t.effects.scaledDimensions(a,0);t.effects.createPlaceholder(a),"show"===c&&(o=_,_=v,v=o),n={from:{y:_.height/m.height,x:_.width/m.width},to:{y:v.height/m.height,x:v.width/m.width}},("box"===d||"both"===d)&&(n.from.y!==n.to.y&&(_=t.effects.setTransition(a,l,n.from.y,_),v=t.effects.setTransition(a,l,n.to.y,v)),n.from.x!==n.to.x&&(_=t.effects.setTransition(a,h,n.from.x,_),v=t.effects.setTransition(a,h,n.to.x,v))),("content"===d||"both"===d)&&n.from.y!==n.to.y&&(_=t.effects.setTransition(a,r,n.from.y,_),v=t.effects.setTransition(a,r,n.to.y,v)),p&&(s=t.effects.getBaseline(p,m),_.top=(m.outerHeight-_.outerHeight)*s.y+g.top,_.left=(m.outerWidth-_.outerWidth)*s.x+g.left,v.top=(m.outerHeight-v.outerHeight)*s.y+g.top,v.left=(m.outerWidth-v.outerWidth)*s.x+g.left),a.css(_),("content"===d||"both"===d)&&(l=l.concat(["marginTop","marginBottom"]).concat(r),h=h.concat(["marginLeft","marginRight"]),a.find("*[width]").each(function(){var i=t(this),s=t.effects.scaledDimensions(i),o={height:s.height*n.from.y,width:s.width*n.from.x,outerHeight:s.outerHeight*n.from.y,outerWidth:s.outerWidth*n.from.x},a={height:s.height*n.to.y,width:s.width*n.to.x,outerHeight:s.height*n.to.y,outerWidth:s.width*n.to.x};n.from.y!==n.to.y&&(o=t.effects.setTransition(i,l,n.from.y,o),a=t.effects.setTransition(i,l,n.to.y,a)),n.from.x!==n.to.x&&(o=t.effects.setTransition(i,h,n.from.x,o),a=t.effects.setTransition(i,h,n.to.x,a)),u&&t.effects.saveStyle(i),i.css(o),i.animate(a,e.duration,e.easing,function(){u&&t.effects.restoreStyle(i)})})),a.animate(v,{queue:!1,duration:e.duration,easing:e.easing,complete:function(){var e=a.offset();0===v.opacity&&a.css("opacity",_.opacity),u||(a.css("position","static"===f?"relative":f).offset(e),t.effects.saveStyle(a)),i()}})}),t.effects.define("scale",function(e,i){var s=t(this),n=e.mode,o=parseInt(e.percent,10)||(0===parseInt(e.percent,10)?0:"effect"!==n?0:100),a=t.extend(!0,{from:t.effects.scaledDimensions(s),to:t.effects.scaledDimensions(s,o,e.direction||"both"),origin:e.origin||["middle","center"]},e);e.fade&&(a.from.opacity=1,a.to.opacity=0),t.effects.effect.size.call(this,a,i)}),t.effects.define("puff","hide",function(e,i){var s=t.extend(!0,{},e,{fade:!0,percent:parseInt(e.percent,10)||150});t.effects.effect.scale.call(this,s,i)}),t.effects.define("pulsate","show",function(e,i){var s=t(this),n=e.mode,o="show"===n,a="hide"===n,r=o||a,l=2*(e.times||5)+(r?1:0),h=e.duration/l,c=0,u=1,d=s.queue().length;for((o||!s.is(":visible"))&&(s.css("opacity",0).show(),c=1);l>u;u++)s.animate({opacity:c},h,e.easing),c=1-c;s.animate({opacity:c},h,e.easing),s.queue(i),t.effects.unshift(s,d,l+1)}),t.effects.define("shake",function(e,i){var s=1,n=t(this),o=e.direction||"left",a=e.distance||20,r=e.times||3,l=2*r+1,h=Math.round(e.duration/l),c="up"===o||"down"===o?"top":"left",u="up"===o||"left"===o,d={},p={},f={},g=n.queue().length;for(t.effects.createPlaceholder(n),d[c]=(u?"-=":"+=")+a,p[c]=(u?"+=":"-=")+2*a,f[c]=(u?"-=":"+=")+2*a,n.animate(d,h,e.easing);r>s;s++)n.animate(p,h,e.easing).animate(f,h,e.easing);n.animate(p,h,e.easing).animate(d,h/2,e.easing).queue(i),t.effects.unshift(n,g,l+1)}),t.effects.define("slide","show",function(e,i){var s,n,o=t(this),a={up:["bottom","top"],down:["top","bottom"],left:["right","left"],right:["left","right"]},r=e.mode,l=e.direction||"left",h="up"===l||"down"===l?"top":"left",c="up"===l||"left"===l,u=e.distance||o["top"===h?"outerHeight":"outerWidth"](!0),d={};t.effects.createPlaceholder(o),s=o.cssClip(),n=o.position()[h],d[h]=(c?-1:1)*u+n,d.clip=o.cssClip(),d.clip[a[l][1]]=d.clip[a[l][0]],"show"===r&&(o.cssClip(d.clip),o.css(h,d[h]),d.clip=s,d[h]=n),o.animate(d,{queue:!1,duration:e.duration,easing:e.easing,complete:i})});var l;t.uiBackCompat!==!1&&(l=t.effects.define("transfer",function(e,i){t(this).transfer(e,i)}))});
diff --git a/src/wiki/static/wiki/js/jquery.min.js b/src/wiki/static/wiki/js/jquery.min.js
deleted file mode 100644
index e83647587..000000000
--- a/src/wiki/static/wiki/js/jquery.min.js
+++ /dev/null
@@ -1,5 +0,0 @@
-/*! jQuery v1.12.4 | (c) jQuery Foundation | jquery.org/license */
-!function(a,b){"object"==typeof module&&"object"==typeof module.exports?module.exports=a.document?b(a,!0):function(a){if(!a.document)throw new Error("jQuery requires a window with a document");return b(a)}:b(a)}("undefined"!=typeof window?window:this,function(a,b){var c=[],d=a.document,e=c.slice,f=c.concat,g=c.push,h=c.indexOf,i={},j=i.toString,k=i.hasOwnProperty,l={},m="1.12.4",n=function(a,b){return new n.fn.init(a,b)},o=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,p=/^-ms-/,q=/-([\da-z])/gi,r=function(a,b){return b.toUpperCase()};n.fn=n.prototype={jquery:m,constructor:n,selector:"",length:0,toArray:function(){return e.call(this)},get:function(a){return null!=a?0>a?this[a+this.length]:this[a]:e.call(this)},pushStack:function(a){var b=n.merge(this.constructor(),a);return b.prevObject=this,b.context=this.context,b},each:function(a){return n.each(this,a)},map:function(a){return this.pushStack(n.map(this,function(b,c){return a.call(b,c,b)}))},slice:function(){return this.pushStack(e.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(a){var b=this.length,c=+a+(0>a?b:0);return this.pushStack(c>=0&&b>c?[this[c]]:[])},end:function(){return this.prevObject||this.constructor()},push:g,sort:c.sort,splice:c.splice},n.extend=n.fn.extend=function(){var a,b,c,d,e,f,g=arguments[0]||{},h=1,i=arguments.length,j=!1;for("boolean"==typeof g&&(j=g,g=arguments[h]||{},h++),"object"==typeof g||n.isFunction(g)||(g={}),h===i&&(g=this,h--);i>h;h++)if(null!=(e=arguments[h]))for(d in e)a=g[d],c=e[d],g!==c&&(j&&c&&(n.isPlainObject(c)||(b=n.isArray(c)))?(b?(b=!1,f=a&&n.isArray(a)?a:[]):f=a&&n.isPlainObject(a)?a:{},g[d]=n.extend(j,f,c)):void 0!==c&&(g[d]=c));return g},n.extend({expando:"jQuery"+(m+Math.random()).replace(/\D/g,""),isReady:!0,error:function(a){throw new Error(a)},noop:function(){},isFunction:function(a){return"function"===n.type(a)},isArray:Array.isArray||function(a){return"array"===n.type(a)},isWindow:function(a){return null!=a&&a==a.window},isNumeric:function(a){var b=a&&a.toString();return!n.isArray(a)&&b-parseFloat(b)+1>=0},isEmptyObject:function(a){var b;for(b in a)return!1;return!0},isPlainObject:function(a){var b;if(!a||"object"!==n.type(a)||a.nodeType||n.isWindow(a))return!1;try{if(a.constructor&&!k.call(a,"constructor")&&!k.call(a.constructor.prototype,"isPrototypeOf"))return!1}catch(c){return!1}if(!l.ownFirst)for(b in a)return k.call(a,b);for(b in a);return void 0===b||k.call(a,b)},type:function(a){return null==a?a+"":"object"==typeof a||"function"==typeof a?i[j.call(a)]||"object":typeof a},globalEval:function(b){b&&n.trim(b)&&(a.execScript||function(b){a.eval.call(a,b)})(b)},camelCase:function(a){return a.replace(p,"ms-").replace(q,r)},nodeName:function(a,b){return a.nodeName&&a.nodeName.toLowerCase()===b.toLowerCase()},each:function(a,b){var c,d=0;if(s(a)){for(c=a.length;c>d;d++)if(b.call(a[d],d,a[d])===!1)break}else for(d in a)if(b.call(a[d],d,a[d])===!1)break;return a},trim:function(a){return null==a?"":(a+"").replace(o,"")},makeArray:function(a,b){var c=b||[];return null!=a&&(s(Object(a))?n.merge(c,"string"==typeof a?[a]:a):g.call(c,a)),c},inArray:function(a,b,c){var d;if(b){if(h)return h.call(b,a,c);for(d=b.length,c=c?0>c?Math.max(0,d+c):c:0;d>c;c++)if(c in b&&b[c]===a)return c}return-1},merge:function(a,b){var c=+b.length,d=0,e=a.length;while(c>d)a[e++]=b[d++];if(c!==c)while(void 0!==b[d])a[e++]=b[d++];return a.length=e,a},grep:function(a,b,c){for(var d,e=[],f=0,g=a.length,h=!c;g>f;f++)d=!b(a[f],f),d!==h&&e.push(a[f]);return e},map:function(a,b,c){var d,e,g=0,h=[];if(s(a))for(d=a.length;d>g;g++)e=b(a[g],g,c),null!=e&&h.push(e);else for(g in a)e=b(a[g],g,c),null!=e&&h.push(e);return f.apply([],h)},guid:1,proxy:function(a,b){var c,d,f;return"string"==typeof b&&(f=a[b],b=a,a=f),n.isFunction(a)?(c=e.call(arguments,2),d=function(){return a.apply(b||this,c.concat(e.call(arguments)))},d.guid=a.guid=a.guid||n.guid++,d):void 0},now:function(){return+new Date},support:l}),"function"==typeof Symbol&&(n.fn[Symbol.iterator]=c[Symbol.iterator]),n.each("Boolean Number String Function Array Date RegExp Object Error Symbol".split(" "),function(a,b){i["[object "+b+"]"]=b.toLowerCase()});function s(a){var b=!!a&&"length"in a&&a.length,c=n.type(a);return"function"===c||n.isWindow(a)?!1:"array"===c||0===b||"number"==typeof b&&b>0&&b-1 in a}var t=function(a){var b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u="sizzle"+1*new Date,v=a.document,w=0,x=0,y=ga(),z=ga(),A=ga(),B=function(a,b){return a===b&&(l=!0),0},C=1<<31,D={}.hasOwnProperty,E=[],F=E.pop,G=E.push,H=E.push,I=E.slice,J=function(a,b){for(var c=0,d=a.length;d>c;c++)if(a[c]===b)return c;return-1},K="checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",L="[\\x20\\t\\r\\n\\f]",M="(?:\\\\.|[\\w-]|[^\\x00-\\xa0])+",N="\\["+L+"*("+M+")(?:"+L+"*([*^$|!~]?=)"+L+"*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|("+M+"))|)"+L+"*\\]",O=":("+M+")(?:\\((('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|((?:\\\\.|[^\\\\()[\\]]|"+N+")*)|.*)\\)|)",P=new RegExp(L+"+","g"),Q=new RegExp("^"+L+"+|((?:^|[^\\\\])(?:\\\\.)*)"+L+"+$","g"),R=new RegExp("^"+L+"*,"+L+"*"),S=new RegExp("^"+L+"*([>+~]|"+L+")"+L+"*"),T=new RegExp("="+L+"*([^\\]'\"]*?)"+L+"*\\]","g"),U=new RegExp(O),V=new RegExp("^"+M+"$"),W={ID:new RegExp("^#("+M+")"),CLASS:new RegExp("^\\.("+M+")"),TAG:new RegExp("^("+M+"|[*])"),ATTR:new RegExp("^"+N),PSEUDO:new RegExp("^"+O),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+L+"*(even|odd|(([+-]|)(\\d*)n|)"+L+"*(?:([+-]|)"+L+"*(\\d+)|))"+L+"*\\)|)","i"),bool:new RegExp("^(?:"+K+")$","i"),needsContext:new RegExp("^"+L+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+L+"*((?:-\\d)?\\d*)"+L+"*\\)|)(?=[^-]|$)","i")},X=/^(?:input|select|textarea|button)$/i,Y=/^h\d$/i,Z=/^[^{]+\{\s*\[native \w/,$=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,_=/[+~]/,aa=/'|\\/g,ba=new RegExp("\\\\([\\da-f]{1,6}"+L+"?|("+L+")|.)","ig"),ca=function(a,b,c){var d="0x"+b-65536;return d!==d||c?b:0>d?String.fromCharCode(d+65536):String.fromCharCode(d>>10|55296,1023&d|56320)},da=function(){m()};try{H.apply(E=I.call(v.childNodes),v.childNodes),E[v.childNodes.length].nodeType}catch(ea){H={apply:E.length?function(a,b){G.apply(a,I.call(b))}:function(a,b){var c=a.length,d=0;while(a[c++]=b[d++]);a.length=c-1}}}function fa(a,b,d,e){var f,h,j,k,l,o,r,s,w=b&&b.ownerDocument,x=b?b.nodeType:9;if(d=d||[],"string"!=typeof a||!a||1!==x&&9!==x&&11!==x)return d;if(!e&&((b?b.ownerDocument||b:v)!==n&&m(b),b=b||n,p)){if(11!==x&&(o=$.exec(a)))if(f=o[1]){if(9===x){if(!(j=b.getElementById(f)))return d;if(j.id===f)return d.push(j),d}else if(w&&(j=w.getElementById(f))&&t(b,j)&&j.id===f)return d.push(j),d}else{if(o[2])return H.apply(d,b.getElementsByTagName(a)),d;if((f=o[3])&&c.getElementsByClassName&&b.getElementsByClassName)return H.apply(d,b.getElementsByClassName(f)),d}if(c.qsa&&!A[a+" "]&&(!q||!q.test(a))){if(1!==x)w=b,s=a;else if("object"!==b.nodeName.toLowerCase()){(k=b.getAttribute("id"))?k=k.replace(aa,"\\$&"):b.setAttribute("id",k=u),r=g(a),h=r.length,l=V.test(k)?"#"+k:"[id='"+k+"']";while(h--)r[h]=l+" "+qa(r[h]);s=r.join(","),w=_.test(a)&&oa(b.parentNode)||b}if(s)try{return H.apply(d,w.querySelectorAll(s)),d}catch(y){}finally{k===u&&b.removeAttribute("id")}}}return i(a.replace(Q,"$1"),b,d,e)}function ga(){var a=[];function b(c,e){return a.push(c+" ")>d.cacheLength&&delete b[a.shift()],b[c+" "]=e}return b}function ha(a){return a[u]=!0,a}function ia(a){var b=n.createElement("div");try{return!!a(b)}catch(c){return!1}finally{b.parentNode&&b.parentNode.removeChild(b),b=null}}function ja(a,b){var c=a.split("|"),e=c.length;while(e--)d.attrHandle[c[e]]=b}function ka(a,b){var c=b&&a,d=c&&1===a.nodeType&&1===b.nodeType&&(~b.sourceIndex||C)-(~a.sourceIndex||C);if(d)return d;if(c)while(c=c.nextSibling)if(c===b)return-1;return a?1:-1}function la(a){return function(b){var c=b.nodeName.toLowerCase();return"input"===c&&b.type===a}}function ma(a){return function(b){var c=b.nodeName.toLowerCase();return("input"===c||"button"===c)&&b.type===a}}function na(a){return ha(function(b){return b=+b,ha(function(c,d){var e,f=a([],c.length,b),g=f.length;while(g--)c[e=f[g]]&&(c[e]=!(d[e]=c[e]))})})}function oa(a){return a&&"undefined"!=typeof a.getElementsByTagName&&a}c=fa.support={},f=fa.isXML=function(a){var b=a&&(a.ownerDocument||a).documentElement;return b?"HTML"!==b.nodeName:!1},m=fa.setDocument=function(a){var b,e,g=a?a.ownerDocument||a:v;return g!==n&&9===g.nodeType&&g.documentElement?(n=g,o=n.documentElement,p=!f(n),(e=n.defaultView)&&e.top!==e&&(e.addEventListener?e.addEventListener("unload",da,!1):e.attachEvent&&e.attachEvent("onunload",da)),c.attributes=ia(function(a){return a.className="i",!a.getAttribute("className")}),c.getElementsByTagName=ia(function(a){return a.appendChild(n.createComment("")),!a.getElementsByTagName("*").length}),c.getElementsByClassName=Z.test(n.getElementsByClassName),c.getById=ia(function(a){return o.appendChild(a).id=u,!n.getElementsByName||!n.getElementsByName(u).length}),c.getById?(d.find.ID=function(a,b){if("undefined"!=typeof b.getElementById&&p){var c=b.getElementById(a);return c?[c]:[]}},d.filter.ID=function(a){var b=a.replace(ba,ca);return function(a){return a.getAttribute("id")===b}}):(delete d.find.ID,d.filter.ID=function(a){var b=a.replace(ba,ca);return function(a){var c="undefined"!=typeof a.getAttributeNode&&a.getAttributeNode("id");return c&&c.value===b}}),d.find.TAG=c.getElementsByTagName?function(a,b){return"undefined"!=typeof b.getElementsByTagName?b.getElementsByTagName(a):c.qsa?b.querySelectorAll(a):void 0}:function(a,b){var c,d=[],e=0,f=b.getElementsByTagName(a);if("*"===a){while(c=f[e++])1===c.nodeType&&d.push(c);return d}return f},d.find.CLASS=c.getElementsByClassName&&function(a,b){return"undefined"!=typeof b.getElementsByClassName&&p?b.getElementsByClassName(a):void 0},r=[],q=[],(c.qsa=Z.test(n.querySelectorAll))&&(ia(function(a){o.appendChild(a).innerHTML="<a id='"+u+"'></a><select id='"+u+"-\r\\' msallowcapture=''><option selected=''></option></select>",a.querySelectorAll("[msallowcapture^='']").length&&q.push("[*^$]="+L+"*(?:''|\"\")"),a.querySelectorAll("[selected]").length||q.push("\\["+L+"*(?:value|"+K+")"),a.querySelectorAll("[id~="+u+"-]").length||q.push("~="),a.querySelectorAll(":checked").length||q.push(":checked"),a.querySelectorAll("a#"+u+"+*").length||q.push(".#.+[+~]")}),ia(function(a){var b=n.createElement("input");b.setAttribute("type","hidden"),a.appendChild(b).setAttribute("name","D"),a.querySelectorAll("[name=d]").length&&q.push("name"+L+"*[*^$|!~]?="),a.querySelectorAll(":enabled").length||q.push(":enabled",":disabled"),a.querySelectorAll("*,:x"),q.push(",.*:")})),(c.matchesSelector=Z.test(s=o.matches||o.webkitMatchesSelector||o.mozMatchesSelector||o.oMatchesSelector||o.msMatchesSelector))&&ia(function(a){c.disconnectedMatch=s.call(a,"div"),s.call(a,"[s!='']:x"),r.push("!=",O)}),q=q.length&&new RegExp(q.join("|")),r=r.length&&new RegExp(r.join("|")),b=Z.test(o.compareDocumentPosition),t=b||Z.test(o.contains)?function(a,b){var c=9===a.nodeType?a.documentElement:a,d=b&&b.parentNode;return a===d||!(!d||1!==d.nodeType||!(c.contains?c.contains(d):a.compareDocumentPosition&&16&a.compareDocumentPosition(d)))}:function(a,b){if(b)while(b=b.parentNode)if(b===a)return!0;return!1},B=b?function(a,b){if(a===b)return l=!0,0;var d=!a.compareDocumentPosition-!b.compareDocumentPosition;return d?d:(d=(a.ownerDocument||a)===(b.ownerDocument||b)?a.compareDocumentPosition(b):1,1&d||!c.sortDetached&&b.compareDocumentPosition(a)===d?a===n||a.ownerDocument===v&&t(v,a)?-1:b===n||b.ownerDocument===v&&t(v,b)?1:k?J(k,a)-J(k,b):0:4&d?-1:1)}:function(a,b){if(a===b)return l=!0,0;var c,d=0,e=a.parentNode,f=b.parentNode,g=[a],h=[b];if(!e||!f)return a===n?-1:b===n?1:e?-1:f?1:k?J(k,a)-J(k,b):0;if(e===f)return ka(a,b);c=a;while(c=c.parentNode)g.unshift(c);c=b;while(c=c.parentNode)h.unshift(c);while(g[d]===h[d])d++;return d?ka(g[d],h[d]):g[d]===v?-1:h[d]===v?1:0},n):n},fa.matches=function(a,b){return fa(a,null,null,b)},fa.matchesSelector=function(a,b){if((a.ownerDocument||a)!==n&&m(a),b=b.replace(T,"='$1']"),c.matchesSelector&&p&&!A[b+" "]&&(!r||!r.test(b))&&(!q||!q.test(b)))try{var d=s.call(a,b);if(d||c.disconnectedMatch||a.document&&11!==a.document.nodeType)return d}catch(e){}return fa(b,n,null,[a]).length>0},fa.contains=function(a,b){return(a.ownerDocument||a)!==n&&m(a),t(a,b)},fa.attr=function(a,b){(a.ownerDocument||a)!==n&&m(a);var e=d.attrHandle[b.toLowerCase()],f=e&&D.call(d.attrHandle,b.toLowerCase())?e(a,b,!p):void 0;return void 0!==f?f:c.attributes||!p?a.getAttribute(b):(f=a.getAttributeNode(b))&&f.specified?f.value:null},fa.error=function(a){throw new Error("Syntax error, unrecognized expression: "+a)},fa.uniqueSort=function(a){var b,d=[],e=0,f=0;if(l=!c.detectDuplicates,k=!c.sortStable&&a.slice(0),a.sort(B),l){while(b=a[f++])b===a[f]&&(e=d.push(f));while(e--)a.splice(d[e],1)}return k=null,a},e=fa.getText=function(a){var b,c="",d=0,f=a.nodeType;if(f){if(1===f||9===f||11===f){if("string"==typeof a.textContent)return a.textContent;for(a=a.firstChild;a;a=a.nextSibling)c+=e(a)}else if(3===f||4===f)return a.nodeValue}else while(b=a[d++])c+=e(b);return c},d=fa.selectors={cacheLength:50,createPseudo:ha,match:W,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(a){return a[1]=a[1].replace(ba,ca),a[3]=(a[3]||a[4]||a[5]||"").replace(ba,ca),"~="===a[2]&&(a[3]=" "+a[3]+" "),a.slice(0,4)},CHILD:function(a){return a[1]=a[1].toLowerCase(),"nth"===a[1].slice(0,3)?(a[3]||fa.error(a[0]),a[4]=+(a[4]?a[5]+(a[6]||1):2*("even"===a[3]||"odd"===a[3])),a[5]=+(a[7]+a[8]||"odd"===a[3])):a[3]&&fa.error(a[0]),a},PSEUDO:function(a){var b,c=!a[6]&&a[2];return W.CHILD.test(a[0])?null:(a[3]?a[2]=a[4]||a[5]||"":c&&U.test(c)&&(b=g(c,!0))&&(b=c.indexOf(")",c.length-b)-c.length)&&(a[0]=a[0].slice(0,b),a[2]=c.slice(0,b)),a.slice(0,3))}},filter:{TAG:function(a){var b=a.replace(ba,ca).toLowerCase();return"*"===a?function(){return!0}:function(a){return a.nodeName&&a.nodeName.toLowerCase()===b}},CLASS:function(a){var b=y[a+" "];return b||(b=new RegExp("(^|"+L+")"+a+"("+L+"|$)"))&&y(a,function(a){return b.test("string"==typeof a.className&&a.className||"undefined"!=typeof a.getAttribute&&a.getAttribute("class")||"")})},ATTR:function(a,b,c){return function(d){var e=fa.attr(d,a);return null==e?"!="===b:b?(e+="","="===b?e===c:"!="===b?e!==c:"^="===b?c&&0===e.indexOf(c):"*="===b?c&&e.indexOf(c)>-1:"$="===b?c&&e.slice(-c.length)===c:"~="===b?(" "+e.replace(P," ")+" ").indexOf(c)>-1:"|="===b?e===c||e.slice(0,c.length+1)===c+"-":!1):!0}},CHILD:function(a,b,c,d,e){var f="nth"!==a.slice(0,3),g="last"!==a.slice(-4),h="of-type"===b;return 1===d&&0===e?function(a){return!!a.parentNode}:function(b,c,i){var j,k,l,m,n,o,p=f!==g?"nextSibling":"previousSibling",q=b.parentNode,r=h&&b.nodeName.toLowerCase(),s=!i&&!h,t=!1;if(q){if(f){while(p){m=b;while(m=m[p])if(h?m.nodeName.toLowerCase()===r:1===m.nodeType)return!1;o=p="only"===a&&!o&&"nextSibling"}return!0}if(o=[g?q.firstChild:q.lastChild],g&&s){m=q,l=m[u]||(m[u]={}),k=l[m.uniqueID]||(l[m.uniqueID]={}),j=k[a]||[],n=j[0]===w&&j[1],t=n&&j[2],m=n&&q.childNodes[n];while(m=++n&&m&&m[p]||(t=n=0)||o.pop())if(1===m.nodeType&&++t&&m===b){k[a]=[w,n,t];break}}else if(s&&(m=b,l=m[u]||(m[u]={}),k=l[m.uniqueID]||(l[m.uniqueID]={}),j=k[a]||[],n=j[0]===w&&j[1],t=n),t===!1)while(m=++n&&m&&m[p]||(t=n=0)||o.pop())if((h?m.nodeName.toLowerCase()===r:1===m.nodeType)&&++t&&(s&&(l=m[u]||(m[u]={}),k=l[m.uniqueID]||(l[m.uniqueID]={}),k[a]=[w,t]),m===b))break;return t-=e,t===d||t%d===0&&t/d>=0}}},PSEUDO:function(a,b){var c,e=d.pseudos[a]||d.setFilters[a.toLowerCase()]||fa.error("unsupported pseudo: "+a);return e[u]?e(b):e.length>1?(c=[a,a,"",b],d.setFilters.hasOwnProperty(a.toLowerCase())?ha(function(a,c){var d,f=e(a,b),g=f.length;while(g--)d=J(a,f[g]),a[d]=!(c[d]=f[g])}):function(a){return e(a,0,c)}):e}},pseudos:{not:ha(function(a){var b=[],c=[],d=h(a.replace(Q,"$1"));return d[u]?ha(function(a,b,c,e){var f,g=d(a,null,e,[]),h=a.length;while(h--)(f=g[h])&&(a[h]=!(b[h]=f))}):function(a,e,f){return b[0]=a,d(b,null,f,c),b[0]=null,!c.pop()}}),has:ha(function(a){return function(b){return fa(a,b).length>0}}),contains:ha(function(a){return a=a.replace(ba,ca),function(b){return(b.textContent||b.innerText||e(b)).indexOf(a)>-1}}),lang:ha(function(a){return V.test(a||"")||fa.error("unsupported lang: "+a),a=a.replace(ba,ca).toLowerCase(),function(b){var c;do if(c=p?b.lang:b.getAttribute("xml:lang")||b.getAttribute("lang"))return c=c.toLowerCase(),c===a||0===c.indexOf(a+"-");while((b=b.parentNode)&&1===b.nodeType);return!1}}),target:function(b){var c=a.location&&a.location.hash;return c&&c.slice(1)===b.id},root:function(a){return a===o},focus:function(a){return a===n.activeElement&&(!n.hasFocus||n.hasFocus())&&!!(a.type||a.href||~a.tabIndex)},enabled:function(a){return a.disabled===!1},disabled:function(a){return a.disabled===!0},checked:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&!!a.checked||"option"===b&&!!a.selected},selected:function(a){return a.parentNode&&a.parentNode.selectedIndex,a.selected===!0},empty:function(a){for(a=a.firstChild;a;a=a.nextSibling)if(a.nodeType<6)return!1;return!0},parent:function(a){return!d.pseudos.empty(a)},header:function(a){return Y.test(a.nodeName)},input:function(a){return X.test(a.nodeName)},button:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&"button"===a.type||"button"===b},text:function(a){var b;return"input"===a.nodeName.toLowerCase()&&"text"===a.type&&(null==(b=a.getAttribute("type"))||"text"===b.toLowerCase())},first:na(function(){return[0]}),last:na(function(a,b){return[b-1]}),eq:na(function(a,b,c){return[0>c?c+b:c]}),even:na(function(a,b){for(var c=0;b>c;c+=2)a.push(c);return a}),odd:na(function(a,b){for(var c=1;b>c;c+=2)a.push(c);return a}),lt:na(function(a,b,c){for(var d=0>c?c+b:c;--d>=0;)a.push(d);return a}),gt:na(function(a,b,c){for(var d=0>c?c+b:c;++d<b;)a.push(d);return a})}},d.pseudos.nth=d.pseudos.eq;for(b in{radio:!0,checkbox:!0,file:!0,password:!0,image:!0})d.pseudos[b]=la(b);for(b in{submit:!0,reset:!0})d.pseudos[b]=ma(b);function pa(){}pa.prototype=d.filters=d.pseudos,d.setFilters=new pa,g=fa.tokenize=function(a,b){var c,e,f,g,h,i,j,k=z[a+" "];if(k)return b?0:k.slice(0);h=a,i=[],j=d.preFilter;while(h){c&&!(e=R.exec(h))||(e&&(h=h.slice(e[0].length)||h),i.push(f=[])),c=!1,(e=S.exec(h))&&(c=e.shift(),f.push({value:c,type:e[0].replace(Q," ")}),h=h.slice(c.length));for(g in d.filter)!(e=W[g].exec(h))||j[g]&&!(e=j[g](e))||(c=e.shift(),f.push({value:c,type:g,matches:e}),h=h.slice(c.length));if(!c)break}return b?h.length:h?fa.error(a):z(a,i).slice(0)};function qa(a){for(var b=0,c=a.length,d="";c>b;b++)d+=a[b].value;return d}function ra(a,b,c){var d=b.dir,e=c&&"parentNode"===d,f=x++;return b.first?function(b,c,f){while(b=b[d])if(1===b.nodeType||e)return a(b,c,f)}:function(b,c,g){var h,i,j,k=[w,f];if(g){while(b=b[d])if((1===b.nodeType||e)&&a(b,c,g))return!0}else while(b=b[d])if(1===b.nodeType||e){if(j=b[u]||(b[u]={}),i=j[b.uniqueID]||(j[b.uniqueID]={}),(h=i[d])&&h[0]===w&&h[1]===f)return k[2]=h[2];if(i[d]=k,k[2]=a(b,c,g))return!0}}}function sa(a){return a.length>1?function(b,c,d){var e=a.length;while(e--)if(!a[e](b,c,d))return!1;return!0}:a[0]}function ta(a,b,c){for(var d=0,e=b.length;e>d;d++)fa(a,b[d],c);return c}function ua(a,b,c,d,e){for(var f,g=[],h=0,i=a.length,j=null!=b;i>h;h++)(f=a[h])&&(c&&!c(f,d,e)||(g.push(f),j&&b.push(h)));return g}function va(a,b,c,d,e,f){return d&&!d[u]&&(d=va(d)),e&&!e[u]&&(e=va(e,f)),ha(function(f,g,h,i){var j,k,l,m=[],n=[],o=g.length,p=f||ta(b||"*",h.nodeType?[h]:h,[]),q=!a||!f&&b?p:ua(p,m,a,h,i),r=c?e||(f?a:o||d)?[]:g:q;if(c&&c(q,r,h,i),d){j=ua(r,n),d(j,[],h,i),k=j.length;while(k--)(l=j[k])&&(r[n[k]]=!(q[n[k]]=l))}if(f){if(e||a){if(e){j=[],k=r.length;while(k--)(l=r[k])&&j.push(q[k]=l);e(null,r=[],j,i)}k=r.length;while(k--)(l=r[k])&&(j=e?J(f,l):m[k])>-1&&(f[j]=!(g[j]=l))}}else r=ua(r===g?r.splice(o,r.length):r),e?e(null,g,r,i):H.apply(g,r)})}function wa(a){for(var b,c,e,f=a.length,g=d.relative[a[0].type],h=g||d.relative[" "],i=g?1:0,k=ra(function(a){return a===b},h,!0),l=ra(function(a){return J(b,a)>-1},h,!0),m=[function(a,c,d){var e=!g&&(d||c!==j)||((b=c).nodeType?k(a,c,d):l(a,c,d));return b=null,e}];f>i;i++)if(c=d.relative[a[i].type])m=[ra(sa(m),c)];else{if(c=d.filter[a[i].type].apply(null,a[i].matches),c[u]){for(e=++i;f>e;e++)if(d.relative[a[e].type])break;return va(i>1&&sa(m),i>1&&qa(a.slice(0,i-1).concat({value:" "===a[i-2].type?"*":""})).replace(Q,"$1"),c,e>i&&wa(a.slice(i,e)),f>e&&wa(a=a.slice(e)),f>e&&qa(a))}m.push(c)}return sa(m)}function xa(a,b){var c=b.length>0,e=a.length>0,f=function(f,g,h,i,k){var l,o,q,r=0,s="0",t=f&&[],u=[],v=j,x=f||e&&d.find.TAG("*",k),y=w+=null==v?1:Math.random()||.1,z=x.length;for(k&&(j=g===n||g||k);s!==z&&null!=(l=x[s]);s++){if(e&&l){o=0,g||l.ownerDocument===n||(m(l),h=!p);while(q=a[o++])if(q(l,g||n,h)){i.push(l);break}k&&(w=y)}c&&((l=!q&&l)&&r--,f&&t.push(l))}if(r+=s,c&&s!==r){o=0;while(q=b[o++])q(t,u,g,h);if(f){if(r>0)while(s--)t[s]||u[s]||(u[s]=F.call(i));u=ua(u)}H.apply(i,u),k&&!f&&u.length>0&&r+b.length>1&&fa.uniqueSort(i)}return k&&(w=y,j=v),t};return c?ha(f):f}return h=fa.compile=function(a,b){var c,d=[],e=[],f=A[a+" "];if(!f){b||(b=g(a)),c=b.length;while(c--)f=wa(b[c]),f[u]?d.push(f):e.push(f);f=A(a,xa(e,d)),f.selector=a}return f},i=fa.select=function(a,b,e,f){var i,j,k,l,m,n="function"==typeof a&&a,o=!f&&g(a=n.selector||a);if(e=e||[],1===o.length){if(j=o[0]=o[0].slice(0),j.length>2&&"ID"===(k=j[0]).type&&c.getById&&9===b.nodeType&&p&&d.relative[j[1].type]){if(b=(d.find.ID(k.matches[0].replace(ba,ca),b)||[])[0],!b)return e;n&&(b=b.parentNode),a=a.slice(j.shift().value.length)}i=W.needsContext.test(a)?0:j.length;while(i--){if(k=j[i],d.relative[l=k.type])break;if((m=d.find[l])&&(f=m(k.matches[0].replace(ba,ca),_.test(j[0].type)&&oa(b.parentNode)||b))){if(j.splice(i,1),a=f.length&&qa(j),!a)return H.apply(e,f),e;break}}}return(n||h(a,o))(f,b,!p,e,!b||_.test(a)&&oa(b.parentNode)||b),e},c.sortStable=u.split("").sort(B).join("")===u,c.detectDuplicates=!!l,m(),c.sortDetached=ia(function(a){return 1&a.compareDocumentPosition(n.createElement("div"))}),ia(function(a){return a.innerHTML="<a href='#'></a>","#"===a.firstChild.getAttribute("href")})||ja("type|href|height|width",function(a,b,c){return c?void 0:a.getAttribute(b,"type"===b.toLowerCase()?1:2)}),c.attributes&&ia(function(a){return a.innerHTML="<input/>",a.firstChild.setAttribute("value",""),""===a.firstChild.getAttribute("value")})||ja("value",function(a,b,c){return c||"input"!==a.nodeName.toLowerCase()?void 0:a.defaultValue}),ia(function(a){return null==a.getAttribute("disabled")})||ja(K,function(a,b,c){var d;return c?void 0:a[b]===!0?b.toLowerCase():(d=a.getAttributeNode(b))&&d.specified?d.value:null}),fa}(a);n.find=t,n.expr=t.selectors,n.expr[":"]=n.expr.pseudos,n.uniqueSort=n.unique=t.uniqueSort,n.text=t.getText,n.isXMLDoc=t.isXML,n.contains=t.contains;var u=function(a,b,c){var d=[],e=void 0!==c;while((a=a[b])&&9!==a.nodeType)if(1===a.nodeType){if(e&&n(a).is(c))break;d.push(a)}return d},v=function(a,b){for(var c=[];a;a=a.nextSibling)1===a.nodeType&&a!==b&&c.push(a);return c},w=n.expr.match.needsContext,x=/^<([\w-]+)\s*\/?>(?:<\/\1>|)$/,y=/^.[^:#\[\.,]*$/;function z(a,b,c){if(n.isFunction(b))return n.grep(a,function(a,d){return!!b.call(a,d,a)!==c});if(b.nodeType)return n.grep(a,function(a){return a===b!==c});if("string"==typeof b){if(y.test(b))return n.filter(b,a,c);b=n.filter(b,a)}return n.grep(a,function(a){return n.inArray(a,b)>-1!==c})}n.filter=function(a,b,c){var d=b[0];return c&&(a=":not("+a+")"),1===b.length&&1===d.nodeType?n.find.matchesSelector(d,a)?[d]:[]:n.find.matches(a,n.grep(b,function(a){return 1===a.nodeType}))},n.fn.extend({find:function(a){var b,c=[],d=this,e=d.length;if("string"!=typeof a)return this.pushStack(n(a).filter(function(){for(b=0;e>b;b++)if(n.contains(d[b],this))return!0}));for(b=0;e>b;b++)n.find(a,d[b],c);return c=this.pushStack(e>1?n.unique(c):c),c.selector=this.selector?this.selector+" "+a:a,c},filter:function(a){return this.pushStack(z(this,a||[],!1))},not:function(a){return this.pushStack(z(this,a||[],!0))},is:function(a){return!!z(this,"string"==typeof a&&w.test(a)?n(a):a||[],!1).length}});var A,B=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]*))$/,C=n.fn.init=function(a,b,c){var e,f;if(!a)return this;if(c=c||A,"string"==typeof a){if(e="<"===a.charAt(0)&&">"===a.charAt(a.length-1)&&a.length>=3?[null,a,null]:B.exec(a),!e||!e[1]&&b)return!b||b.jquery?(b||c).find(a):this.constructor(b).find(a);if(e[1]){if(b=b instanceof n?b[0]:b,n.merge(this,n.parseHTML(e[1],b&&b.nodeType?b.ownerDocument||b:d,!0)),x.test(e[1])&&n.isPlainObject(b))for(e in b)n.isFunction(this[e])?this[e](b[e]):this.attr(e,b[e]);return this}if(f=d.getElementById(e[2]),f&&f.parentNode){if(f.id!==e[2])return A.find(a);this.length=1,this[0]=f}return this.context=d,this.selector=a,this}return a.nodeType?(this.context=this[0]=a,this.length=1,this):n.isFunction(a)?"undefined"!=typeof c.ready?c.ready(a):a(n):(void 0!==a.selector&&(this.selector=a.selector,this.context=a.context),n.makeArray(a,this))};C.prototype=n.fn,A=n(d);var D=/^(?:parents|prev(?:Until|All))/,E={children:!0,contents:!0,next:!0,prev:!0};n.fn.extend({has:function(a){var b,c=n(a,this),d=c.length;return this.filter(function(){for(b=0;d>b;b++)if(n.contains(this,c[b]))return!0})},closest:function(a,b){for(var c,d=0,e=this.length,f=[],g=w.test(a)||"string"!=typeof a?n(a,b||this.context):0;e>d;d++)for(c=this[d];c&&c!==b;c=c.parentNode)if(c.nodeType<11&&(g?g.index(c)>-1:1===c.nodeType&&n.find.matchesSelector(c,a))){f.push(c);break}return this.pushStack(f.length>1?n.uniqueSort(f):f)},index:function(a){return a?"string"==typeof a?n.inArray(this[0],n(a)):n.inArray(a.jquery?a[0]:a,this):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(a,b){return this.pushStack(n.uniqueSort(n.merge(this.get(),n(a,b))))},addBack:function(a){return this.add(null==a?this.prevObject:this.prevObject.filter(a))}});function F(a,b){do a=a[b];while(a&&1!==a.nodeType);return a}n.each({parent:function(a){var b=a.parentNode;return b&&11!==b.nodeType?b:null},parents:function(a){return u(a,"parentNode")},parentsUntil:function(a,b,c){return u(a,"parentNode",c)},next:function(a){return F(a,"nextSibling")},prev:function(a){return F(a,"previousSibling")},nextAll:function(a){return u(a,"nextSibling")},prevAll:function(a){return u(a,"previousSibling")},nextUntil:function(a,b,c){return u(a,"nextSibling",c)},prevUntil:function(a,b,c){return u(a,"previousSibling",c)},siblings:function(a){return v((a.parentNode||{}).firstChild,a)},children:function(a){return v(a.firstChild)},contents:function(a){return n.nodeName(a,"iframe")?a.contentDocument||a.contentWindow.document:n.merge([],a.childNodes)}},function(a,b){n.fn[a]=function(c,d){var e=n.map(this,b,c);return"Until"!==a.slice(-5)&&(d=c),d&&"string"==typeof d&&(e=n.filter(d,e)),this.length>1&&(E[a]||(e=n.uniqueSort(e)),D.test(a)&&(e=e.reverse())),this.pushStack(e)}});var G=/\S+/g;function H(a){var b={};return n.each(a.match(G)||[],function(a,c){b[c]=!0}),b}n.Callbacks=function(a){a="string"==typeof a?H(a):n.extend({},a);var b,c,d,e,f=[],g=[],h=-1,i=function(){for(e=a.once,d=b=!0;g.length;h=-1){c=g.shift();while(++h<f.length)f[h].apply(c[0],c[1])===!1&&a.stopOnFalse&&(h=f.length,c=!1)}a.memory||(c=!1),b=!1,e&&(f=c?[]:"")},j={add:function(){return f&&(c&&!b&&(h=f.length-1,g.push(c)),function d(b){n.each(b,function(b,c){n.isFunction(c)?a.unique&&j.has(c)||f.push(c):c&&c.length&&"string"!==n.type(c)&&d(c)})}(arguments),c&&!b&&i()),this},remove:function(){return n.each(arguments,function(a,b){var c;while((c=n.inArray(b,f,c))>-1)f.splice(c,1),h>=c&&h--}),this},has:function(a){return a?n.inArray(a,f)>-1:f.length>0},empty:function(){return f&&(f=[]),this},disable:function(){return e=g=[],f=c="",this},disabled:function(){return!f},lock:function(){return e=!0,c||j.disable(),this},locked:function(){return!!e},fireWith:function(a,c){return e||(c=c||[],c=[a,c.slice?c.slice():c],g.push(c),b||i()),this},fire:function(){return j.fireWith(this,arguments),this},fired:function(){return!!d}};return j},n.extend({Deferred:function(a){var b=[["resolve","done",n.Callbacks("once memory"),"resolved"],["reject","fail",n.Callbacks("once memory"),"rejected"],["notify","progress",n.Callbacks("memory")]],c="pending",d={state:function(){return c},always:function(){return e.done(arguments).fail(arguments),this},then:function(){var a=arguments;return n.Deferred(function(c){n.each(b,function(b,f){var g=n.isFunction(a[b])&&a[b];e[f[1]](function(){var a=g&&g.apply(this,arguments);a&&n.isFunction(a.promise)?a.promise().progress(c.notify).done(c.resolve).fail(c.reject):c[f[0]+"With"](this===d?c.promise():this,g?[a]:arguments)})}),a=null}).promise()},promise:function(a){return null!=a?n.extend(a,d):d}},e={};return d.pipe=d.then,n.each(b,function(a,f){var g=f[2],h=f[3];d[f[1]]=g.add,h&&g.add(function(){c=h},b[1^a][2].disable,b[2][2].lock),e[f[0]]=function(){return e[f[0]+"With"](this===e?d:this,arguments),this},e[f[0]+"With"]=g.fireWith}),d.promise(e),a&&a.call(e,e),e},when:function(a){var b=0,c=e.call(arguments),d=c.length,f=1!==d||a&&n.isFunction(a.promise)?d:0,g=1===f?a:n.Deferred(),h=function(a,b,c){return function(d){b[a]=this,c[a]=arguments.length>1?e.call(arguments):d,c===i?g.notifyWith(b,c):--f||g.resolveWith(b,c)}},i,j,k;if(d>1)for(i=new Array(d),j=new Array(d),k=new Array(d);d>b;b++)c[b]&&n.isFunction(c[b].promise)?c[b].promise().progress(h(b,j,i)).done(h(b,k,c)).fail(g.reject):--f;return f||g.resolveWith(k,c),g.promise()}});var I;n.fn.ready=function(a){return n.ready.promise().done(a),this},n.extend({isReady:!1,readyWait:1,holdReady:function(a){a?n.readyWait++:n.ready(!0)},ready:function(a){(a===!0?--n.readyWait:n.isReady)||(n.isReady=!0,a!==!0&&--n.readyWait>0||(I.resolveWith(d,[n]),n.fn.triggerHandler&&(n(d).triggerHandler("ready"),n(d).off("ready"))))}});function J(){d.addEventListener?(d.removeEventListener("DOMContentLoaded",K),a.removeEventListener("load",K)):(d.detachEvent("onreadystatechange",K),a.detachEvent("onload",K))}function K(){(d.addEventListener||"load"===a.event.type||"complete"===d.readyState)&&(J(),n.ready())}n.ready.promise=function(b){if(!I)if(I=n.Deferred(),"complete"===d.readyState||"loading"!==d.readyState&&!d.documentElement.doScroll)a.setTimeout(n.ready);else if(d.addEventListener)d.addEventListener("DOMContentLoaded",K),a.addEventListener("load",K);else{d.attachEvent("onreadystatechange",K),a.attachEvent("onload",K);var c=!1;try{c=null==a.frameElement&&d.documentElement}catch(e){}c&&c.doScroll&&!function f(){if(!n.isReady){try{c.doScroll("left")}catch(b){return a.setTimeout(f,50)}J(),n.ready()}}()}return I.promise(b)},n.ready.promise();var L;for(L in n(l))break;l.ownFirst="0"===L,l.inlineBlockNeedsLayout=!1,n(function(){var a,b,c,e;c=d.getElementsByTagName("body")[0],c&&c.style&&(b=d.createElement("div"),e=d.createElement("div"),e.style.cssText="position:absolute;border:0;width:0;height:0;top:0;left:-9999px",c.appendChild(e).appendChild(b),"undefined"!=typeof b.style.zoom&&(b.style.cssText="display:inline;margin:0;border:0;padding:1px;width:1px;zoom:1",l.inlineBlockNeedsLayout=a=3===b.offsetWidth,a&&(c.style.zoom=1)),c.removeChild(e))}),function(){var a=d.createElement("div");l.deleteExpando=!0;try{delete a.test}catch(b){l.deleteExpando=!1}a=null}();var M=function(a){var b=n.noData[(a.nodeName+" ").toLowerCase()],c=+a.nodeType||1;return 1!==c&&9!==c?!1:!b||b!==!0&&a.getAttribute("classid")===b},N=/^(?:\{[\w\W]*\}|\[[\w\W]*\])$/,O=/([A-Z])/g;function P(a,b,c){if(void 0===c&&1===a.nodeType){var d="data-"+b.replace(O,"-$1").toLowerCase();if(c=a.getAttribute(d),"string"==typeof c){try{c="true"===c?!0:"false"===c?!1:"null"===c?null:+c+""===c?+c:N.test(c)?n.parseJSON(c):c}catch(e){}n.data(a,b,c)}else c=void 0;
-}return c}function Q(a){var b;for(b in a)if(("data"!==b||!n.isEmptyObject(a[b]))&&"toJSON"!==b)return!1;return!0}function R(a,b,d,e){if(M(a)){var f,g,h=n.expando,i=a.nodeType,j=i?n.cache:a,k=i?a[h]:a[h]&&h;if(k&&j[k]&&(e||j[k].data)||void 0!==d||"string"!=typeof b)return k||(k=i?a[h]=c.pop()||n.guid++:h),j[k]||(j[k]=i?{}:{toJSON:n.noop}),"object"!=typeof b&&"function"!=typeof b||(e?j[k]=n.extend(j[k],b):j[k].data=n.extend(j[k].data,b)),g=j[k],e||(g.data||(g.data={}),g=g.data),void 0!==d&&(g[n.camelCase(b)]=d),"string"==typeof b?(f=g[b],null==f&&(f=g[n.camelCase(b)])):f=g,f}}function S(a,b,c){if(M(a)){var d,e,f=a.nodeType,g=f?n.cache:a,h=f?a[n.expando]:n.expando;if(g[h]){if(b&&(d=c?g[h]:g[h].data)){n.isArray(b)?b=b.concat(n.map(b,n.camelCase)):b in d?b=[b]:(b=n.camelCase(b),b=b in d?[b]:b.split(" ")),e=b.length;while(e--)delete d[b[e]];if(c?!Q(d):!n.isEmptyObject(d))return}(c||(delete g[h].data,Q(g[h])))&&(f?n.cleanData([a],!0):l.deleteExpando||g!=g.window?delete g[h]:g[h]=void 0)}}}n.extend({cache:{},noData:{"applet ":!0,"embed ":!0,"object ":"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000"},hasData:function(a){return a=a.nodeType?n.cache[a[n.expando]]:a[n.expando],!!a&&!Q(a)},data:function(a,b,c){return R(a,b,c)},removeData:function(a,b){return S(a,b)},_data:function(a,b,c){return R(a,b,c,!0)},_removeData:function(a,b){return S(a,b,!0)}}),n.fn.extend({data:function(a,b){var c,d,e,f=this[0],g=f&&f.attributes;if(void 0===a){if(this.length&&(e=n.data(f),1===f.nodeType&&!n._data(f,"parsedAttrs"))){c=g.length;while(c--)g[c]&&(d=g[c].name,0===d.indexOf("data-")&&(d=n.camelCase(d.slice(5)),P(f,d,e[d])));n._data(f,"parsedAttrs",!0)}return e}return"object"==typeof a?this.each(function(){n.data(this,a)}):arguments.length>1?this.each(function(){n.data(this,a,b)}):f?P(f,a,n.data(f,a)):void 0},removeData:function(a){return this.each(function(){n.removeData(this,a)})}}),n.extend({queue:function(a,b,c){var d;return a?(b=(b||"fx")+"queue",d=n._data(a,b),c&&(!d||n.isArray(c)?d=n._data(a,b,n.makeArray(c)):d.push(c)),d||[]):void 0},dequeue:function(a,b){b=b||"fx";var c=n.queue(a,b),d=c.length,e=c.shift(),f=n._queueHooks(a,b),g=function(){n.dequeue(a,b)};"inprogress"===e&&(e=c.shift(),d--),e&&("fx"===b&&c.unshift("inprogress"),delete f.stop,e.call(a,g,f)),!d&&f&&f.empty.fire()},_queueHooks:function(a,b){var c=b+"queueHooks";return n._data(a,c)||n._data(a,c,{empty:n.Callbacks("once memory").add(function(){n._removeData(a,b+"queue"),n._removeData(a,c)})})}}),n.fn.extend({queue:function(a,b){var c=2;return"string"!=typeof a&&(b=a,a="fx",c--),arguments.length<c?n.queue(this[0],a):void 0===b?this:this.each(function(){var c=n.queue(this,a,b);n._queueHooks(this,a),"fx"===a&&"inprogress"!==c[0]&&n.dequeue(this,a)})},dequeue:function(a){return this.each(function(){n.dequeue(this,a)})},clearQueue:function(a){return this.queue(a||"fx",[])},promise:function(a,b){var c,d=1,e=n.Deferred(),f=this,g=this.length,h=function(){--d||e.resolveWith(f,[f])};"string"!=typeof a&&(b=a,a=void 0),a=a||"fx";while(g--)c=n._data(f[g],a+"queueHooks"),c&&c.empty&&(d++,c.empty.add(h));return h(),e.promise(b)}}),function(){var a;l.shrinkWrapBlocks=function(){if(null!=a)return a;a=!1;var b,c,e;return c=d.getElementsByTagName("body")[0],c&&c.style?(b=d.createElement("div"),e=d.createElement("div"),e.style.cssText="position:absolute;border:0;width:0;height:0;top:0;left:-9999px",c.appendChild(e).appendChild(b),"undefined"!=typeof b.style.zoom&&(b.style.cssText="-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;display:block;margin:0;border:0;padding:1px;width:1px;zoom:1",b.appendChild(d.createElement("div")).style.width="5px",a=3!==b.offsetWidth),c.removeChild(e),a):void 0}}();var T=/[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source,U=new RegExp("^(?:([+-])=|)("+T+")([a-z%]*)$","i"),V=["Top","Right","Bottom","Left"],W=function(a,b){return a=b||a,"none"===n.css(a,"display")||!n.contains(a.ownerDocument,a)};function X(a,b,c,d){var e,f=1,g=20,h=d?function(){return d.cur()}:function(){return n.css(a,b,"")},i=h(),j=c&&c[3]||(n.cssNumber[b]?"":"px"),k=(n.cssNumber[b]||"px"!==j&&+i)&&U.exec(n.css(a,b));if(k&&k[3]!==j){j=j||k[3],c=c||[],k=+i||1;do f=f||".5",k/=f,n.style(a,b,k+j);while(f!==(f=h()/i)&&1!==f&&--g)}return c&&(k=+k||+i||0,e=c[1]?k+(c[1]+1)*c[2]:+c[2],d&&(d.unit=j,d.start=k,d.end=e)),e}var Y=function(a,b,c,d,e,f,g){var h=0,i=a.length,j=null==c;if("object"===n.type(c)){e=!0;for(h in c)Y(a,b,h,c[h],!0,f,g)}else if(void 0!==d&&(e=!0,n.isFunction(d)||(g=!0),j&&(g?(b.call(a,d),b=null):(j=b,b=function(a,b,c){return j.call(n(a),c)})),b))for(;i>h;h++)b(a[h],c,g?d:d.call(a[h],h,b(a[h],c)));return e?a:j?b.call(a):i?b(a[0],c):f},Z=/^(?:checkbox|radio)$/i,$=/<([\w:-]+)/,_=/^$|\/(?:java|ecma)script/i,aa=/^\s+/,ba="abbr|article|aside|audio|bdi|canvas|data|datalist|details|dialog|figcaption|figure|footer|header|hgroup|main|mark|meter|nav|output|picture|progress|section|summary|template|time|video";function ca(a){var b=ba.split("|"),c=a.createDocumentFragment();if(c.createElement)while(b.length)c.createElement(b.pop());return c}!function(){var a=d.createElement("div"),b=d.createDocumentFragment(),c=d.createElement("input");a.innerHTML=" <link/><table></table><a href='/a'>a</a><input type='checkbox'/>",l.leadingWhitespace=3===a.firstChild.nodeType,l.tbody=!a.getElementsByTagName("tbody").length,l.htmlSerialize=!!a.getElementsByTagName("link").length,l.html5Clone="<:nav></:nav>"!==d.createElement("nav").cloneNode(!0).outerHTML,c.type="checkbox",c.checked=!0,b.appendChild(c),l.appendChecked=c.checked,a.innerHTML="<textarea>x</textarea>",l.noCloneChecked=!!a.cloneNode(!0).lastChild.defaultValue,b.appendChild(a),c=d.createElement("input"),c.setAttribute("type","radio"),c.setAttribute("checked","checked"),c.setAttribute("name","t"),a.appendChild(c),l.checkClone=a.cloneNode(!0).cloneNode(!0).lastChild.checked,l.noCloneEvent=!!a.addEventListener,a[n.expando]=1,l.attributes=!a.getAttribute(n.expando)}();var da={option:[1,"<select multiple='multiple'>","</select>"],legend:[1,"<fieldset>","</fieldset>"],area:[1,"<map>","</map>"],param:[1,"<object>","</object>"],thead:[1,"<table>","</table>"],tr:[2,"<table><tbody>","</tbody></table>"],col:[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],_default:l.htmlSerialize?[0,"",""]:[1,"X<div>","</div>"]};da.optgroup=da.option,da.tbody=da.tfoot=da.colgroup=da.caption=da.thead,da.th=da.td;function ea(a,b){var c,d,e=0,f="undefined"!=typeof a.getElementsByTagName?a.getElementsByTagName(b||"*"):"undefined"!=typeof a.querySelectorAll?a.querySelectorAll(b||"*"):void 0;if(!f)for(f=[],c=a.childNodes||a;null!=(d=c[e]);e++)!b||n.nodeName(d,b)?f.push(d):n.merge(f,ea(d,b));return void 0===b||b&&n.nodeName(a,b)?n.merge([a],f):f}function fa(a,b){for(var c,d=0;null!=(c=a[d]);d++)n._data(c,"globalEval",!b||n._data(b[d],"globalEval"))}var ga=/<|&#?\w+;/,ha=/<tbody/i;function ia(a){Z.test(a.type)&&(a.defaultChecked=a.checked)}function ja(a,b,c,d,e){for(var f,g,h,i,j,k,m,o=a.length,p=ca(b),q=[],r=0;o>r;r++)if(g=a[r],g||0===g)if("object"===n.type(g))n.merge(q,g.nodeType?[g]:g);else if(ga.test(g)){i=i||p.appendChild(b.createElement("div")),j=($.exec(g)||["",""])[1].toLowerCase(),m=da[j]||da._default,i.innerHTML=m[1]+n.htmlPrefilter(g)+m[2],f=m[0];while(f--)i=i.lastChild;if(!l.leadingWhitespace&&aa.test(g)&&q.push(b.createTextNode(aa.exec(g)[0])),!l.tbody){g="table"!==j||ha.test(g)?"<table>"!==m[1]||ha.test(g)?0:i:i.firstChild,f=g&&g.childNodes.length;while(f--)n.nodeName(k=g.childNodes[f],"tbody")&&!k.childNodes.length&&g.removeChild(k)}n.merge(q,i.childNodes),i.textContent="";while(i.firstChild)i.removeChild(i.firstChild);i=p.lastChild}else q.push(b.createTextNode(g));i&&p.removeChild(i),l.appendChecked||n.grep(ea(q,"input"),ia),r=0;while(g=q[r++])if(d&&n.inArray(g,d)>-1)e&&e.push(g);else if(h=n.contains(g.ownerDocument,g),i=ea(p.appendChild(g),"script"),h&&fa(i),c){f=0;while(g=i[f++])_.test(g.type||"")&&c.push(g)}return i=null,p}!function(){var b,c,e=d.createElement("div");for(b in{submit:!0,change:!0,focusin:!0})c="on"+b,(l[b]=c in a)||(e.setAttribute(c,"t"),l[b]=e.attributes[c].expando===!1);e=null}();var ka=/^(?:input|select|textarea)$/i,la=/^key/,ma=/^(?:mouse|pointer|contextmenu|drag|drop)|click/,na=/^(?:focusinfocus|focusoutblur)$/,oa=/^([^.]*)(?:\.(.+)|)/;function pa(){return!0}function qa(){return!1}function ra(){try{return d.activeElement}catch(a){}}function sa(a,b,c,d,e,f){var g,h;if("object"==typeof b){"string"!=typeof c&&(d=d||c,c=void 0);for(h in b)sa(a,h,c,d,b[h],f);return a}if(null==d&&null==e?(e=c,d=c=void 0):null==e&&("string"==typeof c?(e=d,d=void 0):(e=d,d=c,c=void 0)),e===!1)e=qa;else if(!e)return a;return 1===f&&(g=e,e=function(a){return n().off(a),g.apply(this,arguments)},e.guid=g.guid||(g.guid=n.guid++)),a.each(function(){n.event.add(this,b,e,d,c)})}n.event={global:{},add:function(a,b,c,d,e){var f,g,h,i,j,k,l,m,o,p,q,r=n._data(a);if(r){c.handler&&(i=c,c=i.handler,e=i.selector),c.guid||(c.guid=n.guid++),(g=r.events)||(g=r.events={}),(k=r.handle)||(k=r.handle=function(a){return"undefined"==typeof n||a&&n.event.triggered===a.type?void 0:n.event.dispatch.apply(k.elem,arguments)},k.elem=a),b=(b||"").match(G)||[""],h=b.length;while(h--)f=oa.exec(b[h])||[],o=q=f[1],p=(f[2]||"").split(".").sort(),o&&(j=n.event.special[o]||{},o=(e?j.delegateType:j.bindType)||o,j=n.event.special[o]||{},l=n.extend({type:o,origType:q,data:d,handler:c,guid:c.guid,selector:e,needsContext:e&&n.expr.match.needsContext.test(e),namespace:p.join(".")},i),(m=g[o])||(m=g[o]=[],m.delegateCount=0,j.setup&&j.setup.call(a,d,p,k)!==!1||(a.addEventListener?a.addEventListener(o,k,!1):a.attachEvent&&a.attachEvent("on"+o,k))),j.add&&(j.add.call(a,l),l.handler.guid||(l.handler.guid=c.guid)),e?m.splice(m.delegateCount++,0,l):m.push(l),n.event.global[o]=!0);a=null}},remove:function(a,b,c,d,e){var f,g,h,i,j,k,l,m,o,p,q,r=n.hasData(a)&&n._data(a);if(r&&(k=r.events)){b=(b||"").match(G)||[""],j=b.length;while(j--)if(h=oa.exec(b[j])||[],o=q=h[1],p=(h[2]||"").split(".").sort(),o){l=n.event.special[o]||{},o=(d?l.delegateType:l.bindType)||o,m=k[o]||[],h=h[2]&&new RegExp("(^|\\.)"+p.join("\\.(?:.*\\.|)")+"(\\.|$)"),i=f=m.length;while(f--)g=m[f],!e&&q!==g.origType||c&&c.guid!==g.guid||h&&!h.test(g.namespace)||d&&d!==g.selector&&("**"!==d||!g.selector)||(m.splice(f,1),g.selector&&m.delegateCount--,l.remove&&l.remove.call(a,g));i&&!m.length&&(l.teardown&&l.teardown.call(a,p,r.handle)!==!1||n.removeEvent(a,o,r.handle),delete k[o])}else for(o in k)n.event.remove(a,o+b[j],c,d,!0);n.isEmptyObject(k)&&(delete r.handle,n._removeData(a,"events"))}},trigger:function(b,c,e,f){var g,h,i,j,l,m,o,p=[e||d],q=k.call(b,"type")?b.type:b,r=k.call(b,"namespace")?b.namespace.split("."):[];if(i=m=e=e||d,3!==e.nodeType&&8!==e.nodeType&&!na.test(q+n.event.triggered)&&(q.indexOf(".")>-1&&(r=q.split("."),q=r.shift(),r.sort()),h=q.indexOf(":")<0&&"on"+q,b=b[n.expando]?b:new n.Event(q,"object"==typeof b&&b),b.isTrigger=f?2:3,b.namespace=r.join("."),b.rnamespace=b.namespace?new RegExp("(^|\\.)"+r.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,b.result=void 0,b.target||(b.target=e),c=null==c?[b]:n.makeArray(c,[b]),l=n.event.special[q]||{},f||!l.trigger||l.trigger.apply(e,c)!==!1)){if(!f&&!l.noBubble&&!n.isWindow(e)){for(j=l.delegateType||q,na.test(j+q)||(i=i.parentNode);i;i=i.parentNode)p.push(i),m=i;m===(e.ownerDocument||d)&&p.push(m.defaultView||m.parentWindow||a)}o=0;while((i=p[o++])&&!b.isPropagationStopped())b.type=o>1?j:l.bindType||q,g=(n._data(i,"events")||{})[b.type]&&n._data(i,"handle"),g&&g.apply(i,c),g=h&&i[h],g&&g.apply&&M(i)&&(b.result=g.apply(i,c),b.result===!1&&b.preventDefault());if(b.type=q,!f&&!b.isDefaultPrevented()&&(!l._default||l._default.apply(p.pop(),c)===!1)&&M(e)&&h&&e[q]&&!n.isWindow(e)){m=e[h],m&&(e[h]=null),n.event.triggered=q;try{e[q]()}catch(s){}n.event.triggered=void 0,m&&(e[h]=m)}return b.result}},dispatch:function(a){a=n.event.fix(a);var b,c,d,f,g,h=[],i=e.call(arguments),j=(n._data(this,"events")||{})[a.type]||[],k=n.event.special[a.type]||{};if(i[0]=a,a.delegateTarget=this,!k.preDispatch||k.preDispatch.call(this,a)!==!1){h=n.event.handlers.call(this,a,j),b=0;while((f=h[b++])&&!a.isPropagationStopped()){a.currentTarget=f.elem,c=0;while((g=f.handlers[c++])&&!a.isImmediatePropagationStopped())a.rnamespace&&!a.rnamespace.test(g.namespace)||(a.handleObj=g,a.data=g.data,d=((n.event.special[g.origType]||{}).handle||g.handler).apply(f.elem,i),void 0!==d&&(a.result=d)===!1&&(a.preventDefault(),a.stopPropagation()))}return k.postDispatch&&k.postDispatch.call(this,a),a.result}},handlers:function(a,b){var c,d,e,f,g=[],h=b.delegateCount,i=a.target;if(h&&i.nodeType&&("click"!==a.type||isNaN(a.button)||a.button<1))for(;i!=this;i=i.parentNode||this)if(1===i.nodeType&&(i.disabled!==!0||"click"!==a.type)){for(d=[],c=0;h>c;c++)f=b[c],e=f.selector+" ",void 0===d[e]&&(d[e]=f.needsContext?n(e,this).index(i)>-1:n.find(e,this,null,[i]).length),d[e]&&d.push(f);d.length&&g.push({elem:i,handlers:d})}return h<b.length&&g.push({elem:this,handlers:b.slice(h)}),g},fix:function(a){if(a[n.expando])return a;var b,c,e,f=a.type,g=a,h=this.fixHooks[f];h||(this.fixHooks[f]=h=ma.test(f)?this.mouseHooks:la.test(f)?this.keyHooks:{}),e=h.props?this.props.concat(h.props):this.props,a=new n.Event(g),b=e.length;while(b--)c=e[b],a[c]=g[c];return a.target||(a.target=g.srcElement||d),3===a.target.nodeType&&(a.target=a.target.parentNode),a.metaKey=!!a.metaKey,h.filter?h.filter(a,g):a},props:"altKey bubbles cancelable ctrlKey currentTarget detail eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "),fixHooks:{},keyHooks:{props:"char charCode key keyCode".split(" "),filter:function(a,b){return null==a.which&&(a.which=null!=b.charCode?b.charCode:b.keyCode),a}},mouseHooks:{props:"button buttons clientX clientY fromElement offsetX offsetY pageX pageY screenX screenY toElement".split(" "),filter:function(a,b){var c,e,f,g=b.button,h=b.fromElement;return null==a.pageX&&null!=b.clientX&&(e=a.target.ownerDocument||d,f=e.documentElement,c=e.body,a.pageX=b.clientX+(f&&f.scrollLeft||c&&c.scrollLeft||0)-(f&&f.clientLeft||c&&c.clientLeft||0),a.pageY=b.clientY+(f&&f.scrollTop||c&&c.scrollTop||0)-(f&&f.clientTop||c&&c.clientTop||0)),!a.relatedTarget&&h&&(a.relatedTarget=h===a.target?b.toElement:h),a.which||void 0===g||(a.which=1&g?1:2&g?3:4&g?2:0),a}},special:{load:{noBubble:!0},focus:{trigger:function(){if(this!==ra()&&this.focus)try{return this.focus(),!1}catch(a){}},delegateType:"focusin"},blur:{trigger:function(){return this===ra()&&this.blur?(this.blur(),!1):void 0},delegateType:"focusout"},click:{trigger:function(){return n.nodeName(this,"input")&&"checkbox"===this.type&&this.click?(this.click(),!1):void 0},_default:function(a){return n.nodeName(a.target,"a")}},beforeunload:{postDispatch:function(a){void 0!==a.result&&a.originalEvent&&(a.originalEvent.returnValue=a.result)}}},simulate:function(a,b,c){var d=n.extend(new n.Event,c,{type:a,isSimulated:!0});n.event.trigger(d,null,b),d.isDefaultPrevented()&&c.preventDefault()}},n.removeEvent=d.removeEventListener?function(a,b,c){a.removeEventListener&&a.removeEventListener(b,c)}:function(a,b,c){var d="on"+b;a.detachEvent&&("undefined"==typeof a[d]&&(a[d]=null),a.detachEvent(d,c))},n.Event=function(a,b){return this instanceof n.Event?(a&&a.type?(this.originalEvent=a,this.type=a.type,this.isDefaultPrevented=a.defaultPrevented||void 0===a.defaultPrevented&&a.returnValue===!1?pa:qa):this.type=a,b&&n.extend(this,b),this.timeStamp=a&&a.timeStamp||n.now(),void(this[n.expando]=!0)):new n.Event(a,b)},n.Event.prototype={constructor:n.Event,isDefaultPrevented:qa,isPropagationStopped:qa,isImmediatePropagationStopped:qa,preventDefault:function(){var a=this.originalEvent;this.isDefaultPrevented=pa,a&&(a.preventDefault?a.preventDefault():a.returnValue=!1)},stopPropagation:function(){var a=this.originalEvent;this.isPropagationStopped=pa,a&&!this.isSimulated&&(a.stopPropagation&&a.stopPropagation(),a.cancelBubble=!0)},stopImmediatePropagation:function(){var a=this.originalEvent;this.isImmediatePropagationStopped=pa,a&&a.stopImmediatePropagation&&a.stopImmediatePropagation(),this.stopPropagation()}},n.each({mouseenter:"mouseover",mouseleave:"mouseout",pointerenter:"pointerover",pointerleave:"pointerout"},function(a,b){n.event.special[a]={delegateType:b,bindType:b,handle:function(a){var c,d=this,e=a.relatedTarget,f=a.handleObj;return e&&(e===d||n.contains(d,e))||(a.type=f.origType,c=f.handler.apply(this,arguments),a.type=b),c}}}),l.submit||(n.event.special.submit={setup:function(){return n.nodeName(this,"form")?!1:void n.event.add(this,"click._submit keypress._submit",function(a){var b=a.target,c=n.nodeName(b,"input")||n.nodeName(b,"button")?n.prop(b,"form"):void 0;c&&!n._data(c,"submit")&&(n.event.add(c,"submit._submit",function(a){a._submitBubble=!0}),n._data(c,"submit",!0))})},postDispatch:function(a){a._submitBubble&&(delete a._submitBubble,this.parentNode&&!a.isTrigger&&n.event.simulate("submit",this.parentNode,a))},teardown:function(){return n.nodeName(this,"form")?!1:void n.event.remove(this,"._submit")}}),l.change||(n.event.special.change={setup:function(){return ka.test(this.nodeName)?("checkbox"!==this.type&&"radio"!==this.type||(n.event.add(this,"propertychange._change",function(a){"checked"===a.originalEvent.propertyName&&(this._justChanged=!0)}),n.event.add(this,"click._change",function(a){this._justChanged&&!a.isTrigger&&(this._justChanged=!1),n.event.simulate("change",this,a)})),!1):void n.event.add(this,"beforeactivate._change",function(a){var b=a.target;ka.test(b.nodeName)&&!n._data(b,"change")&&(n.event.add(b,"change._change",function(a){!this.parentNode||a.isSimulated||a.isTrigger||n.event.simulate("change",this.parentNode,a)}),n._data(b,"change",!0))})},handle:function(a){var b=a.target;return this!==b||a.isSimulated||a.isTrigger||"radio"!==b.type&&"checkbox"!==b.type?a.handleObj.handler.apply(this,arguments):void 0},teardown:function(){return n.event.remove(this,"._change"),!ka.test(this.nodeName)}}),l.focusin||n.each({focus:"focusin",blur:"focusout"},function(a,b){var c=function(a){n.event.simulate(b,a.target,n.event.fix(a))};n.event.special[b]={setup:function(){var d=this.ownerDocument||this,e=n._data(d,b);e||d.addEventListener(a,c,!0),n._data(d,b,(e||0)+1)},teardown:function(){var d=this.ownerDocument||this,e=n._data(d,b)-1;e?n._data(d,b,e):(d.removeEventListener(a,c,!0),n._removeData(d,b))}}}),n.fn.extend({on:function(a,b,c,d){return sa(this,a,b,c,d)},one:function(a,b,c,d){return sa(this,a,b,c,d,1)},off:function(a,b,c){var d,e;if(a&&a.preventDefault&&a.handleObj)return d=a.handleObj,n(a.delegateTarget).off(d.namespace?d.origType+"."+d.namespace:d.origType,d.selector,d.handler),this;if("object"==typeof a){for(e in a)this.off(e,b,a[e]);return this}return b!==!1&&"function"!=typeof b||(c=b,b=void 0),c===!1&&(c=qa),this.each(function(){n.event.remove(this,a,c,b)})},trigger:function(a,b){return this.each(function(){n.event.trigger(a,b,this)})},triggerHandler:function(a,b){var c=this[0];return c?n.event.trigger(a,b,c,!0):void 0}});var ta=/ jQuery\d+="(?:null|\d+)"/g,ua=new RegExp("<(?:"+ba+")[\\s/>]","i"),va=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:-]+)[^>]*)\/>/gi,wa=/<script|<style|<link/i,xa=/checked\s*(?:[^=]|=\s*.checked.)/i,ya=/^true\/(.*)/,za=/^\s*<!(?:\[CDATA\[|--)|(?:\]\]|--)>\s*$/g,Aa=ca(d),Ba=Aa.appendChild(d.createElement("div"));function Ca(a,b){return n.nodeName(a,"table")&&n.nodeName(11!==b.nodeType?b:b.firstChild,"tr")?a.getElementsByTagName("tbody")[0]||a.appendChild(a.ownerDocument.createElement("tbody")):a}function Da(a){return a.type=(null!==n.find.attr(a,"type"))+"/"+a.type,a}function Ea(a){var b=ya.exec(a.type);return b?a.type=b[1]:a.removeAttribute("type"),a}function Fa(a,b){if(1===b.nodeType&&n.hasData(a)){var c,d,e,f=n._data(a),g=n._data(b,f),h=f.events;if(h){delete g.handle,g.events={};for(c in h)for(d=0,e=h[c].length;e>d;d++)n.event.add(b,c,h[c][d])}g.data&&(g.data=n.extend({},g.data))}}function Ga(a,b){var c,d,e;if(1===b.nodeType){if(c=b.nodeName.toLowerCase(),!l.noCloneEvent&&b[n.expando]){e=n._data(b);for(d in e.events)n.removeEvent(b,d,e.handle);b.removeAttribute(n.expando)}"script"===c&&b.text!==a.text?(Da(b).text=a.text,Ea(b)):"object"===c?(b.parentNode&&(b.outerHTML=a.outerHTML),l.html5Clone&&a.innerHTML&&!n.trim(b.innerHTML)&&(b.innerHTML=a.innerHTML)):"input"===c&&Z.test(a.type)?(b.defaultChecked=b.checked=a.checked,b.value!==a.value&&(b.value=a.value)):"option"===c?b.defaultSelected=b.selected=a.defaultSelected:"input"!==c&&"textarea"!==c||(b.defaultValue=a.defaultValue)}}function Ha(a,b,c,d){b=f.apply([],b);var e,g,h,i,j,k,m=0,o=a.length,p=o-1,q=b[0],r=n.isFunction(q);if(r||o>1&&"string"==typeof q&&!l.checkClone&&xa.test(q))return a.each(function(e){var f=a.eq(e);r&&(b[0]=q.call(this,e,f.html())),Ha(f,b,c,d)});if(o&&(k=ja(b,a[0].ownerDocument,!1,a,d),e=k.firstChild,1===k.childNodes.length&&(k=e),e||d)){for(i=n.map(ea(k,"script"),Da),h=i.length;o>m;m++)g=k,m!==p&&(g=n.clone(g,!0,!0),h&&n.merge(i,ea(g,"script"))),c.call(a[m],g,m);if(h)for(j=i[i.length-1].ownerDocument,n.map(i,Ea),m=0;h>m;m++)g=i[m],_.test(g.type||"")&&!n._data(g,"globalEval")&&n.contains(j,g)&&(g.src?n._evalUrl&&n._evalUrl(g.src):n.globalEval((g.text||g.textContent||g.innerHTML||"").replace(za,"")));k=e=null}return a}function Ia(a,b,c){for(var d,e=b?n.filter(b,a):a,f=0;null!=(d=e[f]);f++)c||1!==d.nodeType||n.cleanData(ea(d)),d.parentNode&&(c&&n.contains(d.ownerDocument,d)&&fa(ea(d,"script")),d.parentNode.removeChild(d));return a}n.extend({htmlPrefilter:function(a){return a.replace(va,"<$1></$2>")},clone:function(a,b,c){var d,e,f,g,h,i=n.contains(a.ownerDocument,a);if(l.html5Clone||n.isXMLDoc(a)||!ua.test("<"+a.nodeName+">")?f=a.cloneNode(!0):(Ba.innerHTML=a.outerHTML,Ba.removeChild(f=Ba.firstChild)),!(l.noCloneEvent&&l.noCloneChecked||1!==a.nodeType&&11!==a.nodeType||n.isXMLDoc(a)))for(d=ea(f),h=ea(a),g=0;null!=(e=h[g]);++g)d[g]&&Ga(e,d[g]);if(b)if(c)for(h=h||ea(a),d=d||ea(f),g=0;null!=(e=h[g]);g++)Fa(e,d[g]);else Fa(a,f);return d=ea(f,"script"),d.length>0&&fa(d,!i&&ea(a,"script")),d=h=e=null,f},cleanData:function(a,b){for(var d,e,f,g,h=0,i=n.expando,j=n.cache,k=l.attributes,m=n.event.special;null!=(d=a[h]);h++)if((b||M(d))&&(f=d[i],g=f&&j[f])){if(g.events)for(e in g.events)m[e]?n.event.remove(d,e):n.removeEvent(d,e,g.handle);j[f]&&(delete j[f],k||"undefined"==typeof d.removeAttribute?d[i]=void 0:d.removeAttribute(i),c.push(f))}}}),n.fn.extend({domManip:Ha,detach:function(a){return Ia(this,a,!0)},remove:function(a){return Ia(this,a)},text:function(a){return Y(this,function(a){return void 0===a?n.text(this):this.empty().append((this[0]&&this[0].ownerDocument||d).createTextNode(a))},null,a,arguments.length)},append:function(){return Ha(this,arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=Ca(this,a);b.appendChild(a)}})},prepend:function(){return Ha(this,arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=Ca(this,a);b.insertBefore(a,b.firstChild)}})},before:function(){return Ha(this,arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this)})},after:function(){return Ha(this,arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this.nextSibling)})},empty:function(){for(var a,b=0;null!=(a=this[b]);b++){1===a.nodeType&&n.cleanData(ea(a,!1));while(a.firstChild)a.removeChild(a.firstChild);a.options&&n.nodeName(a,"select")&&(a.options.length=0)}return this},clone:function(a,b){return a=null==a?!1:a,b=null==b?a:b,this.map(function(){return n.clone(this,a,b)})},html:function(a){return Y(this,function(a){var b=this[0]||{},c=0,d=this.length;if(void 0===a)return 1===b.nodeType?b.innerHTML.replace(ta,""):void 0;if("string"==typeof a&&!wa.test(a)&&(l.htmlSerialize||!ua.test(a))&&(l.leadingWhitespace||!aa.test(a))&&!da[($.exec(a)||["",""])[1].toLowerCase()]){a=n.htmlPrefilter(a);try{for(;d>c;c++)b=this[c]||{},1===b.nodeType&&(n.cleanData(ea(b,!1)),b.innerHTML=a);b=0}catch(e){}}b&&this.empty().append(a)},null,a,arguments.length)},replaceWith:function(){var a=[];return Ha(this,arguments,function(b){var c=this.parentNode;n.inArray(this,a)<0&&(n.cleanData(ea(this)),c&&c.replaceChild(b,this))},a)}}),n.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){n.fn[a]=function(a){for(var c,d=0,e=[],f=n(a),h=f.length-1;h>=d;d++)c=d===h?this:this.clone(!0),n(f[d])[b](c),g.apply(e,c.get());return this.pushStack(e)}});var Ja,Ka={HTML:"block",BODY:"block"};function La(a,b){var c=n(b.createElement(a)).appendTo(b.body),d=n.css(c[0],"display");return c.detach(),d}function Ma(a){var b=d,c=Ka[a];return c||(c=La(a,b),"none"!==c&&c||(Ja=(Ja||n("<iframe frameborder='0' width='0' height='0'/>")).appendTo(b.documentElement),b=(Ja[0].contentWindow||Ja[0].contentDocument).document,b.write(),b.close(),c=La(a,b),Ja.detach()),Ka[a]=c),c}var Na=/^margin/,Oa=new RegExp("^("+T+")(?!px)[a-z%]+$","i"),Pa=function(a,b,c,d){var e,f,g={};for(f in b)g[f]=a.style[f],a.style[f]=b[f];e=c.apply(a,d||[]);for(f in b)a.style[f]=g[f];return e},Qa=d.documentElement;!function(){var b,c,e,f,g,h,i=d.createElement("div"),j=d.createElement("div");if(j.style){j.style.cssText="float:left;opacity:.5",l.opacity="0.5"===j.style.opacity,l.cssFloat=!!j.style.cssFloat,j.style.backgroundClip="content-box",j.cloneNode(!0).style.backgroundClip="",l.clearCloneStyle="content-box"===j.style.backgroundClip,i=d.createElement("div"),i.style.cssText="border:0;width:8px;height:0;top:0;left:-9999px;padding:0;margin-top:1px;position:absolute",j.innerHTML="",i.appendChild(j),l.boxSizing=""===j.style.boxSizing||""===j.style.MozBoxSizing||""===j.style.WebkitBoxSizing,n.extend(l,{reliableHiddenOffsets:function(){return null==b&&k(),f},boxSizingReliable:function(){return null==b&&k(),e},pixelMarginRight:function(){return null==b&&k(),c},pixelPosition:function(){return null==b&&k(),b},reliableMarginRight:function(){return null==b&&k(),g},reliableMarginLeft:function(){return null==b&&k(),h}});function k(){var k,l,m=d.documentElement;m.appendChild(i),j.style.cssText="-webkit-box-sizing:border-box;box-sizing:border-box;position:relative;display:block;margin:auto;border:1px;padding:1px;top:1%;width:50%",b=e=h=!1,c=g=!0,a.getComputedStyle&&(l=a.getComputedStyle(j),b="1%"!==(l||{}).top,h="2px"===(l||{}).marginLeft,e="4px"===(l||{width:"4px"}).width,j.style.marginRight="50%",c="4px"===(l||{marginRight:"4px"}).marginRight,k=j.appendChild(d.createElement("div")),k.style.cssText=j.style.cssText="-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;display:block;margin:0;border:0;padding:0",k.style.marginRight=k.style.width="0",j.style.width="1px",g=!parseFloat((a.getComputedStyle(k)||{}).marginRight),j.removeChild(k)),j.style.display="none",f=0===j.getClientRects().length,f&&(j.style.display="",j.innerHTML="<table><tr><td></td><td>t</td></tr></table>",j.childNodes[0].style.borderCollapse="separate",k=j.getElementsByTagName("td"),k[0].style.cssText="margin:0;border:0;padding:0;display:none",f=0===k[0].offsetHeight,f&&(k[0].style.display="",k[1].style.display="none",f=0===k[0].offsetHeight)),m.removeChild(i)}}}();var Ra,Sa,Ta=/^(top|right|bottom|left)$/;a.getComputedStyle?(Ra=function(b){var c=b.ownerDocument.defaultView;return c&&c.opener||(c=a),c.getComputedStyle(b)},Sa=function(a,b,c){var d,e,f,g,h=a.style;return c=c||Ra(a),g=c?c.getPropertyValue(b)||c[b]:void 0,""!==g&&void 0!==g||n.contains(a.ownerDocument,a)||(g=n.style(a,b)),c&&!l.pixelMarginRight()&&Oa.test(g)&&Na.test(b)&&(d=h.width,e=h.minWidth,f=h.maxWidth,h.minWidth=h.maxWidth=h.width=g,g=c.width,h.width=d,h.minWidth=e,h.maxWidth=f),void 0===g?g:g+""}):Qa.currentStyle&&(Ra=function(a){return a.currentStyle},Sa=function(a,b,c){var d,e,f,g,h=a.style;return c=c||Ra(a),g=c?c[b]:void 0,null==g&&h&&h[b]&&(g=h[b]),Oa.test(g)&&!Ta.test(b)&&(d=h.left,e=a.runtimeStyle,f=e&&e.left,f&&(e.left=a.currentStyle.left),h.left="fontSize"===b?"1em":g,g=h.pixelLeft+"px",h.left=d,f&&(e.left=f)),void 0===g?g:g+""||"auto"});function Ua(a,b){return{get:function(){return a()?void delete this.get:(this.get=b).apply(this,arguments)}}}var Va=/alpha\([^)]*\)/i,Wa=/opacity\s*=\s*([^)]*)/i,Xa=/^(none|table(?!-c[ea]).+)/,Ya=new RegExp("^("+T+")(.*)$","i"),Za={position:"absolute",visibility:"hidden",display:"block"},$a={letterSpacing:"0",fontWeight:"400"},_a=["Webkit","O","Moz","ms"],ab=d.createElement("div").style;function bb(a){if(a in ab)return a;var b=a.charAt(0).toUpperCase()+a.slice(1),c=_a.length;while(c--)if(a=_a[c]+b,a in ab)return a}function cb(a,b){for(var c,d,e,f=[],g=0,h=a.length;h>g;g++)d=a[g],d.style&&(f[g]=n._data(d,"olddisplay"),c=d.style.display,b?(f[g]||"none"!==c||(d.style.display=""),""===d.style.display&&W(d)&&(f[g]=n._data(d,"olddisplay",Ma(d.nodeName)))):(e=W(d),(c&&"none"!==c||!e)&&n._data(d,"olddisplay",e?c:n.css(d,"display"))));for(g=0;h>g;g++)d=a[g],d.style&&(b&&"none"!==d.style.display&&""!==d.style.display||(d.style.display=b?f[g]||"":"none"));return a}function db(a,b,c){var d=Ya.exec(b);return d?Math.max(0,d[1]-(c||0))+(d[2]||"px"):b}function eb(a,b,c,d,e){for(var f=c===(d?"border":"content")?4:"width"===b?1:0,g=0;4>f;f+=2)"margin"===c&&(g+=n.css(a,c+V[f],!0,e)),d?("content"===c&&(g-=n.css(a,"padding"+V[f],!0,e)),"margin"!==c&&(g-=n.css(a,"border"+V[f]+"Width",!0,e))):(g+=n.css(a,"padding"+V[f],!0,e),"padding"!==c&&(g+=n.css(a,"border"+V[f]+"Width",!0,e)));return g}function fb(a,b,c){var d=!0,e="width"===b?a.offsetWidth:a.offsetHeight,f=Ra(a),g=l.boxSizing&&"border-box"===n.css(a,"boxSizing",!1,f);if(0>=e||null==e){if(e=Sa(a,b,f),(0>e||null==e)&&(e=a.style[b]),Oa.test(e))return e;d=g&&(l.boxSizingReliable()||e===a.style[b]),e=parseFloat(e)||0}return e+eb(a,b,c||(g?"border":"content"),d,f)+"px"}n.extend({cssHooks:{opacity:{get:function(a,b){if(b){var c=Sa(a,"opacity");return""===c?"1":c}}}},cssNumber:{animationIterationCount:!0,columnCount:!0,fillOpacity:!0,flexGrow:!0,flexShrink:!0,fontWeight:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":l.cssFloat?"cssFloat":"styleFloat"},style:function(a,b,c,d){if(a&&3!==a.nodeType&&8!==a.nodeType&&a.style){var e,f,g,h=n.camelCase(b),i=a.style;if(b=n.cssProps[h]||(n.cssProps[h]=bb(h)||h),g=n.cssHooks[b]||n.cssHooks[h],void 0===c)return g&&"get"in g&&void 0!==(e=g.get(a,!1,d))?e:i[b];if(f=typeof c,"string"===f&&(e=U.exec(c))&&e[1]&&(c=X(a,b,e),f="number"),null!=c&&c===c&&("number"===f&&(c+=e&&e[3]||(n.cssNumber[h]?"":"px")),l.clearCloneStyle||""!==c||0!==b.indexOf("background")||(i[b]="inherit"),!(g&&"set"in g&&void 0===(c=g.set(a,c,d)))))try{i[b]=c}catch(j){}}},css:function(a,b,c,d){var e,f,g,h=n.camelCase(b);return b=n.cssProps[h]||(n.cssProps[h]=bb(h)||h),g=n.cssHooks[b]||n.cssHooks[h],g&&"get"in g&&(f=g.get(a,!0,c)),void 0===f&&(f=Sa(a,b,d)),"normal"===f&&b in $a&&(f=$a[b]),""===c||c?(e=parseFloat(f),c===!0||isFinite(e)?e||0:f):f}}),n.each(["height","width"],function(a,b){n.cssHooks[b]={get:function(a,c,d){return c?Xa.test(n.css(a,"display"))&&0===a.offsetWidth?Pa(a,Za,function(){return fb(a,b,d)}):fb(a,b,d):void 0},set:function(a,c,d){var e=d&&Ra(a);return db(a,c,d?eb(a,b,d,l.boxSizing&&"border-box"===n.css(a,"boxSizing",!1,e),e):0)}}}),l.opacity||(n.cssHooks.opacity={get:function(a,b){return Wa.test((b&&a.currentStyle?a.currentStyle.filter:a.style.filter)||"")?.01*parseFloat(RegExp.$1)+"":b?"1":""},set:function(a,b){var c=a.style,d=a.currentStyle,e=n.isNumeric(b)?"alpha(opacity="+100*b+")":"",f=d&&d.filter||c.filter||"";c.zoom=1,(b>=1||""===b)&&""===n.trim(f.replace(Va,""))&&c.removeAttribute&&(c.removeAttribute("filter"),""===b||d&&!d.filter)||(c.filter=Va.test(f)?f.replace(Va,e):f+" "+e)}}),n.cssHooks.marginRight=Ua(l.reliableMarginRight,function(a,b){return b?Pa(a,{display:"inline-block"},Sa,[a,"marginRight"]):void 0}),n.cssHooks.marginLeft=Ua(l.reliableMarginLeft,function(a,b){return b?(parseFloat(Sa(a,"marginLeft"))||(n.contains(a.ownerDocument,a)?a.getBoundingClientRect().left-Pa(a,{
-marginLeft:0},function(){return a.getBoundingClientRect().left}):0))+"px":void 0}),n.each({margin:"",padding:"",border:"Width"},function(a,b){n.cssHooks[a+b]={expand:function(c){for(var d=0,e={},f="string"==typeof c?c.split(" "):[c];4>d;d++)e[a+V[d]+b]=f[d]||f[d-2]||f[0];return e}},Na.test(a)||(n.cssHooks[a+b].set=db)}),n.fn.extend({css:function(a,b){return Y(this,function(a,b,c){var d,e,f={},g=0;if(n.isArray(b)){for(d=Ra(a),e=b.length;e>g;g++)f[b[g]]=n.css(a,b[g],!1,d);return f}return void 0!==c?n.style(a,b,c):n.css(a,b)},a,b,arguments.length>1)},show:function(){return cb(this,!0)},hide:function(){return cb(this)},toggle:function(a){return"boolean"==typeof a?a?this.show():this.hide():this.each(function(){W(this)?n(this).show():n(this).hide()})}});function gb(a,b,c,d,e){return new gb.prototype.init(a,b,c,d,e)}n.Tween=gb,gb.prototype={constructor:gb,init:function(a,b,c,d,e,f){this.elem=a,this.prop=c,this.easing=e||n.easing._default,this.options=b,this.start=this.now=this.cur(),this.end=d,this.unit=f||(n.cssNumber[c]?"":"px")},cur:function(){var a=gb.propHooks[this.prop];return a&&a.get?a.get(this):gb.propHooks._default.get(this)},run:function(a){var b,c=gb.propHooks[this.prop];return this.options.duration?this.pos=b=n.easing[this.easing](a,this.options.duration*a,0,1,this.options.duration):this.pos=b=a,this.now=(this.end-this.start)*b+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),c&&c.set?c.set(this):gb.propHooks._default.set(this),this}},gb.prototype.init.prototype=gb.prototype,gb.propHooks={_default:{get:function(a){var b;return 1!==a.elem.nodeType||null!=a.elem[a.prop]&&null==a.elem.style[a.prop]?a.elem[a.prop]:(b=n.css(a.elem,a.prop,""),b&&"auto"!==b?b:0)},set:function(a){n.fx.step[a.prop]?n.fx.step[a.prop](a):1!==a.elem.nodeType||null==a.elem.style[n.cssProps[a.prop]]&&!n.cssHooks[a.prop]?a.elem[a.prop]=a.now:n.style(a.elem,a.prop,a.now+a.unit)}}},gb.propHooks.scrollTop=gb.propHooks.scrollLeft={set:function(a){a.elem.nodeType&&a.elem.parentNode&&(a.elem[a.prop]=a.now)}},n.easing={linear:function(a){return a},swing:function(a){return.5-Math.cos(a*Math.PI)/2},_default:"swing"},n.fx=gb.prototype.init,n.fx.step={};var hb,ib,jb=/^(?:toggle|show|hide)$/,kb=/queueHooks$/;function lb(){return a.setTimeout(function(){hb=void 0}),hb=n.now()}function mb(a,b){var c,d={height:a},e=0;for(b=b?1:0;4>e;e+=2-b)c=V[e],d["margin"+c]=d["padding"+c]=a;return b&&(d.opacity=d.width=a),d}function nb(a,b,c){for(var d,e=(qb.tweeners[b]||[]).concat(qb.tweeners["*"]),f=0,g=e.length;g>f;f++)if(d=e[f].call(c,b,a))return d}function ob(a,b,c){var d,e,f,g,h,i,j,k,m=this,o={},p=a.style,q=a.nodeType&&W(a),r=n._data(a,"fxshow");c.queue||(h=n._queueHooks(a,"fx"),null==h.unqueued&&(h.unqueued=0,i=h.empty.fire,h.empty.fire=function(){h.unqueued||i()}),h.unqueued++,m.always(function(){m.always(function(){h.unqueued--,n.queue(a,"fx").length||h.empty.fire()})})),1===a.nodeType&&("height"in b||"width"in b)&&(c.overflow=[p.overflow,p.overflowX,p.overflowY],j=n.css(a,"display"),k="none"===j?n._data(a,"olddisplay")||Ma(a.nodeName):j,"inline"===k&&"none"===n.css(a,"float")&&(l.inlineBlockNeedsLayout&&"inline"!==Ma(a.nodeName)?p.zoom=1:p.display="inline-block")),c.overflow&&(p.overflow="hidden",l.shrinkWrapBlocks()||m.always(function(){p.overflow=c.overflow[0],p.overflowX=c.overflow[1],p.overflowY=c.overflow[2]}));for(d in b)if(e=b[d],jb.exec(e)){if(delete b[d],f=f||"toggle"===e,e===(q?"hide":"show")){if("show"!==e||!r||void 0===r[d])continue;q=!0}o[d]=r&&r[d]||n.style(a,d)}else j=void 0;if(n.isEmptyObject(o))"inline"===("none"===j?Ma(a.nodeName):j)&&(p.display=j);else{r?"hidden"in r&&(q=r.hidden):r=n._data(a,"fxshow",{}),f&&(r.hidden=!q),q?n(a).show():m.done(function(){n(a).hide()}),m.done(function(){var b;n._removeData(a,"fxshow");for(b in o)n.style(a,b,o[b])});for(d in o)g=nb(q?r[d]:0,d,m),d in r||(r[d]=g.start,q&&(g.end=g.start,g.start="width"===d||"height"===d?1:0))}}function pb(a,b){var c,d,e,f,g;for(c in a)if(d=n.camelCase(c),e=b[d],f=a[c],n.isArray(f)&&(e=f[1],f=a[c]=f[0]),c!==d&&(a[d]=f,delete a[c]),g=n.cssHooks[d],g&&"expand"in g){f=g.expand(f),delete a[d];for(c in f)c in a||(a[c]=f[c],b[c]=e)}else b[d]=e}function qb(a,b,c){var d,e,f=0,g=qb.prefilters.length,h=n.Deferred().always(function(){delete i.elem}),i=function(){if(e)return!1;for(var b=hb||lb(),c=Math.max(0,j.startTime+j.duration-b),d=c/j.duration||0,f=1-d,g=0,i=j.tweens.length;i>g;g++)j.tweens[g].run(f);return h.notifyWith(a,[j,f,c]),1>f&&i?c:(h.resolveWith(a,[j]),!1)},j=h.promise({elem:a,props:n.extend({},b),opts:n.extend(!0,{specialEasing:{},easing:n.easing._default},c),originalProperties:b,originalOptions:c,startTime:hb||lb(),duration:c.duration,tweens:[],createTween:function(b,c){var d=n.Tween(a,j.opts,b,c,j.opts.specialEasing[b]||j.opts.easing);return j.tweens.push(d),d},stop:function(b){var c=0,d=b?j.tweens.length:0;if(e)return this;for(e=!0;d>c;c++)j.tweens[c].run(1);return b?(h.notifyWith(a,[j,1,0]),h.resolveWith(a,[j,b])):h.rejectWith(a,[j,b]),this}}),k=j.props;for(pb(k,j.opts.specialEasing);g>f;f++)if(d=qb.prefilters[f].call(j,a,k,j.opts))return n.isFunction(d.stop)&&(n._queueHooks(j.elem,j.opts.queue).stop=n.proxy(d.stop,d)),d;return n.map(k,nb,j),n.isFunction(j.opts.start)&&j.opts.start.call(a,j),n.fx.timer(n.extend(i,{elem:a,anim:j,queue:j.opts.queue})),j.progress(j.opts.progress).done(j.opts.done,j.opts.complete).fail(j.opts.fail).always(j.opts.always)}n.Animation=n.extend(qb,{tweeners:{"*":[function(a,b){var c=this.createTween(a,b);return X(c.elem,a,U.exec(b),c),c}]},tweener:function(a,b){n.isFunction(a)?(b=a,a=["*"]):a=a.match(G);for(var c,d=0,e=a.length;e>d;d++)c=a[d],qb.tweeners[c]=qb.tweeners[c]||[],qb.tweeners[c].unshift(b)},prefilters:[ob],prefilter:function(a,b){b?qb.prefilters.unshift(a):qb.prefilters.push(a)}}),n.speed=function(a,b,c){var d=a&&"object"==typeof a?n.extend({},a):{complete:c||!c&&b||n.isFunction(a)&&a,duration:a,easing:c&&b||b&&!n.isFunction(b)&&b};return d.duration=n.fx.off?0:"number"==typeof d.duration?d.duration:d.duration in n.fx.speeds?n.fx.speeds[d.duration]:n.fx.speeds._default,null!=d.queue&&d.queue!==!0||(d.queue="fx"),d.old=d.complete,d.complete=function(){n.isFunction(d.old)&&d.old.call(this),d.queue&&n.dequeue(this,d.queue)},d},n.fn.extend({fadeTo:function(a,b,c,d){return this.filter(W).css("opacity",0).show().end().animate({opacity:b},a,c,d)},animate:function(a,b,c,d){var e=n.isEmptyObject(a),f=n.speed(b,c,d),g=function(){var b=qb(this,n.extend({},a),f);(e||n._data(this,"finish"))&&b.stop(!0)};return g.finish=g,e||f.queue===!1?this.each(g):this.queue(f.queue,g)},stop:function(a,b,c){var d=function(a){var b=a.stop;delete a.stop,b(c)};return"string"!=typeof a&&(c=b,b=a,a=void 0),b&&a!==!1&&this.queue(a||"fx",[]),this.each(function(){var b=!0,e=null!=a&&a+"queueHooks",f=n.timers,g=n._data(this);if(e)g[e]&&g[e].stop&&d(g[e]);else for(e in g)g[e]&&g[e].stop&&kb.test(e)&&d(g[e]);for(e=f.length;e--;)f[e].elem!==this||null!=a&&f[e].queue!==a||(f[e].anim.stop(c),b=!1,f.splice(e,1));!b&&c||n.dequeue(this,a)})},finish:function(a){return a!==!1&&(a=a||"fx"),this.each(function(){var b,c=n._data(this),d=c[a+"queue"],e=c[a+"queueHooks"],f=n.timers,g=d?d.length:0;for(c.finish=!0,n.queue(this,a,[]),e&&e.stop&&e.stop.call(this,!0),b=f.length;b--;)f[b].elem===this&&f[b].queue===a&&(f[b].anim.stop(!0),f.splice(b,1));for(b=0;g>b;b++)d[b]&&d[b].finish&&d[b].finish.call(this);delete c.finish})}}),n.each(["toggle","show","hide"],function(a,b){var c=n.fn[b];n.fn[b]=function(a,d,e){return null==a||"boolean"==typeof a?c.apply(this,arguments):this.animate(mb(b,!0),a,d,e)}}),n.each({slideDown:mb("show"),slideUp:mb("hide"),slideToggle:mb("toggle"),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(a,b){n.fn[a]=function(a,c,d){return this.animate(b,a,c,d)}}),n.timers=[],n.fx.tick=function(){var a,b=n.timers,c=0;for(hb=n.now();c<b.length;c++)a=b[c],a()||b[c]!==a||b.splice(c--,1);b.length||n.fx.stop(),hb=void 0},n.fx.timer=function(a){n.timers.push(a),a()?n.fx.start():n.timers.pop()},n.fx.interval=13,n.fx.start=function(){ib||(ib=a.setInterval(n.fx.tick,n.fx.interval))},n.fx.stop=function(){a.clearInterval(ib),ib=null},n.fx.speeds={slow:600,fast:200,_default:400},n.fn.delay=function(b,c){return b=n.fx?n.fx.speeds[b]||b:b,c=c||"fx",this.queue(c,function(c,d){var e=a.setTimeout(c,b);d.stop=function(){a.clearTimeout(e)}})},function(){var a,b=d.createElement("input"),c=d.createElement("div"),e=d.createElement("select"),f=e.appendChild(d.createElement("option"));c=d.createElement("div"),c.setAttribute("className","t"),c.innerHTML=" <link/><table></table><a href='/a'>a</a><input type='checkbox'/>",a=c.getElementsByTagName("a")[0],b.setAttribute("type","checkbox"),c.appendChild(b),a=c.getElementsByTagName("a")[0],a.style.cssText="top:1px",l.getSetAttribute="t"!==c.className,l.style=/top/.test(a.getAttribute("style")),l.hrefNormalized="/a"===a.getAttribute("href"),l.checkOn=!!b.value,l.optSelected=f.selected,l.enctype=!!d.createElement("form").enctype,e.disabled=!0,l.optDisabled=!f.disabled,b=d.createElement("input"),b.setAttribute("value",""),l.input=""===b.getAttribute("value"),b.value="t",b.setAttribute("type","radio"),l.radioValue="t"===b.value}();var rb=/\r/g,sb=/[\x20\t\r\n\f]+/g;n.fn.extend({val:function(a){var b,c,d,e=this[0];{if(arguments.length)return d=n.isFunction(a),this.each(function(c){var e;1===this.nodeType&&(e=d?a.call(this,c,n(this).val()):a,null==e?e="":"number"==typeof e?e+="":n.isArray(e)&&(e=n.map(e,function(a){return null==a?"":a+""})),b=n.valHooks[this.type]||n.valHooks[this.nodeName.toLowerCase()],b&&"set"in b&&void 0!==b.set(this,e,"value")||(this.value=e))});if(e)return b=n.valHooks[e.type]||n.valHooks[e.nodeName.toLowerCase()],b&&"get"in b&&void 0!==(c=b.get(e,"value"))?c:(c=e.value,"string"==typeof c?c.replace(rb,""):null==c?"":c)}}}),n.extend({valHooks:{option:{get:function(a){var b=n.find.attr(a,"value");return null!=b?b:n.trim(n.text(a)).replace(sb," ")}},select:{get:function(a){for(var b,c,d=a.options,e=a.selectedIndex,f="select-one"===a.type||0>e,g=f?null:[],h=f?e+1:d.length,i=0>e?h:f?e:0;h>i;i++)if(c=d[i],(c.selected||i===e)&&(l.optDisabled?!c.disabled:null===c.getAttribute("disabled"))&&(!c.parentNode.disabled||!n.nodeName(c.parentNode,"optgroup"))){if(b=n(c).val(),f)return b;g.push(b)}return g},set:function(a,b){var c,d,e=a.options,f=n.makeArray(b),g=e.length;while(g--)if(d=e[g],n.inArray(n.valHooks.option.get(d),f)>-1)try{d.selected=c=!0}catch(h){d.scrollHeight}else d.selected=!1;return c||(a.selectedIndex=-1),e}}}}),n.each(["radio","checkbox"],function(){n.valHooks[this]={set:function(a,b){return n.isArray(b)?a.checked=n.inArray(n(a).val(),b)>-1:void 0}},l.checkOn||(n.valHooks[this].get=function(a){return null===a.getAttribute("value")?"on":a.value})});var tb,ub,vb=n.expr.attrHandle,wb=/^(?:checked|selected)$/i,xb=l.getSetAttribute,yb=l.input;n.fn.extend({attr:function(a,b){return Y(this,n.attr,a,b,arguments.length>1)},removeAttr:function(a){return this.each(function(){n.removeAttr(this,a)})}}),n.extend({attr:function(a,b,c){var d,e,f=a.nodeType;if(3!==f&&8!==f&&2!==f)return"undefined"==typeof a.getAttribute?n.prop(a,b,c):(1===f&&n.isXMLDoc(a)||(b=b.toLowerCase(),e=n.attrHooks[b]||(n.expr.match.bool.test(b)?ub:tb)),void 0!==c?null===c?void n.removeAttr(a,b):e&&"set"in e&&void 0!==(d=e.set(a,c,b))?d:(a.setAttribute(b,c+""),c):e&&"get"in e&&null!==(d=e.get(a,b))?d:(d=n.find.attr(a,b),null==d?void 0:d))},attrHooks:{type:{set:function(a,b){if(!l.radioValue&&"radio"===b&&n.nodeName(a,"input")){var c=a.value;return a.setAttribute("type",b),c&&(a.value=c),b}}}},removeAttr:function(a,b){var c,d,e=0,f=b&&b.match(G);if(f&&1===a.nodeType)while(c=f[e++])d=n.propFix[c]||c,n.expr.match.bool.test(c)?yb&&xb||!wb.test(c)?a[d]=!1:a[n.camelCase("default-"+c)]=a[d]=!1:n.attr(a,c,""),a.removeAttribute(xb?c:d)}}),ub={set:function(a,b,c){return b===!1?n.removeAttr(a,c):yb&&xb||!wb.test(c)?a.setAttribute(!xb&&n.propFix[c]||c,c):a[n.camelCase("default-"+c)]=a[c]=!0,c}},n.each(n.expr.match.bool.source.match(/\w+/g),function(a,b){var c=vb[b]||n.find.attr;yb&&xb||!wb.test(b)?vb[b]=function(a,b,d){var e,f;return d||(f=vb[b],vb[b]=e,e=null!=c(a,b,d)?b.toLowerCase():null,vb[b]=f),e}:vb[b]=function(a,b,c){return c?void 0:a[n.camelCase("default-"+b)]?b.toLowerCase():null}}),yb&&xb||(n.attrHooks.value={set:function(a,b,c){return n.nodeName(a,"input")?void(a.defaultValue=b):tb&&tb.set(a,b,c)}}),xb||(tb={set:function(a,b,c){var d=a.getAttributeNode(c);return d||a.setAttributeNode(d=a.ownerDocument.createAttribute(c)),d.value=b+="","value"===c||b===a.getAttribute(c)?b:void 0}},vb.id=vb.name=vb.coords=function(a,b,c){var d;return c?void 0:(d=a.getAttributeNode(b))&&""!==d.value?d.value:null},n.valHooks.button={get:function(a,b){var c=a.getAttributeNode(b);return c&&c.specified?c.value:void 0},set:tb.set},n.attrHooks.contenteditable={set:function(a,b,c){tb.set(a,""===b?!1:b,c)}},n.each(["width","height"],function(a,b){n.attrHooks[b]={set:function(a,c){return""===c?(a.setAttribute(b,"auto"),c):void 0}}})),l.style||(n.attrHooks.style={get:function(a){return a.style.cssText||void 0},set:function(a,b){return a.style.cssText=b+""}});var zb=/^(?:input|select|textarea|button|object)$/i,Ab=/^(?:a|area)$/i;n.fn.extend({prop:function(a,b){return Y(this,n.prop,a,b,arguments.length>1)},removeProp:function(a){return a=n.propFix[a]||a,this.each(function(){try{this[a]=void 0,delete this[a]}catch(b){}})}}),n.extend({prop:function(a,b,c){var d,e,f=a.nodeType;if(3!==f&&8!==f&&2!==f)return 1===f&&n.isXMLDoc(a)||(b=n.propFix[b]||b,e=n.propHooks[b]),void 0!==c?e&&"set"in e&&void 0!==(d=e.set(a,c,b))?d:a[b]=c:e&&"get"in e&&null!==(d=e.get(a,b))?d:a[b]},propHooks:{tabIndex:{get:function(a){var b=n.find.attr(a,"tabindex");return b?parseInt(b,10):zb.test(a.nodeName)||Ab.test(a.nodeName)&&a.href?0:-1}}},propFix:{"for":"htmlFor","class":"className"}}),l.hrefNormalized||n.each(["href","src"],function(a,b){n.propHooks[b]={get:function(a){return a.getAttribute(b,4)}}}),l.optSelected||(n.propHooks.selected={get:function(a){var b=a.parentNode;return b&&(b.selectedIndex,b.parentNode&&b.parentNode.selectedIndex),null},set:function(a){var b=a.parentNode;b&&(b.selectedIndex,b.parentNode&&b.parentNode.selectedIndex)}}),n.each(["tabIndex","readOnly","maxLength","cellSpacing","cellPadding","rowSpan","colSpan","useMap","frameBorder","contentEditable"],function(){n.propFix[this.toLowerCase()]=this}),l.enctype||(n.propFix.enctype="encoding");var Bb=/[\t\r\n\f]/g;function Cb(a){return n.attr(a,"class")||""}n.fn.extend({addClass:function(a){var b,c,d,e,f,g,h,i=0;if(n.isFunction(a))return this.each(function(b){n(this).addClass(a.call(this,b,Cb(this)))});if("string"==typeof a&&a){b=a.match(G)||[];while(c=this[i++])if(e=Cb(c),d=1===c.nodeType&&(" "+e+" ").replace(Bb," ")){g=0;while(f=b[g++])d.indexOf(" "+f+" ")<0&&(d+=f+" ");h=n.trim(d),e!==h&&n.attr(c,"class",h)}}return this},removeClass:function(a){var b,c,d,e,f,g,h,i=0;if(n.isFunction(a))return this.each(function(b){n(this).removeClass(a.call(this,b,Cb(this)))});if(!arguments.length)return this.attr("class","");if("string"==typeof a&&a){b=a.match(G)||[];while(c=this[i++])if(e=Cb(c),d=1===c.nodeType&&(" "+e+" ").replace(Bb," ")){g=0;while(f=b[g++])while(d.indexOf(" "+f+" ")>-1)d=d.replace(" "+f+" "," ");h=n.trim(d),e!==h&&n.attr(c,"class",h)}}return this},toggleClass:function(a,b){var c=typeof a;return"boolean"==typeof b&&"string"===c?b?this.addClass(a):this.removeClass(a):n.isFunction(a)?this.each(function(c){n(this).toggleClass(a.call(this,c,Cb(this),b),b)}):this.each(function(){var b,d,e,f;if("string"===c){d=0,e=n(this),f=a.match(G)||[];while(b=f[d++])e.hasClass(b)?e.removeClass(b):e.addClass(b)}else void 0!==a&&"boolean"!==c||(b=Cb(this),b&&n._data(this,"__className__",b),n.attr(this,"class",b||a===!1?"":n._data(this,"__className__")||""))})},hasClass:function(a){var b,c,d=0;b=" "+a+" ";while(c=this[d++])if(1===c.nodeType&&(" "+Cb(c)+" ").replace(Bb," ").indexOf(b)>-1)return!0;return!1}}),n.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error contextmenu".split(" "),function(a,b){n.fn[b]=function(a,c){return arguments.length>0?this.on(b,null,a,c):this.trigger(b)}}),n.fn.extend({hover:function(a,b){return this.mouseenter(a).mouseleave(b||a)}});var Db=a.location,Eb=n.now(),Fb=/\?/,Gb=/(,)|(\[|{)|(}|])|"(?:[^"\\\r\n]|\\["\\\/bfnrt]|\\u[\da-fA-F]{4})*"\s*:?|true|false|null|-?(?!0\d)\d+(?:\.\d+|)(?:[eE][+-]?\d+|)/g;n.parseJSON=function(b){if(a.JSON&&a.JSON.parse)return a.JSON.parse(b+"");var c,d=null,e=n.trim(b+"");return e&&!n.trim(e.replace(Gb,function(a,b,e,f){return c&&b&&(d=0),0===d?a:(c=e||b,d+=!f-!e,"")}))?Function("return "+e)():n.error("Invalid JSON: "+b)},n.parseXML=function(b){var c,d;if(!b||"string"!=typeof b)return null;try{a.DOMParser?(d=new a.DOMParser,c=d.parseFromString(b,"text/xml")):(c=new a.ActiveXObject("Microsoft.XMLDOM"),c.async="false",c.loadXML(b))}catch(e){c=void 0}return c&&c.documentElement&&!c.getElementsByTagName("parsererror").length||n.error("Invalid XML: "+b),c};var Hb=/#.*$/,Ib=/([?&])_=[^&]*/,Jb=/^(.*?):[ \t]*([^\r\n]*)\r?$/gm,Kb=/^(?:about|app|app-storage|.+-extension|file|res|widget):$/,Lb=/^(?:GET|HEAD)$/,Mb=/^\/\//,Nb=/^([\w.+-]+:)(?:\/\/(?:[^\/?#]*@|)([^\/?#:]*)(?::(\d+)|)|)/,Ob={},Pb={},Qb="*/".concat("*"),Rb=Db.href,Sb=Nb.exec(Rb.toLowerCase())||[];function Tb(a){return function(b,c){"string"!=typeof b&&(c=b,b="*");var d,e=0,f=b.toLowerCase().match(G)||[];if(n.isFunction(c))while(d=f[e++])"+"===d.charAt(0)?(d=d.slice(1)||"*",(a[d]=a[d]||[]).unshift(c)):(a[d]=a[d]||[]).push(c)}}function Ub(a,b,c,d){var e={},f=a===Pb;function g(h){var i;return e[h]=!0,n.each(a[h]||[],function(a,h){var j=h(b,c,d);return"string"!=typeof j||f||e[j]?f?!(i=j):void 0:(b.dataTypes.unshift(j),g(j),!1)}),i}return g(b.dataTypes[0])||!e["*"]&&g("*")}function Vb(a,b){var c,d,e=n.ajaxSettings.flatOptions||{};for(d in b)void 0!==b[d]&&((e[d]?a:c||(c={}))[d]=b[d]);return c&&n.extend(!0,a,c),a}function Wb(a,b,c){var d,e,f,g,h=a.contents,i=a.dataTypes;while("*"===i[0])i.shift(),void 0===e&&(e=a.mimeType||b.getResponseHeader("Content-Type"));if(e)for(g in h)if(h[g]&&h[g].test(e)){i.unshift(g);break}if(i[0]in c)f=i[0];else{for(g in c){if(!i[0]||a.converters[g+" "+i[0]]){f=g;break}d||(d=g)}f=f||d}return f?(f!==i[0]&&i.unshift(f),c[f]):void 0}function Xb(a,b,c,d){var e,f,g,h,i,j={},k=a.dataTypes.slice();if(k[1])for(g in a.converters)j[g.toLowerCase()]=a.converters[g];f=k.shift();while(f)if(a.responseFields[f]&&(c[a.responseFields[f]]=b),!i&&d&&a.dataFilter&&(b=a.dataFilter(b,a.dataType)),i=f,f=k.shift())if("*"===f)f=i;else if("*"!==i&&i!==f){if(g=j[i+" "+f]||j["* "+f],!g)for(e in j)if(h=e.split(" "),h[1]===f&&(g=j[i+" "+h[0]]||j["* "+h[0]])){g===!0?g=j[e]:j[e]!==!0&&(f=h[0],k.unshift(h[1]));break}if(g!==!0)if(g&&a["throws"])b=g(b);else try{b=g(b)}catch(l){return{state:"parsererror",error:g?l:"No conversion from "+i+" to "+f}}}return{state:"success",data:b}}n.extend({active:0,lastModified:{},etag:{},ajaxSettings:{url:Rb,type:"GET",isLocal:Kb.test(Sb[1]),global:!0,processData:!0,async:!0,contentType:"application/x-www-form-urlencoded; charset=UTF-8",accepts:{"*":Qb,text:"text/plain",html:"text/html",xml:"application/xml, text/xml",json:"application/json, text/javascript"},contents:{xml:/\bxml\b/,html:/\bhtml/,json:/\bjson\b/},responseFields:{xml:"responseXML",text:"responseText",json:"responseJSON"},converters:{"* text":String,"text html":!0,"text json":n.parseJSON,"text xml":n.parseXML},flatOptions:{url:!0,context:!0}},ajaxSetup:function(a,b){return b?Vb(Vb(a,n.ajaxSettings),b):Vb(n.ajaxSettings,a)},ajaxPrefilter:Tb(Ob),ajaxTransport:Tb(Pb),ajax:function(b,c){"object"==typeof b&&(c=b,b=void 0),c=c||{};var d,e,f,g,h,i,j,k,l=n.ajaxSetup({},c),m=l.context||l,o=l.context&&(m.nodeType||m.jquery)?n(m):n.event,p=n.Deferred(),q=n.Callbacks("once memory"),r=l.statusCode||{},s={},t={},u=0,v="canceled",w={readyState:0,getResponseHeader:function(a){var b;if(2===u){if(!k){k={};while(b=Jb.exec(g))k[b[1].toLowerCase()]=b[2]}b=k[a.toLowerCase()]}return null==b?null:b},getAllResponseHeaders:function(){return 2===u?g:null},setRequestHeader:function(a,b){var c=a.toLowerCase();return u||(a=t[c]=t[c]||a,s[a]=b),this},overrideMimeType:function(a){return u||(l.mimeType=a),this},statusCode:function(a){var b;if(a)if(2>u)for(b in a)r[b]=[r[b],a[b]];else w.always(a[w.status]);return this},abort:function(a){var b=a||v;return j&&j.abort(b),y(0,b),this}};if(p.promise(w).complete=q.add,w.success=w.done,w.error=w.fail,l.url=((b||l.url||Rb)+"").replace(Hb,"").replace(Mb,Sb[1]+"//"),l.type=c.method||c.type||l.method||l.type,l.dataTypes=n.trim(l.dataType||"*").toLowerCase().match(G)||[""],null==l.crossDomain&&(d=Nb.exec(l.url.toLowerCase()),l.crossDomain=!(!d||d[1]===Sb[1]&&d[2]===Sb[2]&&(d[3]||("http:"===d[1]?"80":"443"))===(Sb[3]||("http:"===Sb[1]?"80":"443")))),l.data&&l.processData&&"string"!=typeof l.data&&(l.data=n.param(l.data,l.traditional)),Ub(Ob,l,c,w),2===u)return w;i=n.event&&l.global,i&&0===n.active++&&n.event.trigger("ajaxStart"),l.type=l.type.toUpperCase(),l.hasContent=!Lb.test(l.type),f=l.url,l.hasContent||(l.data&&(f=l.url+=(Fb.test(f)?"&":"?")+l.data,delete l.data),l.cache===!1&&(l.url=Ib.test(f)?f.replace(Ib,"$1_="+Eb++):f+(Fb.test(f)?"&":"?")+"_="+Eb++)),l.ifModified&&(n.lastModified[f]&&w.setRequestHeader("If-Modified-Since",n.lastModified[f]),n.etag[f]&&w.setRequestHeader("If-None-Match",n.etag[f])),(l.data&&l.hasContent&&l.contentType!==!1||c.contentType)&&w.setRequestHeader("Content-Type",l.contentType),w.setRequestHeader("Accept",l.dataTypes[0]&&l.accepts[l.dataTypes[0]]?l.accepts[l.dataTypes[0]]+("*"!==l.dataTypes[0]?", "+Qb+"; q=0.01":""):l.accepts["*"]);for(e in l.headers)w.setRequestHeader(e,l.headers[e]);if(l.beforeSend&&(l.beforeSend.call(m,w,l)===!1||2===u))return w.abort();v="abort";for(e in{success:1,error:1,complete:1})w[e](l[e]);if(j=Ub(Pb,l,c,w)){if(w.readyState=1,i&&o.trigger("ajaxSend",[w,l]),2===u)return w;l.async&&l.timeout>0&&(h=a.setTimeout(function(){w.abort("timeout")},l.timeout));try{u=1,j.send(s,y)}catch(x){if(!(2>u))throw x;y(-1,x)}}else y(-1,"No Transport");function y(b,c,d,e){var k,s,t,v,x,y=c;2!==u&&(u=2,h&&a.clearTimeout(h),j=void 0,g=e||"",w.readyState=b>0?4:0,k=b>=200&&300>b||304===b,d&&(v=Wb(l,w,d)),v=Xb(l,v,w,k),k?(l.ifModified&&(x=w.getResponseHeader("Last-Modified"),x&&(n.lastModified[f]=x),x=w.getResponseHeader("etag"),x&&(n.etag[f]=x)),204===b||"HEAD"===l.type?y="nocontent":304===b?y="notmodified":(y=v.state,s=v.data,t=v.error,k=!t)):(t=y,!b&&y||(y="error",0>b&&(b=0))),w.status=b,w.statusText=(c||y)+"",k?p.resolveWith(m,[s,y,w]):p.rejectWith(m,[w,y,t]),w.statusCode(r),r=void 0,i&&o.trigger(k?"ajaxSuccess":"ajaxError",[w,l,k?s:t]),q.fireWith(m,[w,y]),i&&(o.trigger("ajaxComplete",[w,l]),--n.active||n.event.trigger("ajaxStop")))}return w},getJSON:function(a,b,c){return n.get(a,b,c,"json")},getScript:function(a,b){return n.get(a,void 0,b,"script")}}),n.each(["get","post"],function(a,b){n[b]=function(a,c,d,e){return n.isFunction(c)&&(e=e||d,d=c,c=void 0),n.ajax(n.extend({url:a,type:b,dataType:e,data:c,success:d},n.isPlainObject(a)&&a))}}),n._evalUrl=function(a){return n.ajax({url:a,type:"GET",dataType:"script",cache:!0,async:!1,global:!1,"throws":!0})},n.fn.extend({wrapAll:function(a){if(n.isFunction(a))return this.each(function(b){n(this).wrapAll(a.call(this,b))});if(this[0]){var b=n(a,this[0].ownerDocument).eq(0).clone(!0);this[0].parentNode&&b.insertBefore(this[0]),b.map(function(){var a=this;while(a.firstChild&&1===a.firstChild.nodeType)a=a.firstChild;return a}).append(this)}return this},wrapInner:function(a){return n.isFunction(a)?this.each(function(b){n(this).wrapInner(a.call(this,b))}):this.each(function(){var b=n(this),c=b.contents();c.length?c.wrapAll(a):b.append(a)})},wrap:function(a){var b=n.isFunction(a);return this.each(function(c){n(this).wrapAll(b?a.call(this,c):a)})},unwrap:function(){return this.parent().each(function(){n.nodeName(this,"body")||n(this).replaceWith(this.childNodes)}).end()}});function Yb(a){return a.style&&a.style.display||n.css(a,"display")}function Zb(a){if(!n.contains(a.ownerDocument||d,a))return!0;while(a&&1===a.nodeType){if("none"===Yb(a)||"hidden"===a.type)return!0;a=a.parentNode}return!1}n.expr.filters.hidden=function(a){return l.reliableHiddenOffsets()?a.offsetWidth<=0&&a.offsetHeight<=0&&!a.getClientRects().length:Zb(a)},n.expr.filters.visible=function(a){return!n.expr.filters.hidden(a)};var $b=/%20/g,_b=/\[\]$/,ac=/\r?\n/g,bc=/^(?:submit|button|image|reset|file)$/i,cc=/^(?:input|select|textarea|keygen)/i;function dc(a,b,c,d){var e;if(n.isArray(b))n.each(b,function(b,e){c||_b.test(a)?d(a,e):dc(a+"["+("object"==typeof e&&null!=e?b:"")+"]",e,c,d)});else if(c||"object"!==n.type(b))d(a,b);else for(e in b)dc(a+"["+e+"]",b[e],c,d)}n.param=function(a,b){var c,d=[],e=function(a,b){b=n.isFunction(b)?b():null==b?"":b,d[d.length]=encodeURIComponent(a)+"="+encodeURIComponent(b)};if(void 0===b&&(b=n.ajaxSettings&&n.ajaxSettings.traditional),n.isArray(a)||a.jquery&&!n.isPlainObject(a))n.each(a,function(){e(this.name,this.value)});else for(c in a)dc(c,a[c],b,e);return d.join("&").replace($b,"+")},n.fn.extend({serialize:function(){return n.param(this.serializeArray())},serializeArray:function(){return this.map(function(){var a=n.prop(this,"elements");return a?n.makeArray(a):this}).filter(function(){var a=this.type;return this.name&&!n(this).is(":disabled")&&cc.test(this.nodeName)&&!bc.test(a)&&(this.checked||!Z.test(a))}).map(function(a,b){var c=n(this).val();return null==c?null:n.isArray(c)?n.map(c,function(a){return{name:b.name,value:a.replace(ac,"\r\n")}}):{name:b.name,value:c.replace(ac,"\r\n")}}).get()}}),n.ajaxSettings.xhr=void 0!==a.ActiveXObject?function(){return this.isLocal?ic():d.documentMode>8?hc():/^(get|post|head|put|delete|options)$/i.test(this.type)&&hc()||ic()}:hc;var ec=0,fc={},gc=n.ajaxSettings.xhr();a.attachEvent&&a.attachEvent("onunload",function(){for(var a in fc)fc[a](void 0,!0)}),l.cors=!!gc&&"withCredentials"in gc,gc=l.ajax=!!gc,gc&&n.ajaxTransport(function(b){if(!b.crossDomain||l.cors){var c;return{send:function(d,e){var f,g=b.xhr(),h=++ec;if(g.open(b.type,b.url,b.async,b.username,b.password),b.xhrFields)for(f in b.xhrFields)g[f]=b.xhrFields[f];b.mimeType&&g.overrideMimeType&&g.overrideMimeType(b.mimeType),b.crossDomain||d["X-Requested-With"]||(d["X-Requested-With"]="XMLHttpRequest");for(f in d)void 0!==d[f]&&g.setRequestHeader(f,d[f]+"");g.send(b.hasContent&&b.data||null),c=function(a,d){var f,i,j;if(c&&(d||4===g.readyState))if(delete fc[h],c=void 0,g.onreadystatechange=n.noop,d)4!==g.readyState&&g.abort();else{j={},f=g.status,"string"==typeof g.responseText&&(j.text=g.responseText);try{i=g.statusText}catch(k){i=""}f||!b.isLocal||b.crossDomain?1223===f&&(f=204):f=j.text?200:404}j&&e(f,i,j,g.getAllResponseHeaders())},b.async?4===g.readyState?a.setTimeout(c):g.onreadystatechange=fc[h]=c:c()},abort:function(){c&&c(void 0,!0)}}}});function hc(){try{return new a.XMLHttpRequest}catch(b){}}function ic(){try{return new a.ActiveXObject("Microsoft.XMLHTTP")}catch(b){}}n.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/\b(?:java|ecma)script\b/},converters:{"text script":function(a){return n.globalEval(a),a}}}),n.ajaxPrefilter("script",function(a){void 0===a.cache&&(a.cache=!1),a.crossDomain&&(a.type="GET",a.global=!1)}),n.ajaxTransport("script",function(a){if(a.crossDomain){var b,c=d.head||n("head")[0]||d.documentElement;return{send:function(e,f){b=d.createElement("script"),b.async=!0,a.scriptCharset&&(b.charset=a.scriptCharset),b.src=a.url,b.onload=b.onreadystatechange=function(a,c){(c||!b.readyState||/loaded|complete/.test(b.readyState))&&(b.onload=b.onreadystatechange=null,b.parentNode&&b.parentNode.removeChild(b),b=null,c||f(200,"success"))},c.insertBefore(b,c.firstChild)},abort:function(){b&&b.onload(void 0,!0)}}}});var jc=[],kc=/(=)\?(?=&|$)|\?\?/;n.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var a=jc.pop()||n.expando+"_"+Eb++;return this[a]=!0,a}}),n.ajaxPrefilter("json jsonp",function(b,c,d){var e,f,g,h=b.jsonp!==!1&&(kc.test(b.url)?"url":"string"==typeof b.data&&0===(b.contentType||"").indexOf("application/x-www-form-urlencoded")&&kc.test(b.data)&&"data");return h||"jsonp"===b.dataTypes[0]?(e=b.jsonpCallback=n.isFunction(b.jsonpCallback)?b.jsonpCallback():b.jsonpCallback,h?b[h]=b[h].replace(kc,"$1"+e):b.jsonp!==!1&&(b.url+=(Fb.test(b.url)?"&":"?")+b.jsonp+"="+e),b.converters["script json"]=function(){return g||n.error(e+" was not called"),g[0]},b.dataTypes[0]="json",f=a[e],a[e]=function(){g=arguments},d.always(function(){void 0===f?n(a).removeProp(e):a[e]=f,b[e]&&(b.jsonpCallback=c.jsonpCallback,jc.push(e)),g&&n.isFunction(f)&&f(g[0]),g=f=void 0}),"script"):void 0}),n.parseHTML=function(a,b,c){if(!a||"string"!=typeof a)return null;"boolean"==typeof b&&(c=b,b=!1),b=b||d;var e=x.exec(a),f=!c&&[];return e?[b.createElement(e[1])]:(e=ja([a],b,f),f&&f.length&&n(f).remove(),n.merge([],e.childNodes))};var lc=n.fn.load;n.fn.load=function(a,b,c){if("string"!=typeof a&&lc)return lc.apply(this,arguments);var d,e,f,g=this,h=a.indexOf(" ");return h>-1&&(d=n.trim(a.slice(h,a.length)),a=a.slice(0,h)),n.isFunction(b)?(c=b,b=void 0):b&&"object"==typeof b&&(e="POST"),g.length>0&&n.ajax({url:a,type:e||"GET",dataType:"html",data:b}).done(function(a){f=arguments,g.html(d?n("<div>").append(n.parseHTML(a)).find(d):a)}).always(c&&function(a,b){g.each(function(){c.apply(this,f||[a.responseText,b,a])})}),this},n.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(a,b){n.fn[b]=function(a){return this.on(b,a)}}),n.expr.filters.animated=function(a){return n.grep(n.timers,function(b){return a===b.elem}).length};function mc(a){return n.isWindow(a)?a:9===a.nodeType?a.defaultView||a.parentWindow:!1}n.offset={setOffset:function(a,b,c){var d,e,f,g,h,i,j,k=n.css(a,"position"),l=n(a),m={};"static"===k&&(a.style.position="relative"),h=l.offset(),f=n.css(a,"top"),i=n.css(a,"left"),j=("absolute"===k||"fixed"===k)&&n.inArray("auto",[f,i])>-1,j?(d=l.position(),g=d.top,e=d.left):(g=parseFloat(f)||0,e=parseFloat(i)||0),n.isFunction(b)&&(b=b.call(a,c,n.extend({},h))),null!=b.top&&(m.top=b.top-h.top+g),null!=b.left&&(m.left=b.left-h.left+e),"using"in b?b.using.call(a,m):l.css(m)}},n.fn.extend({offset:function(a){if(arguments.length)return void 0===a?this:this.each(function(b){n.offset.setOffset(this,a,b)});var b,c,d={top:0,left:0},e=this[0],f=e&&e.ownerDocument;if(f)return b=f.documentElement,n.contains(b,e)?("undefined"!=typeof e.getBoundingClientRect&&(d=e.getBoundingClientRect()),c=mc(f),{top:d.top+(c.pageYOffset||b.scrollTop)-(b.clientTop||0),left:d.left+(c.pageXOffset||b.scrollLeft)-(b.clientLeft||0)}):d},position:function(){if(this[0]){var a,b,c={top:0,left:0},d=this[0];return"fixed"===n.css(d,"position")?b=d.getBoundingClientRect():(a=this.offsetParent(),b=this.offset(),n.nodeName(a[0],"html")||(c=a.offset()),c.top+=n.css(a[0],"borderTopWidth",!0),c.left+=n.css(a[0],"borderLeftWidth",!0)),{top:b.top-c.top-n.css(d,"marginTop",!0),left:b.left-c.left-n.css(d,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var a=this.offsetParent;while(a&&!n.nodeName(a,"html")&&"static"===n.css(a,"position"))a=a.offsetParent;return a||Qa})}}),n.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(a,b){var c=/Y/.test(b);n.fn[a]=function(d){return Y(this,function(a,d,e){var f=mc(a);return void 0===e?f?b in f?f[b]:f.document.documentElement[d]:a[d]:void(f?f.scrollTo(c?n(f).scrollLeft():e,c?e:n(f).scrollTop()):a[d]=e)},a,d,arguments.length,null)}}),n.each(["top","left"],function(a,b){n.cssHooks[b]=Ua(l.pixelPosition,function(a,c){return c?(c=Sa(a,b),Oa.test(c)?n(a).position()[b]+"px":c):void 0})}),n.each({Height:"height",Width:"width"},function(a,b){n.each({
-padding:"inner"+a,content:b,"":"outer"+a},function(c,d){n.fn[d]=function(d,e){var f=arguments.length&&(c||"boolean"!=typeof d),g=c||(d===!0||e===!0?"margin":"border");return Y(this,function(b,c,d){var e;return n.isWindow(b)?b.document.documentElement["client"+a]:9===b.nodeType?(e=b.documentElement,Math.max(b.body["scroll"+a],e["scroll"+a],b.body["offset"+a],e["offset"+a],e["client"+a])):void 0===d?n.css(b,c,g):n.style(b,c,d,g)},b,f?d:void 0,f,null)}})}),n.fn.extend({bind:function(a,b,c){return this.on(a,null,b,c)},unbind:function(a,b){return this.off(a,null,b)},delegate:function(a,b,c,d){return this.on(b,a,c,d)},undelegate:function(a,b,c){return 1===arguments.length?this.off(a,"**"):this.off(b,a||"**",c)}}),n.fn.size=function(){return this.length},n.fn.andSelf=n.fn.addBack,"function"==typeof define&&define.amd&&define("jquery",[],function(){return n});var nc=a.jQuery,oc=a.$;return n.noConflict=function(b){return a.$===n&&(a.$=oc),b&&a.jQuery===n&&(a.jQuery=nc),n},b||(a.jQuery=a.$=n),n});
diff --git a/src/wiki/static/wiki/js/jqueryui/images/animated-overlay.gif b/src/wiki/static/wiki/js/jqueryui/images/animated-overlay.gif
deleted file mode 100644
index d441f75eb..000000000
Binary files a/src/wiki/static/wiki/js/jqueryui/images/animated-overlay.gif and /dev/null differ
diff --git a/src/wiki/static/wiki/js/jqueryui/images/ui-bg_diagonals-thick_18_b81900_40x40.png b/src/wiki/static/wiki/js/jqueryui/images/ui-bg_diagonals-thick_18_b81900_40x40.png
deleted file mode 100644
index ade421935..000000000
Binary files a/src/wiki/static/wiki/js/jqueryui/images/ui-bg_diagonals-thick_18_b81900_40x40.png and /dev/null differ
diff --git a/src/wiki/static/wiki/js/jqueryui/images/ui-bg_diagonals-thick_20_666666_40x40.png b/src/wiki/static/wiki/js/jqueryui/images/ui-bg_diagonals-thick_20_666666_40x40.png
deleted file mode 100644
index 5eae365ff..000000000
Binary files a/src/wiki/static/wiki/js/jqueryui/images/ui-bg_diagonals-thick_20_666666_40x40.png and /dev/null differ
diff --git a/src/wiki/static/wiki/js/jqueryui/images/ui-bg_flat_10_000000_40x100.png b/src/wiki/static/wiki/js/jqueryui/images/ui-bg_flat_10_000000_40x100.png
deleted file mode 100644
index 3c8cacbef..000000000
Binary files a/src/wiki/static/wiki/js/jqueryui/images/ui-bg_flat_10_000000_40x100.png and /dev/null differ
diff --git a/src/wiki/static/wiki/js/jqueryui/images/ui-bg_glass_100_f6f6f6_1x400.png b/src/wiki/static/wiki/js/jqueryui/images/ui-bg_glass_100_f6f6f6_1x400.png
deleted file mode 100644
index 297cc9bae..000000000
Binary files a/src/wiki/static/wiki/js/jqueryui/images/ui-bg_glass_100_f6f6f6_1x400.png and /dev/null differ
diff --git a/src/wiki/static/wiki/js/jqueryui/images/ui-bg_glass_100_fdf5ce_1x400.png b/src/wiki/static/wiki/js/jqueryui/images/ui-bg_glass_100_fdf5ce_1x400.png
deleted file mode 100644
index 3a8abfc16..000000000
Binary files a/src/wiki/static/wiki/js/jqueryui/images/ui-bg_glass_100_fdf5ce_1x400.png and /dev/null differ
diff --git a/src/wiki/static/wiki/js/jqueryui/images/ui-bg_glass_65_ffffff_1x400.png b/src/wiki/static/wiki/js/jqueryui/images/ui-bg_glass_65_ffffff_1x400.png
deleted file mode 100644
index 782da8673..000000000
Binary files a/src/wiki/static/wiki/js/jqueryui/images/ui-bg_glass_65_ffffff_1x400.png and /dev/null differ
diff --git a/src/wiki/static/wiki/js/jqueryui/images/ui-bg_gloss-wave_35_f6a828_500x100.png b/src/wiki/static/wiki/js/jqueryui/images/ui-bg_gloss-wave_35_f6a828_500x100.png
deleted file mode 100644
index edff81cca..000000000
Binary files a/src/wiki/static/wiki/js/jqueryui/images/ui-bg_gloss-wave_35_f6a828_500x100.png and /dev/null differ
diff --git a/src/wiki/static/wiki/js/jqueryui/images/ui-bg_highlight-soft_100_eeeeee_1x100.png b/src/wiki/static/wiki/js/jqueryui/images/ui-bg_highlight-soft_100_eeeeee_1x100.png
deleted file mode 100644
index cf22448a7..000000000
Binary files a/src/wiki/static/wiki/js/jqueryui/images/ui-bg_highlight-soft_100_eeeeee_1x100.png and /dev/null differ
diff --git a/src/wiki/static/wiki/js/jqueryui/images/ui-bg_highlight-soft_75_ffe45c_1x100.png b/src/wiki/static/wiki/js/jqueryui/images/ui-bg_highlight-soft_75_ffe45c_1x100.png
deleted file mode 100644
index 260a68552..000000000
Binary files a/src/wiki/static/wiki/js/jqueryui/images/ui-bg_highlight-soft_75_ffe45c_1x100.png and /dev/null differ
diff --git a/src/wiki/static/wiki/js/jqueryui/images/ui-icons_222222_256x240.png b/src/wiki/static/wiki/js/jqueryui/images/ui-icons_222222_256x240.png
deleted file mode 100644
index 0de629325..000000000
Binary files a/src/wiki/static/wiki/js/jqueryui/images/ui-icons_222222_256x240.png and /dev/null differ
diff --git a/src/wiki/static/wiki/js/jqueryui/images/ui-icons_228ef1_256x240.png b/src/wiki/static/wiki/js/jqueryui/images/ui-icons_228ef1_256x240.png
deleted file mode 100644
index bb48633a1..000000000
Binary files a/src/wiki/static/wiki/js/jqueryui/images/ui-icons_228ef1_256x240.png and /dev/null differ
diff --git a/src/wiki/static/wiki/js/jqueryui/images/ui-icons_ef8c08_256x240.png b/src/wiki/static/wiki/js/jqueryui/images/ui-icons_ef8c08_256x240.png
deleted file mode 100644
index 716ce1ea7..000000000
Binary files a/src/wiki/static/wiki/js/jqueryui/images/ui-icons_ef8c08_256x240.png and /dev/null differ
diff --git a/src/wiki/static/wiki/js/jqueryui/images/ui-icons_ffd27a_256x240.png b/src/wiki/static/wiki/js/jqueryui/images/ui-icons_ffd27a_256x240.png
deleted file mode 100644
index 7993cccfa..000000000
Binary files a/src/wiki/static/wiki/js/jqueryui/images/ui-icons_ffd27a_256x240.png and /dev/null differ
diff --git a/src/wiki/static/wiki/js/jqueryui/images/ui-icons_ffffff_256x240.png b/src/wiki/static/wiki/js/jqueryui/images/ui-icons_ffffff_256x240.png
deleted file mode 100644
index 625962b51..000000000
Binary files a/src/wiki/static/wiki/js/jqueryui/images/ui-icons_ffffff_256x240.png and /dev/null differ
diff --git a/src/wiki/static/wiki/js/jqueryui/jquery-ui-1.10.0.custom.min.css b/src/wiki/static/wiki/js/jqueryui/jquery-ui-1.10.0.custom.min.css
deleted file mode 100644
index 2ed1fcfe1..000000000
--- a/src/wiki/static/wiki/js/jqueryui/jquery-ui-1.10.0.custom.min.css
+++ /dev/null
@@ -1,5 +0,0 @@
-/*! jQuery UI - v1.10.0 - 2013-02-12
-* http://jqueryui.com
-* Includes: jquery.ui.core.css, jquery.ui.resizable.css
-* To view and modify this theme, visit http://jqueryui.com/themeroller/?ffDefault=Trebuchet%20MS%2CTahoma%2CVerdana%2CArial%2Csans-serif&fwDefault=bold&fsDefault=1.1em&cornerRadius=4px&bgColorHeader=f6a828&bgTextureHeader=gloss_wave&bgImgOpacityHeader=35&borderColorHeader=e78f08&fcHeader=ffffff&iconColorHeader=ffffff&bgColorContent=eeeeee&bgTextureContent=highlight_soft&bgImgOpacityContent=100&borderColorContent=dddddd&fcContent=333333&iconColorContent=222222&bgColorDefault=f6f6f6&bgTextureDefault=glass&bgImgOpacityDefault=100&borderColorDefault=cccccc&fcDefault=1c94c4&iconColorDefault=ef8c08&bgColorHover=fdf5ce&bgTextureHover=glass&bgImgOpacityHover=100&borderColorHover=fbcb09&fcHover=c77405&iconColorHover=ef8c08&bgColorActive=ffffff&bgTextureActive=glass&bgImgOpacityActive=65&borderColorActive=fbd850&fcActive=eb8f00&iconColorActive=ef8c08&bgColorHighlight=ffe45c&bgTextureHighlight=highlight_soft&bgImgOpacityHighlight=75&borderColorHighlight=fed22f&fcHighlight=363636&iconColorHighlight=228ef1&bgColorError=b81900&bgTextureError=diagonals_thick&bgImgOpacityError=18&borderColorError=cd0a0a&fcError=ffffff&iconColorError=ffd27a&bgColorOverlay=666666&bgTextureOverlay=diagonals_thick&bgImgOpacityOverlay=20&opacityOverlay=50&bgColorShadow=000000&bgTextureShadow=flat&bgImgOpacityShadow=10&opacityShadow=20&thicknessShadow=5px&offsetTopShadow=-5px&offsetLeftShadow=-5px&cornerRadiusShadow=5px
-* Copyright (c) 2013 jQuery Foundation and other contributors Licensed MIT */.ui-helper-hidden{display:none}.ui-helper-hidden-accessible{border:0;clip:rect(0 0 0 0);height:1px;margin:-1px;overflow:hidden;padding:0;position:absolute;width:1px}.ui-helper-reset{margin:0;padding:0;border:0;outline:0;line-height:1.3;text-decoration:none;font-size:100%;list-style:none}.ui-helper-clearfix:before,.ui-helper-clearfix:after{content:"";display:table}.ui-helper-clearfix:after{clear:both}.ui-helper-clearfix{min-height:0}.ui-helper-zfix{width:100%;height:100%;top:0;left:0;position:absolute;opacity:0;filter:Alpha(Opacity=0)}.ui-front{z-index:100}.ui-state-disabled{cursor:default!important}.ui-icon{display:block;text-indent:-99999px;overflow:hidden;background-repeat:no-repeat}.ui-widget-overlay{position:fixed;top:0;left:0;width:100%;height:100%}.ui-resizable{position:relative}.ui-resizable-handle{position:absolute;font-size:0.1px;display:block}.ui-resizable-disabled .ui-resizable-handle,.ui-resizable-autohide .ui-resizable-handle{display:none}.ui-resizable-n{cursor:n-resize;height:7px;width:100%;top:-5px;left:0}.ui-resizable-s{cursor:s-resize;height:7px;width:100%;bottom:-5px;left:0}.ui-resizable-e{cursor:e-resize;width:7px;right:-5px;top:0;height:100%}.ui-resizable-w{cursor:w-resize;width:7px;left:-5px;top:0;height:100%}.ui-resizable-se{cursor:se-resize;width:12px;height:12px;right:1px;bottom:1px}.ui-resizable-sw{cursor:sw-resize;width:9px;height:9px;left:-5px;bottom:-5px}.ui-resizable-nw{cursor:nw-resize;width:9px;height:9px;left:-5px;top:-5px}.ui-resizable-ne{cursor:ne-resize;width:9px;height:9px;right:-5px;top:-5px}.ui-widget{font-family:Trebuchet MS,Tahoma,Verdana,Arial,sans-serif;font-size:1.1em}.ui-widget .ui-widget{font-size:1em}.ui-widget input,.ui-widget select,.ui-widget textarea,.ui-widget button{font-family:Trebuchet MS,Tahoma,Verdana,Arial,sans-serif;font-size:1em}.ui-widget-content{border:1px solid #ddd;background:#eee url(images/ui-bg_highlight-soft_100_eeeeee_1x100.png) 50% top repeat-x;color:#333}.ui-widget-content a{color:#333}.ui-widget-header{border:1px solid #e78f08;background:#f6a828 url(images/ui-bg_gloss-wave_35_f6a828_500x100.png) 50% 50% repeat-x;color:#fff;font-weight:bold}.ui-widget-header a{color:#fff}.ui-state-default,.ui-widget-content .ui-state-default,.ui-widget-header .ui-state-default{border:1px solid #ccc;background:#f6f6f6 url(images/ui-bg_glass_100_f6f6f6_1x400.png) 50% 50% repeat-x;font-weight:bold;color:#1c94c4}.ui-state-default a,.ui-state-default a:link,.ui-state-default a:visited{color:#1c94c4;text-decoration:none}.ui-state-hover,.ui-widget-content .ui-state-hover,.ui-widget-header .ui-state-hover,.ui-state-focus,.ui-widget-content .ui-state-focus,.ui-widget-header .ui-state-focus{border:1px solid #fbcb09;background:#fdf5ce url(images/ui-bg_glass_100_fdf5ce_1x400.png) 50% 50% repeat-x;font-weight:bold;color:#c77405}.ui-state-hover a,.ui-state-hover a:hover,.ui-state-hover a:link,.ui-state-hover a:visited{color:#c77405;text-decoration:none}.ui-state-active,.ui-widget-content .ui-state-active,.ui-widget-header .ui-state-active{border:1px solid #fbd850;background:#fff url(images/ui-bg_glass_65_ffffff_1x400.png) 50% 50% repeat-x;font-weight:bold;color:#eb8f00}.ui-state-active a,.ui-state-active a:link,.ui-state-active a:visited{color:#eb8f00;text-decoration:none}.ui-state-highlight,.ui-widget-content .ui-state-highlight,.ui-widget-header .ui-state-highlight{border:1px solid #fed22f;background:#ffe45c url(images/ui-bg_highlight-soft_75_ffe45c_1x100.png) 50% top repeat-x;color:#363636}.ui-state-highlight a,.ui-widget-content .ui-state-highlight a,.ui-widget-header .ui-state-highlight a{color:#363636}.ui-state-error,.ui-widget-content .ui-state-error,.ui-widget-header .ui-state-error{border:1px solid #cd0a0a;background:#b81900 url(images/ui-bg_diagonals-thick_18_b81900_40x40.png) 50% 50% repeat;color:#fff}.ui-state-error a,.ui-widget-content .ui-state-error a,.ui-widget-header .ui-state-error a{color:#fff}.ui-state-error-text,.ui-widget-content .ui-state-error-text,.ui-widget-header .ui-state-error-text{color:#fff}.ui-priority-primary,.ui-widget-content .ui-priority-primary,.ui-widget-header .ui-priority-primary{font-weight:bold}.ui-priority-secondary,.ui-widget-content .ui-priority-secondary,.ui-widget-header .ui-priority-secondary{opacity:.7;filter:Alpha(Opacity=70);font-weight:normal}.ui-state-disabled,.ui-widget-content .ui-state-disabled,.ui-widget-header .ui-state-disabled{opacity:.35;filter:Alpha(Opacity=35);background-image:none}.ui-state-disabled .ui-icon{filter:Alpha(Opacity=35)}.ui-icon{width:16px;height:16px;background-position:16px 16px}.ui-icon,.ui-widget-content .ui-icon{background-image:url(images/ui-icons_222222_256x240.png)}.ui-widget-header .ui-icon{background-image:url(images/ui-icons_ffffff_256x240.png)}.ui-state-default .ui-icon{background-image:url(images/ui-icons_ef8c08_256x240.png)}.ui-state-hover .ui-icon,.ui-state-focus .ui-icon{background-image:url(images/ui-icons_ef8c08_256x240.png)}.ui-state-active .ui-icon{background-image:url(images/ui-icons_ef8c08_256x240.png)}.ui-state-highlight .ui-icon{background-image:url(images/ui-icons_228ef1_256x240.png)}.ui-state-error .ui-icon,.ui-state-error-text .ui-icon{background-image:url(images/ui-icons_ffd27a_256x240.png)}.ui-icon-carat-1-n{background-position:0 0}.ui-icon-carat-1-ne{background-position:-16px 0}.ui-icon-carat-1-e{background-position:-32px 0}.ui-icon-carat-1-se{background-position:-48px 0}.ui-icon-carat-1-s{background-position:-64px 0}.ui-icon-carat-1-sw{background-position:-80px 0}.ui-icon-carat-1-w{background-position:-96px 0}.ui-icon-carat-1-nw{background-position:-112px 0}.ui-icon-carat-2-n-s{background-position:-128px 0}.ui-icon-carat-2-e-w{background-position:-144px 0}.ui-icon-triangle-1-n{background-position:0 -16px}.ui-icon-triangle-1-ne{background-position:-16px -16px}.ui-icon-triangle-1-e{background-position:-32px -16px}.ui-icon-triangle-1-se{background-position:-48px -16px}.ui-icon-triangle-1-s{background-position:-64px -16px}.ui-icon-triangle-1-sw{background-position:-80px -16px}.ui-icon-triangle-1-w{background-position:-96px -16px}.ui-icon-triangle-1-nw{background-position:-112px -16px}.ui-icon-triangle-2-n-s{background-position:-128px -16px}.ui-icon-triangle-2-e-w{background-position:-144px -16px}.ui-icon-arrow-1-n{background-position:0 -32px}.ui-icon-arrow-1-ne{background-position:-16px -32px}.ui-icon-arrow-1-e{background-position:-32px -32px}.ui-icon-arrow-1-se{background-position:-48px -32px}.ui-icon-arrow-1-s{background-position:-64px -32px}.ui-icon-arrow-1-sw{background-position:-80px -32px}.ui-icon-arrow-1-w{background-position:-96px -32px}.ui-icon-arrow-1-nw{background-position:-112px -32px}.ui-icon-arrow-2-n-s{background-position:-128px -32px}.ui-icon-arrow-2-ne-sw{background-position:-144px -32px}.ui-icon-arrow-2-e-w{background-position:-160px -32px}.ui-icon-arrow-2-se-nw{background-position:-176px -32px}.ui-icon-arrowstop-1-n{background-position:-192px -32px}.ui-icon-arrowstop-1-e{background-position:-208px -32px}.ui-icon-arrowstop-1-s{background-position:-224px -32px}.ui-icon-arrowstop-1-w{background-position:-240px -32px}.ui-icon-arrowthick-1-n{background-position:0 -48px}.ui-icon-arrowthick-1-ne{background-position:-16px -48px}.ui-icon-arrowthick-1-e{background-position:-32px -48px}.ui-icon-arrowthick-1-se{background-position:-48px -48px}.ui-icon-arrowthick-1-s{background-position:-64px -48px}.ui-icon-arrowthick-1-sw{background-position:-80px -48px}.ui-icon-arrowthick-1-w{background-position:-96px -48px}.ui-icon-arrowthick-1-nw{background-position:-112px -48px}.ui-icon-arrowthick-2-n-s{background-position:-128px -48px}.ui-icon-arrowthick-2-ne-sw{background-position:-144px -48px}.ui-icon-arrowthick-2-e-w{background-position:-160px -48px}.ui-icon-arrowthick-2-se-nw{background-position:-176px -48px}.ui-icon-arrowthickstop-1-n{background-position:-192px -48px}.ui-icon-arrowthickstop-1-e{background-position:-208px -48px}.ui-icon-arrowthickstop-1-s{background-position:-224px -48px}.ui-icon-arrowthickstop-1-w{background-position:-240px -48px}.ui-icon-arrowreturnthick-1-w{background-position:0 -64px}.ui-icon-arrowreturnthick-1-n{background-position:-16px -64px}.ui-icon-arrowreturnthick-1-e{background-position:-32px -64px}.ui-icon-arrowreturnthick-1-s{background-position:-48px -64px}.ui-icon-arrowreturn-1-w{background-position:-64px -64px}.ui-icon-arrowreturn-1-n{background-position:-80px -64px}.ui-icon-arrowreturn-1-e{background-position:-96px -64px}.ui-icon-arrowreturn-1-s{background-position:-112px -64px}.ui-icon-arrowrefresh-1-w{background-position:-128px -64px}.ui-icon-arrowrefresh-1-n{background-position:-144px -64px}.ui-icon-arrowrefresh-1-e{background-position:-160px -64px}.ui-icon-arrowrefresh-1-s{background-position:-176px -64px}.ui-icon-arrow-4{background-position:0 -80px}.ui-icon-arrow-4-diag{background-position:-16px -80px}.ui-icon-extlink{background-position:-32px -80px}.ui-icon-newwin{background-position:-48px -80px}.ui-icon-refresh{background-position:-64px -80px}.ui-icon-shuffle{background-position:-80px -80px}.ui-icon-transfer-e-w{background-position:-96px -80px}.ui-icon-transferthick-e-w{background-position:-112px -80px}.ui-icon-folder-collapsed{background-position:0 -96px}.ui-icon-folder-open{background-position:-16px -96px}.ui-icon-document{background-position:-32px -96px}.ui-icon-document-b{background-position:-48px -96px}.ui-icon-note{background-position:-64px -96px}.ui-icon-mail-closed{background-position:-80px -96px}.ui-icon-mail-open{background-position:-96px -96px}.ui-icon-suitcase{background-position:-112px -96px}.ui-icon-comment{background-position:-128px -96px}.ui-icon-person{background-position:-144px -96px}.ui-icon-print{background-position:-160px -96px}.ui-icon-trash{background-position:-176px -96px}.ui-icon-locked{background-position:-192px -96px}.ui-icon-unlocked{background-position:-208px -96px}.ui-icon-bookmark{background-position:-224px -96px}.ui-icon-tag{background-position:-240px -96px}.ui-icon-home{background-position:0 -112px}.ui-icon-flag{background-position:-16px -112px}.ui-icon-calendar{background-position:-32px -112px}.ui-icon-cart{background-position:-48px -112px}.ui-icon-pencil{background-position:-64px -112px}.ui-icon-clock{background-position:-80px -112px}.ui-icon-disk{background-position:-96px -112px}.ui-icon-calculator{background-position:-112px -112px}.ui-icon-zoomin{background-position:-128px -112px}.ui-icon-zoomout{background-position:-144px -112px}.ui-icon-search{background-position:-160px -112px}.ui-icon-wrench{background-position:-176px -112px}.ui-icon-gear{background-position:-192px -112px}.ui-icon-heart{background-position:-208px -112px}.ui-icon-star{background-position:-224px -112px}.ui-icon-link{background-position:-240px -112px}.ui-icon-cancel{background-position:0 -128px}.ui-icon-plus{background-position:-16px -128px}.ui-icon-plusthick{background-position:-32px -128px}.ui-icon-minus{background-position:-48px -128px}.ui-icon-minusthick{background-position:-64px -128px}.ui-icon-close{background-position:-80px -128px}.ui-icon-closethick{background-position:-96px -128px}.ui-icon-key{background-position:-112px -128px}.ui-icon-lightbulb{background-position:-128px -128px}.ui-icon-scissors{background-position:-144px -128px}.ui-icon-clipboard{background-position:-160px -128px}.ui-icon-copy{background-position:-176px -128px}.ui-icon-contact{background-position:-192px -128px}.ui-icon-image{background-position:-208px -128px}.ui-icon-video{background-position:-224px -128px}.ui-icon-script{background-position:-240px -128px}.ui-icon-alert{background-position:0 -144px}.ui-icon-info{background-position:-16px -144px}.ui-icon-notice{background-position:-32px -144px}.ui-icon-help{background-position:-48px -144px}.ui-icon-check{background-position:-64px -144px}.ui-icon-bullet{background-position:-80px -144px}.ui-icon-radio-on{background-position:-96px -144px}.ui-icon-radio-off{background-position:-112px -144px}.ui-icon-pin-w{background-position:-128px -144px}.ui-icon-pin-s{background-position:-144px -144px}.ui-icon-play{background-position:0 -160px}.ui-icon-pause{background-position:-16px -160px}.ui-icon-seek-next{background-position:-32px -160px}.ui-icon-seek-prev{background-position:-48px -160px}.ui-icon-seek-end{background-position:-64px -160px}.ui-icon-seek-start{background-position:-80px -160px}.ui-icon-seek-first{background-position:-80px -160px}.ui-icon-stop{background-position:-96px -160px}.ui-icon-eject{background-position:-112px -160px}.ui-icon-volume-off{background-position:-128px -160px}.ui-icon-volume-on{background-position:-144px -160px}.ui-icon-power{background-position:0 -176px}.ui-icon-signal-diag{background-position:-16px -176px}.ui-icon-signal{background-position:-32px -176px}.ui-icon-battery-0{background-position:-48px -176px}.ui-icon-battery-1{background-position:-64px -176px}.ui-icon-battery-2{background-position:-80px -176px}.ui-icon-battery-3{background-position:-96px -176px}.ui-icon-circle-plus{background-position:0 -192px}.ui-icon-circle-minus{background-position:-16px -192px}.ui-icon-circle-close{background-position:-32px -192px}.ui-icon-circle-triangle-e{background-position:-48px -192px}.ui-icon-circle-triangle-s{background-position:-64px -192px}.ui-icon-circle-triangle-w{background-position:-80px -192px}.ui-icon-circle-triangle-n{background-position:-96px -192px}.ui-icon-circle-arrow-e{background-position:-112px -192px}.ui-icon-circle-arrow-s{background-position:-128px -192px}.ui-icon-circle-arrow-w{background-position:-144px -192px}.ui-icon-circle-arrow-n{background-position:-160px -192px}.ui-icon-circle-zoomin{background-position:-176px -192px}.ui-icon-circle-zoomout{background-position:-192px -192px}.ui-icon-circle-check{background-position:-208px -192px}.ui-icon-circlesmall-plus{background-position:0 -208px}.ui-icon-circlesmall-minus{background-position:-16px -208px}.ui-icon-circlesmall-close{background-position:-32px -208px}.ui-icon-squaresmall-plus{background-position:-48px -208px}.ui-icon-squaresmall-minus{background-position:-64px -208px}.ui-icon-squaresmall-close{background-position:-80px -208px}.ui-icon-grip-dotted-vertical{background-position:0 -224px}.ui-icon-grip-dotted-horizontal{background-position:-16px -224px}.ui-icon-grip-solid-vertical{background-position:-32px -224px}.ui-icon-grip-solid-horizontal{background-position:-48px -224px}.ui-icon-gripsmall-diagonal-se{background-position:-64px -224px}.ui-icon-grip-diagonal-se{background-position:-80px -224px}.ui-corner-all,.ui-corner-top,.ui-corner-left,.ui-corner-tl{border-top-left-radius:4px}.ui-corner-all,.ui-corner-top,.ui-corner-right,.ui-corner-tr{border-top-right-radius:4px}.ui-corner-all,.ui-corner-bottom,.ui-corner-left,.ui-corner-bl{border-bottom-left-radius:4px}.ui-corner-all,.ui-corner-bottom,.ui-corner-right,.ui-corner-br{border-bottom-right-radius:4px}.ui-widget-overlay{background:#666 url(images/ui-bg_diagonals-thick_20_666666_40x40.png) 50% 50% repeat;opacity:.5;filter:Alpha(Opacity=50)}.ui-widget-shadow{margin:-5px 0 0 -5px;padding:5px;background:#000 url(images/ui-bg_flat_10_000000_40x100.png) 50% 50% repeat-x;opacity:.2;filter:Alpha(Opacity=20);border-radius:5px}
diff --git a/src/wiki/static/wiki/js/jqueryui/jquery-ui-1.10.0.custom.min.js b/src/wiki/static/wiki/js/jqueryui/jquery-ui-1.10.0.custom.min.js
deleted file mode 100644
index e5794e9d1..000000000
--- a/src/wiki/static/wiki/js/jqueryui/jquery-ui-1.10.0.custom.min.js
+++ /dev/null
@@ -1,6 +0,0 @@
-/*! jQuery UI - v1.10.0 - 2013-02-12
-* http://jqueryui.com
-* Includes: jquery.ui.core.js, jquery.ui.widget.js, jquery.ui.mouse.js, jquery.ui.resizable.js
-* Copyright (c) 2013 jQuery Foundation and other contributors Licensed MIT */
-
-(function(e,t){function i(t,n){var r,i,o,u=t.nodeName.toLowerCase();return"area"===u?(r=t.parentNode,i=r.name,!t.href||!i||r.nodeName.toLowerCase()!=="map"?!1:(o=e("img[usemap=#"+i+"]")[0],!!o&&s(o))):(/input|select|textarea|button|object/.test(u)?!t.disabled:"a"===u?t.href||n:n)&&s(t)}function s(t){return e.expr.filters.visible(t)&&!e(t).parents().addBack().filter(function(){return e.css(this,"visibility")==="hidden"}).length}var n=0,r=/^ui-id-\d+$/;e.ui=e.ui||{};if(e.ui.version)return;e.extend(e.ui,{version:"1.10.0",keyCode:{BACKSPACE:8,COMMA:188,DELETE:46,DOWN:40,END:35,ENTER:13,ESCAPE:27,HOME:36,LEFT:37,NUMPAD_ADD:107,NUMPAD_DECIMAL:110,NUMPAD_DIVIDE:111,NUMPAD_ENTER:108,NUMPAD_MULTIPLY:106,NUMPAD_SUBTRACT:109,PAGE_DOWN:34,PAGE_UP:33,PERIOD:190,RIGHT:39,SPACE:32,TAB:9,UP:38}}),e.fn.extend({_focus:e.fn.focus,focus:function(t,n){return typeof t=="number"?this.each(function(){var r=this;setTimeout(function(){e(r).focus(),n&&n.call(r)},t)}):this._focus.apply(this,arguments)},scrollParent:function(){var t;return e.ui.ie&&/(static|relative)/.test(this.css("position"))||/absolute/.test(this.css("position"))?t=this.parents().filter(function(){return/(relative|absolute|fixed)/.test(e.css(this,"position"))&&/(auto|scroll)/.test(e.css(this,"overflow")+e.css(this,"overflow-y")+e.css(this,"overflow-x"))}).eq(0):t=this.parents().filter(function(){return/(auto|scroll)/.test(e.css(this,"overflow")+e.css(this,"overflow-y")+e.css(this,"overflow-x"))}).eq(0),/fixed/.test(this.css("position"))||!t.length?e(document):t},zIndex:function(n){if(n!==t)return this.css("zIndex",n);if(this.length){var r=e(this[0]),i,s;while(r.length&&r[0]!==document){i=r.css("position");if(i==="absolute"||i==="relative"||i==="fixed"){s=parseInt(r.css("zIndex"),10);if(!isNaN(s)&&s!==0)return s}r=r.parent()}}return 0},uniqueId:function(){return this.each(function(){this.id||(this.id="ui-id-"+ ++n)})},removeUniqueId:function(){return this.each(function(){r.test(this.id)&&e(this).removeAttr("id")})}}),e.extend(e.expr[":"],{data:e.expr.createPseudo?e.expr.createPseudo(function(t){return function(n){return!!e.data(n,t)}}):function(t,n,r){return!!e.data(t,r[3])},focusable:function(t){return i(t,!isNaN(e.attr(t,"tabindex")))},tabbable:function(t){var n=e.attr(t,"tabindex"),r=isNaN(n);return(r||n>=0)&&i(t,!r)}}),e("<a>").outerWidth(1).jquery||e.each(["Width","Height"],function(n,r){function u(t,n,r,s){return e.each(i,function(){n-=parseFloat(e.css(t,"padding"+this))||0,r&&(n-=parseFloat(e.css(t,"border"+this+"Width"))||0),s&&(n-=parseFloat(e.css(t,"margin"+this))||0)}),n}var i=r==="Width"?["Left","Right"]:["Top","Bottom"],s=r.toLowerCase(),o={innerWidth:e.fn.innerWidth,innerHeight:e.fn.innerHeight,outerWidth:e.fn.outerWidth,outerHeight:e.fn.outerHeight};e.fn["inner"+r]=function(n){return n===t?o["inner"+r].call(this):this.each(function(){e(this).css(s,u(this,n)+"px")})},e.fn["outer"+r]=function(t,n){return typeof t!="number"?o["outer"+r].call(this,t):this.each(function(){e(this).css(s,u(this,t,!0,n)+"px")})}}),e.fn.addBack||(e.fn.addBack=function(e){return this.add(e==null?this.prevObject:this.prevObject.filter(e))}),e("<a>").data("a-b","a").removeData("a-b").data("a-b")&&(e.fn.removeData=function(t){return function(n){return arguments.length?t.call(this,e.camelCase(n)):t.call(this)}}(e.fn.removeData)),e.ui.ie=!!/msie [\w.]+/.exec(navigator.userAgent.toLowerCase()),e.support.selectstart="onselectstart"in document.createElement("div"),e.fn.extend({disableSelection:function(){return this.bind((e.support.selectstart?"selectstart":"mousedown")+".ui-disableSelection",function(e){e.preventDefault()})},enableSelection:function(){return this.unbind(".ui-disableSelection")}}),e.extend(e.ui,{plugin:{add:function(t,n,r){var i,s=e.ui[t].prototype;for(i in r)s.plugins[i]=s.plugins[i]||[],s.plugins[i].push([n,r[i]])},call:function(e,t,n){var r,i=e.plugins[t];if(!i||!e.element[0].parentNode||e.element[0].parentNode.nodeType===11)return;for(r=0;r<i.length;r++)e.options[i[r][0]]&&i[r][1].apply(e.element,n)}},hasScroll:function(t,n){if(e(t).css("overflow")==="hidden")return!1;var r=n&&n==="left"?"scrollLeft":"scrollTop",i=!1;return t[r]>0?!0:(t[r]=1,i=t[r]>0,t[r]=0,i)}})})(jQuery);(function(e,t){var n=0,r=Array.prototype.slice,i=e.cleanData;e.cleanData=function(t){for(var n=0,r;(r=t[n])!=null;n++)try{e(r).triggerHandler("remove")}catch(s){}i(t)},e.widget=function(t,n,r){var i,s,o,u,a={},f=t.split(".")[0];t=t.split(".")[1],i=f+"-"+t,r||(r=n,n=e.Widget),e.expr[":"][i.toLowerCase()]=function(t){return!!e.data(t,i)},e[f]=e[f]||{},s=e[f][t],o=e[f][t]=function(e,t){if(!this._createWidget)return new o(e,t);arguments.length&&this._createWidget(e,t)},e.extend(o,s,{version:r.version,_proto:e.extend({},r),_childConstructors:[]}),u=new n,u.options=e.widget.extend({},u.options),e.each(r,function(t,r){if(!e.isFunction(r)){a[t]=r;return}a[t]=function(){var e=function(){return n.prototype[t].apply(this,arguments)},i=function(e){return n.prototype[t].apply(this,e)};return function(){var t=this._super,n=this._superApply,s;return this._super=e,this._superApply=i,s=r.apply(this,arguments),this._super=t,this._superApply=n,s}}()}),o.prototype=e.widget.extend(u,{widgetEventPrefix:s?u.widgetEventPrefix:t},a,{constructor:o,namespace:f,widgetName:t,widgetFullName:i}),s?(e.each(s._childConstructors,function(t,n){var r=n.prototype;e.widget(r.namespace+"."+r.widgetName,o,n._proto)}),delete s._childConstructors):n._childConstructors.push(o),e.widget.bridge(t,o)},e.widget.extend=function(n){var i=r.call(arguments,1),s=0,o=i.length,u,a;for(;s<o;s++)for(u in i[s])a=i[s][u],i[s].hasOwnProperty(u)&&a!==t&&(e.isPlainObject(a)?n[u]=e.isPlainObject(n[u])?e.widget.extend({},n[u],a):e.widget.extend({},a):n[u]=a);return n},e.widget.bridge=function(n,i){var s=i.prototype.widgetFullName||n;e.fn[n]=function(o){var u=typeof o=="string",a=r.call(arguments,1),f=this;return o=!u&&a.length?e.widget.extend.apply(null,[o].concat(a)):o,u?this.each(function(){var r,i=e.data(this,s);if(!i)return e.error("cannot call methods on "+n+" prior to initialization; "+"attempted to call method '"+o+"'");if(!e.isFunction(i[o])||o.charAt(0)==="_")return e.error("no such method '"+o+"' for "+n+" widget instance");r=i[o].apply(i,a);if(r!==i&&r!==t)return f=r&&r.jquery?f.pushStack(r.get()):r,!1}):this.each(function(){var t=e.data(this,s);t?t.option(o||{})._init():e.data(this,s,new i(o,this))}),f}},e.Widget=function(){},e.Widget._childConstructors=[],e.Widget.prototype={widgetName:"widget",widgetEventPrefix:"",defaultElement:"<div>",options:{disabled:!1,create:null},_createWidget:function(t,r){r=e(r||this.defaultElement||this)[0],this.element=e(r),this.uuid=n++,this.eventNamespace="."+this.widgetName+this.uuid,this.options=e.widget.extend({},this.options,this._getCreateOptions(),t),this.bindings=e(),this.hoverable=e(),this.focusable=e(),r!==this&&(e.data(r,this.widgetFullName,this),this._on(!0,this.element,{remove:function(e){e.target===r&&this.destroy()}}),this.document=e(r.style?r.ownerDocument:r.document||r),this.window=e(this.document[0].defaultView||this.document[0].parentWindow)),this._create(),this._trigger("create",null,this._getCreateEventData()),this._init()},_getCreateOptions:e.noop,_getCreateEventData:e.noop,_create:e.noop,_init:e.noop,destroy:function(){this._destroy(),this.element.unbind(this.eventNamespace).removeData(this.widgetName).removeData(this.widgetFullName).removeData(e.camelCase(this.widgetFullName)),this.widget().unbind(this.eventNamespace).removeAttr("aria-disabled").removeClass(this.widgetFullName+"-disabled "+"ui-state-disabled"),this.bindings.unbind(this.eventNamespace),this.hoverable.removeClass("ui-state-hover"),this.focusable.removeClass("ui-state-focus")},_destroy:e.noop,widget:function(){return this.element},option:function(n,r){var i=n,s,o,u;if(arguments.length===0)return e.widget.extend({},this.options);if(typeof n=="string"){i={},s=n.split("."),n=s.shift();if(s.length){o=i[n]=e.widget.extend({},this.options[n]);for(u=0;u<s.length-1;u++)o[s[u]]=o[s[u]]||{},o=o[s[u]];n=s.pop();if(r===t)return o[n]===t?null:o[n];o[n]=r}else{if(r===t)return this.options[n]===t?null:this.options[n];i[n]=r}}return this._setOptions(i),this},_setOptions:function(e){var t;for(t in e)this._setOption(t,e[t]);return this},_setOption:function(e,t){return this.options[e]=t,e==="disabled"&&(this.widget().toggleClass(this.widgetFullName+"-disabled ui-state-disabled",!!t).attr("aria-disabled",t),this.hoverable.removeClass("ui-state-hover"),this.focusable.removeClass("ui-state-focus")),this},enable:function(){return this._setOption("disabled",!1)},disable:function(){return this._setOption("disabled",!0)},_on:function(t,n,r){var i,s=this;typeof t!="boolean"&&(r=n,n=t,t=!1),r?(n=i=e(n),this.bindings=this.bindings.add(n)):(r=n,n=this.element,i=this.widget()),e.each(r,function(r,o){function u(){if(!t&&(s.options.disabled===!0||e(this).hasClass("ui-state-disabled")))return;return(typeof o=="string"?s[o]:o).apply(s,arguments)}typeof o!="string"&&(u.guid=o.guid=o.guid||u.guid||e.guid++);var a=r.match(/^(\w+)\s*(.*)$/),f=a[1]+s.eventNamespace,l=a[2];l?i.delegate(l,f,u):n.bind(f,u)})},_off:function(e,t){t=(t||"").split(" ").join(this.eventNamespace+" ")+this.eventNamespace,e.unbind(t).undelegate(t)},_delay:function(e,t){function n(){return(typeof e=="string"?r[e]:e).apply(r,arguments)}var r=this;return setTimeout(n,t||0)},_hoverable:function(t){this.hoverable=this.hoverable.add(t),this._on(t,{mouseenter:function(t){e(t.currentTarget).addClass("ui-state-hover")},mouseleave:function(t){e(t.currentTarget).removeClass("ui-state-hover")}})},_focusable:function(t){this.focusable=this.focusable.add(t),this._on(t,{focusin:function(t){e(t.currentTarget).addClass("ui-state-focus")},focusout:function(t){e(t.currentTarget).removeClass("ui-state-focus")}})},_trigger:function(t,n,r){var i,s,o=this.options[t];r=r||{},n=e.Event(n),n.type=(t===this.widgetEventPrefix?t:this.widgetEventPrefix+t).toLowerCase(),n.target=this.element[0],s=n.originalEvent;if(s)for(i in s)i in n||(n[i]=s[i]);return this.element.trigger(n,r),!(e.isFunction(o)&&o.apply(this.element[0],[n].concat(r))===!1||n.isDefaultPrevented())}},e.each({show:"fadeIn",hide:"fadeOut"},function(t,n){e.Widget.prototype["_"+t]=function(r,i,s){typeof i=="string"&&(i={effect:i});var o,u=i?i===!0||typeof i=="number"?n:i.effect||n:t;i=i||{},typeof i=="number"&&(i={duration:i}),o=!e.isEmptyObject(i),i.complete=s,i.delay&&r.delay(i.delay),o&&e.effects&&e.effects.effect[u]?r[t](i):u!==t&&r[u]?r[u](i.duration,i.easing,s):r.queue(function(n){e(this)[t](),s&&s.call(r[0]),n()})}})})(jQuery);(function(e,t){var n=!1;e(document).mouseup(function(){n=!1}),e.widget("ui.mouse",{version:"1.10.0",options:{cancel:"input,textarea,button,select,option",distance:1,delay:0},_mouseInit:function(){var t=this;this.element.bind("mousedown."+this.widgetName,function(e){return t._mouseDown(e)}).bind("click."+this.widgetName,function(n){if(!0===e.data(n.target,t.widgetName+".preventClickEvent"))return e.removeData(n.target,t.widgetName+".preventClickEvent"),n.stopImmediatePropagation(),!1}),this.started=!1},_mouseDestroy:function(){this.element.unbind("."+this.widgetName),this._mouseMoveDelegate&&e(document).unbind("mousemove."+this.widgetName,this._mouseMoveDelegate).unbind("mouseup."+this.widgetName,this._mouseUpDelegate)},_mouseDown:function(t){if(n)return;this._mouseStarted&&this._mouseUp(t),this._mouseDownEvent=t;var r=this,i=t.which===1,s=typeof this.options.cancel=="string"&&t.target.nodeName?e(t.target).closest(this.options.cancel).length:!1;if(!i||s||!this._mouseCapture(t))return!0;this.mouseDelayMet=!this.options.delay,this.mouseDelayMet||(this._mouseDelayTimer=setTimeout(function(){r.mouseDelayMet=!0},this.options.delay));if(this._mouseDistanceMet(t)&&this._mouseDelayMet(t)){this._mouseStarted=this._mouseStart(t)!==!1;if(!this._mouseStarted)return t.preventDefault(),!0}return!0===e.data(t.target,this.widgetName+".preventClickEvent")&&e.removeData(t.target,this.widgetName+".preventClickEvent"),this._mouseMoveDelegate=function(e){return r._mouseMove(e)},this._mouseUpDelegate=function(e){return r._mouseUp(e)},e(document).bind("mousemove."+this.widgetName,this._mouseMoveDelegate).bind("mouseup."+this.widgetName,this._mouseUpDelegate),t.preventDefault(),n=!0,!0},_mouseMove:function(t){return e.ui.ie&&(!document.documentMode||document.documentMode<9)&&!t.button?this._mouseUp(t):this._mouseStarted?(this._mouseDrag(t),t.preventDefault()):(this._mouseDistanceMet(t)&&this._mouseDelayMet(t)&&(this._mouseStarted=this._mouseStart(this._mouseDownEvent,t)!==!1,this._mouseStarted?this._mouseDrag(t):this._mouseUp(t)),!this._mouseStarted)},_mouseUp:function(t){return e(document).unbind("mousemove."+this.widgetName,this._mouseMoveDelegate).unbind("mouseup."+this.widgetName,this._mouseUpDelegate),this._mouseStarted&&(this._mouseStarted=!1,t.target===this._mouseDownEvent.target&&e.data(t.target,this.widgetName+".preventClickEvent",!0),this._mouseStop(t)),!1},_mouseDistanceMet:function(e){return Math.max(Math.abs(this._mouseDownEvent.pageX-e.pageX),Math.abs(this._mouseDownEvent.pageY-e.pageY))>=this.options.distance},_mouseDelayMet:function(){return this.mouseDelayMet},_mouseStart:function(){},_mouseDrag:function(){},_mouseStop:function(){},_mouseCapture:function(){return!0}})})(jQuery);(function(e,t){function n(e){return parseInt(e,10)||0}function r(e){return!isNaN(parseInt(e,10))}e.widget("ui.resizable",e.ui.mouse,{version:"1.10.0",widgetEventPrefix:"resize",options:{alsoResize:!1,animate:!1,animateDuration:"slow",animateEasing:"swing",aspectRatio:!1,autoHide:!1,containment:!1,ghost:!1,grid:!1,handles:"e,s,se",helper:!1,maxHeight:null,maxWidth:null,minHeight:10,minWidth:10,zIndex:90,resize:null,start:null,stop:null},_create:function(){var t,n,r,i,s,o=this,u=this.options;this.element.addClass("ui-resizable"),e.extend(this,{_aspectRatio:!!u.aspectRatio,aspectRatio:u.aspectRatio,originalElement:this.element,_proportionallyResizeElements:[],_helper:u.helper||u.ghost||u.animate?u.helper||"ui-resizable-helper":null}),this.element[0].nodeName.match(/canvas|textarea|input|select|button|img/i)&&(this.element.wrap(e("<div class='ui-wrapper' style='overflow: hidden;'></div>").css({position:this.element.css("position"),width:this.element.outerWidth(),height:this.element.outerHeight(),top:this.element.css("top"),left:this.element.css("left")})),this.element=this.element.parent().data("ui-resizable",this.element.data("ui-resizable")),this.elementIsWrapper=!0,this.element.css({marginLeft:this.originalElement.css("marginLeft"),marginTop:this.originalElement.css("marginTop"),marginRight:this.originalElement.css("marginRight"),marginBottom:this.originalElement.css("marginBottom")}),this.originalElement.css({marginLeft:0,marginTop:0,marginRight:0,marginBottom:0}),this.originalResizeStyle=this.originalElement.css("resize"),this.originalElement.css("resize","none"),this._proportionallyResizeElements.push(this.originalElement.css({position:"static",zoom:1,display:"block"})),this.originalElement.css({margin:this.originalElement.css("margin")}),this._proportionallyResize()),this.handles=u.handles||(e(".ui-resizable-handle",this.element).length?{n:".ui-resizable-n",e:".ui-resizable-e",s:".ui-resizable-s",w:".ui-resizable-w",se:".ui-resizable-se",sw:".ui-resizable-sw",ne:".ui-resizable-ne",nw:".ui-resizable-nw"}:"e,s,se");if(this.handles.constructor===String){this.handles==="all"&&(this.handles="n,e,s,w,se,sw,ne,nw"),t=this.handles.split(","),this.handles={};for(n=0;n<t.length;n++)r=e.trim(t[n]),s="ui-resizable-"+r,i=e("<div class='ui-resizable-handle "+s+"'></div>"),i.css({zIndex:u.zIndex}),"se"===r&&i.addClass("ui-icon ui-icon-gripsmall-diagonal-se"),this.handles[r]=".ui-resizable-"+r,this.element.append(i)}this._renderAxis=function(t){var n,r,i,s;t=t||this.element;for(n in this.handles){this.handles[n].constructor===String&&(this.handles[n]=e(this.handles[n],this.element).show()),this.elementIsWrapper&&this.originalElement[0].nodeName.match(/textarea|input|select|button/i)&&(r=e(this.handles[n],this.element),s=/sw|ne|nw|se|n|s/.test(n)?r.outerHeight():r.outerWidth(),i=["padding",/ne|nw|n/.test(n)?"Top":/se|sw|s/.test(n)?"Bottom":/^e$/.test(n)?"Right":"Left"].join(""),t.css(i,s),this._proportionallyResize());if(!e(this.handles[n]).length)continue}},this._renderAxis(this.element),this._handles=e(".ui-resizable-handle",this.element).disableSelection(),this._handles.mouseover(function(){o.resizing||(this.className&&(i=this.className.match(/ui-resizable-(se|sw|ne|nw|n|e|s|w)/i)),o.axis=i&&i[1]?i[1]:"se")}),u.autoHide&&(this._handles.hide(),e(this.element).addClass("ui-resizable-autohide").mouseenter(function(){if(u.disabled)return;e(this).removeClass("ui-resizable-autohide"),o._handles.show()}).mouseleave(function(){if(u.disabled)return;o.resizing||(e(this).addClass("ui-resizable-autohide"),o._handles.hide())})),this._mouseInit()},_destroy:function(){this._mouseDestroy();var t,n=function(t){e(t).removeClass("ui-resizable ui-resizable-disabled ui-resizable-resizing").removeData("resizable").removeData("ui-resizable").unbind(".resizable").find(".ui-resizable-handle").remove()};return this.elementIsWrapper&&(n(this.element),t=this.element,this.originalElement.css({position:t.css("position"),width:t.outerWidth(),height:t.outerHeight(),top:t.css("top"),left:t.css("left")}).insertAfter(t),t.remove()),this.originalElement.css("resize",this.originalResizeStyle),n(this.originalElement),this},_mouseCapture:function(t){var n,r,i=!1;for(n in this.handles){r=e(this.handles[n])[0];if(r===t.target||e.contains(r,t.target))i=!0}return!this.options.disabled&&i},_mouseStart:function(t){var r,i,s,o=this.options,u=this.element.position(),a=this.element;return this.resizing=!0,/absolute/.test(a.css("position"))?a.css({position:"absolute",top:a.css("top"),left:a.css("left")}):a.is(".ui-draggable")&&a.css({position:"absolute",top:u.top,left:u.left}),this._renderProxy(),r=n(this.helper.css("left")),i=n(this.helper.css("top")),o.containment&&(r+=e(o.containment).scrollLeft()||0,i+=e(o.containment).scrollTop()||0),this.offset=this.helper.offset(),this.position={left:r,top:i},this.size=this._helper?{width:a.outerWidth(),height:a.outerHeight()}:{width:a.width(),height:a.height()},this.originalSize=this._helper?{width:a.outerWidth(),height:a.outerHeight()}:{width:a.width(),height:a.height()},this.originalPosition={left:r,top:i},this.sizeDiff={width:a.outerWidth()-a.width(),height:a.outerHeight()-a.height()},this.originalMousePosition={left:t.pageX,top:t.pageY},this.aspectRatio=typeof o.aspectRatio=="number"?o.aspectRatio:this.originalSize.width/this.originalSize.height||1,s=e(".ui-resizable-"+this.axis).css("cursor"),e("body").css("cursor",s==="auto"?this.axis+"-resize":s),a.addClass("ui-resizable-resizing"),this._propagate("start",t),!0},_mouseDrag:function(t){var n,r=this.helper,i={},s=this.originalMousePosition,o=this.axis,u=this.position.top,a=this.position.left,f=this.size.width,l=this.size.height,c=t.pageX-s.left||0,h=t.pageY-s.top||0,p=this._change[o];if(!p)return!1;n=p.apply(this,[t,c,h]),this._updateVirtualBoundaries(t.shiftKey);if(this._aspectRatio||t.shiftKey)n=this._updateRatio(n,t);return n=this._respectSize(n,t),this._updateCache(n),this._propagate("resize",t),this.position.top!==u&&(i.top=this.position.top+"px"),this.position.left!==a&&(i.left=this.position.left+"px"),this.size.width!==f&&(i.width=this.size.width+"px"),this.size.height!==l&&(i.height=this.size.height+"px"),r.css(i),!this._helper&&this._proportionallyResizeElements.length&&this._proportionallyResize(),e.isEmptyObject(i)||this._trigger("resize",t,this.ui()),!1},_mouseStop:function(t){this.resizing=!1;var n,r,i,s,o,u,a,f=this.options,l=this;return this._helper&&(n=this._proportionallyResizeElements,r=n.length&&/textarea/i.test(n[0].nodeName),i=r&&e.ui.hasScroll(n[0],"left")?0:l.sizeDiff.height,s=r?0:l.sizeDiff.width,o={width:l.helper.width()-s,height:l.helper.height()-i},u=parseInt(l.element.css("left"),10)+(l.position.left-l.originalPosition.left)||null,a=parseInt(l.element.css("top"),10)+(l.position.top-l.originalPosition.top)||null,f.animate||this.element.css(e.extend(o,{top:a,left:u})),l.helper.height(l.size.height),l.helper.width(l.size.width),this._helper&&!f.animate&&this._proportionallyResize()),e("body").css("cursor","auto"),this.element.removeClass("ui-resizable-resizing"),this._propagate("stop",t),this._helper&&this.helper.remove(),!1},_updateVirtualBoundaries:function(e){var t,n,i,s,o,u=this.options;o={minWidth:r(u.minWidth)?u.minWidth:0,maxWidth:r(u.maxWidth)?u.maxWidth:Infinity,minHeight:r(u.minHeight)?u.minHeight:0,maxHeight:r(u.maxHeight)?u.maxHeight:Infinity};if(this._aspectRatio||e)t=o.minHeight*this.aspectRatio,i=o.minWidth/this.aspectRatio,n=o.maxHeight*this.aspectRatio,s=o.maxWidth/this.aspectRatio,t>o.minWidth&&(o.minWidth=t),i>o.minHeight&&(o.minHeight=i),n<o.maxWidth&&(o.maxWidth=n),s<o.maxHeight&&(o.maxHeight=s);this._vBoundaries=o},_updateCache:function(e){this.offset=this.helper.offset(),r(e.left)&&(this.position.left=e.left),r(e.top)&&(this.position.top=e.top),r(e.height)&&(this.size.height=e.height),r(e.width)&&(this.size.width=e.width)},_updateRatio:function(e){var t=this.position,n=this.size,i=this.axis;return r(e.height)?e.width=e.height*this.aspectRatio:r(e.width)&&(e.height=e.width/this.aspectRatio),i==="sw"&&(e.left=t.left+(n.width-e.width),e.top=null),i==="nw"&&(e.top=t.top+(n.height-e.height),e.left=t.left+(n.width-e.width)),e},_respectSize:function(e){var t=this._vBoundaries,n=this.axis,i=r(e.width)&&t.maxWidth&&t.maxWidth<e.width,s=r(e.height)&&t.maxHeight&&t.maxHeight<e.height,o=r(e.width)&&t.minWidth&&t.minWidth>e.width,u=r(e.height)&&t.minHeight&&t.minHeight>e.height,a=this.originalPosition.left+this.originalSize.width,f=this.position.top+this.size.height,l=/sw|nw|w/.test(n),c=/nw|ne|n/.test(n);return o&&(e.width=t.minWidth),u&&(e.height=t.minHeight),i&&(e.width=t.maxWidth),s&&(e.height=t.maxHeight),o&&l&&(e.left=a-t.minWidth),i&&l&&(e.left=a-t.maxWidth),u&&c&&(e.top=f-t.minHeight),s&&c&&(e.top=f-t.maxHeight),!e.width&&!e.height&&!e.left&&e.top?e.top=null:!e.width&&!e.height&&!e.top&&e.left&&(e.left=null),e},_proportionallyResize:function(){if(!this._proportionallyResizeElements.length)return;var e,t,n,r,i,s=this.helper||this.element;for(e=0;e<this._proportionallyResizeElements.length;e++){i=this._proportionallyResizeElements[e];if(!this.borderDif){this.borderDif=[],n=[i.css("borderTopWidth"),i.css("borderRightWidth"),i.css("borderBottomWidth"),i.css("borderLeftWidth")],r=[i.css("paddingTop"),i.css("paddingRight"),i.css("paddingBottom"),i.css("paddingLeft")];for(t=0;t<n.length;t++)this.borderDif[t]=(parseInt(n[t],10)||0)+(parseInt(r[t],10)||0)}i.css({height:s.height()-this.borderDif[0]-this.borderDif[2]||0,width:s.width()-this.borderDif[1]-this.borderDif[3]||0})}},_renderProxy:function(){var t=this.element,n=this.options;this.elementOffset=t.offset(),this._helper?(this.helper=this.helper||e("<div style='overflow:hidden;'></div>"),this.helper.addClass(this._helper).css({width:this.element.outerWidth()-1,height:this.element.outerHeight()-1,position:"absolute",left:this.elementOffset.left+"px",top:this.elementOffset.top+"px",zIndex:++n.zIndex}),this.helper.appendTo("body").disableSelection()):this.helper=this.element},_change:{e:function(e,t){return{width:this.originalSize.width+t}},w:function(e,t){var n=this.originalSize,r=this.originalPosition;return{left:r.left+t,width:n.width-t}},n:function(e,t,n){var r=this.originalSize,i=this.originalPosition;return{top:i.top+n,height:r.height-n}},s:function(e,t,n){return{height:this.originalSize.height+n}},se:function(t,n,r){return e.extend(this._change.s.apply(this,arguments),this._change.e.apply(this,[t,n,r]))},sw:function(t,n,r){return e.extend(this._change.s.apply(this,arguments),this._change.w.apply(this,[t,n,r]))},ne:function(t,n,r){return e.extend(this._change.n.apply(this,arguments),this._change.e.apply(this,[t,n,r]))},nw:function(t,n,r){return e.extend(this._change.n.apply(this,arguments),this._change.w.apply(this,[t,n,r]))}},_propagate:function(t,n){e.ui.plugin.call(this,t,[n,this.ui()]),t!=="resize"&&this._trigger(t,n,this.ui())},plugins:{},ui:function(){return{originalElement:this.originalElement,element:this.element,helper:this.helper,position:this.position,size:this.size,originalSize:this.originalSize,originalPosition:this.originalPosition}}}),e.ui.plugin.add("resizable","animate",{stop:function(t){var n=e(this).data("ui-resizable"),r=n.options,i=n._proportionallyResizeElements,s=i.length&&/textarea/i.test(i[0].nodeName),o=s&&e.ui.hasScroll(i[0],"left")?0:n.sizeDiff.height,u=s?0:n.sizeDiff.width,a={width:n.size.width-u,height:n.size.height-o},f=parseInt(n.element.css("left"),10)+(n.position.left-n.originalPosition.left)||null,l=parseInt(n.element.css("top"),10)+(n.position.top-n.originalPosition.top)||null;n.element.animate(e.extend(a,l&&f?{top:l,left:f}:{}),{duration:r.animateDuration,easing:r.animateEasing,step:function(){var r={width:parseInt(n.element.css("width"),10),height:parseInt(n.element.css("height"),10),top:parseInt(n.element.css("top"),10),left:parseInt(n.element.css("left"),10)};i&&i.length&&e(i[0]).css({width:r.width,height:r.height}),n._updateCache(r),n._propagate("resize",t)}})}}),e.ui.plugin.add("resizable","containment",{start:function(){var t,r,i,s,o,u,a,f=e(this).data("ui-resizable"),l=f.options,c=f.element,h=l.containment,p=h instanceof e?h.get(0):/parent/.test(h)?c.parent().get(0):h;if(!p)return;f.containerElement=e(p),/document/.test(h)||h===document?(f.containerOffset={left:0,top:0},f.containerPosition={left:0,top:0},f.parentData={element:e(document),left:0,top:0,width:e(document).width(),height:e(document).height()||document.body.parentNode.scrollHeight}):(t=e(p),r=[],e(["Top","Right","Left","Bottom"]).each(function(e,i){r[e]=n(t.css("padding"+i))}),f.containerOffset=t.offset(),f.containerPosition=t.position(),f.containerSize={height:t.innerHeight()-r[3],width:t.innerWidth()-r[1]},i=f.containerOffset,s=f.containerSize.height,o=f.containerSize.width,u=e.ui.hasScroll(p,"left")?p.scrollWidth:o,a=e.ui.hasScroll(p)?p.scrollHeight:s,f.parentData={element:p,left:i.left,top:i.top,width:u,height:a})},resize:function(t){var n,r,i,s,o=e(this).data("ui-resizable"),u=o.options,a=o.containerOffset,f=o.position,l=o._aspectRatio||t.shiftKey,c={top:0,left:0},h=o.containerElement;h[0]!==document&&/static/.test(h.css("position"))&&(c=a),f.left<(o._helper?a.left:0)&&(o.size.width=o.size.width+(o._helper?o.position.left-a.left:o.position.left-c.left),l&&(o.size.height=o.size.width/o.aspectRatio),o.position.left=u.helper?a.left:0),f.top<(o._helper?a.top:0)&&(o.size.height=o.size.height+(o._helper?o.position.top-a.top:o.position.top),l&&(o.size.width=o.size.height*o.aspectRatio),o.position.top=o._helper?a.top:0),o.offset.left=o.parentData.left+o.position.left,o.offset.top=o.parentData.top+o.position.top,n=Math.abs((o._helper?o.offset.left-c.left:o.offset.left-c.left)+o.sizeDiff.width),r=Math.abs((o._helper?o.offset.top-c.top:o.offset.top-a.top)+o.sizeDiff.height),i=o.containerElement.get(0)===o.element.parent().get(0),s=/relative|absolute/.test(o.containerElement.css("position")),i&&s&&(n-=o.parentData.left),n+o.size.width>=o.parentData.width&&(o.size.width=o.parentData.width-n,l&&(o.size.height=o.size.width/o.aspectRatio)),r+o.size.height>=o.parentData.height&&(o.size.height=o.parentData.height-r,l&&(o.size.width=o.size.height*o.aspectRatio))},stop:function(){var t=e(this).data("ui-resizable"),n=t.options,r=t.containerOffset,i=t.containerPosition,s=t.containerElement,o=e(t.helper),u=o.offset(),a=o.outerWidth()-t.sizeDiff.width,f=o.outerHeight()-t.sizeDiff.height;t._helper&&!n.animate&&/relative/.test(s.css("position"))&&e(this).css({left:u.left-i.left-r.left,width:a,height:f}),t._helper&&!n.animate&&/static/.test(s.css("position"))&&e(this).css({left:u.left-i.left-r.left,width:a,height:f})}}),e.ui.plugin.add("resizable","alsoResize",{start:function(){var t=e(this).data("ui-resizable"),n=t.options,r=function(t){e(t).each(function(){var t=e(this);t.data("ui-resizable-alsoresize",{width:parseInt(t.width(),10),height:parseInt(t.height(),10),left:parseInt(t.css("left"),10),top:parseInt(t.css("top"),10)})})};typeof n.alsoResize=="object"&&!n.alsoResize.parentNode?n.alsoResize.length?(n.alsoResize=n.alsoResize[0],r(n.alsoResize)):e.each(n.alsoResize,function(e){r(e)}):r(n.alsoResize)},resize:function(t,n){var r=e(this).data("ui-resizable"),i=r.options,s=r.originalSize,o=r.originalPosition,u={height:r.size.height-s.height||0,width:r.size.width-s.width||0,top:r.position.top-o.top||0,left:r.position.left-o.left||0},a=function(t,r){e(t).each(function(){var t=e(this),i=e(this).data("ui-resizable-alsoresize"),s={},o=r&&r.length?r:t.parents(n.originalElement[0]).length?["width","height"]:["width","height","top","left"];e.each(o,function(e,t){var n=(i[t]||0)+(u[t]||0);n&&n>=0&&(s[t]=n||null)}),t.css(s)})};typeof i.alsoResize=="object"&&!i.alsoResize.nodeType?e.each(i.alsoResize,function(e,t){a(e,t)}):a(i.alsoResize)},stop:function(){e(this).removeData("resizable-alsoresize")}}),e.ui.plugin.add("resizable","ghost",{start:function(){var t=e(this).data("ui-resizable"),n=t.options,r=t.size;t.ghost=t.originalElement.clone(),t.ghost.css({opacity:.25,display:"block",position:"relative",height:r.height,width:r.width,margin:0,left:0,top:0}).addClass("ui-resizable-ghost").addClass(typeof n.ghost=="string"?n.ghost:""),t.ghost.appendTo(t.helper)},resize:function(){var t=e(this).data("ui-resizable");t.ghost&&t.ghost.css({position:"relative",height:t.size.height,width:t.size.width})},stop:function(){var t=e(this).data("ui-resizable");t.ghost&&t.helper&&t.helper.get(0).removeChild(t.ghost.get(0))}}),e.ui.plugin.add("resizable","grid",{resize:function(){var t=e(this).data("ui-resizable"),n=t.options,r=t.size,i=t.originalSize,s=t.originalPosition,o=t.axis,u=typeof n.grid=="number"?[n.grid,n.grid]:n.grid,a=u[0]||1,f=u[1]||1,l=Math.round((r.width-i.width)/a)*a,c=Math.round((r.height-i.height)/f)*f,h=i.width+l,p=i.height+c,d=n.maxWidth&&n.maxWidth<h,v=n.maxHeight&&n.maxHeight<p,m=n.minWidth&&n.minWidth>h,g=n.minHeight&&n.minHeight>p;n.grid=u,m&&(h+=a),g&&(p+=f),d&&(h-=a),v&&(p-=f),/^(se|s|e)$/.test(o)?(t.size.width=h,t.size.height=p):/^(ne)$/.test(o)?(t.size.width=h,t.size.height=p,t.position.top=s.top-c):/^(sw)$/.test(o)?(t.size.width=h,t.size.height=p,t.position.left=s.left-l):(t.size.width=h,t.size.height=p,t.position.top=s.top-c,t.position.left=s.left-l)}})})(jQuery);
diff --git a/src/wiki/templates/wiki/base_site.html b/src/wiki/templates/wiki/base_site.html
index fe925fef0..d025e53cd 100644
--- a/src/wiki/templates/wiki/base_site.html
+++ b/src/wiki/templates/wiki/base_site.html
@@ -153,7 +153,7 @@
{% endblock %}
- <script src="{% static "wiki/js/jquery.min.js" %}"></script>
+ <script src="{% static "wiki/js/jquery-3.3.1.min.js" %}"></script>
<script src="{% static "wiki/js/core.js" %}"></script>
<script src="{% static "wiki/bootstrap/js/bootstrap.min.js" %}"></script>
<!-- Optionally enable responsive features in IE8 -->
diff --git a/src/wiki/templates/wiki/includes/modals.html b/src/wiki/templates/wiki/includes/modals.html
index a43d7592c..b86d4f653 100644
--- a/src/wiki/templates/wiki/includes/modals.html
+++ b/src/wiki/templates/wiki/includes/modals.html
@@ -1,6 +1,6 @@
{% load sekizai_tags static %}
{% addtoblock "js" %}
-<script type="text/javascript" src="{% static "wiki/js/jqueryui/jquery-ui-1.10.0.custom.min.js" %}"></script>
+<script type="text/javascript" src="{% static "wiki/js/jquery-ui-1.12.1.custom/jquery-ui.min.js" %}"></script>
<script type="text/javascript">
$(document).ready(function() {
$(".modal-content").on("resizestart", function(event, ui) {
@@ -27,5 +27,5 @@
</script>
{% endaddtoblock %}
{% addtoblock "css" %}
-<link rel="stylesheet" href="{% static "wiki/js/jqueryui/jquery-ui-1.10.0.custom.min.css" %}" type="text/css" />
+<link rel="stylesheet" href="{% static "wiki/js/jquery-ui-1.12.1.custom/jquery-ui.min.css" %}" type="text/css" />
{% endaddtoblock %}
|
mdn__kuma-6489 | Can't browse users in django admin now that tags are gone
https://sentry.prod.mozaws.net/operations/mdn-prod/issues/7273070/
```
Resolver404: {'tried': [[<RegexURLPattern None ^media/(?:redesign/)?css/(?P<doc>.*)-min.css$>], [<RegexURLPattern None ^media/(?:redesign/)?js/(?P<doc>.*)-min.js$>], [<RegexURLPattern None ^media/(?:redesign/)?img(?P<suffix>.*)$>], [<RegexURLPattern None ^media/(?:redesign/)?css(?P<suffix>.*)$>], [<RegexURLPattern None ^media/(?:redesign/)?js(?P<suffix>.*)$>], [<RegexURLPattern None ^media/(?:redesign/)?fonts(?P<suffix>.*)$>], [<RegexURLPattern None ^media/uploads/demos/(?:.*)$>], [<RegexURLPattern None (?i)^(?P<one>.*)//(?P<two>.*)//(?P<three>.*)$>], [<RegexURLPattern None (?i)^(?P<one>.*)//(?P<two>.*)$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/2_1_canvas_rect.html$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/2_2_canvas_moveto.html$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/2_3_canvas_lineto.html$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/2_4_canvas_arc.html$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/2_5_canvas_quadraticcurveto.html$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/2_6_canvas_beziercurveto.html$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/3_1_canvas_drawimage.html$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/3_2_canvas_drawimage.html$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/3_3_canvas_drawimage.html$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/3_4_canvas_gallery.html$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/4_1_canvas_fillstyle.html$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/4_2_canvas_strokestyle.html$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/4_3_canvas_globalalpha.html$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/4_4_canvas_rgba.html$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/4_5_canvas_linewidth.html$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/4_6_canvas_linecap.html$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/4_7_canvas_linejoin.html$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/4_8_canvas_miterlimit.html$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/4_9_canvas_lineargradient.html$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/4_10_canvas_radialgradient.html$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/4_11_canvas_createpattern.html$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/5_1_canvas_savestate.html$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/5_2_canvas_translate.html$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/5_3_canvas_rotate.html$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/5_4_canvas_scale.html$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/6_1_canvas_composite.html$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/6_2_canvas_clipping.html$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/globalCompositeOperation.html$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/images/backdrop.png$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/images/bg_gallery.png$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/images/gallery_1.jpg$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/images/gallery_2.jpg$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/images/gallery_3.jpg$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/images/gallery_4.jpg$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/images/gallery_5.jpg$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/images/gallery_6.jpg$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/images/gallery_7.jpg$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/images/gallery_8.jpg$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/images/picture_frame.png$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/images/rhino.jpg$>], [<RegexURLPattern None (?i)^samples/canvas-tutorial/images/wallpaper.png$>], [<RegexURLPattern None (?i)^samples/domref/mozGetAsFile.html$>], [<RegexURLPattern None (?i)^samples/raycaster/input.js$>], [<RegexURLPattern None (?i)^samples/raycaster/Level.js$>], [<RegexURL...
File "redirect_urls/middleware.py", line 14, in __call__
resolver_match = self.resolver.resolve(request.path_info)
File "newrelic/hooks/framework_django.py", line 600, in wrapper
return _wrapped(*args, **kwargs)
File "newrelic/hooks/framework_django.py", line 588, in _wrapped
result = wrapped(path)
File "newrelic/hooks/framework_django.py", line 575, in wrapper
return wrapped(*args, **kwargs)
File "django/urls/resolvers.py", line 394, in resolve
raise Resolver404({'tried': tried, 'path': new_path})
FieldError: Cannot resolve keyword 'tags' into field. Choices are: auth_token, bans, bans_issued, bio, created_attachment_revisions, created_revisions, created_toolbars, date_joined, discourse_url, documentattachment, documentdeletionlog, documentspam_reviewed, documentspamattempt, email, emailaddress, facebook_url, first_name, flag, fullname, github_url, groups, homepage, id, irc_nickname, is_active, is_github_url_public, is_newsletter_subscribed, is_staff, is_superuser, key, last_login, last_name, linkedin_url, locale, location, logentry, mozillians_url, organization, password, revisionakismetsubmission, socialaccount, stackoverflow_url, stripe_customer_id, timezone, title, twitter_url, user_permissions, username, watch, website_url
(18 additional frame(s) were not displayed)
...
File "django/db/models/sql/query.py", line 1268, in _add_q
child_clause, needed_inner = self._add_q(
File "django/db/models/sql/query.py", line 1273, in _add_q
child_clause, needed_inner = self.build_filter(
File "django/db/models/sql/query.py", line 1154, in build_filter
lookups, parts, reffed_expression = self.solve_lookup_type(arg)
File "django/db/models/sql/query.py", line 1034, in solve_lookup_type
_, field, _, lookup_parts = self.names_to_path(lookup_splitted, self.get_meta())
File "django/db/models/sql/query.py", line 1351, in names_to_path
raise FieldError("Cannot resolve keyword '%s' into field. "
FieldError: Cannot resolve keyword 'tags' into field. Choices are: auth_token, bans, bans_issued, bio, created_attachment_revisions, created_revisions, created_toolbars, date_joined, discourse_url, documentattachment, documentdeletionlog, documentspam_reviewed, documentspamattempt, email, emailaddress, facebook_url, first_name, flag, fullname, github_url, groups, homepage, id, irc_nickname, is_active, is_github_url_public, is_newsletter_subscribed, is_staff, is_superuser, key, last_login, last_name, linkedin_url, locale, location, logentry, mozillians_url, organization, password, revisionakismetsubmission, socialaccount, stackoverflow_url, stripe_customer_id, timezone, title, twitter_url, user_permissions, username, watch, website_url
```
| [
{
"content": "from django.contrib import admin\nfrom django.contrib.auth.admin import UserAdmin as BaseUserAdmin\nfrom django.utils.html import format_html\n\nfrom kuma.core.urlresolvers import reverse\nfrom kuma.core.utils import urlparams\n\nfrom .models import User, UserBan\n\n\[email protected](UserBan)\nclass UserBanAdmin(admin.ModelAdmin):\n fields = (\"user\", \"by\", \"reason\", \"is_active\")\n list_display = (\"user\", \"by\", \"reason\", \"is_active\")\n list_editable = (\"is_active\",)\n list_filter = (\"is_active\",)\n raw_id_fields = (\"user\", \"by\")\n search_fields = (\"user__username\", \"reason\", \"by__username\")\n\n\[email protected](User)\nclass UserAdmin(BaseUserAdmin):\n \"\"\"\n Extends the admin view of users to show date_joined field\n add a filter on the field too\n \"\"\"\n\n list_display = (\n \"username\",\n \"fullname\",\n \"email\",\n \"revisions\",\n \"date_joined\",\n \"is_staff\",\n \"is_active\",\n )\n list_filter = (\"is_staff\", \"is_superuser\", \"is_active\", \"date_joined\", \"groups\")\n ordering = (\"-date_joined\",)\n search_fields = (\n \"username\",\n \"title\",\n \"fullname\",\n \"organization\",\n \"location\",\n \"email\",\n \"tags__name\",\n )\n\n def revisions(self, obj):\n \"\"\"HTML link to user's revisions with count\"\"\"\n link = urlparams(reverse(\"dashboards.revisions\"), user=obj.username)\n count = obj.created_revisions.count()\n return format_html('<a href=\"{}\"><strong>{}</strong></a>', link, count)\n",
"path": "kuma/users/admin.py"
}
] | [
{
"content": "from django.contrib import admin\nfrom django.contrib.auth.admin import UserAdmin as BaseUserAdmin\nfrom django.utils.html import format_html\n\nfrom kuma.core.urlresolvers import reverse\nfrom kuma.core.utils import urlparams\n\nfrom .models import User, UserBan\n\n\[email protected](UserBan)\nclass UserBanAdmin(admin.ModelAdmin):\n fields = (\"user\", \"by\", \"reason\", \"is_active\")\n list_display = (\"user\", \"by\", \"reason\", \"is_active\")\n list_editable = (\"is_active\",)\n list_filter = (\"is_active\",)\n raw_id_fields = (\"user\", \"by\")\n search_fields = (\"user__username\", \"reason\", \"by__username\")\n\n\[email protected](User)\nclass UserAdmin(BaseUserAdmin):\n \"\"\"\n Extends the admin view of users to show date_joined field\n add a filter on the field too\n \"\"\"\n\n list_display = (\n \"username\",\n \"fullname\",\n \"email\",\n \"revisions\",\n \"date_joined\",\n \"is_staff\",\n \"is_active\",\n )\n list_filter = (\"is_staff\", \"is_superuser\", \"is_active\", \"date_joined\", \"groups\")\n ordering = (\"-date_joined\",)\n search_fields = (\n \"username\",\n \"title\",\n \"fullname\",\n \"organization\",\n \"location\",\n \"email\",\n )\n\n def revisions(self, obj):\n \"\"\"HTML link to user's revisions with count\"\"\"\n link = urlparams(reverse(\"dashboards.revisions\"), user=obj.username)\n count = obj.created_revisions.count()\n return format_html('<a href=\"{}\"><strong>{}</strong></a>', link, count)\n",
"path": "kuma/users/admin.py"
}
] | diff --git a/kuma/users/admin.py b/kuma/users/admin.py
index d47b8088fcd..bc2aa7265b6 100644
--- a/kuma/users/admin.py
+++ b/kuma/users/admin.py
@@ -43,7 +43,6 @@ class UserAdmin(BaseUserAdmin):
"organization",
"location",
"email",
- "tags__name",
)
def revisions(self, obj):
|
pulp__pulpcore-3462 | Database errors raised when importing content
**Version**
Main pulpcore branch. The issue arose after merging the labels refractor work (https://github.com/pulp/pulpcore/commit/4e25949176d72c5dbe1c7623a9c47d253a18b085) .
Reproducible in pulp_file and pulp_rpm.
**Describe the bug**
```
pulp [d32341b1-78b2-44da-b43d-e51121df9e95]: pulpcore.tasking.pulpcore_worker:INFO: Task 4c2b456b-d9a8-4238-bb45-7b63f403229c failed (Unexpected end of string
LINE 1: ...le.file', '365f08db-ac00-4e21-8abf-af0f047064cd', '{}', '', ...
^
)
pulp [d32341b1-78b2-44da-b43d-e51121df9e95]: pulpcore.tasking.pulpcore_worker:INFO: File "/home/vagrant/devel/pulpcore/pulpcore/tasking/pulpcore_worker.py", line 444, in _perform_task
result = func(*args, **kwargs)
File "/home/vagrant/devel/pulpcore/pulpcore/app/tasks/importer.py", line 236, in import_repository_version
for a_result in _import_file(os.path.join(rv_path, filename), res_class, retry=True):
File "/home/vagrant/devel/pulpcore/pulpcore/app/tasks/importer.py", line 138, in _import_file
a_result = resource.import_data(data, raise_errors=True)
File "/usr/local/lib/pulp/lib64/python3.10/site-packages/import_export/resources.py", line 819, in import_data
return self.import_data_inner(
File "/usr/local/lib/pulp/lib64/python3.10/site-packages/import_export/resources.py", line 871, in import_data_inner
raise row_result.errors[-1].error
File "/usr/local/lib/pulp/lib64/python3.10/site-packages/import_export/resources.py", line 743, in import_row
self.save_instance(instance, new, using_transactions, dry_run)
File "/usr/local/lib/pulp/lib64/python3.10/site-packages/import_export/resources.py", line 500, in save_instance
instance.save()
File "/home/vagrant/devel/pulpcore/pulpcore/app/models/repository.py", line 95, in save
super().save(*args, **kwargs)
File "/home/vagrant/devel/pulpcore/pulpcore/app/models/base.py", line 203, in save
return super().save(*args, **kwargs)
File "/usr/lib64/python3.10/contextlib.py", line 79, in inner
return func(*args, **kwds)
File "/usr/local/lib/pulp/lib64/python3.10/site-packages/django_lifecycle/mixins.py", line 169, in save
save(*args, **kwargs)
File "/usr/local/lib/pulp/lib64/python3.10/site-packages/django/db/models/base.py", line 739, in save
self.save_base(using=using, force_insert=force_insert,
File "/usr/local/lib/pulp/lib64/python3.10/site-packages/django/db/models/base.py", line 775, in save_base
parent_inserted = self._save_parents(cls, using, update_fields)
File "/usr/local/lib/pulp/lib64/python3.10/site-packages/django/db/models/base.py", line 804, in _save_parents
updated = self._save_table(
File "/usr/local/lib/pulp/lib64/python3.10/site-packages/django/db/models/base.py", line 881, in _save_table
results = self._do_insert(cls._base_manager, using, fields, returning_fields, raw)
File "/usr/local/lib/pulp/lib64/python3.10/site-packages/django/db/models/base.py", line 919, in _do_insert
return manager._insert(
File "/usr/local/lib/pulp/lib64/python3.10/site-packages/django/db/models/manager.py", line 85, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
File "/usr/local/lib/pulp/lib64/python3.10/site-packages/django/db/models/query.py", line 1270, in _insert
return query.get_compiler(using=using).execute_sql(returning_fields)
File "/usr/local/lib/pulp/lib64/python3.10/site-packages/django/db/models/sql/compiler.py", line 1416, in execute_sql
cursor.execute(sql, params)
File "/usr/local/lib/pulp/lib64/python3.10/site-packages/django/db/backends/utils.py", line 66, in execute
return self._execute_with_wrappers(sql, params, many=False, executor=self._execute)
File "/usr/local/lib/pulp/lib64/python3.10/site-packages/django/db/backends/utils.py", line 75, in _execute_with_wrappers
return executor(sql, params, many, context)
File "/usr/local/lib/pulp/lib64/python3.10/site-packages/django/db/backends/utils.py", line 79, in _execute
with self.db.wrap_database_errors:
File "/usr/local/lib/pulp/lib64/python3.10/site-packages/django/db/utils.py", line 90, in __exit__
raise dj_exc_value.with_traceback(traceback) from exc_value
File "/usr/local/lib/pulp/lib64/python3.10/site-packages/django/db/backends/utils.py", line 84, in _execute
return self.cursor.execute(sql, params)
```
| [
{
"content": "from import_export import fields\nfrom import_export.widgets import ForeignKeyWidget\nfrom logging import getLogger\n\nfrom pulpcore.app.models.content import (\n Artifact,\n Content,\n ContentArtifact,\n)\nfrom pulpcore.app.models.repository import Repository\nfrom pulpcore.constants import ALL_KNOWN_CONTENT_CHECKSUMS\nfrom pulpcore.plugin.importexport import QueryModelResource\n\n\nlog = getLogger(__name__)\n\n\n#\n# Artifact and Repository are different from other import-export entities, in that they are not\n# repo-version-specific.\n#\nclass ArtifactResource(QueryModelResource):\n \"\"\"Resource for import/export of artifacts.\"\"\"\n\n def before_import_row(self, row, **kwargs):\n \"\"\"\n Sets digests to None if they are blank strings.\n\n Args:\n row (tablib.Dataset row): incoming import-row representing a single Variant.\n kwargs: args passed along from the import() call.\n\n \"\"\"\n # the export converts None to blank strings but sha384 and sha512 have unique constraints\n # that get triggered if they are blank. convert checksums back into None if they are blank.\n for checksum in ALL_KNOWN_CONTENT_CHECKSUMS:\n if row[checksum] == \"\":\n row[checksum] = None\n\n class Meta:\n model = Artifact\n exclude = (\n \"pulp_id\",\n \"pulp_created\",\n \"pulp_last_updated\",\n )\n import_id_fields = (\"sha256\",)\n\n\nclass RepositoryResource(QueryModelResource):\n class Meta:\n model = Repository\n import_id_fields = (\"name\",)\n exclude = (\n \"pulp_id\",\n \"pulp_created\",\n \"pulp_last_updated\",\n \"content\",\n \"next_version\",\n \"repository_ptr\",\n \"remote\",\n )\n\n\nclass ContentArtifactResource(QueryModelResource):\n \"\"\"\n Handles import/export of the ContentArtifact model.\n\n ContentArtifact is different from other import-export entities because it has no 'natural key'\n other than a pulp_id, which aren't shared across instances. We do some magic to link up\n ContentArtifacts to their matching (already-imported) Content.\n\n Some plugin-models have sub-repositories. We take advantage of the content-mapping\n machinery to account for those contentartifacts as well.\n \"\"\"\n\n artifact = fields.Field(\n column_name=\"artifact\", attribute=\"artifact\", widget=ForeignKeyWidget(Artifact, \"sha256\")\n )\n\n def __init__(self, repo_version=None, content_mapping=None):\n self.content_mapping = content_mapping\n super().__init__(repo_version)\n\n def before_import_row(self, row, **kwargs):\n \"\"\"\n Fixes the content-ptr of an incoming content-artifact row at import time.\n\n Finds the 'original uuid' of the Content for this row, looks it up as the\n 'upstream_id' of imported Content, and then replaces the Content-pk with its\n (new) uuid.\n\n Args:\n row (tablib.Dataset row): incoming import-row representing a single ContentArtifact.\n kwargs: args passed along from the import() call.\n\n Returns:\n (tablib.Dataset row): row that now points to the new downstream uuid for its content.\n \"\"\"\n\n linked_content = Content.objects.get(upstream_id=row[\"content\"])\n row[\"content\"] = str(linked_content.pulp_id)\n\n def set_up_queryset(self):\n vers_content = ContentArtifact.objects.filter(content__in=self.repo_version.content)\n if self.content_mapping:\n all_content = []\n for content_ids in self.content_mapping.values():\n all_content.extend(content_ids)\n vers_content = vers_content.union(\n ContentArtifact.objects.filter(content__in=all_content)\n )\n return vers_content.order_by(\"content\", \"relative_path\")\n\n class Meta:\n model = ContentArtifact\n import_id_fields = (\n \"content\",\n \"relative_path\",\n )\n exclude = (\n \"pulp_created\",\n \"pulp_last_updated\",\n \"_artifacts\",\n \"pulp_id\",\n )\n",
"path": "pulpcore/app/modelresource.py"
}
] | [
{
"content": "from import_export import fields\nfrom import_export.widgets import ForeignKeyWidget\nfrom logging import getLogger\n\nfrom pulpcore.app.models.content import (\n Artifact,\n Content,\n ContentArtifact,\n)\nfrom pulpcore.app.models.repository import Repository\nfrom pulpcore.constants import ALL_KNOWN_CONTENT_CHECKSUMS\nfrom pulpcore.plugin.importexport import QueryModelResource\n\n\nlog = getLogger(__name__)\n\n\n#\n# Artifact and Repository are different from other import-export entities, in that they are not\n# repo-version-specific.\n#\nclass ArtifactResource(QueryModelResource):\n \"\"\"Resource for import/export of artifacts.\"\"\"\n\n def before_import_row(self, row, **kwargs):\n \"\"\"\n Sets digests to None if they are blank strings.\n\n Args:\n row (tablib.Dataset row): incoming import-row representing a single Variant.\n kwargs: args passed along from the import() call.\n\n \"\"\"\n # the export converts None to blank strings but sha384 and sha512 have unique constraints\n # that get triggered if they are blank. convert checksums back into None if they are blank.\n for checksum in ALL_KNOWN_CONTENT_CHECKSUMS:\n if row[checksum] == \"\":\n row[checksum] = None\n\n class Meta:\n model = Artifact\n exclude = (\n \"pulp_id\",\n \"pulp_created\",\n \"pulp_last_updated\",\n )\n import_id_fields = (\"sha256\",)\n\n\nclass RepositoryResource(QueryModelResource):\n class Meta:\n model = Repository\n import_id_fields = (\"name\",)\n exclude = (\n \"pulp_id\",\n \"pulp_created\",\n \"pulp_last_updated\",\n \"content\",\n \"next_version\",\n \"repository_ptr\",\n \"remote\",\n \"pulp_labels\",\n )\n\n\nclass ContentArtifactResource(QueryModelResource):\n \"\"\"\n Handles import/export of the ContentArtifact model.\n\n ContentArtifact is different from other import-export entities because it has no 'natural key'\n other than a pulp_id, which aren't shared across instances. We do some magic to link up\n ContentArtifacts to their matching (already-imported) Content.\n\n Some plugin-models have sub-repositories. We take advantage of the content-mapping\n machinery to account for those contentartifacts as well.\n \"\"\"\n\n artifact = fields.Field(\n column_name=\"artifact\", attribute=\"artifact\", widget=ForeignKeyWidget(Artifact, \"sha256\")\n )\n\n def __init__(self, repo_version=None, content_mapping=None):\n self.content_mapping = content_mapping\n super().__init__(repo_version)\n\n def before_import_row(self, row, **kwargs):\n \"\"\"\n Fixes the content-ptr of an incoming content-artifact row at import time.\n\n Finds the 'original uuid' of the Content for this row, looks it up as the\n 'upstream_id' of imported Content, and then replaces the Content-pk with its\n (new) uuid.\n\n Args:\n row (tablib.Dataset row): incoming import-row representing a single ContentArtifact.\n kwargs: args passed along from the import() call.\n\n Returns:\n (tablib.Dataset row): row that now points to the new downstream uuid for its content.\n \"\"\"\n\n linked_content = Content.objects.get(upstream_id=row[\"content\"])\n row[\"content\"] = str(linked_content.pulp_id)\n\n def set_up_queryset(self):\n vers_content = ContentArtifact.objects.filter(content__in=self.repo_version.content)\n if self.content_mapping:\n all_content = []\n for content_ids in self.content_mapping.values():\n all_content.extend(content_ids)\n vers_content = vers_content.union(\n ContentArtifact.objects.filter(content__in=all_content)\n )\n return vers_content.order_by(\"content\", \"relative_path\")\n\n class Meta:\n model = ContentArtifact\n import_id_fields = (\n \"content\",\n \"relative_path\",\n )\n exclude = (\n \"pulp_created\",\n \"pulp_last_updated\",\n \"_artifacts\",\n \"pulp_id\",\n )\n",
"path": "pulpcore/app/modelresource.py"
}
] | diff --git a/CHANGES/3461.misc b/CHANGES/3461.misc
new file mode 100644
index 0000000000..c0f67471b9
--- /dev/null
+++ b/CHANGES/3461.misc
@@ -0,0 +1 @@
+Taught Repository to not export pulp_labels.
diff --git a/pulpcore/app/modelresource.py b/pulpcore/app/modelresource.py
index def5962597..6e957a60f4 100644
--- a/pulpcore/app/modelresource.py
+++ b/pulpcore/app/modelresource.py
@@ -59,6 +59,7 @@ class Meta:
"next_version",
"repository_ptr",
"remote",
+ "pulp_labels",
)
|
SCons__scons-4374 | Configure.CheckLib() error with -Wstrict-prototypes
This is a continuation of #3095. As noted in [this comment](https://github.com/SCons/scons/pull/3096/files#r1257532304), there was one more instance that was missed in PR #3096:
https://github.com/SCons/scons/blob/810ca6c8895b01cbd636d83079f6a848dc36adf6/SCons/Conftest.py#L677-L684
| [
{
"content": "# MIT License\n#\n# Copyright The SCons Foundation\n# Copyright (c) 2003 Stichting NLnet Labs\n# Copyright (c) 2001, 2002, 2003 Steven Knight\n#\n# Permission is hereby granted, free of charge, to any person obtaining\n# a copy of this software and associated documentation files (the\n# \"Software\"), to deal in the Software without restriction, including\n# without limitation the rights to use, copy, modify, merge, publish,\n# distribute, sublicense, and/or sell copies of the Software, and to\n# permit persons to whom the Software is furnished to do so, subject to\n# the following conditions:\n#\n# The above copyright notice and this permission notice shall be included\n# in all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY\n# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE\n# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND\n# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE\n# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\n# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION\n# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\nr\"\"\"Autoconf-like configuration support\n\nThe purpose of this module is to define how a check is to be performed.\n\nA context class is used that defines functions for carrying out the tests,\nlogging and messages. The following methods and members must be present:\n\ncontext.Display(msg)\n Function called to print messages that are normally displayed\n for the user. Newlines are explicitly used. The text should\n also be written to the logfile!\n\ncontext.Log(msg)\n Function called to write to a log file.\n\ncontext.BuildProg(text, ext)\n Function called to build a program, using \"ext\" for the file\n extension. Must return an empty string for success, an error\n message for failure. For reliable test results building should\n be done just like an actual program would be build, using the\n same command and arguments (including configure results so far).\n\ncontext.CompileProg(text, ext)\n Function called to compile a program, using \"ext\" for the file\n extension. Must return an empty string for success, an error\n message for failure. For reliable test results compiling should be\n done just like an actual source file would be compiled, using the\n same command and arguments (including configure results so far).\n\ncontext.AppendLIBS(lib_name_list)\n Append \"lib_name_list\" to the value of LIBS. \"lib_namelist\" is\n a list of strings. Return the value of LIBS before changing it\n (any type can be used, it is passed to SetLIBS() later.)\n\ncontext.PrependLIBS(lib_name_list)\n Prepend \"lib_name_list\" to the value of LIBS. \"lib_namelist\" is\n a list of strings. Return the value of LIBS before changing it\n (any type can be used, it is passed to SetLIBS() later.)\n\ncontext.SetLIBS(value)\n Set LIBS to \"value\". The type of \"value\" is what AppendLIBS()\n returned. Return the value of LIBS before changing it (any type\n can be used, it is passed to SetLIBS() later.)\n\ncontext.headerfilename\n Name of file to append configure results to, usually \"confdefs.h\".\n The file must not exist or be empty when starting. Empty or None\n to skip this (some tests will not work!).\n\ncontext.config_h (may be missing).\n If present, must be a string, which will be filled with the\n contents of a config_h file.\n\ncontext.vardict\n Dictionary holding variables used for the tests and stores results\n from the tests, used for the build commands. Normally contains\n \"CC\", \"LIBS\", \"CPPFLAGS\", etc.\n\ncontext.havedict\n Dictionary holding results from the tests that are to be used\n inside a program. Names often start with \"HAVE\\_\". These are zero\n (feature not present) or one (feature present). Other variables\n may have any value, e.g., \"PERLVERSION\" can be a number and\n \"SYSTEMNAME\" a string.\n\"\"\"\n\nimport re\n\n#\n# PUBLIC VARIABLES\n#\n\nLogInputFiles = 1 # Set that to log the input files in case of a failed test\nLogErrorMessages = 1 # Set that to log Conftest-generated error messages\n\n#\n# PUBLIC FUNCTIONS\n#\n\n# Generic remarks:\n# - When a language is specified which is not supported the test fails. The\n# message is a bit different, because not all the arguments for the normal\n# message are available yet (chicken-egg problem).\n\n\ndef CheckBuilder(context, text = None, language = None):\n \"\"\"\n Configure check to see if the compiler works.\n Note that this uses the current value of compiler and linker flags, make\n sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly.\n \"language\" should be \"C\" or \"C++\" and is used to select the compiler.\n Default is \"C\".\n \"text\" may be used to specify the code to be build.\n Returns an empty string for success, an error message for failure.\n \"\"\"\n lang, suffix, msg = _lang2suffix(language)\n if msg:\n context.Display(\"%s\\n\" % msg)\n return msg\n\n if not text:\n text = \"\"\"\nint main(void) {\n return 0;\n}\n\"\"\"\n\n context.Display(\"Checking if building a %s file works... \" % lang)\n ret = context.BuildProg(text, suffix)\n _YesNoResult(context, ret, None, text)\n return ret\n\ndef CheckCC(context):\n \"\"\"\n Configure check for a working C compiler.\n\n This checks whether the C compiler, as defined in the $CC construction\n variable, can compile a C source file. It uses the current $CCCOM value\n too, so that it can test against non working flags.\n\n \"\"\"\n context.Display(\"Checking whether the C compiler works... \")\n text = \"\"\"\nint main(void)\n{\n return 0;\n}\n\"\"\"\n ret = _check_empty_program(context, 'CC', text, 'C')\n _YesNoResult(context, ret, None, text)\n return ret\n\ndef CheckSHCC(context):\n \"\"\"\n Configure check for a working shared C compiler.\n\n This checks whether the C compiler, as defined in the $SHCC construction\n variable, can compile a C source file. It uses the current $SHCCCOM value\n too, so that it can test against non working flags.\n\n \"\"\"\n context.Display(\"Checking whether the (shared) C compiler works... \")\n text = \"\"\"\nint foo(void)\n{\n return 0;\n}\n\"\"\"\n ret = _check_empty_program(context, 'SHCC', text, 'C', use_shared = True)\n _YesNoResult(context, ret, None, text)\n return ret\n\ndef CheckCXX(context):\n \"\"\"\n Configure check for a working CXX compiler.\n\n This checks whether the CXX compiler, as defined in the $CXX construction\n variable, can compile a CXX source file. It uses the current $CXXCOM value\n too, so that it can test against non working flags.\n\n \"\"\"\n context.Display(\"Checking whether the C++ compiler works... \")\n text = \"\"\"\nint main(void)\n{\n return 0;\n}\n\"\"\"\n ret = _check_empty_program(context, 'CXX', text, 'C++')\n _YesNoResult(context, ret, None, text)\n return ret\n\ndef CheckSHCXX(context):\n \"\"\"\n Configure check for a working shared CXX compiler.\n\n This checks whether the CXX compiler, as defined in the $SHCXX construction\n variable, can compile a CXX source file. It uses the current $SHCXXCOM value\n too, so that it can test against non working flags.\n\n \"\"\"\n context.Display(\"Checking whether the (shared) C++ compiler works... \")\n text = \"\"\"\nint main(void)\n{\n return 0;\n}\n\"\"\"\n ret = _check_empty_program(context, 'SHCXX', text, 'C++', use_shared = True)\n _YesNoResult(context, ret, None, text)\n return ret\n\ndef _check_empty_program(context, comp, text, language, use_shared: bool = False):\n \"\"\"Return 0 on success, 1 otherwise.\"\"\"\n if comp not in context.env or not context.env[comp]:\n # The compiler construction variable is not set or empty\n return 1\n\n lang, suffix, msg = _lang2suffix(language)\n if msg:\n return 1\n\n if use_shared:\n return context.CompileSharedObject(text, suffix)\n else:\n return context.CompileProg(text, suffix)\n\n\ndef CheckFunc(context, function_name, header = None, language = None):\n \"\"\"\n Configure check for a function \"function_name\".\n \"language\" should be \"C\" or \"C++\" and is used to select the compiler.\n Default is \"C\".\n Optional \"header\" can be defined to define a function prototype, include a\n header file or anything else that comes before main().\n Sets HAVE_function_name in context.havedict according to the result.\n Note that this uses the current value of compiler and linker flags, make\n sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly.\n Returns an empty string for success, an error message for failure.\n \"\"\"\n\n # Remarks from autoconf:\n # - Don't include <ctype.h> because on OSF/1 3.0 it includes <sys/types.h>\n # which includes <sys/select.h> which contains a prototype for select.\n # Similarly for bzero.\n # - assert.h is included to define __stub macros and hopefully few\n # prototypes, which can conflict with char $1(); below.\n # - Override any gcc2 internal prototype to avoid an error.\n # - We use char for the function declaration because int might match the\n # return type of a gcc2 builtin and then its argument prototype would\n # still apply.\n # - The GNU C library defines this for functions which it implements to\n # always fail with ENOSYS. Some functions are actually named something\n # starting with __ and the normal name is an alias.\n\n if context.headerfilename:\n includetext = '#include \"%s\"' % context.headerfilename\n else:\n includetext = ''\n if not header:\n header = \"\"\"\n#ifdef __cplusplus\nextern \"C\"\n#endif\nchar %s(void);\"\"\" % function_name\n\n lang, suffix, msg = _lang2suffix(language)\n if msg:\n context.Display(\"Cannot check for %s(): %s\\n\" % (function_name, msg))\n return msg\n\n text = \"\"\"\n%(include)s\n#include <assert.h>\n%(hdr)s\n\n#if _MSC_VER && !__INTEL_COMPILER\n #pragma function(%(name)s)\n#endif\n\nint main(void) {\n#if defined (__stub_%(name)s) || defined (__stub___%(name)s)\n #error \"%(name)s has a GNU stub, cannot check\"\n#else\n %(name)s();\n#endif\n\n return 0;\n}\n\"\"\" % { 'name': function_name,\n 'include': includetext,\n 'hdr': header }\n\n context.Display(\"Checking for %s function %s()... \" % (lang, function_name))\n ret = context.BuildProg(text, suffix)\n _YesNoResult(context, ret, \"HAVE_\" + function_name, text,\n \"Define to 1 if the system has the function `%s'.\" %\\\n function_name)\n return ret\n\n\ndef CheckHeader(context, header_name, header=None, language=None,\n include_quotes=None):\n \"\"\"\n Configure check for a C or C++ header file \"header_name\".\n Optional \"header\" can be defined to do something before including the\n header file (unusual, supported for consistency).\n \"language\" should be \"C\" or \"C++\" and is used to select the compiler.\n Default is \"C\".\n Sets HAVE_header_name in context.havedict according to the result.\n Note that this uses the current value of compiler and linker flags, make\n sure $CFLAGS and $CPPFLAGS are set correctly.\n Returns an empty string for success, an error message for failure.\n \"\"\"\n # Why compile the program instead of just running the preprocessor?\n # It is possible that the header file exists, but actually using it may\n # fail (e.g., because it depends on other header files). Thus this test is\n # more strict. It may require using the \"header\" argument.\n #\n # Use <> by default, because the check is normally used for system header\n # files. SCons passes '\"\"' to overrule this.\n\n # Include \"confdefs.h\" first, so that the header can use HAVE_HEADER_H.\n if context.headerfilename:\n includetext = '#include \"%s\"\\n' % context.headerfilename\n else:\n includetext = ''\n if not header:\n header = \"\"\n\n lang, suffix, msg = _lang2suffix(language)\n if msg:\n context.Display(\"Cannot check for header file %s: %s\\n\"\n % (header_name, msg))\n return msg\n\n if not include_quotes:\n include_quotes = \"<>\"\n\n text = \"%s%s\\n#include %s%s%s\\n\\n\" % (includetext, header,\n include_quotes[0], header_name, include_quotes[1])\n\n context.Display(\"Checking for %s header file %s... \" % (lang, header_name))\n ret = context.CompileProg(text, suffix)\n _YesNoResult(context, ret, \"HAVE_\" + header_name, text,\n \"Define to 1 if you have the <%s> header file.\" % header_name)\n return ret\n\n\ndef CheckType(context, type_name, fallback = None,\n header = None, language = None):\n \"\"\"\n Configure check for a C or C++ type \"type_name\".\n Optional \"header\" can be defined to include a header file.\n \"language\" should be \"C\" or \"C++\" and is used to select the compiler.\n Default is \"C\".\n Sets HAVE_type_name in context.havedict according to the result.\n Note that this uses the current value of compiler and linker flags, make\n sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly.\n Returns an empty string for success, an error message for failure.\n \"\"\"\n\n # Include \"confdefs.h\" first, so that the header can use HAVE_HEADER_H.\n if context.headerfilename:\n includetext = '#include \"%s\"' % context.headerfilename\n else:\n includetext = ''\n if not header:\n header = \"\"\n\n lang, suffix, msg = _lang2suffix(language)\n if msg:\n context.Display(\"Cannot check for %s type: %s\\n\" % (type_name, msg))\n return msg\n\n # Remarks from autoconf about this test:\n # - Grepping for the type in include files is not reliable (grep isn't\n # portable anyway).\n # - Using \"TYPE my_var;\" doesn't work for const qualified types in C++.\n # Adding an initializer is not valid for some C++ classes.\n # - Using the type as parameter to a function either fails for K&$ C or for\n # C++.\n # - Using \"TYPE *my_var;\" is valid in C for some types that are not\n # declared (struct something).\n # - Using \"sizeof(TYPE)\" is valid when TYPE is actually a variable.\n # - Using the previous two together works reliably.\n text = \"\"\"\n%(include)s\n%(header)s\n\nint main(void) {\n if ((%(name)s *) 0)\n return 0;\n if (sizeof (%(name)s))\n return 0;\n}\n\"\"\" % { 'include': includetext,\n 'header': header,\n 'name': type_name }\n\n context.Display(\"Checking for %s type %s... \" % (lang, type_name))\n ret = context.BuildProg(text, suffix)\n _YesNoResult(context, ret, \"HAVE_\" + type_name, text,\n \"Define to 1 if the system has the type `%s'.\" % type_name)\n if ret and fallback and context.headerfilename:\n f = open(context.headerfilename, \"a\")\n f.write(\"typedef %s %s;\\n\" % (fallback, type_name))\n f.close()\n\n return ret\n\ndef CheckTypeSize(context, type_name, header = None, language = None, expect = None):\n \"\"\"This check can be used to get the size of a given type, or to check whether\n the type is of expected size.\n\n Arguments:\n - type : str\n the type to check\n - includes : sequence\n list of headers to include in the test code before testing the type\n - language : str\n 'C' or 'C++'\n - expect : int\n if given, will test wether the type has the given number of bytes.\n If not given, will automatically find the size.\n\n Returns:\n status : int\n 0 if the check failed, or the found size of the type if the check succeeded.\"\"\"\n\n # Include \"confdefs.h\" first, so that the header can use HAVE_HEADER_H.\n if context.headerfilename:\n includetext = '#include \"%s\"' % context.headerfilename\n else:\n includetext = ''\n\n if not header:\n header = \"\"\n\n lang, suffix, msg = _lang2suffix(language)\n if msg:\n context.Display(\"Cannot check for %s type: %s\\n\" % (type_name, msg))\n return msg\n\n src = includetext + header\n if expect is not None:\n # Only check if the given size is the right one\n context.Display('Checking %s is %d bytes... ' % (type_name, expect))\n\n # test code taken from autoconf: this is a pretty clever hack to find that\n # a type is of a given size using only compilation. This speeds things up\n # quite a bit compared to straightforward code using TryRun\n src = src + r\"\"\"\ntypedef %s scons_check_type;\n\nint main(void)\n{\n static int test_array[1 - 2 * !(((long int) (sizeof(scons_check_type))) == %d)];\n test_array[0] = 0;\n\n return 0;\n}\n\"\"\"\n\n st = context.CompileProg(src % (type_name, expect), suffix)\n if not st:\n context.Display(\"yes\\n\")\n _Have(context, \"SIZEOF_%s\" % type_name, expect,\n \"The size of `%s', as computed by sizeof.\" % type_name)\n return expect\n else:\n context.Display(\"no\\n\")\n _LogFailed(context, src, st)\n return 0\n else:\n # Only check if the given size is the right one\n context.Message('Checking size of %s ... ' % type_name)\n\n # We have to be careful with the program we wish to test here since\n # compilation will be attempted using the current environment's flags.\n # So make sure that the program will compile without any warning. For\n # example using: 'int main(int argc, char** argv)' will fail with the\n # '-Wall -Werror' flags since the variables argc and argv would not be\n # used in the program...\n #\n src = src + \"\"\"\n#include <stdlib.h>\n#include <stdio.h>\nint main(void) {\n printf(\"%d\", (int)sizeof(\"\"\" + type_name + \"\"\"));\n return 0;\n}\n \"\"\"\n st, out = context.RunProg(src, suffix)\n try:\n size = int(out)\n except ValueError:\n # If cannot convert output of test prog to an integer (the size),\n # something went wront, so just fail\n st = 1\n size = 0\n\n if not st:\n context.Display(\"yes\\n\")\n _Have(context, \"SIZEOF_%s\" % type_name, size,\n \"The size of `%s', as computed by sizeof.\" % type_name)\n return size\n else:\n context.Display(\"no\\n\")\n _LogFailed(context, src, st)\n return 0\n\n return 0\n\ndef CheckDeclaration(context, symbol, includes = None, language = None):\n \"\"\"Checks whether symbol is declared.\n\n Use the same test as autoconf, that is test whether the symbol is defined\n as a macro or can be used as an r-value.\n\n Arguments:\n symbol : str\n the symbol to check\n includes : str\n Optional \"header\" can be defined to include a header file.\n language : str\n only C and C++ supported.\n\n Returns:\n status : bool\n True if the check failed, False if succeeded.\"\"\"\n\n # Include \"confdefs.h\" first, so that the header can use HAVE_HEADER_H.\n if context.headerfilename:\n includetext = '#include \"%s\"' % context.headerfilename\n else:\n includetext = ''\n\n if not includes:\n includes = \"\"\n\n lang, suffix, msg = _lang2suffix(language)\n if msg:\n context.Display(\"Cannot check for declaration %s: %s\\n\" % (symbol, msg))\n return msg\n\n src = includetext + includes\n context.Display('Checking whether %s is declared... ' % symbol)\n\n src = src + r\"\"\"\nint main(void)\n{\n#ifndef %s\n (void) %s;\n#endif\n ;\n return 0;\n}\n\"\"\" % (symbol, symbol)\n\n st = context.CompileProg(src, suffix)\n _YesNoResult(context, st, \"HAVE_DECL_\" + symbol, src,\n \"Set to 1 if %s is defined.\" % symbol)\n return st\n\n\ndef CheckMember(context, aggregate_member, header = None, language = None):\n \"\"\"\n Configure check for a C or C++ member \"aggregate_member\".\n Optional \"header\" can be defined to include a header file.\n \"language\" should be \"C\" or \"C++\" and is used to select the compiler.\n Default is \"C\".\n Note that this uses the current value of compiler and linker flags, make\n sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly.\n\n Arguments:\n aggregate_member : str\n the member to check. For example, 'struct tm.tm_gmtoff'.\n includes : str\n Optional \"header\" can be defined to include a header file.\n language : str\n only C and C++ supported.\n\n Returns the status (0 or False = Passed, True/non-zero = Failed).\n \"\"\"\n\n lang, suffix, msg = _lang2suffix(language)\n if msg:\n context.Display(\"Cannot check for member %s: %s\\n\" % (aggregate_member, msg))\n return True\n context.Display(\"Checking for %s member %s... \" % (lang, aggregate_member))\n fields = aggregate_member.split('.')\n if len(fields) != 2:\n msg = \"shall contain just one dot, for example 'struct tm.tm_gmtoff'\"\n context.Display(\"Cannot check for member %s: %s\\n\" % (aggregate_member, msg))\n return True\n aggregate, member = fields[0], fields[1]\n\n # Include \"confdefs.h\" first, so that the header can use HAVE_HEADER_H.\n if context.headerfilename:\n includetext = '#include \"%s\"' % context.headerfilename\n else:\n includetext = ''\n if not header:\n header = ''\n text = '''\n%(include)s\n%(header)s\n\nint main(void) {\n if (sizeof ((%(aggregate)s *) 0)->%(member)s)\n return 0;\n}''' % {'include': includetext,\n 'header': header,\n 'aggregate': aggregate,\n 'member': member}\n\n ret = context.BuildProg(text, suffix)\n _YesNoResult(context, ret, \"HAVE_\" + aggregate_member, text,\n \"Define to 1 if the system has the member `%s`.\" % aggregate_member)\n return ret\n\ndef CheckLib(context, libs, func_name = None, header = None,\n extra_libs = None, call = None, language = None, autoadd: int = 1,\n append: bool=True, unique: bool=False):\n \"\"\"\n Configure check for a C or C++ libraries \"libs\". Searches through\n the list of libraries, until one is found where the test succeeds.\n Tests if \"func_name\" or \"call\" exists in the library. Note: if it exists\n in another library the test succeeds anyway!\n Optional \"header\" can be defined to include a header file. If not given a\n default prototype for \"func_name\" is added.\n Optional \"extra_libs\" is a list of library names to be added after\n \"lib_name\" in the build command. To be used for libraries that \"lib_name\"\n depends on.\n Optional \"call\" replaces the call to \"func_name\" in the test code. It must\n consist of complete C statements, including a trailing \";\".\n Both \"func_name\" and \"call\" arguments are optional, and in that case, just\n linking against the libs is tested.\n \"language\" should be \"C\" or \"C++\" and is used to select the compiler.\n Default is \"C\".\n Note that this uses the current value of compiler and linker flags, make\n sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly.\n Returns an empty string for success, an error message for failure.\n \"\"\"\n # Include \"confdefs.h\" first, so that the header can use HAVE_HEADER_H.\n if context.headerfilename:\n includetext = '#include \"%s\"' % context.headerfilename\n else:\n includetext = ''\n if not header:\n header = \"\"\n\n text = \"\"\"\n%s\n%s\"\"\" % (includetext, header)\n\n # Add a function declaration if needed.\n if func_name and func_name != \"main\":\n if not header:\n text = text + \"\"\"\n#ifdef __cplusplus\nextern \"C\"\n#endif\nchar %s();\n\"\"\" % func_name\n\n # The actual test code.\n if not call:\n call = \"%s();\" % func_name\n\n # if no function to test, leave main() blank\n text = text + \"\"\"\nint\nmain() {\n %s\nreturn 0;\n}\n\"\"\" % (call or \"\")\n\n if call:\n i = call.find(\"\\n\")\n if i > 0:\n calltext = call[:i] + \"..\"\n elif call[-1] == ';':\n calltext = call[:-1]\n else:\n calltext = call\n\n for lib_name in libs:\n\n lang, suffix, msg = _lang2suffix(language)\n if msg:\n context.Display(\"Cannot check for library %s: %s\\n\" % (lib_name, msg))\n return msg\n\n # if a function was specified to run in main(), say it\n if call:\n context.Display(\"Checking for %s in %s library %s... \"\n % (calltext, lang, lib_name))\n # otherwise, just say the name of library and language\n else:\n context.Display(\"Checking for %s library %s... \"\n % (lang, lib_name))\n\n if lib_name:\n l = [ lib_name ]\n if extra_libs:\n l.extend(extra_libs)\n if append:\n oldLIBS = context.AppendLIBS(l, unique)\n else:\n oldLIBS = context.PrependLIBS(l, unique)\n sym = \"HAVE_LIB\" + lib_name\n else:\n oldLIBS = -1\n sym = None\n\n ret = context.BuildProg(text, suffix)\n\n _YesNoResult(context, ret, sym, text,\n \"Define to 1 if you have the `%s' library.\" % lib_name)\n if oldLIBS != -1 and (ret or not autoadd):\n context.SetLIBS(oldLIBS)\n\n if not ret:\n return ret\n\n return ret\n\ndef CheckProg(context, prog_name):\n \"\"\"\n Configure check for a specific program.\n\n Check whether program prog_name exists in path. If it is found,\n returns the path for it, otherwise returns None.\n \"\"\"\n context.Display(\"Checking whether %s program exists...\" % prog_name)\n path = context.env.WhereIs(prog_name)\n if path:\n context.Display(path + \"\\n\")\n else:\n context.Display(\"no\\n\")\n return path\n\n\n#\n# END OF PUBLIC FUNCTIONS\n#\n\ndef _YesNoResult(context, ret, key, text, comment = None) -> None:\n r\"\"\"\n Handle the result of a test with a \"yes\" or \"no\" result.\n\n :Parameters:\n - `ret` is the return value: empty if OK, error message when not.\n - `key` is the name of the symbol to be defined (HAVE_foo).\n - `text` is the source code of the program used for testing.\n - `comment` is the C comment to add above the line defining the symbol (the comment is automatically put inside a /\\* \\*/). If None, no comment is added.\n \"\"\"\n if key:\n _Have(context, key, not ret, comment)\n if ret:\n context.Display(\"no\\n\")\n _LogFailed(context, text, ret)\n else:\n context.Display(\"yes\\n\")\n\n\ndef _Have(context, key, have, comment = None) -> None:\n r\"\"\"\n Store result of a test in context.havedict and context.headerfilename.\n\n :Parameters:\n - `key` - is a \"HAVE_abc\" name. It is turned into all CAPITALS and non-alphanumerics are replaced by an underscore.\n - `have` - value as it should appear in the header file, include quotes when desired and escape special characters!\n - `comment` is the C comment to add above the line defining the symbol (the comment is automatically put inside a /\\* \\*/). If None, no comment is added.\n\n\n The value of \"have\" can be:\n - 1 - Feature is defined, add \"#define key\".\n - 0 - Feature is not defined, add \"/\\* #undef key \\*/\". Adding \"undef\" is what autoconf does. Not useful for the compiler, but it shows that the test was done.\n - number - Feature is defined to this number \"#define key have\". Doesn't work for 0 or 1, use a string then.\n - string - Feature is defined to this string \"#define key have\".\n\n\n \"\"\"\n key_up = key.upper()\n key_up = re.sub('[^A-Z0-9_]', '_', key_up)\n context.havedict[key_up] = have\n if have == 1:\n line = \"#define %s 1\\n\" % key_up\n elif have == 0:\n line = \"/* #undef %s */\\n\" % key_up\n elif isinstance(have, int):\n line = \"#define %s %d\\n\" % (key_up, have)\n else:\n line = \"#define %s %s\\n\" % (key_up, str(have))\n\n if comment is not None:\n lines = \"\\n/* %s */\\n\" % comment + line\n else:\n lines = \"\\n\" + line\n\n if context.headerfilename:\n f = open(context.headerfilename, \"a\")\n f.write(lines)\n f.close()\n elif hasattr(context,'config_h'):\n context.config_h = context.config_h + lines\n\n\ndef _LogFailed(context, text, msg) -> None:\n \"\"\"\n Write to the log about a failed program.\n Add line numbers, so that error messages can be understood.\n \"\"\"\n if LogInputFiles:\n context.Log(\"Failed program was:\\n\")\n lines = text.split('\\n')\n if len(lines) and lines[-1] == '':\n lines = lines[:-1] # remove trailing empty line\n n = 1\n for line in lines:\n context.Log(\"%d: %s\\n\" % (n, line))\n n = n + 1\n if LogErrorMessages:\n context.Log(\"Error message: %s\\n\" % msg)\n\n\ndef _lang2suffix(lang):\n \"\"\"\n Convert a language name to a suffix.\n When \"lang\" is empty or None C is assumed.\n Returns a tuple (lang, suffix, None) when it works.\n For an unrecognized language returns (None, None, msg).\n\n Where:\n - lang = the unified language name\n - suffix = the suffix, including the leading dot\n - msg = an error message\n \"\"\"\n if not lang or lang in [\"C\", \"c\"]:\n return (\"C\", \".c\", None)\n if lang in [\"c++\", \"C++\", \"cpp\", \"CXX\", \"cxx\"]:\n return (\"C++\", \".cpp\", None)\n\n return None, None, \"Unsupported language: %s\" % lang\n\n\n# vim: set sw=4 et sts=4 tw=79 fo+=l:\n\n# Local Variables:\n# tab-width:4\n# indent-tabs-mode:nil\n# End:\n# vim: set expandtab tabstop=4 shiftwidth=4:\n",
"path": "SCons/Conftest.py"
}
] | [
{
"content": "# MIT License\n#\n# Copyright The SCons Foundation\n# Copyright (c) 2003 Stichting NLnet Labs\n# Copyright (c) 2001, 2002, 2003 Steven Knight\n#\n# Permission is hereby granted, free of charge, to any person obtaining\n# a copy of this software and associated documentation files (the\n# \"Software\"), to deal in the Software without restriction, including\n# without limitation the rights to use, copy, modify, merge, publish,\n# distribute, sublicense, and/or sell copies of the Software, and to\n# permit persons to whom the Software is furnished to do so, subject to\n# the following conditions:\n#\n# The above copyright notice and this permission notice shall be included\n# in all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY\n# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE\n# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND\n# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE\n# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\n# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION\n# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\nr\"\"\"Autoconf-like configuration support\n\nThe purpose of this module is to define how a check is to be performed.\n\nA context class is used that defines functions for carrying out the tests,\nlogging and messages. The following methods and members must be present:\n\ncontext.Display(msg)\n Function called to print messages that are normally displayed\n for the user. Newlines are explicitly used. The text should\n also be written to the logfile!\n\ncontext.Log(msg)\n Function called to write to a log file.\n\ncontext.BuildProg(text, ext)\n Function called to build a program, using \"ext\" for the file\n extension. Must return an empty string for success, an error\n message for failure. For reliable test results building should\n be done just like an actual program would be build, using the\n same command and arguments (including configure results so far).\n\ncontext.CompileProg(text, ext)\n Function called to compile a program, using \"ext\" for the file\n extension. Must return an empty string for success, an error\n message for failure. For reliable test results compiling should be\n done just like an actual source file would be compiled, using the\n same command and arguments (including configure results so far).\n\ncontext.AppendLIBS(lib_name_list)\n Append \"lib_name_list\" to the value of LIBS. \"lib_namelist\" is\n a list of strings. Return the value of LIBS before changing it\n (any type can be used, it is passed to SetLIBS() later.)\n\ncontext.PrependLIBS(lib_name_list)\n Prepend \"lib_name_list\" to the value of LIBS. \"lib_namelist\" is\n a list of strings. Return the value of LIBS before changing it\n (any type can be used, it is passed to SetLIBS() later.)\n\ncontext.SetLIBS(value)\n Set LIBS to \"value\". The type of \"value\" is what AppendLIBS()\n returned. Return the value of LIBS before changing it (any type\n can be used, it is passed to SetLIBS() later.)\n\ncontext.headerfilename\n Name of file to append configure results to, usually \"confdefs.h\".\n The file must not exist or be empty when starting. Empty or None\n to skip this (some tests will not work!).\n\ncontext.config_h (may be missing).\n If present, must be a string, which will be filled with the\n contents of a config_h file.\n\ncontext.vardict\n Dictionary holding variables used for the tests and stores results\n from the tests, used for the build commands. Normally contains\n \"CC\", \"LIBS\", \"CPPFLAGS\", etc.\n\ncontext.havedict\n Dictionary holding results from the tests that are to be used\n inside a program. Names often start with \"HAVE\\_\". These are zero\n (feature not present) or one (feature present). Other variables\n may have any value, e.g., \"PERLVERSION\" can be a number and\n \"SYSTEMNAME\" a string.\n\"\"\"\n\nimport re\n\n#\n# PUBLIC VARIABLES\n#\n\nLogInputFiles = 1 # Set that to log the input files in case of a failed test\nLogErrorMessages = 1 # Set that to log Conftest-generated error messages\n\n#\n# PUBLIC FUNCTIONS\n#\n\n# Generic remarks:\n# - When a language is specified which is not supported the test fails. The\n# message is a bit different, because not all the arguments for the normal\n# message are available yet (chicken-egg problem).\n\n\ndef CheckBuilder(context, text = None, language = None):\n \"\"\"\n Configure check to see if the compiler works.\n Note that this uses the current value of compiler and linker flags, make\n sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly.\n \"language\" should be \"C\" or \"C++\" and is used to select the compiler.\n Default is \"C\".\n \"text\" may be used to specify the code to be build.\n Returns an empty string for success, an error message for failure.\n \"\"\"\n lang, suffix, msg = _lang2suffix(language)\n if msg:\n context.Display(\"%s\\n\" % msg)\n return msg\n\n if not text:\n text = \"\"\"\nint main(void) {\n return 0;\n}\n\"\"\"\n\n context.Display(\"Checking if building a %s file works... \" % lang)\n ret = context.BuildProg(text, suffix)\n _YesNoResult(context, ret, None, text)\n return ret\n\ndef CheckCC(context):\n \"\"\"\n Configure check for a working C compiler.\n\n This checks whether the C compiler, as defined in the $CC construction\n variable, can compile a C source file. It uses the current $CCCOM value\n too, so that it can test against non working flags.\n\n \"\"\"\n context.Display(\"Checking whether the C compiler works... \")\n text = \"\"\"\nint main(void)\n{\n return 0;\n}\n\"\"\"\n ret = _check_empty_program(context, 'CC', text, 'C')\n _YesNoResult(context, ret, None, text)\n return ret\n\ndef CheckSHCC(context):\n \"\"\"\n Configure check for a working shared C compiler.\n\n This checks whether the C compiler, as defined in the $SHCC construction\n variable, can compile a C source file. It uses the current $SHCCCOM value\n too, so that it can test against non working flags.\n\n \"\"\"\n context.Display(\"Checking whether the (shared) C compiler works... \")\n text = \"\"\"\nint foo(void)\n{\n return 0;\n}\n\"\"\"\n ret = _check_empty_program(context, 'SHCC', text, 'C', use_shared = True)\n _YesNoResult(context, ret, None, text)\n return ret\n\ndef CheckCXX(context):\n \"\"\"\n Configure check for a working CXX compiler.\n\n This checks whether the CXX compiler, as defined in the $CXX construction\n variable, can compile a CXX source file. It uses the current $CXXCOM value\n too, so that it can test against non working flags.\n\n \"\"\"\n context.Display(\"Checking whether the C++ compiler works... \")\n text = \"\"\"\nint main(void)\n{\n return 0;\n}\n\"\"\"\n ret = _check_empty_program(context, 'CXX', text, 'C++')\n _YesNoResult(context, ret, None, text)\n return ret\n\ndef CheckSHCXX(context):\n \"\"\"\n Configure check for a working shared CXX compiler.\n\n This checks whether the CXX compiler, as defined in the $SHCXX construction\n variable, can compile a CXX source file. It uses the current $SHCXXCOM value\n too, so that it can test against non working flags.\n\n \"\"\"\n context.Display(\"Checking whether the (shared) C++ compiler works... \")\n text = \"\"\"\nint main(void)\n{\n return 0;\n}\n\"\"\"\n ret = _check_empty_program(context, 'SHCXX', text, 'C++', use_shared = True)\n _YesNoResult(context, ret, None, text)\n return ret\n\ndef _check_empty_program(context, comp, text, language, use_shared: bool = False):\n \"\"\"Return 0 on success, 1 otherwise.\"\"\"\n if comp not in context.env or not context.env[comp]:\n # The compiler construction variable is not set or empty\n return 1\n\n lang, suffix, msg = _lang2suffix(language)\n if msg:\n return 1\n\n if use_shared:\n return context.CompileSharedObject(text, suffix)\n else:\n return context.CompileProg(text, suffix)\n\n\ndef CheckFunc(context, function_name, header = None, language = None):\n \"\"\"\n Configure check for a function \"function_name\".\n \"language\" should be \"C\" or \"C++\" and is used to select the compiler.\n Default is \"C\".\n Optional \"header\" can be defined to define a function prototype, include a\n header file or anything else that comes before main().\n Sets HAVE_function_name in context.havedict according to the result.\n Note that this uses the current value of compiler and linker flags, make\n sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly.\n Returns an empty string for success, an error message for failure.\n \"\"\"\n\n # Remarks from autoconf:\n # - Don't include <ctype.h> because on OSF/1 3.0 it includes <sys/types.h>\n # which includes <sys/select.h> which contains a prototype for select.\n # Similarly for bzero.\n # - assert.h is included to define __stub macros and hopefully few\n # prototypes, which can conflict with char $1(); below.\n # - Override any gcc2 internal prototype to avoid an error.\n # - We use char for the function declaration because int might match the\n # return type of a gcc2 builtin and then its argument prototype would\n # still apply.\n # - The GNU C library defines this for functions which it implements to\n # always fail with ENOSYS. Some functions are actually named something\n # starting with __ and the normal name is an alias.\n\n if context.headerfilename:\n includetext = '#include \"%s\"' % context.headerfilename\n else:\n includetext = ''\n if not header:\n header = \"\"\"\n#ifdef __cplusplus\nextern \"C\"\n#endif\nchar %s(void);\"\"\" % function_name\n\n lang, suffix, msg = _lang2suffix(language)\n if msg:\n context.Display(\"Cannot check for %s(): %s\\n\" % (function_name, msg))\n return msg\n\n text = \"\"\"\n%(include)s\n#include <assert.h>\n%(hdr)s\n\n#if _MSC_VER && !__INTEL_COMPILER\n #pragma function(%(name)s)\n#endif\n\nint main(void) {\n#if defined (__stub_%(name)s) || defined (__stub___%(name)s)\n #error \"%(name)s has a GNU stub, cannot check\"\n#else\n %(name)s();\n#endif\n\n return 0;\n}\n\"\"\" % { 'name': function_name,\n 'include': includetext,\n 'hdr': header }\n\n context.Display(\"Checking for %s function %s()... \" % (lang, function_name))\n ret = context.BuildProg(text, suffix)\n _YesNoResult(context, ret, \"HAVE_\" + function_name, text,\n \"Define to 1 if the system has the function `%s'.\" %\\\n function_name)\n return ret\n\n\ndef CheckHeader(context, header_name, header=None, language=None,\n include_quotes=None):\n \"\"\"\n Configure check for a C or C++ header file \"header_name\".\n Optional \"header\" can be defined to do something before including the\n header file (unusual, supported for consistency).\n \"language\" should be \"C\" or \"C++\" and is used to select the compiler.\n Default is \"C\".\n Sets HAVE_header_name in context.havedict according to the result.\n Note that this uses the current value of compiler and linker flags, make\n sure $CFLAGS and $CPPFLAGS are set correctly.\n Returns an empty string for success, an error message for failure.\n \"\"\"\n # Why compile the program instead of just running the preprocessor?\n # It is possible that the header file exists, but actually using it may\n # fail (e.g., because it depends on other header files). Thus this test is\n # more strict. It may require using the \"header\" argument.\n #\n # Use <> by default, because the check is normally used for system header\n # files. SCons passes '\"\"' to overrule this.\n\n # Include \"confdefs.h\" first, so that the header can use HAVE_HEADER_H.\n if context.headerfilename:\n includetext = '#include \"%s\"\\n' % context.headerfilename\n else:\n includetext = ''\n if not header:\n header = \"\"\n\n lang, suffix, msg = _lang2suffix(language)\n if msg:\n context.Display(\"Cannot check for header file %s: %s\\n\"\n % (header_name, msg))\n return msg\n\n if not include_quotes:\n include_quotes = \"<>\"\n\n text = \"%s%s\\n#include %s%s%s\\n\\n\" % (includetext, header,\n include_quotes[0], header_name, include_quotes[1])\n\n context.Display(\"Checking for %s header file %s... \" % (lang, header_name))\n ret = context.CompileProg(text, suffix)\n _YesNoResult(context, ret, \"HAVE_\" + header_name, text,\n \"Define to 1 if you have the <%s> header file.\" % header_name)\n return ret\n\n\ndef CheckType(context, type_name, fallback = None,\n header = None, language = None):\n \"\"\"\n Configure check for a C or C++ type \"type_name\".\n Optional \"header\" can be defined to include a header file.\n \"language\" should be \"C\" or \"C++\" and is used to select the compiler.\n Default is \"C\".\n Sets HAVE_type_name in context.havedict according to the result.\n Note that this uses the current value of compiler and linker flags, make\n sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly.\n Returns an empty string for success, an error message for failure.\n \"\"\"\n\n # Include \"confdefs.h\" first, so that the header can use HAVE_HEADER_H.\n if context.headerfilename:\n includetext = '#include \"%s\"' % context.headerfilename\n else:\n includetext = ''\n if not header:\n header = \"\"\n\n lang, suffix, msg = _lang2suffix(language)\n if msg:\n context.Display(\"Cannot check for %s type: %s\\n\" % (type_name, msg))\n return msg\n\n # Remarks from autoconf about this test:\n # - Grepping for the type in include files is not reliable (grep isn't\n # portable anyway).\n # - Using \"TYPE my_var;\" doesn't work for const qualified types in C++.\n # Adding an initializer is not valid for some C++ classes.\n # - Using the type as parameter to a function either fails for K&$ C or for\n # C++.\n # - Using \"TYPE *my_var;\" is valid in C for some types that are not\n # declared (struct something).\n # - Using \"sizeof(TYPE)\" is valid when TYPE is actually a variable.\n # - Using the previous two together works reliably.\n text = \"\"\"\n%(include)s\n%(header)s\n\nint main(void) {\n if ((%(name)s *) 0)\n return 0;\n if (sizeof (%(name)s))\n return 0;\n}\n\"\"\" % { 'include': includetext,\n 'header': header,\n 'name': type_name }\n\n context.Display(\"Checking for %s type %s... \" % (lang, type_name))\n ret = context.BuildProg(text, suffix)\n _YesNoResult(context, ret, \"HAVE_\" + type_name, text,\n \"Define to 1 if the system has the type `%s'.\" % type_name)\n if ret and fallback and context.headerfilename:\n f = open(context.headerfilename, \"a\")\n f.write(\"typedef %s %s;\\n\" % (fallback, type_name))\n f.close()\n\n return ret\n\ndef CheckTypeSize(context, type_name, header = None, language = None, expect = None):\n \"\"\"This check can be used to get the size of a given type, or to check whether\n the type is of expected size.\n\n Arguments:\n - type : str\n the type to check\n - includes : sequence\n list of headers to include in the test code before testing the type\n - language : str\n 'C' or 'C++'\n - expect : int\n if given, will test wether the type has the given number of bytes.\n If not given, will automatically find the size.\n\n Returns:\n status : int\n 0 if the check failed, or the found size of the type if the check succeeded.\"\"\"\n\n # Include \"confdefs.h\" first, so that the header can use HAVE_HEADER_H.\n if context.headerfilename:\n includetext = '#include \"%s\"' % context.headerfilename\n else:\n includetext = ''\n\n if not header:\n header = \"\"\n\n lang, suffix, msg = _lang2suffix(language)\n if msg:\n context.Display(\"Cannot check for %s type: %s\\n\" % (type_name, msg))\n return msg\n\n src = includetext + header\n if expect is not None:\n # Only check if the given size is the right one\n context.Display('Checking %s is %d bytes... ' % (type_name, expect))\n\n # test code taken from autoconf: this is a pretty clever hack to find that\n # a type is of a given size using only compilation. This speeds things up\n # quite a bit compared to straightforward code using TryRun\n src = src + r\"\"\"\ntypedef %s scons_check_type;\n\nint main(void)\n{\n static int test_array[1 - 2 * !(((long int) (sizeof(scons_check_type))) == %d)];\n test_array[0] = 0;\n\n return 0;\n}\n\"\"\"\n\n st = context.CompileProg(src % (type_name, expect), suffix)\n if not st:\n context.Display(\"yes\\n\")\n _Have(context, \"SIZEOF_%s\" % type_name, expect,\n \"The size of `%s', as computed by sizeof.\" % type_name)\n return expect\n else:\n context.Display(\"no\\n\")\n _LogFailed(context, src, st)\n return 0\n else:\n # Only check if the given size is the right one\n context.Message('Checking size of %s ... ' % type_name)\n\n # We have to be careful with the program we wish to test here since\n # compilation will be attempted using the current environment's flags.\n # So make sure that the program will compile without any warning. For\n # example using: 'int main(int argc, char** argv)' will fail with the\n # '-Wall -Werror' flags since the variables argc and argv would not be\n # used in the program...\n #\n src = src + \"\"\"\n#include <stdlib.h>\n#include <stdio.h>\nint main(void) {\n printf(\"%d\", (int)sizeof(\"\"\" + type_name + \"\"\"));\n return 0;\n}\n \"\"\"\n st, out = context.RunProg(src, suffix)\n try:\n size = int(out)\n except ValueError:\n # If cannot convert output of test prog to an integer (the size),\n # something went wront, so just fail\n st = 1\n size = 0\n\n if not st:\n context.Display(\"yes\\n\")\n _Have(context, \"SIZEOF_%s\" % type_name, size,\n \"The size of `%s', as computed by sizeof.\" % type_name)\n return size\n else:\n context.Display(\"no\\n\")\n _LogFailed(context, src, st)\n return 0\n\n return 0\n\ndef CheckDeclaration(context, symbol, includes = None, language = None):\n \"\"\"Checks whether symbol is declared.\n\n Use the same test as autoconf, that is test whether the symbol is defined\n as a macro or can be used as an r-value.\n\n Arguments:\n symbol : str\n the symbol to check\n includes : str\n Optional \"header\" can be defined to include a header file.\n language : str\n only C and C++ supported.\n\n Returns:\n status : bool\n True if the check failed, False if succeeded.\"\"\"\n\n # Include \"confdefs.h\" first, so that the header can use HAVE_HEADER_H.\n if context.headerfilename:\n includetext = '#include \"%s\"' % context.headerfilename\n else:\n includetext = ''\n\n if not includes:\n includes = \"\"\n\n lang, suffix, msg = _lang2suffix(language)\n if msg:\n context.Display(\"Cannot check for declaration %s: %s\\n\" % (symbol, msg))\n return msg\n\n src = includetext + includes\n context.Display('Checking whether %s is declared... ' % symbol)\n\n src = src + r\"\"\"\nint main(void)\n{\n#ifndef %s\n (void) %s;\n#endif\n ;\n return 0;\n}\n\"\"\" % (symbol, symbol)\n\n st = context.CompileProg(src, suffix)\n _YesNoResult(context, st, \"HAVE_DECL_\" + symbol, src,\n \"Set to 1 if %s is defined.\" % symbol)\n return st\n\n\ndef CheckMember(context, aggregate_member, header = None, language = None):\n \"\"\"\n Configure check for a C or C++ member \"aggregate_member\".\n Optional \"header\" can be defined to include a header file.\n \"language\" should be \"C\" or \"C++\" and is used to select the compiler.\n Default is \"C\".\n Note that this uses the current value of compiler and linker flags, make\n sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly.\n\n Arguments:\n aggregate_member : str\n the member to check. For example, 'struct tm.tm_gmtoff'.\n includes : str\n Optional \"header\" can be defined to include a header file.\n language : str\n only C and C++ supported.\n\n Returns the status (0 or False = Passed, True/non-zero = Failed).\n \"\"\"\n\n lang, suffix, msg = _lang2suffix(language)\n if msg:\n context.Display(\"Cannot check for member %s: %s\\n\" % (aggregate_member, msg))\n return True\n context.Display(\"Checking for %s member %s... \" % (lang, aggregate_member))\n fields = aggregate_member.split('.')\n if len(fields) != 2:\n msg = \"shall contain just one dot, for example 'struct tm.tm_gmtoff'\"\n context.Display(\"Cannot check for member %s: %s\\n\" % (aggregate_member, msg))\n return True\n aggregate, member = fields[0], fields[1]\n\n # Include \"confdefs.h\" first, so that the header can use HAVE_HEADER_H.\n if context.headerfilename:\n includetext = '#include \"%s\"' % context.headerfilename\n else:\n includetext = ''\n if not header:\n header = ''\n text = '''\n%(include)s\n%(header)s\n\nint main(void) {\n if (sizeof ((%(aggregate)s *) 0)->%(member)s)\n return 0;\n}''' % {'include': includetext,\n 'header': header,\n 'aggregate': aggregate,\n 'member': member}\n\n ret = context.BuildProg(text, suffix)\n _YesNoResult(context, ret, \"HAVE_\" + aggregate_member, text,\n \"Define to 1 if the system has the member `%s`.\" % aggregate_member)\n return ret\n\ndef CheckLib(context, libs, func_name = None, header = None,\n extra_libs = None, call = None, language = None, autoadd: int = 1,\n append: bool=True, unique: bool=False):\n \"\"\"\n Configure check for a C or C++ libraries \"libs\". Searches through\n the list of libraries, until one is found where the test succeeds.\n Tests if \"func_name\" or \"call\" exists in the library. Note: if it exists\n in another library the test succeeds anyway!\n Optional \"header\" can be defined to include a header file. If not given a\n default prototype for \"func_name\" is added.\n Optional \"extra_libs\" is a list of library names to be added after\n \"lib_name\" in the build command. To be used for libraries that \"lib_name\"\n depends on.\n Optional \"call\" replaces the call to \"func_name\" in the test code. It must\n consist of complete C statements, including a trailing \";\".\n Both \"func_name\" and \"call\" arguments are optional, and in that case, just\n linking against the libs is tested.\n \"language\" should be \"C\" or \"C++\" and is used to select the compiler.\n Default is \"C\".\n Note that this uses the current value of compiler and linker flags, make\n sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly.\n Returns an empty string for success, an error message for failure.\n \"\"\"\n # Include \"confdefs.h\" first, so that the header can use HAVE_HEADER_H.\n if context.headerfilename:\n includetext = '#include \"%s\"' % context.headerfilename\n else:\n includetext = ''\n if not header:\n header = \"\"\n\n text = \"\"\"\n%s\n%s\"\"\" % (includetext, header)\n\n # Add a function declaration if needed.\n if func_name and func_name != \"main\":\n if not header:\n text = text + \"\"\"\n#ifdef __cplusplus\nextern \"C\"\n#endif\nchar %s();\n\"\"\" % func_name\n\n # The actual test code.\n if not call:\n call = \"%s();\" % func_name\n\n # if no function to test, leave main() blank\n text = text + \"\"\"\nint main(void) {\n %s\nreturn 0;\n}\n\"\"\" % (call or \"\")\n\n if call:\n i = call.find(\"\\n\")\n if i > 0:\n calltext = call[:i] + \"..\"\n elif call[-1] == ';':\n calltext = call[:-1]\n else:\n calltext = call\n\n for lib_name in libs:\n\n lang, suffix, msg = _lang2suffix(language)\n if msg:\n context.Display(\"Cannot check for library %s: %s\\n\" % (lib_name, msg))\n return msg\n\n # if a function was specified to run in main(), say it\n if call:\n context.Display(\"Checking for %s in %s library %s... \"\n % (calltext, lang, lib_name))\n # otherwise, just say the name of library and language\n else:\n context.Display(\"Checking for %s library %s... \"\n % (lang, lib_name))\n\n if lib_name:\n l = [ lib_name ]\n if extra_libs:\n l.extend(extra_libs)\n if append:\n oldLIBS = context.AppendLIBS(l, unique)\n else:\n oldLIBS = context.PrependLIBS(l, unique)\n sym = \"HAVE_LIB\" + lib_name\n else:\n oldLIBS = -1\n sym = None\n\n ret = context.BuildProg(text, suffix)\n\n _YesNoResult(context, ret, sym, text,\n \"Define to 1 if you have the `%s' library.\" % lib_name)\n if oldLIBS != -1 and (ret or not autoadd):\n context.SetLIBS(oldLIBS)\n\n if not ret:\n return ret\n\n return ret\n\ndef CheckProg(context, prog_name):\n \"\"\"\n Configure check for a specific program.\n\n Check whether program prog_name exists in path. If it is found,\n returns the path for it, otherwise returns None.\n \"\"\"\n context.Display(\"Checking whether %s program exists...\" % prog_name)\n path = context.env.WhereIs(prog_name)\n if path:\n context.Display(path + \"\\n\")\n else:\n context.Display(\"no\\n\")\n return path\n\n\n#\n# END OF PUBLIC FUNCTIONS\n#\n\ndef _YesNoResult(context, ret, key, text, comment = None) -> None:\n r\"\"\"\n Handle the result of a test with a \"yes\" or \"no\" result.\n\n :Parameters:\n - `ret` is the return value: empty if OK, error message when not.\n - `key` is the name of the symbol to be defined (HAVE_foo).\n - `text` is the source code of the program used for testing.\n - `comment` is the C comment to add above the line defining the symbol (the comment is automatically put inside a /\\* \\*/). If None, no comment is added.\n \"\"\"\n if key:\n _Have(context, key, not ret, comment)\n if ret:\n context.Display(\"no\\n\")\n _LogFailed(context, text, ret)\n else:\n context.Display(\"yes\\n\")\n\n\ndef _Have(context, key, have, comment = None) -> None:\n r\"\"\"\n Store result of a test in context.havedict and context.headerfilename.\n\n :Parameters:\n - `key` - is a \"HAVE_abc\" name. It is turned into all CAPITALS and non-alphanumerics are replaced by an underscore.\n - `have` - value as it should appear in the header file, include quotes when desired and escape special characters!\n - `comment` is the C comment to add above the line defining the symbol (the comment is automatically put inside a /\\* \\*/). If None, no comment is added.\n\n\n The value of \"have\" can be:\n - 1 - Feature is defined, add \"#define key\".\n - 0 - Feature is not defined, add \"/\\* #undef key \\*/\". Adding \"undef\" is what autoconf does. Not useful for the compiler, but it shows that the test was done.\n - number - Feature is defined to this number \"#define key have\". Doesn't work for 0 or 1, use a string then.\n - string - Feature is defined to this string \"#define key have\".\n\n\n \"\"\"\n key_up = key.upper()\n key_up = re.sub('[^A-Z0-9_]', '_', key_up)\n context.havedict[key_up] = have\n if have == 1:\n line = \"#define %s 1\\n\" % key_up\n elif have == 0:\n line = \"/* #undef %s */\\n\" % key_up\n elif isinstance(have, int):\n line = \"#define %s %d\\n\" % (key_up, have)\n else:\n line = \"#define %s %s\\n\" % (key_up, str(have))\n\n if comment is not None:\n lines = \"\\n/* %s */\\n\" % comment + line\n else:\n lines = \"\\n\" + line\n\n if context.headerfilename:\n f = open(context.headerfilename, \"a\")\n f.write(lines)\n f.close()\n elif hasattr(context,'config_h'):\n context.config_h = context.config_h + lines\n\n\ndef _LogFailed(context, text, msg) -> None:\n \"\"\"\n Write to the log about a failed program.\n Add line numbers, so that error messages can be understood.\n \"\"\"\n if LogInputFiles:\n context.Log(\"Failed program was:\\n\")\n lines = text.split('\\n')\n if len(lines) and lines[-1] == '':\n lines = lines[:-1] # remove trailing empty line\n n = 1\n for line in lines:\n context.Log(\"%d: %s\\n\" % (n, line))\n n = n + 1\n if LogErrorMessages:\n context.Log(\"Error message: %s\\n\" % msg)\n\n\ndef _lang2suffix(lang):\n \"\"\"\n Convert a language name to a suffix.\n When \"lang\" is empty or None C is assumed.\n Returns a tuple (lang, suffix, None) when it works.\n For an unrecognized language returns (None, None, msg).\n\n Where:\n - lang = the unified language name\n - suffix = the suffix, including the leading dot\n - msg = an error message\n \"\"\"\n if not lang or lang in [\"C\", \"c\"]:\n return (\"C\", \".c\", None)\n if lang in [\"c++\", \"C++\", \"cpp\", \"CXX\", \"cxx\"]:\n return (\"C++\", \".cpp\", None)\n\n return None, None, \"Unsupported language: %s\" % lang\n\n\n# vim: set sw=4 et sts=4 tw=79 fo+=l:\n\n# Local Variables:\n# tab-width:4\n# indent-tabs-mode:nil\n# End:\n# vim: set expandtab tabstop=4 shiftwidth=4:\n",
"path": "SCons/Conftest.py"
}
] | diff --git a/CHANGES.txt b/CHANGES.txt
index ee08ab7019..7cd71dda41 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -97,6 +97,10 @@ RELEASE VERSION/DATE TO BE FILLED IN LATER
- Cleaned up dblite module (checker warnings, etc.).
- Some cleanup in the FortranCommon tool.
+ From Jonathon Reinhart:
+ - Fix another instance of `int main()` in CheckLib() causing failures
+ when using -Wstrict-prototypes.
+
RELEASE 4.5.2 - Sun, 21 Mar 2023 14:08:29 -0700
diff --git a/SCons/Conftest.py b/SCons/Conftest.py
index 3541bce0ad..6af5e78893 100644
--- a/SCons/Conftest.py
+++ b/SCons/Conftest.py
@@ -676,8 +676,7 @@ def CheckLib(context, libs, func_name = None, header = None,
# if no function to test, leave main() blank
text = text + """
-int
-main() {
+int main(void) {
%s
return 0;
}
|
DDMAL__CantusDB-328 | Representation of Notation objects
When I recently tried to edit a source, I was presented with an error message, and found that I was missing several required fields, including this one: 
Notation objects are currently pretty inscrutable. They should be represented such that at least their `name` property is visible.
Larger question: why do we have notation objects at all? Currently, the notation model has only one property: `name`. Could this information in Source objects not be more simply represented by a CharField? Is using Notation objects simply the way things were done in OldCantus? Are we using them to ensure standardization among multiple Sources?
| [
{
"content": "from django.db import models\nfrom main_app.models import BaseModel\n\n\nclass Notation(BaseModel):\n name = models.CharField(max_length=63)\n",
"path": "django/cantusdb_project/main_app/models/notation.py"
}
] | [
{
"content": "from django.db import models\nfrom main_app.models import BaseModel\n\n\nclass Notation(BaseModel):\n name = models.CharField(max_length=63)\n def __str__(self):\n return f\"{self.name} ({self.id})\"",
"path": "django/cantusdb_project/main_app/models/notation.py"
}
] | diff --git a/django/cantusdb_project/main_app/models/notation.py b/django/cantusdb_project/main_app/models/notation.py
index a6a4469f0..52faa2b90 100644
--- a/django/cantusdb_project/main_app/models/notation.py
+++ b/django/cantusdb_project/main_app/models/notation.py
@@ -4,3 +4,5 @@
class Notation(BaseModel):
name = models.CharField(max_length=63)
+ def __str__(self):
+ return f"{self.name} ({self.id})"
\ No newline at end of file
|
encode__uvicorn-1279 | Include a environment variable to change arg commands
Hello,
I'm testing the FASTAPI (https://fastapi.tiangolo.com/) and there's a nice environment variable when running with gunicorn (https://docs.gunicorn.org/en/stable/settings.html#settings) `GUNICORN_CMD_ARGS`, it include command line args using enviroment variables
this is very usefull when using docker compose, or k8s. without rebuild the container i can change how it starts (more debug messages instead of only critical)
could be possible include a enviroment at uvicorn? `UVICORN_CMD_ARGS`
the gunicorn_cmd_args implementation can be saw here:
https://github.com/benoitc/gunicorn/blob/ee685e197b3f7cf899dc7d6e0688ff169e9d10df/gunicorn/app/base.py#L171
https://github.com/benoitc/gunicorn/blob/6aab4decde5735fc77daf4fecaf9ef3632189f62/gunicorn/config.py#L79
thanks!
| [
{
"content": "import logging\nimport os\nimport platform\nimport ssl\nimport sys\nimport typing\n\nimport click\nfrom asgiref.typing import ASGIApplication\n\nimport uvicorn\nfrom uvicorn.config import (\n HTTP_PROTOCOLS,\n INTERFACES,\n LIFESPAN,\n LOG_LEVELS,\n LOGGING_CONFIG,\n LOOP_SETUPS,\n SSL_PROTOCOL_VERSION,\n WS_PROTOCOLS,\n Config,\n)\nfrom uvicorn.server import Server, ServerState # noqa: F401 # Used to be defined here.\nfrom uvicorn.supervisors import ChangeReload, Multiprocess\n\nLEVEL_CHOICES = click.Choice(list(LOG_LEVELS.keys()))\nHTTP_CHOICES = click.Choice(list(HTTP_PROTOCOLS.keys()))\nWS_CHOICES = click.Choice(list(WS_PROTOCOLS.keys()))\nLIFESPAN_CHOICES = click.Choice(list(LIFESPAN.keys()))\nLOOP_CHOICES = click.Choice([key for key in LOOP_SETUPS.keys() if key != \"none\"])\nINTERFACE_CHOICES = click.Choice(INTERFACES)\n\nlogger = logging.getLogger(\"uvicorn.error\")\n\n\ndef print_version(ctx: click.Context, param: click.Parameter, value: bool) -> None:\n if not value or ctx.resilient_parsing:\n return\n click.echo(\n \"Running uvicorn %s with %s %s on %s\"\n % (\n uvicorn.__version__,\n platform.python_implementation(),\n platform.python_version(),\n platform.system(),\n )\n )\n ctx.exit()\n\n\[email protected]()\[email protected](\"app\")\[email protected](\n \"--host\",\n type=str,\n default=\"127.0.0.1\",\n help=\"Bind socket to this host.\",\n show_default=True,\n)\[email protected](\n \"--port\",\n type=int,\n default=8000,\n help=\"Bind socket to this port.\",\n show_default=True,\n)\[email protected](\"--uds\", type=str, default=None, help=\"Bind to a UNIX domain socket.\")\[email protected](\n \"--fd\", type=int, default=None, help=\"Bind to socket from this file descriptor.\"\n)\[email protected](\n \"--debug\", is_flag=True, default=False, help=\"Enable debug mode.\", hidden=True\n)\[email protected](\"--reload\", is_flag=True, default=False, help=\"Enable auto-reload.\")\[email protected](\n \"--reload-dir\",\n \"reload_dirs\",\n multiple=True,\n help=\"Set reload directories explicitly, instead of using the current working\"\n \" directory.\",\n type=click.Path(exists=True),\n)\[email protected](\n \"--reload-include\",\n \"reload_includes\",\n multiple=True,\n help=\"Set glob patterns to include while watching for files. Includes '*.py' \"\n \"by default; these defaults can be overridden in `--reload-exclude`.\",\n)\[email protected](\n \"--reload-exclude\",\n \"reload_excludes\",\n multiple=True,\n help=\"Set glob patterns to exclude while watching for files. Includes \"\n \"'.*, .py[cod], .sw.*, ~*' by default; these defaults can be overridden \"\n \"in `--reload-include`.\",\n)\[email protected](\n \"--reload-delay\",\n type=float,\n default=0.25,\n show_default=True,\n help=\"Delay between previous and next check if application needs to be.\"\n \" Defaults to 0.25s.\",\n)\[email protected](\n \"--workers\",\n default=None,\n type=int,\n help=\"Number of worker processes. Defaults to the $WEB_CONCURRENCY environment\"\n \" variable if available, or 1. Not valid with --reload.\",\n)\[email protected](\n \"--loop\",\n type=LOOP_CHOICES,\n default=\"auto\",\n help=\"Event loop implementation.\",\n show_default=True,\n)\[email protected](\n \"--http\",\n type=HTTP_CHOICES,\n default=\"auto\",\n help=\"HTTP protocol implementation.\",\n show_default=True,\n)\[email protected](\n \"--ws\",\n type=WS_CHOICES,\n default=\"auto\",\n help=\"WebSocket protocol implementation.\",\n show_default=True,\n)\[email protected](\n \"--ws-max-size\",\n type=int,\n default=16777216,\n help=\"WebSocket max size message in bytes\",\n show_default=True,\n)\[email protected](\n \"--ws-ping-interval\",\n type=float,\n default=20.0,\n help=\"WebSocket ping interval\",\n show_default=True,\n)\[email protected](\n \"--ws-ping-timeout\",\n type=float,\n default=20.0,\n help=\"WebSocket ping timeout\",\n show_default=True,\n)\[email protected](\n \"--lifespan\",\n type=LIFESPAN_CHOICES,\n default=\"auto\",\n help=\"Lifespan implementation.\",\n show_default=True,\n)\[email protected](\n \"--interface\",\n type=INTERFACE_CHOICES,\n default=\"auto\",\n help=\"Select ASGI3, ASGI2, or WSGI as the application interface.\",\n show_default=True,\n)\[email protected](\n \"--env-file\",\n type=click.Path(exists=True),\n default=None,\n help=\"Environment configuration file.\",\n show_default=True,\n)\[email protected](\n \"--log-config\",\n type=click.Path(exists=True),\n default=None,\n help=\"Logging configuration file. Supported formats: .ini, .json, .yaml.\",\n show_default=True,\n)\[email protected](\n \"--log-level\",\n type=LEVEL_CHOICES,\n default=None,\n help=\"Log level. [default: info]\",\n show_default=True,\n)\[email protected](\n \"--access-log/--no-access-log\",\n is_flag=True,\n default=True,\n help=\"Enable/Disable access log.\",\n)\[email protected](\n \"--use-colors/--no-use-colors\",\n is_flag=True,\n default=None,\n help=\"Enable/Disable colorized logging.\",\n)\[email protected](\n \"--proxy-headers/--no-proxy-headers\",\n is_flag=True,\n default=True,\n help=\"Enable/Disable X-Forwarded-Proto, X-Forwarded-For, X-Forwarded-Port to \"\n \"populate remote address info.\",\n)\[email protected](\n \"--server-header/--no-server-header\",\n is_flag=True,\n default=True,\n help=\"Enable/Disable default Server header.\",\n)\[email protected](\n \"--date-header/--no-date-header\",\n is_flag=True,\n default=True,\n help=\"Enable/Disable default Date header.\",\n)\[email protected](\n \"--forwarded-allow-ips\",\n type=str,\n default=None,\n help=\"Comma seperated list of IPs to trust with proxy headers. Defaults to\"\n \" the $FORWARDED_ALLOW_IPS environment variable if available, or '127.0.0.1'.\",\n)\[email protected](\n \"--root-path\",\n type=str,\n default=\"\",\n help=\"Set the ASGI 'root_path' for applications submounted below a given URL path.\",\n)\[email protected](\n \"--limit-concurrency\",\n type=int,\n default=None,\n help=\"Maximum number of concurrent connections or tasks to allow, before issuing\"\n \" HTTP 503 responses.\",\n)\[email protected](\n \"--backlog\",\n type=int,\n default=2048,\n help=\"Maximum number of connections to hold in backlog\",\n)\[email protected](\n \"--limit-max-requests\",\n type=int,\n default=None,\n help=\"Maximum number of requests to service before terminating the process.\",\n)\[email protected](\n \"--timeout-keep-alive\",\n type=int,\n default=5,\n help=\"Close Keep-Alive connections if no new data is received within this timeout.\",\n show_default=True,\n)\[email protected](\n \"--ssl-keyfile\", type=str, default=None, help=\"SSL key file\", show_default=True\n)\[email protected](\n \"--ssl-certfile\",\n type=str,\n default=None,\n help=\"SSL certificate file\",\n show_default=True,\n)\[email protected](\n \"--ssl-keyfile-password\",\n type=str,\n default=None,\n help=\"SSL keyfile password\",\n show_default=True,\n)\[email protected](\n \"--ssl-version\",\n type=int,\n default=int(SSL_PROTOCOL_VERSION),\n help=\"SSL version to use (see stdlib ssl module's)\",\n show_default=True,\n)\[email protected](\n \"--ssl-cert-reqs\",\n type=int,\n default=int(ssl.CERT_NONE),\n help=\"Whether client certificate is required (see stdlib ssl module's)\",\n show_default=True,\n)\[email protected](\n \"--ssl-ca-certs\",\n type=str,\n default=None,\n help=\"CA certificates file\",\n show_default=True,\n)\[email protected](\n \"--ssl-ciphers\",\n type=str,\n default=\"TLSv1\",\n help=\"Ciphers to use (see stdlib ssl module's)\",\n show_default=True,\n)\[email protected](\n \"--header\",\n \"headers\",\n multiple=True,\n help=\"Specify custom default HTTP response headers as a Name:Value pair\",\n)\[email protected](\n \"--version\",\n is_flag=True,\n callback=print_version,\n expose_value=False,\n is_eager=True,\n help=\"Display the uvicorn version and exit.\",\n)\[email protected](\n \"--app-dir\",\n \"app_dir\",\n default=\".\",\n show_default=True,\n help=\"Look for APP in the specified directory, by adding this to the PYTHONPATH.\"\n \" Defaults to the current working directory.\",\n)\[email protected](\n \"--factory\",\n is_flag=True,\n default=False,\n help=\"Treat APP as an application factory, i.e. a () -> <ASGI app> callable.\",\n show_default=True,\n)\ndef main(\n app: str,\n host: str,\n port: int,\n uds: str,\n fd: int,\n loop: str,\n http: str,\n ws: str,\n ws_max_size: int,\n ws_ping_interval: float,\n ws_ping_timeout: float,\n lifespan: str,\n interface: str,\n debug: bool,\n reload: bool,\n reload_dirs: typing.List[str],\n reload_includes: typing.List[str],\n reload_excludes: typing.List[str],\n reload_delay: float,\n workers: int,\n env_file: str,\n log_config: str,\n log_level: str,\n access_log: bool,\n proxy_headers: bool,\n server_header: bool,\n date_header: bool,\n forwarded_allow_ips: str,\n root_path: str,\n limit_concurrency: int,\n backlog: int,\n limit_max_requests: int,\n timeout_keep_alive: int,\n ssl_keyfile: str,\n ssl_certfile: str,\n ssl_keyfile_password: str,\n ssl_version: int,\n ssl_cert_reqs: int,\n ssl_ca_certs: str,\n ssl_ciphers: str,\n headers: typing.List[str],\n use_colors: bool,\n app_dir: str,\n factory: bool,\n) -> None:\n sys.path.insert(0, app_dir)\n\n kwargs = {\n \"host\": host,\n \"port\": port,\n \"uds\": uds,\n \"fd\": fd,\n \"loop\": loop,\n \"http\": http,\n \"ws\": ws,\n \"ws_max_size\": ws_max_size,\n \"ws_ping_interval\": ws_ping_interval,\n \"ws_ping_timeout\": ws_ping_timeout,\n \"lifespan\": lifespan,\n \"env_file\": env_file,\n \"log_config\": LOGGING_CONFIG if log_config is None else log_config,\n \"log_level\": log_level,\n \"access_log\": access_log,\n \"interface\": interface,\n \"debug\": debug,\n \"reload\": reload,\n \"reload_dirs\": reload_dirs if reload_dirs else None,\n \"reload_includes\": reload_includes if reload_includes else None,\n \"reload_excludes\": reload_excludes if reload_excludes else None,\n \"reload_delay\": reload_delay,\n \"workers\": workers,\n \"proxy_headers\": proxy_headers,\n \"server_header\": server_header,\n \"date_header\": date_header,\n \"forwarded_allow_ips\": forwarded_allow_ips,\n \"root_path\": root_path,\n \"limit_concurrency\": limit_concurrency,\n \"backlog\": backlog,\n \"limit_max_requests\": limit_max_requests,\n \"timeout_keep_alive\": timeout_keep_alive,\n \"ssl_keyfile\": ssl_keyfile,\n \"ssl_certfile\": ssl_certfile,\n \"ssl_keyfile_password\": ssl_keyfile_password,\n \"ssl_version\": ssl_version,\n \"ssl_cert_reqs\": ssl_cert_reqs,\n \"ssl_ca_certs\": ssl_ca_certs,\n \"ssl_ciphers\": ssl_ciphers,\n \"headers\": [header.split(\":\", 1) for header in headers],\n \"use_colors\": use_colors,\n \"factory\": factory,\n }\n run(app, **kwargs)\n\n\ndef run(app: typing.Union[ASGIApplication, str], **kwargs: typing.Any) -> None:\n config = Config(app, **kwargs)\n server = Server(config=config)\n\n if (config.reload or config.workers > 1) and not isinstance(app, str):\n logger = logging.getLogger(\"uvicorn.error\")\n logger.warning(\n \"You must pass the application as an import string to enable 'reload' or \"\n \"'workers'.\"\n )\n sys.exit(1)\n\n if config.should_reload:\n sock = config.bind_socket()\n ChangeReload(config, target=server.run, sockets=[sock]).run()\n elif config.workers > 1:\n sock = config.bind_socket()\n Multiprocess(config, target=server.run, sockets=[sock]).run()\n else:\n server.run()\n if config.uds:\n os.remove(config.uds) # pragma: py-win32\n\n\nif __name__ == \"__main__\":\n main() # pragma: no cover\n",
"path": "uvicorn/main.py"
}
] | [
{
"content": "import logging\nimport os\nimport platform\nimport ssl\nimport sys\nimport typing\n\nimport click\nfrom asgiref.typing import ASGIApplication\n\nimport uvicorn\nfrom uvicorn.config import (\n HTTP_PROTOCOLS,\n INTERFACES,\n LIFESPAN,\n LOG_LEVELS,\n LOGGING_CONFIG,\n LOOP_SETUPS,\n SSL_PROTOCOL_VERSION,\n WS_PROTOCOLS,\n Config,\n)\nfrom uvicorn.server import Server, ServerState # noqa: F401 # Used to be defined here.\nfrom uvicorn.supervisors import ChangeReload, Multiprocess\n\nLEVEL_CHOICES = click.Choice(list(LOG_LEVELS.keys()))\nHTTP_CHOICES = click.Choice(list(HTTP_PROTOCOLS.keys()))\nWS_CHOICES = click.Choice(list(WS_PROTOCOLS.keys()))\nLIFESPAN_CHOICES = click.Choice(list(LIFESPAN.keys()))\nLOOP_CHOICES = click.Choice([key for key in LOOP_SETUPS.keys() if key != \"none\"])\nINTERFACE_CHOICES = click.Choice(INTERFACES)\n\nlogger = logging.getLogger(\"uvicorn.error\")\n\n\ndef print_version(ctx: click.Context, param: click.Parameter, value: bool) -> None:\n if not value or ctx.resilient_parsing:\n return\n click.echo(\n \"Running uvicorn %s with %s %s on %s\"\n % (\n uvicorn.__version__,\n platform.python_implementation(),\n platform.python_version(),\n platform.system(),\n )\n )\n ctx.exit()\n\n\[email protected](context_settings={\"auto_envvar_prefix\": \"UVICORN\"})\[email protected](\"app\")\[email protected](\n \"--host\",\n type=str,\n default=\"127.0.0.1\",\n help=\"Bind socket to this host.\",\n show_default=True,\n)\[email protected](\n \"--port\",\n type=int,\n default=8000,\n help=\"Bind socket to this port.\",\n show_default=True,\n)\[email protected](\"--uds\", type=str, default=None, help=\"Bind to a UNIX domain socket.\")\[email protected](\n \"--fd\", type=int, default=None, help=\"Bind to socket from this file descriptor.\"\n)\[email protected](\n \"--debug\", is_flag=True, default=False, help=\"Enable debug mode.\", hidden=True\n)\[email protected](\"--reload\", is_flag=True, default=False, help=\"Enable auto-reload.\")\[email protected](\n \"--reload-dir\",\n \"reload_dirs\",\n multiple=True,\n help=\"Set reload directories explicitly, instead of using the current working\"\n \" directory.\",\n type=click.Path(exists=True),\n)\[email protected](\n \"--reload-include\",\n \"reload_includes\",\n multiple=True,\n help=\"Set glob patterns to include while watching for files. Includes '*.py' \"\n \"by default; these defaults can be overridden in `--reload-exclude`.\",\n)\[email protected](\n \"--reload-exclude\",\n \"reload_excludes\",\n multiple=True,\n help=\"Set glob patterns to exclude while watching for files. Includes \"\n \"'.*, .py[cod], .sw.*, ~*' by default; these defaults can be overridden \"\n \"in `--reload-include`.\",\n)\[email protected](\n \"--reload-delay\",\n type=float,\n default=0.25,\n show_default=True,\n help=\"Delay between previous and next check if application needs to be.\"\n \" Defaults to 0.25s.\",\n)\[email protected](\n \"--workers\",\n default=None,\n type=int,\n help=\"Number of worker processes. Defaults to the $WEB_CONCURRENCY environment\"\n \" variable if available, or 1. Not valid with --reload.\",\n)\[email protected](\n \"--loop\",\n type=LOOP_CHOICES,\n default=\"auto\",\n help=\"Event loop implementation.\",\n show_default=True,\n)\[email protected](\n \"--http\",\n type=HTTP_CHOICES,\n default=\"auto\",\n help=\"HTTP protocol implementation.\",\n show_default=True,\n)\[email protected](\n \"--ws\",\n type=WS_CHOICES,\n default=\"auto\",\n help=\"WebSocket protocol implementation.\",\n show_default=True,\n)\[email protected](\n \"--ws-max-size\",\n type=int,\n default=16777216,\n help=\"WebSocket max size message in bytes\",\n show_default=True,\n)\[email protected](\n \"--ws-ping-interval\",\n type=float,\n default=20.0,\n help=\"WebSocket ping interval\",\n show_default=True,\n)\[email protected](\n \"--ws-ping-timeout\",\n type=float,\n default=20.0,\n help=\"WebSocket ping timeout\",\n show_default=True,\n)\[email protected](\n \"--lifespan\",\n type=LIFESPAN_CHOICES,\n default=\"auto\",\n help=\"Lifespan implementation.\",\n show_default=True,\n)\[email protected](\n \"--interface\",\n type=INTERFACE_CHOICES,\n default=\"auto\",\n help=\"Select ASGI3, ASGI2, or WSGI as the application interface.\",\n show_default=True,\n)\[email protected](\n \"--env-file\",\n type=click.Path(exists=True),\n default=None,\n help=\"Environment configuration file.\",\n show_default=True,\n)\[email protected](\n \"--log-config\",\n type=click.Path(exists=True),\n default=None,\n help=\"Logging configuration file. Supported formats: .ini, .json, .yaml.\",\n show_default=True,\n)\[email protected](\n \"--log-level\",\n type=LEVEL_CHOICES,\n default=None,\n help=\"Log level. [default: info]\",\n show_default=True,\n)\[email protected](\n \"--access-log/--no-access-log\",\n is_flag=True,\n default=True,\n help=\"Enable/Disable access log.\",\n)\[email protected](\n \"--use-colors/--no-use-colors\",\n is_flag=True,\n default=None,\n help=\"Enable/Disable colorized logging.\",\n)\[email protected](\n \"--proxy-headers/--no-proxy-headers\",\n is_flag=True,\n default=True,\n help=\"Enable/Disable X-Forwarded-Proto, X-Forwarded-For, X-Forwarded-Port to \"\n \"populate remote address info.\",\n)\[email protected](\n \"--server-header/--no-server-header\",\n is_flag=True,\n default=True,\n help=\"Enable/Disable default Server header.\",\n)\[email protected](\n \"--date-header/--no-date-header\",\n is_flag=True,\n default=True,\n help=\"Enable/Disable default Date header.\",\n)\[email protected](\n \"--forwarded-allow-ips\",\n type=str,\n default=None,\n help=\"Comma seperated list of IPs to trust with proxy headers. Defaults to\"\n \" the $FORWARDED_ALLOW_IPS environment variable if available, or '127.0.0.1'.\",\n)\[email protected](\n \"--root-path\",\n type=str,\n default=\"\",\n help=\"Set the ASGI 'root_path' for applications submounted below a given URL path.\",\n)\[email protected](\n \"--limit-concurrency\",\n type=int,\n default=None,\n help=\"Maximum number of concurrent connections or tasks to allow, before issuing\"\n \" HTTP 503 responses.\",\n)\[email protected](\n \"--backlog\",\n type=int,\n default=2048,\n help=\"Maximum number of connections to hold in backlog\",\n)\[email protected](\n \"--limit-max-requests\",\n type=int,\n default=None,\n help=\"Maximum number of requests to service before terminating the process.\",\n)\[email protected](\n \"--timeout-keep-alive\",\n type=int,\n default=5,\n help=\"Close Keep-Alive connections if no new data is received within this timeout.\",\n show_default=True,\n)\[email protected](\n \"--ssl-keyfile\", type=str, default=None, help=\"SSL key file\", show_default=True\n)\[email protected](\n \"--ssl-certfile\",\n type=str,\n default=None,\n help=\"SSL certificate file\",\n show_default=True,\n)\[email protected](\n \"--ssl-keyfile-password\",\n type=str,\n default=None,\n help=\"SSL keyfile password\",\n show_default=True,\n)\[email protected](\n \"--ssl-version\",\n type=int,\n default=int(SSL_PROTOCOL_VERSION),\n help=\"SSL version to use (see stdlib ssl module's)\",\n show_default=True,\n)\[email protected](\n \"--ssl-cert-reqs\",\n type=int,\n default=int(ssl.CERT_NONE),\n help=\"Whether client certificate is required (see stdlib ssl module's)\",\n show_default=True,\n)\[email protected](\n \"--ssl-ca-certs\",\n type=str,\n default=None,\n help=\"CA certificates file\",\n show_default=True,\n)\[email protected](\n \"--ssl-ciphers\",\n type=str,\n default=\"TLSv1\",\n help=\"Ciphers to use (see stdlib ssl module's)\",\n show_default=True,\n)\[email protected](\n \"--header\",\n \"headers\",\n multiple=True,\n help=\"Specify custom default HTTP response headers as a Name:Value pair\",\n)\[email protected](\n \"--version\",\n is_flag=True,\n callback=print_version,\n expose_value=False,\n is_eager=True,\n help=\"Display the uvicorn version and exit.\",\n)\[email protected](\n \"--app-dir\",\n \"app_dir\",\n default=\".\",\n show_default=True,\n help=\"Look for APP in the specified directory, by adding this to the PYTHONPATH.\"\n \" Defaults to the current working directory.\",\n)\[email protected](\n \"--factory\",\n is_flag=True,\n default=False,\n help=\"Treat APP as an application factory, i.e. a () -> <ASGI app> callable.\",\n show_default=True,\n)\ndef main(\n app: str,\n host: str,\n port: int,\n uds: str,\n fd: int,\n loop: str,\n http: str,\n ws: str,\n ws_max_size: int,\n ws_ping_interval: float,\n ws_ping_timeout: float,\n lifespan: str,\n interface: str,\n debug: bool,\n reload: bool,\n reload_dirs: typing.List[str],\n reload_includes: typing.List[str],\n reload_excludes: typing.List[str],\n reload_delay: float,\n workers: int,\n env_file: str,\n log_config: str,\n log_level: str,\n access_log: bool,\n proxy_headers: bool,\n server_header: bool,\n date_header: bool,\n forwarded_allow_ips: str,\n root_path: str,\n limit_concurrency: int,\n backlog: int,\n limit_max_requests: int,\n timeout_keep_alive: int,\n ssl_keyfile: str,\n ssl_certfile: str,\n ssl_keyfile_password: str,\n ssl_version: int,\n ssl_cert_reqs: int,\n ssl_ca_certs: str,\n ssl_ciphers: str,\n headers: typing.List[str],\n use_colors: bool,\n app_dir: str,\n factory: bool,\n) -> None:\n sys.path.insert(0, app_dir)\n\n kwargs = {\n \"host\": host,\n \"port\": port,\n \"uds\": uds,\n \"fd\": fd,\n \"loop\": loop,\n \"http\": http,\n \"ws\": ws,\n \"ws_max_size\": ws_max_size,\n \"ws_ping_interval\": ws_ping_interval,\n \"ws_ping_timeout\": ws_ping_timeout,\n \"lifespan\": lifespan,\n \"env_file\": env_file,\n \"log_config\": LOGGING_CONFIG if log_config is None else log_config,\n \"log_level\": log_level,\n \"access_log\": access_log,\n \"interface\": interface,\n \"debug\": debug,\n \"reload\": reload,\n \"reload_dirs\": reload_dirs if reload_dirs else None,\n \"reload_includes\": reload_includes if reload_includes else None,\n \"reload_excludes\": reload_excludes if reload_excludes else None,\n \"reload_delay\": reload_delay,\n \"workers\": workers,\n \"proxy_headers\": proxy_headers,\n \"server_header\": server_header,\n \"date_header\": date_header,\n \"forwarded_allow_ips\": forwarded_allow_ips,\n \"root_path\": root_path,\n \"limit_concurrency\": limit_concurrency,\n \"backlog\": backlog,\n \"limit_max_requests\": limit_max_requests,\n \"timeout_keep_alive\": timeout_keep_alive,\n \"ssl_keyfile\": ssl_keyfile,\n \"ssl_certfile\": ssl_certfile,\n \"ssl_keyfile_password\": ssl_keyfile_password,\n \"ssl_version\": ssl_version,\n \"ssl_cert_reqs\": ssl_cert_reqs,\n \"ssl_ca_certs\": ssl_ca_certs,\n \"ssl_ciphers\": ssl_ciphers,\n \"headers\": [header.split(\":\", 1) for header in headers],\n \"use_colors\": use_colors,\n \"factory\": factory,\n }\n run(app, **kwargs)\n\n\ndef run(app: typing.Union[ASGIApplication, str], **kwargs: typing.Any) -> None:\n config = Config(app, **kwargs)\n server = Server(config=config)\n\n if (config.reload or config.workers > 1) and not isinstance(app, str):\n logger = logging.getLogger(\"uvicorn.error\")\n logger.warning(\n \"You must pass the application as an import string to enable 'reload' or \"\n \"'workers'.\"\n )\n sys.exit(1)\n\n if config.should_reload:\n sock = config.bind_socket()\n ChangeReload(config, target=server.run, sockets=[sock]).run()\n elif config.workers > 1:\n sock = config.bind_socket()\n Multiprocess(config, target=server.run, sockets=[sock]).run()\n else:\n server.run()\n if config.uds:\n os.remove(config.uds) # pragma: py-win32\n\n\nif __name__ == \"__main__\":\n main() # pragma: no cover\n",
"path": "uvicorn/main.py"
}
] | diff --git a/docs/settings.md b/docs/settings.md
index b9055a94f..3edf4b3c3 100644
--- a/docs/settings.md
+++ b/docs/settings.md
@@ -7,6 +7,13 @@ equivalent keyword arguments, eg. `uvicorn.run("example:app", port=5000, reload=
Please note that in this case, if you use `reload=True` or `workers=NUM`,
you should put `uvicorn.run` into `if __name__ == '__main__'` clause in the main module.
+You can also configure Uvicorn using environment variables with the prefix `UVICORN_`.
+For example, in case you want to run the app on port `5000`, just set the environment variable `UVICORN_PORT` to `5000`.
+
+!!! note
+ CLI options and the arguments for `uvicorn.run()` take precedence over environment variables.
+
+
## Application
* `APP` - The ASGI application to run, in the format `"<module>:<attribute>"`.
diff --git a/mkdocs.yml b/mkdocs.yml
index 040e8cd0e..ed833a10c 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -29,6 +29,7 @@ nav:
- Contributing: "contributing.md"
markdown_extensions:
+ - admonition
- codehilite:
css_class: highlight
- toc:
diff --git a/tests/test_cli.py b/tests/test_cli.py
index e372c7e78..0754598dc 100644
--- a/tests/test_cli.py
+++ b/tests/test_cli.py
@@ -1,4 +1,5 @@
import importlib
+import os
import platform
import sys
from pathlib import Path
@@ -17,6 +18,10 @@
main = importlib.import_module("uvicorn.main")
+class App:
+ pass
+
+
def test_cli_print_version() -> None:
runner = CliRunner()
@@ -131,5 +136,26 @@ def test_cli_reloader_incomplete_app_parameter(
) in captured.err
-class App:
- pass
[email protected]()
+def load_env_h11_protocol():
+ old_environ = dict(os.environ)
+ os.environ["UVICORN_HTTP"] = "h11"
+ yield
+ os.environ.clear()
+ os.environ.update(old_environ)
+
+
+def test_env_variables(load_env_h11_protocol: None):
+ runner = CliRunner(env=os.environ)
+ with mock.patch.object(main, "run") as mock_run:
+ runner.invoke(cli, ["tests.test_cli:App"])
+ _, kwargs = mock_run.call_args
+ assert kwargs["http"] == "h11"
+
+
+def test_mistmatch_env_variables(load_env_h11_protocol: None):
+ runner = CliRunner(env=os.environ)
+ with mock.patch.object(main, "run") as mock_run:
+ runner.invoke(cli, ["tests.test_cli:App", "--http=httptools"])
+ _, kwargs = mock_run.call_args
+ assert kwargs["http"] == "httptools"
diff --git a/uvicorn/main.py b/uvicorn/main.py
index c4f4a6c4c..896f96a67 100644
--- a/uvicorn/main.py
+++ b/uvicorn/main.py
@@ -48,7 +48,7 @@ def print_version(ctx: click.Context, param: click.Parameter, value: bool) -> No
ctx.exit()
[email protected]()
[email protected](context_settings={"auto_envvar_prefix": "UVICORN"})
@click.argument("app")
@click.option(
"--host",
|
mlcommons__GaNDLF-744 | Make `black` version static
**Is your feature request related to a problem? Please describe.**
Different versions of black behave differently WRT linting, which creates issues, such as PRs having linting changes where they are not needed.
**Describe the solution you'd like**
Fix the version of `black`.
**Describe alternatives you've considered**
N.A.
**Additional context**
N.A.
| [
{
"content": "#!/usr/bin/env python\n\n\"\"\"The setup script.\"\"\"\n\n\nimport sys, re, os\nfrom setuptools import setup, find_packages\nfrom setuptools.command.install import install\nfrom setuptools.command.develop import develop\nfrom setuptools.command.egg_info import egg_info\n\ntry:\n with open(\"README.md\") as readme_file:\n readme = readme_file.read()\nexcept Exception as error:\n readme = \"No README information found.\"\n sys.stderr.write(\"Warning: Could not open '%s' due %s\\n\" % (\"README.md\", error))\n\n\nclass CustomInstallCommand(install):\n def run(self):\n install.run(self)\n\n\nclass CustomDevelopCommand(develop):\n def run(self):\n develop.run(self)\n\n\nclass CustomEggInfoCommand(egg_info):\n def run(self):\n egg_info.run(self)\n\n\ntry:\n filepath = \"GANDLF/version.py\"\n version_file = open(filepath)\n (__version__,) = re.findall('__version__ = \"(.*)\"', version_file.read())\n\nexcept Exception as error:\n __version__ = \"0.0.1\"\n sys.stderr.write(\"Warning: Could not open '%s' due %s\\n\" % (filepath, error))\n\n# Handle cases where specific files need to be bundled into the final package as installed via PyPI\ndockerfiles = [\n item\n for item in os.listdir(os.path.dirname(os.path.abspath(__file__)))\n if (os.path.isfile(item) and item.startswith(\"Dockerfile-\"))\n]\nentrypoint_files = [\n item\n for item in os.listdir(os.path.dirname(os.path.abspath(__file__)))\n if (os.path.isfile(item) and item.startswith(\"gandlf_\"))\n]\nsetup_files = [\"setup.py\", \".dockerignore\", \"pyproject.toml\", \"MANIFEST.in\"]\nall_extra_files = dockerfiles + entrypoint_files + setup_files\nall_extra_files_pathcorrected = [os.path.join(\"../\", item) for item in all_extra_files]\n# find_packages should only ever find these as subpackages of gandlf, not as top-level packages\n# generate this dynamically?\n# GANDLF.GANDLF is needed to prevent recursion madness in deployments\ntoplevel_package_excludes = [\n \"GANDLF.GANDLF\",\n \"anonymize\",\n \"cli\",\n \"compute\",\n \"data\",\n \"grad_clipping\",\n \"losses\",\n \"metrics\",\n \"models\",\n \"optimizers\",\n \"schedulers\",\n \"utils\",\n]\n\n\nrequirements = [\n \"torch==1.13.1\",\n \"black\",\n \"numpy==1.25.0\",\n \"scipy\",\n \"SimpleITK!=2.0.*\",\n \"SimpleITK!=2.2.1\", # https://github.com/mlcommons/GaNDLF/issues/536\n \"torchvision\",\n \"tqdm\",\n \"torchio==0.18.75\",\n \"pandas>=2.0.0\",\n \"scikit-learn>=0.23.2\",\n \"scikit-image>=0.19.1\",\n \"setuptools\",\n \"seaborn\",\n \"pyyaml\",\n \"tiffslide\",\n \"matplotlib\",\n \"gdown\",\n \"pytest\",\n \"coverage\",\n \"pytest-cov\",\n \"psutil\",\n \"medcam\",\n \"opencv-python\",\n \"torchmetrics==0.8.1\",\n \"zarr==2.10.3\",\n \"pydicom\",\n \"onnx\",\n \"torchinfo==1.7.0\",\n \"segmentation-models-pytorch==0.3.2\",\n \"ACSConv==0.1.1\",\n \"docker\",\n \"dicom-anonymizer\",\n \"twine\",\n \"zarr\",\n \"keyring\",\n]\n\nif __name__ == \"__main__\":\n setup(\n name=\"GANDLF\",\n version=__version__,\n author=\"MLCommons\",\n author_email=\"[email protected]\",\n python_requires=\">=3.9, <3.11\",\n packages=find_packages(\n where=os.path.dirname(os.path.abspath(__file__)),\n exclude=toplevel_package_excludes,\n ),\n cmdclass={\n \"install\": CustomInstallCommand,\n \"develop\": CustomDevelopCommand,\n \"egg_info\": CustomEggInfoCommand,\n },\n scripts=[\n \"gandlf_run\",\n \"gandlf_constructCSV\",\n \"gandlf_collectStats\",\n \"gandlf_patchMiner\",\n \"gandlf_preprocess\",\n \"gandlf_anonymizer\",\n \"gandlf_verifyInstall\",\n \"gandlf_configGenerator\",\n \"gandlf_recoverConfig\",\n \"gandlf_deploy\",\n \"gandlf_optimizeModel\",\n \"gandlf_generateMetrics\",\n ],\n classifiers=[\n \"Development Status :: 3 - Alpha\",\n \"Intended Audience :: Science/Research\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Natural Language :: English\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"Topic :: Scientific/Engineering :: Medical Science Apps.\",\n ],\n description=(\n \"PyTorch-based framework that handles segmentation/regression/classification using various DL architectures for medical imaging.\"\n ),\n install_requires=requirements,\n license=\"Apache-2.0\",\n long_description=readme,\n long_description_content_type=\"text/markdown\",\n include_package_data=True,\n package_data={\"GANDLF\": all_extra_files_pathcorrected},\n keywords=\"semantic, segmentation, regression, classification, data-augmentation, medical-imaging, clinical-workflows, deep-learning, pytorch\",\n zip_safe=False,\n )\n",
"path": "setup.py"
}
] | [
{
"content": "#!/usr/bin/env python\n\n\"\"\"The setup script.\"\"\"\n\n\nimport sys, re, os\nfrom setuptools import setup, find_packages\nfrom setuptools.command.install import install\nfrom setuptools.command.develop import develop\nfrom setuptools.command.egg_info import egg_info\n\ntry:\n with open(\"README.md\") as readme_file:\n readme = readme_file.read()\nexcept Exception as error:\n readme = \"No README information found.\"\n sys.stderr.write(\"Warning: Could not open '%s' due %s\\n\" % (\"README.md\", error))\n\n\nclass CustomInstallCommand(install):\n def run(self):\n install.run(self)\n\n\nclass CustomDevelopCommand(develop):\n def run(self):\n develop.run(self)\n\n\nclass CustomEggInfoCommand(egg_info):\n def run(self):\n egg_info.run(self)\n\n\ntry:\n filepath = \"GANDLF/version.py\"\n version_file = open(filepath)\n (__version__,) = re.findall('__version__ = \"(.*)\"', version_file.read())\n\nexcept Exception as error:\n __version__ = \"0.0.1\"\n sys.stderr.write(\"Warning: Could not open '%s' due %s\\n\" % (filepath, error))\n\n# Handle cases where specific files need to be bundled into the final package as installed via PyPI\ndockerfiles = [\n item\n for item in os.listdir(os.path.dirname(os.path.abspath(__file__)))\n if (os.path.isfile(item) and item.startswith(\"Dockerfile-\"))\n]\nentrypoint_files = [\n item\n for item in os.listdir(os.path.dirname(os.path.abspath(__file__)))\n if (os.path.isfile(item) and item.startswith(\"gandlf_\"))\n]\nsetup_files = [\"setup.py\", \".dockerignore\", \"pyproject.toml\", \"MANIFEST.in\"]\nall_extra_files = dockerfiles + entrypoint_files + setup_files\nall_extra_files_pathcorrected = [os.path.join(\"../\", item) for item in all_extra_files]\n# find_packages should only ever find these as subpackages of gandlf, not as top-level packages\n# generate this dynamically?\n# GANDLF.GANDLF is needed to prevent recursion madness in deployments\ntoplevel_package_excludes = [\n \"GANDLF.GANDLF\",\n \"anonymize\",\n \"cli\",\n \"compute\",\n \"data\",\n \"grad_clipping\",\n \"losses\",\n \"metrics\",\n \"models\",\n \"optimizers\",\n \"schedulers\",\n \"utils\",\n]\n\n\nrequirements = [\n \"torch==1.13.1\",\n \"black==23.11.0\",\n \"numpy==1.25.0\",\n \"scipy\",\n \"SimpleITK!=2.0.*\",\n \"SimpleITK!=2.2.1\", # https://github.com/mlcommons/GaNDLF/issues/536\n \"torchvision\",\n \"tqdm\",\n \"torchio==0.18.75\",\n \"pandas>=2.0.0\",\n \"scikit-learn>=0.23.2\",\n \"scikit-image>=0.19.1\",\n \"setuptools\",\n \"seaborn\",\n \"pyyaml\",\n \"tiffslide\",\n \"matplotlib\",\n \"gdown\",\n \"pytest\",\n \"coverage\",\n \"pytest-cov\",\n \"psutil\",\n \"medcam\",\n \"opencv-python\",\n \"torchmetrics==0.8.1\",\n \"zarr==2.10.3\",\n \"pydicom\",\n \"onnx\",\n \"torchinfo==1.7.0\",\n \"segmentation-models-pytorch==0.3.2\",\n \"ACSConv==0.1.1\",\n \"docker\",\n \"dicom-anonymizer\",\n \"twine\",\n \"zarr\",\n \"keyring\",\n]\n\nif __name__ == \"__main__\":\n setup(\n name=\"GANDLF\",\n version=__version__,\n author=\"MLCommons\",\n author_email=\"[email protected]\",\n python_requires=\">=3.9, <3.11\",\n packages=find_packages(\n where=os.path.dirname(os.path.abspath(__file__)),\n exclude=toplevel_package_excludes,\n ),\n cmdclass={\n \"install\": CustomInstallCommand,\n \"develop\": CustomDevelopCommand,\n \"egg_info\": CustomEggInfoCommand,\n },\n scripts=[\n \"gandlf_run\",\n \"gandlf_constructCSV\",\n \"gandlf_collectStats\",\n \"gandlf_patchMiner\",\n \"gandlf_preprocess\",\n \"gandlf_anonymizer\",\n \"gandlf_verifyInstall\",\n \"gandlf_configGenerator\",\n \"gandlf_recoverConfig\",\n \"gandlf_deploy\",\n \"gandlf_optimizeModel\",\n \"gandlf_generateMetrics\",\n ],\n classifiers=[\n \"Development Status :: 3 - Alpha\",\n \"Intended Audience :: Science/Research\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Natural Language :: English\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"Topic :: Scientific/Engineering :: Medical Science Apps.\",\n ],\n description=(\n \"PyTorch-based framework that handles segmentation/regression/classification using various DL architectures for medical imaging.\"\n ),\n install_requires=requirements,\n license=\"Apache-2.0\",\n long_description=readme,\n long_description_content_type=\"text/markdown\",\n include_package_data=True,\n package_data={\"GANDLF\": all_extra_files_pathcorrected},\n keywords=\"semantic, segmentation, regression, classification, data-augmentation, medical-imaging, clinical-workflows, deep-learning, pytorch\",\n zip_safe=False,\n )\n",
"path": "setup.py"
}
] | diff --git a/.github/workflows/black.yml b/.github/workflows/black.yml
index b9144f9d7..80c8729f9 100644
--- a/.github/workflows/black.yml
+++ b/.github/workflows/black.yml
@@ -20,7 +20,7 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip
- python -m pip install black
+ python -m pip install black==23.11.0
- name: Run tests
run: |
diff --git a/setup.py b/setup.py
index 039fb0560..40c974b0b 100644
--- a/setup.py
+++ b/setup.py
@@ -76,7 +76,7 @@ def run(self):
requirements = [
"torch==1.13.1",
- "black",
+ "black==23.11.0",
"numpy==1.25.0",
"scipy",
"SimpleITK!=2.0.*",
|
liqd__a4-meinberlin-3701 | testing 4293: can't edit polls somebody else created even if I have the rights
**URL:** https://meinberlin-dev.liqd.net/dashboard/modules/umfrage-24-4/poll/
**user:** group member
**expected behaviour:** I can edit polls somebody else created if I have the right to do so
**behaviour:** cannot save, getting an red altert
**important screensize:**
**device & browser:**
**Comment/Question:** also true for new polls whose rights have been given to me. for polls I started myself it is fine.

Screenshot?
| [
{
"content": "import rules\n\nfrom adhocracy4.modules import predicates as module_predicates\n\nrules.set_perm(\n 'a4polls.change_poll',\n module_predicates.is_context_initiator |\n module_predicates.is_context_moderator\n)\n",
"path": "meinberlin/apps/polls/rules.py"
}
] | [
{
"content": "import rules\n\nfrom adhocracy4.modules import predicates as module_predicates\n\nrules.set_perm(\n 'a4polls.change_poll',\n module_predicates.is_project_admin\n)\n",
"path": "meinberlin/apps/polls/rules.py"
}
] | diff --git a/meinberlin/apps/polls/rules.py b/meinberlin/apps/polls/rules.py
index 3dd6d6d57c..069e13b191 100644
--- a/meinberlin/apps/polls/rules.py
+++ b/meinberlin/apps/polls/rules.py
@@ -4,6 +4,5 @@
rules.set_perm(
'a4polls.change_poll',
- module_predicates.is_context_initiator |
- module_predicates.is_context_moderator
+ module_predicates.is_project_admin
)
diff --git a/tests/polls/rules/test_rules_change_poll.py b/tests/polls/rules/test_rules_change_poll.py
index 66bad65e06..1b6a20526d 100644
--- a/tests/polls/rules/test_rules_change_poll.py
+++ b/tests/polls/rules/test_rules_change_poll.py
@@ -8,6 +8,7 @@
from adhocracy4.test.helpers import freeze_pre_phase
from adhocracy4.test.helpers import setup_phase
from adhocracy4.test.helpers import setup_users
+from meinberlin.test.helpers import setup_group_member
perm_name = 'a4polls.change_poll'
@@ -17,12 +18,16 @@ def test_perm_exists():
@pytest.mark.django_db
-def test_pre_phase(phase_factory, poll_factory, user):
+def test_pre_phase(phase_factory, poll_factory, user_factory,
+ group_factory):
phase, _, project, item = setup_phase(phase_factory, poll_factory,
phases.VotingPhase)
anonymous, moderator, initiator = setup_users(project)
-
creator = item.creator
+ user = user_factory()
+ group_member, _, project = setup_group_member(None, project,
+ group_factory,
+ user_factory)
assert project.is_public
with freeze_pre_phase(phase):
@@ -31,14 +36,20 @@ def test_pre_phase(phase_factory, poll_factory, user):
assert not rules.has_perm(perm_name, creator, item)
assert rules.has_perm(perm_name, moderator, item)
assert rules.has_perm(perm_name, initiator, item)
+ assert rules.has_perm(perm_name, group_member, item)
@pytest.mark.django_db
-def test_phase_active(phase_factory, poll_factory, user):
+def test_phase_active(phase_factory, poll_factory, user_factory,
+ group_factory):
phase, _, project, item = setup_phase(phase_factory, poll_factory,
phases.VotingPhase)
anonymous, moderator, initiator = setup_users(project)
creator = item.creator
+ user = user_factory()
+ group_member, _, project = setup_group_member(None, project,
+ group_factory,
+ user_factory)
assert project.is_public
with freeze_phase(phase):
@@ -47,18 +58,23 @@ def test_phase_active(phase_factory, poll_factory, user):
assert not rules.has_perm(perm_name, creator, item)
assert rules.has_perm(perm_name, moderator, item)
assert rules.has_perm(perm_name, initiator, item)
+ assert rules.has_perm(perm_name, group_member, item)
@pytest.mark.django_db
def test_phase_active_project_private(phase_factory, poll_factory,
- user, user2):
+ user_factory, group_factory):
phase, _, project, item = setup_phase(
phase_factory, poll_factory, phases.VotingPhase,
module__project__access=Access.PRIVATE)
anonymous, moderator, initiator = setup_users(project)
creator = item.creator
- participant = user2
+ participant = user_factory()
project.participants.add(participant)
+ user = user_factory()
+ group_member, _, project = setup_group_member(None, project,
+ group_factory,
+ user_factory)
assert project.access == Access.PRIVATE
with freeze_phase(phase):
@@ -68,18 +84,23 @@ def test_phase_active_project_private(phase_factory, poll_factory,
assert not rules.has_perm(perm_name, participant, item)
assert rules.has_perm(perm_name, moderator, item)
assert rules.has_perm(perm_name, initiator, item)
+ assert rules.has_perm(perm_name, group_member, item)
@pytest.mark.django_db
def test_phase_active_project_semipublic(phase_factory, poll_factory,
- user, user2):
+ user_factory, group_factory):
phase, _, project, item = setup_phase(
phase_factory, poll_factory, phases.VotingPhase,
module__project__access=Access.SEMIPUBLIC)
anonymous, moderator, initiator = setup_users(project)
creator = item.creator
- participant = user2
+ participant = user_factory()
project.participants.add(participant)
+ user = user_factory()
+ group_member, _, project = setup_group_member(None, project,
+ group_factory,
+ user_factory)
assert project.access == Access.SEMIPUBLIC
with freeze_phase(phase):
@@ -89,15 +110,21 @@ def test_phase_active_project_semipublic(phase_factory, poll_factory,
assert not rules.has_perm(perm_name, participant, item)
assert rules.has_perm(perm_name, moderator, item)
assert rules.has_perm(perm_name, initiator, item)
+ assert rules.has_perm(perm_name, group_member, item)
@pytest.mark.django_db
-def test_phase_active_project_draft(phase_factory, poll_factory, user):
+def test_phase_active_project_draft(phase_factory, poll_factory,
+ user_factory, group_factory):
phase, _, project, item = setup_phase(phase_factory, poll_factory,
phases.VotingPhase,
module__project__is_draft=True)
anonymous, moderator, initiator = setup_users(project)
creator = item.creator
+ user = user_factory()
+ group_member, _, project = setup_group_member(None, project,
+ group_factory,
+ user_factory)
assert project.is_draft
with freeze_phase(phase):
@@ -106,15 +133,21 @@ def test_phase_active_project_draft(phase_factory, poll_factory, user):
assert not rules.has_perm(perm_name, creator, item)
assert rules.has_perm(perm_name, moderator, item)
assert rules.has_perm(perm_name, initiator, item)
+ assert rules.has_perm(perm_name, group_member, item)
@pytest.mark.django_db
-def test_post_phase_project_archived(phase_factory, poll_factory, user):
+def test_post_phase_project_archived(phase_factory, poll_factory,
+ user_factory, group_factory):
phase, _, project, item = setup_phase(phase_factory, poll_factory,
phases.VotingPhase,
module__project__is_archived=True)
anonymous, moderator, initiator = setup_users(project)
creator = item.creator
+ user = user_factory()
+ group_member, _, project = setup_group_member(None, project,
+ group_factory,
+ user_factory)
assert project.is_archived
with freeze_post_phase(phase):
@@ -123,3 +156,4 @@ def test_post_phase_project_archived(phase_factory, poll_factory, user):
assert not rules.has_perm(perm_name, creator, item)
assert rules.has_perm(perm_name, moderator, item)
assert rules.has_perm(perm_name, initiator, item)
+ assert rules.has_perm(perm_name, group_member, item)
|
TheAlgorithms__Python-7054 | Add typing to maths/segmented_sieve.py
### Describe your change:
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [ ] Documentation change?
### Checklist:
* [ ] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [ ] This pull request is all my own work -- I have not plagiarized.
* [ ] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation.
* [ ] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| [
{
"content": "\"\"\"Segmented Sieve.\"\"\"\r\n\r\nimport math\r\n\r\n\r\ndef sieve(n):\r\n \"\"\"Segmented Sieve.\"\"\"\r\n in_prime = []\r\n start = 2\r\n end = int(math.sqrt(n)) # Size of every segment\r\n temp = [True] * (end + 1)\r\n prime = []\r\n\r\n while start <= end:\r\n if temp[start] is True:\r\n in_prime.append(start)\r\n for i in range(start * start, end + 1, start):\r\n temp[i] = False\r\n start += 1\r\n prime += in_prime\r\n\r\n low = end + 1\r\n high = min(2 * end, n)\r\n\r\n while low <= n:\r\n temp = [True] * (high - low + 1)\r\n for each in in_prime:\r\n\r\n t = math.floor(low / each) * each\r\n if t < low:\r\n t += each\r\n\r\n for j in range(t, high + 1, each):\r\n temp[j - low] = False\r\n\r\n for j in range(len(temp)):\r\n if temp[j] is True:\r\n prime.append(j + low)\r\n\r\n low = high + 1\r\n high = min(high + end, n)\r\n\r\n return prime\r\n\r\n\r\nprint(sieve(10**6))\r\n",
"path": "maths/segmented_sieve.py"
}
] | [
{
"content": "\"\"\"Segmented Sieve.\"\"\"\r\n\r\nimport math\r\n\r\n\r\ndef sieve(n: int) -> list[int]:\r\n \"\"\"Segmented Sieve.\"\"\"\r\n in_prime = []\r\n start = 2\r\n end = int(math.sqrt(n)) # Size of every segment\r\n temp = [True] * (end + 1)\r\n prime = []\r\n\r\n while start <= end:\r\n if temp[start] is True:\r\n in_prime.append(start)\r\n for i in range(start * start, end + 1, start):\r\n temp[i] = False\r\n start += 1\r\n prime += in_prime\r\n\r\n low = end + 1\r\n high = min(2 * end, n)\r\n\r\n while low <= n:\r\n temp = [True] * (high - low + 1)\r\n for each in in_prime:\r\n\r\n t = math.floor(low / each) * each\r\n if t < low:\r\n t += each\r\n\r\n for j in range(t, high + 1, each):\r\n temp[j - low] = False\r\n\r\n for j in range(len(temp)):\r\n if temp[j] is True:\r\n prime.append(j + low)\r\n\r\n low = high + 1\r\n high = min(high + end, n)\r\n\r\n return prime\r\n\r\n\r\nprint(sieve(10**6))\r\n",
"path": "maths/segmented_sieve.py"
}
] | diff --git a/maths/segmented_sieve.py b/maths/segmented_sieve.py
index 0054b0595be5..35ed9702b3be 100644
--- a/maths/segmented_sieve.py
+++ b/maths/segmented_sieve.py
@@ -3,7 +3,7 @@
import math
-def sieve(n):
+def sieve(n: int) -> list[int]:
"""Segmented Sieve."""
in_prime = []
start = 2
|
pypa__setuptools-2427 | Sphinx setup should be stricter
I noticed that some of the docs pages are unreachable when navigating from the main RTD page. In particular, _I know_ that there's `history.rst` that is only accessible if one knows the URL upfront.
I tracked this to https://github.com/pypa/setuptools/pull/2097 which removes entries from the TOC but doesn't reintroduce them in other places.
Sphinx has a few toggles that make it nitpicky about warnings. I think this should be enabled in the CI to prevent such problems in the future. This should catch implicit orphan pages as well as dead references or typos.
| [
{
"content": "import subprocess\nimport sys\nimport os\n\n\n# hack to run the bootstrap script so that jaraco.packaging.sphinx\n# can invoke setup.py\n'READTHEDOCS' in os.environ and subprocess.check_call(\n [sys.executable, '-m', 'bootstrap'],\n cwd=os.path.join(os.path.dirname(__file__), os.path.pardir),\n)\n\n# -- Project information -----------------------------------------------------\n\ngithub_url = 'https://github.com'\ngithub_sponsors_url = f'{github_url}/sponsors'\n\n# -- General configuration --\n\nextensions = [\n 'sphinx.ext.extlinks', # allows to create custom roles easily\n 'jaraco.packaging.sphinx',\n 'rst.linker',\n]\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = ['_templates']\n\n# The master toctree document.\nmaster_doc = 'index'\n\n# List of directories, relative to source directory, that shouldn't be searched\n# for source files.\nexclude_trees = []\n\n# The name of the Pygments (syntax highlighting) style to use.\npygments_style = 'sphinx'\n\n# -- Options for extlinks extension ---------------------------------------\nextlinks = {\n 'user': (f'{github_sponsors_url}/%s', '@'), # noqa: WPS323\n}\n\n# -- Options for HTML output --\n\n# The theme to use for HTML and HTML Help pages. Major themes that come with\n# Sphinx are currently 'default' and 'sphinxdoc'.\nhtml_theme = 'nature'\n\n# Add any paths that contain custom themes here, relative to this directory.\nhtml_theme_path = ['_theme']\n\n# If true, SmartyPants will be used to convert quotes and dashes to\n# typographically correct entities.\nhtml_use_smartypants = True\n\n# Custom sidebar templates, maps document names to template names.\nhtml_sidebars = {\n 'index': [\n 'relations.html', 'sourcelink.html', 'indexsidebar.html',\n 'searchbox.html']}\n\n# If false, no module index is generated.\nhtml_use_modindex = False\n\n# If false, no index is generated.\nhtml_use_index = False\n\n# -- Options for LaTeX output --\n\n# Grouping the document tree into LaTeX files. List of tuples\n# (source start file, target name, title, author,\n# documentclass [howto/manual]).\nlatex_documents = [(\n 'index', 'Setuptools.tex', 'Setuptools Documentation',\n 'The fellowship of the packaging', 'manual',\n)]\n\nlink_files = {\n '../CHANGES.rst': dict(\n using=dict(\n BB='https://bitbucket.org',\n GH='https://github.com',\n ),\n replace=[\n dict(\n pattern=r'(Issue )?#(?P<issue>\\d+)',\n url='{package_url}/issues/{issue}',\n ),\n dict(\n pattern=r'BB Pull Request ?#(?P<bb_pull_request>\\d+)',\n url='{BB}/pypa/setuptools/pull-request/{bb_pull_request}',\n ),\n dict(\n pattern=r'Distribute #(?P<distribute>\\d+)',\n url='{BB}/tarek/distribute/issue/{distribute}',\n ),\n dict(\n pattern=r'Buildout #(?P<buildout>\\d+)',\n url='{GH}/buildout/buildout/issues/{buildout}',\n ),\n dict(\n pattern=r'Old Setuptools #(?P<old_setuptools>\\d+)',\n url='http://bugs.python.org/setuptools/issue{old_setuptools}',\n ),\n dict(\n pattern=r'Jython #(?P<jython>\\d+)',\n url='http://bugs.jython.org/issue{jython}',\n ),\n dict(\n pattern=r'(Python #|bpo-)(?P<python>\\d+)',\n url='http://bugs.python.org/issue{python}',\n ),\n dict(\n pattern=r'Interop #(?P<interop>\\d+)',\n url='{GH}/pypa/interoperability-peps/issues/{interop}',\n ),\n dict(\n pattern=r'Pip #(?P<pip>\\d+)',\n url='{GH}/pypa/pip/issues/{pip}',\n ),\n dict(\n pattern=r'Packaging #(?P<packaging>\\d+)',\n url='{GH}/pypa/packaging/issues/{packaging}',\n ),\n dict(\n pattern=r'[Pp]ackaging (?P<packaging_ver>\\d+(\\.\\d+)+)',\n url='{GH}/pypa/packaging/blob/{packaging_ver}/CHANGELOG.rst',\n ),\n dict(\n pattern=r'PEP[- ](?P<pep_number>\\d+)',\n url='https://www.python.org/dev/peps/pep-{pep_number:0>4}/',\n ),\n dict(\n pattern=r'setuptools_svn #(?P<setuptools_svn>\\d+)',\n url='{GH}/jaraco/setuptools_svn/issues/{setuptools_svn}',\n ),\n dict(\n pattern=r'pypa/distutils#(?P<distutils>\\d+)',\n url='{GH}/pypa/distutils/issues/{distutils}',\n ),\n dict(\n pattern=r'^(?m)((?P<scm_version>v?\\d+(\\.\\d+){1,2}))\\n[-=]+\\n',\n with_scm='{text}\\n{rev[timestamp]:%d %b %Y}\\n',\n ),\n ],\n ),\n}\n",
"path": "docs/conf.py"
}
] | [
{
"content": "import subprocess\nimport sys\nimport os\n\n\n# hack to run the bootstrap script so that jaraco.packaging.sphinx\n# can invoke setup.py\n'READTHEDOCS' in os.environ and subprocess.check_call(\n [sys.executable, '-m', 'bootstrap'],\n cwd=os.path.join(os.path.dirname(__file__), os.path.pardir),\n)\n\n# -- Project information -----------------------------------------------------\n\ngithub_url = 'https://github.com'\ngithub_sponsors_url = f'{github_url}/sponsors'\n\n# -- General configuration --\n\nextensions = [\n 'sphinx.ext.extlinks', # allows to create custom roles easily\n 'jaraco.packaging.sphinx',\n 'rst.linker',\n]\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = ['_templates']\n\n# The master toctree document.\nmaster_doc = 'index'\n\n# List of directories, relative to source directory, that shouldn't be searched\n# for source files.\nexclude_trees = []\n\n# The name of the Pygments (syntax highlighting) style to use.\npygments_style = 'sphinx'\n\n# -- Options for extlinks extension ---------------------------------------\nextlinks = {\n 'user': (f'{github_sponsors_url}/%s', '@'), # noqa: WPS323\n}\n\n# -- Options for HTML output --\n\n# The theme to use for HTML and HTML Help pages. Major themes that come with\n# Sphinx are currently 'default' and 'sphinxdoc'.\nhtml_theme = 'nature'\n\n# Add any paths that contain custom themes here, relative to this directory.\nhtml_theme_path = ['_theme']\n\n# If true, SmartyPants will be used to convert quotes and dashes to\n# typographically correct entities.\nhtml_use_smartypants = True\n\n# Custom sidebar templates, maps document names to template names.\nhtml_sidebars = {\n 'index': [\n 'relations.html', 'sourcelink.html', 'indexsidebar.html',\n 'searchbox.html']}\n\n# If false, no module index is generated.\nhtml_use_modindex = False\n\n# If false, no index is generated.\nhtml_use_index = False\n\n# -- Options for LaTeX output --\n\n# Grouping the document tree into LaTeX files. List of tuples\n# (source start file, target name, title, author,\n# documentclass [howto/manual]).\nlatex_documents = [(\n 'index', 'Setuptools.tex', 'Setuptools Documentation',\n 'The fellowship of the packaging', 'manual',\n)]\n\nlink_files = {\n '../CHANGES.rst': dict(\n using=dict(\n BB='https://bitbucket.org',\n GH='https://github.com',\n ),\n replace=[\n dict(\n pattern=r'(Issue )?#(?P<issue>\\d+)',\n url='{package_url}/issues/{issue}',\n ),\n dict(\n pattern=r'BB Pull Request ?#(?P<bb_pull_request>\\d+)',\n url='{BB}/pypa/setuptools/pull-request/{bb_pull_request}',\n ),\n dict(\n pattern=r'Distribute #(?P<distribute>\\d+)',\n url='{BB}/tarek/distribute/issue/{distribute}',\n ),\n dict(\n pattern=r'Buildout #(?P<buildout>\\d+)',\n url='{GH}/buildout/buildout/issues/{buildout}',\n ),\n dict(\n pattern=r'Old Setuptools #(?P<old_setuptools>\\d+)',\n url='http://bugs.python.org/setuptools/issue{old_setuptools}',\n ),\n dict(\n pattern=r'Jython #(?P<jython>\\d+)',\n url='http://bugs.jython.org/issue{jython}',\n ),\n dict(\n pattern=r'(Python #|bpo-)(?P<python>\\d+)',\n url='http://bugs.python.org/issue{python}',\n ),\n dict(\n pattern=r'Interop #(?P<interop>\\d+)',\n url='{GH}/pypa/interoperability-peps/issues/{interop}',\n ),\n dict(\n pattern=r'Pip #(?P<pip>\\d+)',\n url='{GH}/pypa/pip/issues/{pip}',\n ),\n dict(\n pattern=r'Packaging #(?P<packaging>\\d+)',\n url='{GH}/pypa/packaging/issues/{packaging}',\n ),\n dict(\n pattern=r'[Pp]ackaging (?P<packaging_ver>\\d+(\\.\\d+)+)',\n url='{GH}/pypa/packaging/blob/{packaging_ver}/CHANGELOG.rst',\n ),\n dict(\n pattern=r'PEP[- ](?P<pep_number>\\d+)',\n url='https://www.python.org/dev/peps/pep-{pep_number:0>4}/',\n ),\n dict(\n pattern=r'setuptools_svn #(?P<setuptools_svn>\\d+)',\n url='{GH}/jaraco/setuptools_svn/issues/{setuptools_svn}',\n ),\n dict(\n pattern=r'pypa/distutils#(?P<distutils>\\d+)',\n url='{GH}/pypa/distutils/issues/{distutils}',\n ),\n dict(\n pattern=r'^(?m)((?P<scm_version>v?\\d+(\\.\\d+){1,2}))\\n[-=]+\\n',\n with_scm='{text}\\n{rev[timestamp]:%d %b %Y}\\n',\n ),\n ],\n ),\n}\n\n\n# Be strict about any broken references:\nnitpicky = True\n\n\n# Ref: https://github.com/python-attrs/attrs/pull/571/files\\\n# #diff-85987f48f1258d9ee486e3191495582dR82\ndefault_role = 'any'\n",
"path": "docs/conf.py"
}
] | diff --git a/CHANGES.rst b/CHANGES.rst
index c35c4a8792..c96fb0bc9d 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -22,9 +22,9 @@ v50.2.0
* #2355: When pip is imported as part of a build, leave distutils patched.
* #2380: There are some setuptools specific changes in the
- `setuptools.command.bdist_rpm` module that are no longer needed, because
- they are part of the `bdist_rpm` module in distutils in Python
- 3.5.0. Therefore, code was removed from `setuptools.command.bdist_rpm`.
+ ``setuptools.command.bdist_rpm`` module that are no longer needed, because
+ they are part of the ``bdist_rpm`` module in distutils in Python
+ 3.5.0. Therefore, code was removed from ``setuptools.command.bdist_rpm``.
v50.1.0
@@ -48,7 +48,7 @@ v50.0.2
v50.0.1
-------
-* #2357: Restored Python 3.5 support in distutils.util for missing `subprocess._optim_args_from_interpreter_flags`.
+* #2357: Restored Python 3.5 support in distutils.util for missing ``subprocess._optim_args_from_interpreter_flags``.
* #2358: Restored AIX support on Python 3.8 and earlier.
* #2361: Add Python 3.10 support to _distutils_hack. Get the 'Loader' abstract class
from importlib.abc rather than importlib.util.abc (alias removed in Python
@@ -495,7 +495,7 @@ v40.7.1
v40.7.0
-------
-* #1551: File inputs for the `license` field in `setup.cfg` files now explicitly raise an error.
+* #1551: File inputs for the ``license`` field in ``setup.cfg`` files now explicitly raise an error.
* #1180: Add support for non-ASCII in setup.cfg (#1062). Add support for native strings on some parameters (#1136).
* #1499: ``setuptools.package_index`` no longer relies on the deprecated ``urllib.parse.splituser`` per Python #27485.
* #1544: Added tests for PackageIndex.download (for git URLs).
@@ -545,7 +545,7 @@ v40.5.0
* #1335: In ``pkg_resources.normalize_path``, fix issue on Cygwin when cwd contains symlinks.
* #1502: Deprecated support for downloads from Subversion in package_index/easy_install.
-* #1517: Dropped use of six.u in favor of `u""` literals.
+* #1517: Dropped use of six.u in favor of ``u""`` literals.
* #1520: Added support for ``data_files`` in ``setup.cfg``.
* #1525: Fixed rendering of the deprecation warning in easy_install doc.
@@ -594,7 +594,7 @@ v40.2.0
v40.1.1
--------
-* #1465: Fix regression with `egg_info` command when tagging is used.
+* #1465: Fix regression with ``egg_info`` command when tagging is used.
v40.1.0
@@ -631,8 +631,8 @@ v39.2.0
a text file.
* #1360: Fixed issue with a mismatch between the name of the package and the
name of the .dist-info file in wheel files
-* #1364: Add `__dir__()` implementation to `pkg_resources.Distribution()` that
- includes the attributes in the `_provider` instance variable.
+* #1364: Add ``__dir__()`` implementation to ``pkg_resources.Distribution()`` that
+ includes the attributes in the ``_provider`` instance variable.
* #1365: Take the package_dir option into account when loading the version from
a module attribute.
* #1353: Added coverage badge to README.
@@ -742,7 +742,7 @@ v38.2.5
v38.2.4
-------
-* #1220: Fix `data_files` handling when installing from wheel.
+* #1220: Fix ``data_files`` handling when installing from wheel.
v38.2.3
-------
@@ -1506,7 +1506,7 @@ v25.4.0
v25.3.0
-------
-* #739 Fix unquoted libpaths by fixing compatibility between `numpy.distutils` and `distutils._msvccompiler` for numpy < 1.11.2 (Fix issue #728, error also fixed in Numpy).
+* #739 Fix unquoted libpaths by fixing compatibility between ``numpy.distutils`` and ``distutils._msvccompiler`` for numpy < 1.11.2 (Fix issue #728, error also fixed in Numpy).
* #731: Bump certifi.
@@ -1523,13 +1523,13 @@ v25.2.0
v25.1.6
-------
-* #725: revert `library_dir_option` patch (Error is related to `numpy.distutils` and make errors on non Numpy users).
+* #725: revert ``library_dir_option`` patch (Error is related to ``numpy.distutils`` and make errors on non Numpy users).
v25.1.5
-------
* #720
-* #723: Improve patch for `library_dir_option`.
+* #723: Improve patch for ``library_dir_option``.
v25.1.4
-------
@@ -1537,7 +1537,7 @@ v25.1.4
* #717
* #713
* #707: Fix Python 2 compatibility for MSVC by catching errors properly.
-* #715: Fix unquoted libpaths by patching `library_dir_option`.
+* #715: Fix unquoted libpaths by patching ``library_dir_option``.
v25.1.3
-------
@@ -3065,10 +3065,10 @@ not all users will find 1.0 a drop-in replacement for 0.9.
* Issue #50: Normalized API of environment marker support. Specifically,
removed line number and filename from SyntaxErrors when returned from
- `pkg_resources.invalid_marker`. Any clients depending on the specific
+ ``pkg_resources.invalid_marker``. Any clients depending on the specific
string representation of exceptions returned by that function may need to
be updated to account for this change.
-* Issue #50: SyntaxErrors generated by `pkg_resources.invalid_marker` are
+* Issue #50: SyntaxErrors generated by ``pkg_resources.invalid_marker`` are
normalized for cross-implementation consistency.
* Removed ``--ignore-conflicts-at-my-risk`` and ``--delete-conflicting``
options to easy_install. These options have been deprecated since 0.6a11.
@@ -3076,13 +3076,13 @@ not all users will find 1.0 a drop-in replacement for 0.9.
0.9.8
-----
-* Issue #53: Fix NameErrors in `_vcs_split_rev_from_url`.
+* Issue #53: Fix NameErrors in ``_vcs_split_rev_from_url``.
0.9.7
-----
* Issue #49: Correct AttributeError on PyPy where a hashlib.HASH object does
- not have a `.name` attribute.
+ not have a ``.name`` attribute.
* Issue #34: Documentation now refers to bootstrap script in code repository
referenced by bookmark.
* Add underscore-separated keys to environment markers (markerlib).
@@ -3090,7 +3090,7 @@ not all users will find 1.0 a drop-in replacement for 0.9.
0.9.6
-----
-* Issue #44: Test failure on Python 2.4 when MD5 hash doesn't have a `.name`
+* Issue #44: Test failure on Python 2.4 when MD5 hash doesn't have a ``.name``
attribute.
0.9.5
@@ -3124,7 +3124,7 @@ not all users will find 1.0 a drop-in replacement for 0.9.
0.9
---
-* `package_index` now validates hashes other than MD5 in download links.
+* ``package_index`` now validates hashes other than MD5 in download links.
0.8
---
@@ -3171,7 +3171,7 @@ not all users will find 1.0 a drop-in replacement for 0.9.
0.7.2
-----
-* Issue #14: Use markerlib when the `parser` module is not available.
+* Issue #14: Use markerlib when the ``parser`` module is not available.
* Issue #10: ``ez_setup.py`` now uses HTTPS to download setuptools from PyPI.
0.7.1
@@ -3255,7 +3255,7 @@ Added several features that were slated for setuptools 0.6c12:
------
* Distribute #27: Use public api for loading resources from zip files rather than
- the private method `_zip_directory_cache`.
+ the private method ``_zip_directory_cache``.
* Added a new function ``easy_install.get_win_launcher`` which may be used by
third-party libraries such as buildout to get a suitable script launcher.
@@ -3321,7 +3321,7 @@ how it parses version numbers.
* Fix 2 errors with Jython 2.5.
* Fix 1 failure with Jython 2.5 and 2.7.
* Disable workaround for Jython scripts on Linux systems.
-* Distribute #336: `setup.py` no longer masks failure exit code when tests fail.
+* Distribute #336: ``setup.py`` no longer masks failure exit code when tests fail.
* Fix issue in pkg_resources where try/except around a platform-dependent
import would trigger hook load failures on Mercurial. See pull request 32
for details.
@@ -3332,7 +3332,7 @@ how it parses version numbers.
* Fix test suite with Python 2.6.
* Fix some DeprecationWarnings and ResourceWarnings.
-* Distribute #335: Backed out `setup_requires` superceding installed requirements
+* Distribute #335: Backed out ``setup_requires`` superceding installed requirements
until regression can be addressed.
0.6.31
@@ -3342,7 +3342,7 @@ how it parses version numbers.
* Distribute #329: Properly close files created by tests for compatibility with
Jython.
* Work around Jython #1980 and Jython #1981.
-* Distribute #334: Provide workaround for packages that reference `sys.__stdout__`
+* Distribute #334: Provide workaround for packages that reference ``sys.__stdout__``
such as numpy does. This change should address
`virtualenv #359 <https://github.com/pypa/virtualenv/issues/359>`_ as long
as the system encoding is UTF-8 or the IO encoding is specified in the
@@ -3351,7 +3351,7 @@ how it parses version numbers.
PYTHONIOENCODING=utf8 pip install numpy
* Fix for encoding issue when installing from Windows executable on Python 3.
-* Distribute #323: Allow `setup_requires` requirements to supercede installed
+* Distribute #323: Allow ``setup_requires`` requirements to supercede installed
requirements. Added some new keyword arguments to existing pkg_resources
methods. Also had to updated how __path__ is handled for namespace packages
to ensure that when a new egg distribution containing a namespace package is
@@ -3371,16 +3371,16 @@ how it parses version numbers.
* BB Pull Request #14: Honor file permissions in zip files.
* Distribute #327: Merged pull request #24 to fix a dependency problem with pip.
* Merged pull request #23 to fix https://github.com/pypa/virtualenv/issues/301.
-* If Sphinx is installed, the `upload_docs` command now runs `build_sphinx`
+* If Sphinx is installed, the ``upload_docs`` command now runs ``build_sphinx``
to produce uploadable documentation.
-* Distribute #326: `upload_docs` provided mangled auth credentials under Python 3.
+* Distribute #326: ``upload_docs`` provided mangled auth credentials under Python 3.
* Distribute #320: Fix check for "createable" in distribute_setup.py.
* Distribute #305: Remove a warning that was triggered during normal operations.
* Distribute #311: Print metadata in UTF-8 independent of platform.
* Distribute #303: Read manifest file with UTF-8 encoding under Python 3.
* Distribute #301: Allow to run tests of namespace packages when using 2to3.
* Distribute #304: Prevent import loop in site.py under Python 3.3.
-* Distribute #283: Reenable scanning of `*.pyc` / `*.pyo` files on Python 3.3.
+* Distribute #283: Reenable scanning of ``*.pyc`` / ``*.pyo`` files on Python 3.3.
* Distribute #299: The develop command didn't work on Python 3, when using 2to3,
as the egg link would go to the Python 2 source. Linking to the 2to3'd code
in build/lib makes it work, although you will have to rebuild the module
@@ -3390,10 +3390,10 @@ how it parses version numbers.
* Distribute #313: Support for sdist subcommands (Python 2.7)
* Distribute #314: test_local_index() would fail an OS X.
* Distribute #310: Non-ascii characters in a namespace __init__.py causes errors.
-* Distribute #218: Improved documentation on behavior of `package_data` and
- `include_package_data`. Files indicated by `package_data` are now included
+* Distribute #218: Improved documentation on behavior of ``package_data`` and
+ ``include_package_data``. Files indicated by ``package_data`` are now included
in the manifest.
-* `distribute_setup.py` now allows a `--download-base` argument for retrieving
+* ``distribute_setup.py`` now allows a ``--download-base`` argument for retrieving
distribute from a specified location.
0.6.28
@@ -3402,7 +3402,7 @@ how it parses version numbers.
* Distribute #294: setup.py can now be invoked from any directory.
* Scripts are now installed honoring the umask.
* Added support for .dist-info directories.
-* Distribute #283: Fix and disable scanning of `*.pyc` / `*.pyo` files on
+* Distribute #283: Fix and disable scanning of ``*.pyc`` / ``*.pyo`` files on
Python 3.3.
0.6.27
@@ -3636,7 +3636,7 @@ how it parses version numbers.
0.6.4
-----
-* Added the generation of `distribute_setup_3k.py` during the release.
+* Added the generation of ``distribute_setup_3k.py`` during the release.
This closes Distribute #52.
* Added an upload_docs command to easily upload project documentation to
diff --git a/changelog.d/2427.doc.rst b/changelog.d/2427.doc.rst
new file mode 100644
index 0000000000..bec964ffc4
--- /dev/null
+++ b/changelog.d/2427.doc.rst
@@ -0,0 +1,2 @@
+Started enforcing strict syntax and reference validation
+in the Sphinx docs -- by :user:`webknjaz`
diff --git a/docs/build_meta.rst b/docs/build_meta.rst
index fcc2b7fee6..c36e2bab38 100644
--- a/docs/build_meta.rst
+++ b/docs/build_meta.rst
@@ -56,7 +56,8 @@ setuptools, the content would be::
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta"
-Use ``setuptools``' `declarative config`_ to specify the package information::
+Use ``setuptools``' :ref:`declarative config <declarative config>` to
+specify the package information::
[metadata]
name = meowpkg
diff --git a/docs/conf.py b/docs/conf.py
index d5111391d7..982f5e6212 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -146,3 +146,12 @@
],
),
}
+
+
+# Be strict about any broken references:
+nitpicky = True
+
+
+# Ref: https://github.com/python-attrs/attrs/pull/571/files\
+# #diff-85987f48f1258d9ee486e3191495582dR82
+default_role = 'any'
diff --git a/docs/deprecated/index.rst b/docs/deprecated/index.rst
index a655b21930..ca80767a77 100644
--- a/docs/deprecated/index.rst
+++ b/docs/deprecated/index.rst
@@ -17,3 +17,4 @@ objectives.
python_eggs
easy_install
distutils-legacy
+ functionalities
diff --git a/docs/development.rst b/docs/development.rst
index 28e653fea9..7ee52361ec 100644
--- a/docs/development.rst
+++ b/docs/development.rst
@@ -31,5 +31,4 @@ setuptools changes. You have been warned.
:maxdepth: 1
developer-guide
- formats
releases
diff --git a/docs/index.rst b/docs/index.rst
index 5a46052632..961ec394c3 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -10,5 +10,11 @@ Documentation content:
:maxdepth: 1
User guide <userguide/index>
+ build_meta
+ pkg_resources
+ references/keywords
+ roadmap
+ setuptools
Development guide <development>
Backward compatibility & deprecated practice <deprecated/index>
+ Changelog <history>
diff --git a/docs/pkg_resources.rst b/docs/pkg_resources.rst
index 7d0d8da92a..364e218328 100644
--- a/docs/pkg_resources.rst
+++ b/docs/pkg_resources.rst
@@ -149,7 +149,7 @@ more information on this.) Also, you must add a ``declare_namespace()`` call
in the package's ``__init__.py`` file(s):
``declare_namespace(name)``
- Declare that the dotted package name `name` is a "namespace package" whose
+ Declare that the dotted package name ``name`` is a "namespace package" whose
contained packages and modules may be spread across multiple distributions.
The named package's ``__path__`` will be extended to include the
corresponding package in all distributions on ``sys.path`` that contain a
@@ -163,7 +163,7 @@ Applications that manipulate namespace packages or directly alter ``sys.path``
at runtime may also need to use this API function:
``fixup_namespace_packages(path_item)``
- Declare that `path_item` is a newly added item on ``sys.path`` that may
+ Declare that ``path_item`` is a newly added item on ``sys.path`` that may
need to be used to update existing namespace packages. Ordinarily, this is
called for you when an egg is automatically added to ``sys.path``, but if
your application modifies ``sys.path`` to include locations that may
@@ -197,7 +197,7 @@ not provide any way to detect arbitrary changes to a list object like
``working_set`` based on changes to ``sys.path``.
``WorkingSet(entries=None)``
- Create a ``WorkingSet`` from an iterable of path entries. If `entries`
+ Create a ``WorkingSet`` from an iterable of path entries. If ``entries``
is not supplied, it defaults to the value of ``sys.path`` at the time
the constructor is called.
@@ -229,9 +229,9 @@ abbreviation for ``pkg_resources.working_set.require()``:
``require(*requirements)``
- Ensure that distributions matching `requirements` are activated
+ Ensure that distributions matching ``requirements`` are activated
- `requirements` must be a string or a (possibly-nested) sequence
+ ``requirements`` must be a string or a (possibly-nested) sequence
thereof, specifying the distributions and versions required. The
return value is a sequence of the distributions that needed to be
activated to fulfill the requirements; all relevant distributions are
@@ -259,8 +259,8 @@ abbreviation for ``pkg_resources.working_set.require()``:
``obtain()`` method of ``Environment`` objects.
``run_script(requires, script_name)``
- Locate distribution specified by `requires` and run its `script_name`
- script. `requires` must be a string containing a requirement specifier.
+ Locate distribution specified by ``requires`` and run its ``script_name``
+ script. ``requires`` must be a string containing a requirement specifier.
(See `Requirements Parsing`_ below for the syntax.)
The script, if found, will be executed in *the caller's globals*. That's
@@ -274,11 +274,11 @@ abbreviation for ``pkg_resources.working_set.require()``:
object's `Metadata API`_ instead.
``iter_entry_points(group, name=None)``
- Yield entry point objects from `group` matching `name`
+ Yield entry point objects from ``group`` matching ``name``
- If `name` is None, yields all entry points in `group` from all
+ If ``name`` is None, yields all entry points in ``group`` from all
distributions in the working set, otherwise only ones matching both
- `group` and `name` are yielded. Entry points are yielded from the active
+ ``group`` and ``name`` are yielded. Entry points are yielded from the active
distributions in the order that the distributions appear in the working
set. (For the global ``working_set``, this should be the same as the order
that they are listed in ``sys.path``.) Note that within the entry points
@@ -301,14 +301,14 @@ instance:
called by the ``WorkingSet()`` constructor during initialization.
This method uses ``find_distributions(entry,True)`` to find distributions
- corresponding to the path entry, and then ``add()`` them. `entry` is
+ corresponding to the path entry, and then ``add()`` them. ``entry`` is
always appended to the ``entries`` attribute, even if it is already
present, however. (This is because ``sys.path`` can contain the same value
more than once, and the ``entries`` attribute should be able to reflect
this.)
``__contains__(dist)``
- True if `dist` is active in this ``WorkingSet``. Note that only one
+ True if ``dist`` is active in this ``WorkingSet``. Note that only one
distribution for a given project can be active in a given ``WorkingSet``.
``__iter__()``
@@ -317,34 +317,34 @@ instance:
added to the working set.
``find(req)``
- Find a distribution matching `req` (a ``Requirement`` instance).
+ Find a distribution matching ``req`` (a ``Requirement`` instance).
If there is an active distribution for the requested project, this
returns it, as long as it meets the version requirement specified by
- `req`. But, if there is an active distribution for the project and it
- does *not* meet the `req` requirement, ``VersionConflict`` is raised.
+ ``req``. But, if there is an active distribution for the project and it
+ does *not* meet the ``req`` requirement, ``VersionConflict`` is raised.
If there is no active distribution for the requested project, ``None``
is returned.
``resolve(requirements, env=None, installer=None)``
- List all distributions needed to (recursively) meet `requirements`
+ List all distributions needed to (recursively) meet ``requirements``
- `requirements` must be a sequence of ``Requirement`` objects. `env`,
+ ``requirements`` must be a sequence of ``Requirement`` objects. ``env``,
if supplied, should be an ``Environment`` instance. If
not supplied, an ``Environment`` is created from the working set's
- ``entries``. `installer`, if supplied, will be invoked with each
+ ``entries``. ``installer``, if supplied, will be invoked with each
requirement that cannot be met by an already-installed distribution; it
should return a ``Distribution`` or ``None``. (See the ``obtain()`` method
- of `Environment Objects`_, below, for more information on the `installer`
+ of `Environment Objects`_, below, for more information on the ``installer``
argument.)
``add(dist, entry=None)``
- Add `dist` to working set, associated with `entry`
+ Add ``dist`` to working set, associated with ``entry``
- If `entry` is unspecified, it defaults to ``dist.location``. On exit from
- this routine, `entry` is added to the end of the working set's ``.entries``
+ If ``entry`` is unspecified, it defaults to ``dist.location``. On exit from
+ this routine, ``entry`` is added to the end of the working set's ``.entries``
(if it wasn't already present).
- `dist` is only added to the working set if it's for a project that
+ ``dist`` is only added to the working set if it's for a project that
doesn't already have a distribution active in the set. If it's
successfully added, any callbacks registered with the ``subscribe()``
method will be called. (See `Receiving Change Notifications`_, below.)
@@ -401,7 +401,7 @@ environment for the newest version of each project that can be safely loaded
without conflicts or missing requirements.
``find_plugins(plugin_env, full_env=None, fallback=True)``
- Scan `plugin_env` and identify which distributions could be added to this
+ Scan ``plugin_env`` and identify which distributions could be added to this
working set without version conflicts or missing requirements.
Example usage::
@@ -412,19 +412,19 @@ without conflicts or missing requirements.
map(working_set.add, distributions) # add plugins+libs to sys.path
print "Couldn't load", errors # display errors
- The `plugin_env` should be an ``Environment`` instance that contains only
+ The ``plugin_env`` should be an ``Environment`` instance that contains only
distributions that are in the project's "plugin directory" or directories.
- The `full_env`, if supplied, should be an ``Environment`` instance that
+ The ``full_env``, if supplied, should be an ``Environment`` instance that
contains all currently-available distributions.
- If `full_env` is not supplied, one is created automatically from the
+ If ``full_env`` is not supplied, one is created automatically from the
``WorkingSet`` this method is called on, which will typically mean that
every directory on ``sys.path`` will be scanned for distributions.
- This method returns a 2-tuple: (`distributions`, `error_info`), where
- `distributions` is a list of the distributions found in `plugin_env` that
+ This method returns a 2-tuple: (``distributions``, ``error_info``), where
+ ``distributions`` is a list of the distributions found in ``plugin_env`` that
were loadable, along with any other distributions that are needed to resolve
- their dependencies. `error_info` is a dictionary mapping unloadable plugin
+ their dependencies. ``error_info`` is a dictionary mapping unloadable plugin
distributions to an exception instance describing the error that occurred.
Usually this will be a ``DistributionNotFound`` or ``VersionConflict``
instance.
@@ -436,7 +436,7 @@ without conflicts or missing requirements.
metadata tracking and hooks to be activated.
The resolution algorithm used by ``find_plugins()`` is as follows. First,
- the project names of the distributions present in `plugin_env` are sorted.
+ the project names of the distributions present in ``plugin_env`` are sorted.
Then, each project's eggs are tried in descending version order (i.e.,
newest version first).
@@ -446,7 +446,7 @@ without conflicts or missing requirements.
the next project name, and no older eggs for that project are tried.
If the resolution attempt fails, however, the error is added to the error
- dictionary. If the `fallback` flag is true, the next older version of the
+ dictionary. If the ``fallback`` flag is true, the next older version of the
plugin is tried, until a working version is found. If false, the resolution
process continues with the next plugin project name.
@@ -455,7 +455,7 @@ without conflicts or missing requirements.
may not be able to safely downgrade a version of a package. Others may want
to ensure that a new plugin configuration is either 100% good or else
revert to a known-good configuration. (That is, they may wish to revert to
- a known configuration if the `error_info` return value is non-empty.)
+ a known configuration if the ``error_info`` return value is non-empty.)
Note that this algorithm gives precedence to satisfying the dependencies of
alphabetically prior project names in case of version conflicts. If two
@@ -473,22 +473,22 @@ that are present and potentially importable on the current platform.
distributions during dependency resolution.
``Environment(search_path=None, platform=get_supported_platform(), python=PY_MAJOR)``
- Create an environment snapshot by scanning `search_path` for distributions
- compatible with `platform` and `python`. `search_path` should be a
+ Create an environment snapshot by scanning ``search_path`` for distributions
+ compatible with ``platform`` and ``python``. ``search_path`` should be a
sequence of strings such as might be used on ``sys.path``. If a
- `search_path` isn't supplied, ``sys.path`` is used.
+ ``search_path`` isn't supplied, ``sys.path`` is used.
- `platform` is an optional string specifying the name of the platform
+ ``platform`` is an optional string specifying the name of the platform
that platform-specific distributions must be compatible with. If
- unspecified, it defaults to the current platform. `python` is an
+ unspecified, it defaults to the current platform. ``python`` is an
optional string naming the desired version of Python (e.g. ``'2.4'``);
it defaults to the currently-running version.
- You may explicitly set `platform` (and/or `python`) to ``None`` if you
+ You may explicitly set ``platform`` (and/or ``python``) to ``None`` if you
wish to include *all* distributions, not just those compatible with the
running platform or Python version.
- Note that `search_path` is scanned immediately for distributions, and the
+ Note that ``search_path`` is scanned immediately for distributions, and the
resulting ``Environment`` is a snapshot of the found distributions. It
is not automatically updated if the system's state changes due to e.g.
installation or removal of distributions.
@@ -504,15 +504,15 @@ distributions during dependency resolution.
The yielded names are always in lower case.
``add(dist)``
- Add `dist` to the environment if it matches the platform and python version
+ Add ``dist`` to the environment if it matches the platform and python version
specified at creation time, and only if the distribution hasn't already
been added. (i.e., adding the same distribution more than once is a no-op.)
``remove(dist)``
- Remove `dist` from the environment.
+ Remove ``dist`` from the environment.
``can_add(dist)``
- Is distribution `dist` acceptable for this environment? If it's not
+ Is distribution ``dist`` acceptable for this environment? If it's not
compatible with the ``platform`` and ``python`` version values specified
when the environment was created, a false value is returned.
@@ -534,34 +534,34 @@ distributions during dependency resolution.
are silently ignored.
``best_match(req, working_set, installer=None)``
- Find distribution best matching `req` and usable on `working_set`
+ Find distribution best matching ``req`` and usable on ``working_set``
- This calls the ``find(req)`` method of the `working_set` to see if a
+ This calls the ``find(req)`` method of the ``working_set`` to see if a
suitable distribution is already active. (This may raise
``VersionConflict`` if an unsuitable version of the project is already
- active in the specified `working_set`.) If a suitable distribution isn't
+ active in the specified ``working_set``.) If a suitable distribution isn't
active, this method returns the newest distribution in the environment
- that meets the ``Requirement`` in `req`. If no suitable distribution is
- found, and `installer` is supplied, then the result of calling
+ that meets the ``Requirement`` in ``req``. If no suitable distribution is
+ found, and ``installer`` is supplied, then the result of calling
the environment's ``obtain(req, installer)`` method will be returned.
``obtain(requirement, installer=None)``
Obtain a distro that matches requirement (e.g. via download). In the
base ``Environment`` class, this routine just returns
- ``installer(requirement)``, unless `installer` is None, in which case
+ ``installer(requirement)``, unless ``installer`` is None, in which case
None is returned instead. This method is a hook that allows subclasses
to attempt other ways of obtaining a distribution before falling back
- to the `installer` argument.
+ to the ``installer`` argument.
``scan(search_path=None)``
- Scan `search_path` for distributions usable on `platform`
+ Scan ``search_path`` for distributions usable on ``platform``
- Any distributions found are added to the environment. `search_path` should
+ Any distributions found are added to the environment. ``search_path`` should
be a sequence of strings such as might be used on ``sys.path``. If not
supplied, ``sys.path`` is used. Only distributions conforming to
the platform/python version defined at initialization are added. This
method is a shortcut for using the ``find_distributions()`` function to
- find the distributions from each item in `search_path`, and then calling
+ find the distributions from each item in ``search_path``, and then calling
``add()`` to add each one to the environment.
@@ -627,10 +627,10 @@ Requirements Parsing
--------------------------------------
``__contains__(dist_or_version)``
- Return true if `dist_or_version` fits the criteria for this requirement.
- If `dist_or_version` is a ``Distribution`` object, its project name must
+ Return true if ``dist_or_version`` fits the criteria for this requirement.
+ If ``dist_or_version`` is a ``Distribution`` object, its project name must
match the requirement's project name, and its version must meet the
- requirement's version criteria. If `dist_or_version` is a string, it is
+ requirement's version criteria. If ``dist_or_version`` is a string, it is
parsed using the ``parse_version()`` utility function. Otherwise, it is
assumed to be an already-parsed version.
@@ -668,8 +668,8 @@ Requirements Parsing
``specs``
A list of ``(op,version)`` tuples, sorted in ascending parsed-version
- order. The `op` in each tuple is a comparison operator, represented as
- a string. The `version` is the (unparsed) version number.
+ order. The ``op`` in each tuple is a comparison operator, represented as
+ a string. The ``version`` is the (unparsed) version number.
``marker``
An instance of ``packaging.markers.Marker`` that allows evaluation
@@ -721,14 +721,14 @@ in sys.path order, etc.
Convenience API
---------------
-In the following functions, the `dist` argument can be a ``Distribution``
+In the following functions, the ``dist`` argument can be a ``Distribution``
instance, a ``Requirement`` instance, or a string specifying a requirement
(i.e. project name, version, etc.). If the argument is a string or
``Requirement``, the specified distribution is located (and added to sys.path
if not already present). An error will be raised if a matching distribution is
not available.
-The `group` argument should be a string containing a dotted identifier,
+The ``group`` argument should be a string containing a dotted identifier,
identifying an entry point group. If you are defining an entry point group,
you should include some portion of your package's name in the group name so as
to avoid collision with other packages' entry point groups.
@@ -738,25 +738,25 @@ to avoid collision with other packages' entry point groups.
``ImportError``.
``get_entry_info(dist, group, name)``
- Return an ``EntryPoint`` object for the given `group` and `name` from
+ Return an ``EntryPoint`` object for the given ``group`` and ``name`` from
the specified distribution. Returns ``None`` if the distribution has not
advertised a matching entry point.
``get_entry_map(dist, group=None)``
- Return the distribution's entry point map for `group`, or the full entry
+ Return the distribution's entry point map for ``group``, or the full entry
map for the distribution. This function always returns a dictionary,
- even if the distribution advertises no entry points. If `group` is given,
+ even if the distribution advertises no entry points. If ``group`` is given,
the dictionary maps entry point names to the corresponding ``EntryPoint``
- object. If `group` is None, the dictionary maps group names to
+ object. If ``group`` is None, the dictionary maps group names to
dictionaries that then map entry point names to the corresponding
``EntryPoint`` instance in that group.
``iter_entry_points(group, name=None)``
- Yield entry point objects from `group` matching `name`.
+ Yield entry point objects from ``group`` matching ``name``.
- If `name` is None, yields all entry points in `group` from all
+ If ``name`` is None, yields all entry points in ``group`` from all
distributions in the working set on sys.path, otherwise only ones matching
- both `group` and `name` are yielded. Entry points are yielded from
+ both ``group`` and ``name`` are yielded. Entry points are yielded from
the active distributions in the order that the distributions appear on
sys.path. (Within entry points for a particular distribution, however,
there is no particular ordering.)
@@ -769,26 +769,26 @@ Creating and Parsing
--------------------
``EntryPoint(name, module_name, attrs=(), extras=(), dist=None)``
- Create an ``EntryPoint`` instance. `name` is the entry point name. The
- `module_name` is the (dotted) name of the module containing the advertised
- object. `attrs` is an optional tuple of names to look up from the
- module to obtain the advertised object. For example, an `attrs` of
- ``("foo","bar")`` and a `module_name` of ``"baz"`` would mean that the
+ Create an ``EntryPoint`` instance. ``name`` is the entry point name. The
+ ``module_name`` is the (dotted) name of the module containing the advertised
+ object. ``attrs`` is an optional tuple of names to look up from the
+ module to obtain the advertised object. For example, an ``attrs`` of
+ ``("foo","bar")`` and a ``module_name`` of ``"baz"`` would mean that the
advertised object could be obtained by the following code::
import baz
advertised_object = baz.foo.bar
- The `extras` are an optional tuple of "extra feature" names that the
+ The ``extras`` are an optional tuple of "extra feature" names that the
distribution needs in order to provide this entry point. When the
- entry point is loaded, these extra features are looked up in the `dist`
+ entry point is loaded, these extra features are looked up in the ``dist``
argument to find out what other distributions may need to be activated
- on sys.path; see the ``load()`` method for more details. The `extras`
- argument is only meaningful if `dist` is specified. `dist` must be
+ on sys.path; see the ``load()`` method for more details. The ``extras``
+ argument is only meaningful if ``dist`` is specified. ``dist`` must be
a ``Distribution`` instance.
``EntryPoint.parse(src, dist=None)`` (classmethod)
- Parse a single entry point from string `src`
+ Parse a single entry point from string ``src``
Entry point syntax follows the form::
@@ -796,27 +796,27 @@ Creating and Parsing
The entry name and module name are required, but the ``:attrs`` and
``[extras]`` parts are optional, as is the whitespace shown between
- some of the items. The `dist` argument is passed through to the
+ some of the items. The ``dist`` argument is passed through to the
``EntryPoint()`` constructor, along with the other values parsed from
- `src`.
+ ``src``.
``EntryPoint.parse_group(group, lines, dist=None)`` (classmethod)
- Parse `lines` (a string or sequence of lines) to create a dictionary
+ Parse ``lines`` (a string or sequence of lines) to create a dictionary
mapping entry point names to ``EntryPoint`` objects. ``ValueError`` is
- raised if entry point names are duplicated, if `group` is not a valid
+ raised if entry point names are duplicated, if ``group`` is not a valid
entry point group name, or if there are any syntax errors. (Note: the
- `group` parameter is used only for validation and to create more
- informative error messages.) If `dist` is provided, it will be used to
+ ``group`` parameter is used only for validation and to create more
+ informative error messages.) If ``dist`` is provided, it will be used to
set the ``dist`` attribute of the created ``EntryPoint`` objects.
``EntryPoint.parse_map(data, dist=None)`` (classmethod)
- Parse `data` into a dictionary mapping group names to dictionaries mapping
- entry point names to ``EntryPoint`` objects. If `data` is a dictionary,
+ Parse ``data`` into a dictionary mapping group names to dictionaries mapping
+ entry point names to ``EntryPoint`` objects. If ``data`` is a dictionary,
then the keys are used as group names and the values are passed to
- ``parse_group()`` as the `lines` argument. If `data` is a string or
+ ``parse_group()`` as the ``lines`` argument. If ``data`` is a string or
sequence of lines, it is first split into .ini-style sections (using
the ``split_sections()`` utility function) and the section names are used
- as group names. In either case, the `dist` argument is passed through to
+ as group names. In either case, the ``dist`` argument is passed through to
``parse_group()`` so that the entry points will be linked to the specified
distribution.
@@ -837,9 +837,9 @@ addition, the following methods are provided:
Ensure that any "extras" needed by the entry point are available on
sys.path. ``UnknownExtra`` is raised if the ``EntryPoint`` has ``extras``,
but no ``dist``, or if the named extras are not defined by the
- distribution. If `env` is supplied, it must be an ``Environment``, and it
+ distribution. If ``env`` is supplied, it must be an ``Environment``, and it
will be used to search for needed distributions if they are not already
- present on sys.path. If `installer` is supplied, it must be a callable
+ present on sys.path. If ``installer`` is supplied, it must be a callable
taking a ``Requirement`` instance and returning a matching importable
``Distribution`` instance or None.
@@ -872,16 +872,16 @@ available distributions, respectively.) You can also obtain ``Distribution``
objects from one of these high-level APIs:
``find_distributions(path_item, only=False)``
- Yield distributions accessible via `path_item`. If `only` is true, yield
- only distributions whose ``location`` is equal to `path_item`. In other
- words, if `only` is true, this yields any distributions that would be
- importable if `path_item` were on ``sys.path``. If `only` is false, this
- also yields distributions that are "in" or "under" `path_item`, but would
+ Yield distributions accessible via ``path_item``. If ``only`` is true, yield
+ only distributions whose ``location`` is equal to ``path_item``. In other
+ words, if ``only`` is true, this yields any distributions that would be
+ importable if ``path_item`` were on ``sys.path``. If ``only`` is false, this
+ also yields distributions that are "in" or "under" ``path_item``, but would
not be importable unless their locations were also added to ``sys.path``.
``get_distribution(dist_spec)``
Return a ``Distribution`` object for a given ``Requirement`` or string.
- If `dist_spec` is already a ``Distribution`` instance, it is returned.
+ If ``dist_spec`` is already a ``Distribution`` instance, it is returned.
If it is a ``Requirement`` object or a string that can be parsed into one,
it is used to locate and activate a matching distribution, which is then
returned.
@@ -890,18 +890,18 @@ However, if you're creating specialized tools for working with distributions,
or creating a new distribution format, you may also need to create
``Distribution`` objects directly, using one of the three constructors below.
-These constructors all take an optional `metadata` argument, which is used to
-access any resources or metadata associated with the distribution. `metadata`
+These constructors all take an optional ``metadata`` argument, which is used to
+access any resources or metadata associated with the distribution. ``metadata``
must be an object that implements the ``IResourceProvider`` interface, or None.
If it is None, an ``EmptyProvider`` is used instead. ``Distribution`` objects
implement both the `IResourceProvider`_ and `IMetadataProvider Methods`_ by
-delegating them to the `metadata` object.
+delegating them to the ``metadata`` object.
``Distribution.from_location(location, basename, metadata=None, **kw)`` (classmethod)
- Create a distribution for `location`, which must be a string such as a
+ Create a distribution for ``location``, which must be a string such as a
URL, filename, or other string that might be used on ``sys.path``.
- `basename` is a string naming the distribution, like ``Foo-1.2-py2.4.egg``.
- If `basename` ends with ``.egg``, then the project's name, version, python
+ ``basename`` is a string naming the distribution, like ``Foo-1.2-py2.4.egg``.
+ If ``basename`` ends with ``.egg``, then the project's name, version, python
version and platform are extracted from the filename and used to set those
properties of the created distribution. Any additional keyword arguments
are forwarded to the ``Distribution()`` constructor.
@@ -917,8 +917,8 @@ delegating them to the `metadata` object.
``Distribution(location,metadata,project_name,version,py_version,platform,precedence)``
Create a distribution by setting its properties. All arguments are
- optional and default to None, except for `py_version` (which defaults to
- the current Python version) and `precedence` (which defaults to
+ optional and default to None, except for ``py_version`` (which defaults to
+ the current Python version) and ``precedence`` (which defaults to
``EGG_DIST``; for more details see ``precedence`` under `Distribution
Attributes`_ below). Note that it's usually easier to use the
``from_filename()`` or ``from_location()`` constructors than to specify
@@ -938,7 +938,7 @@ project_name
A string, naming the project that this distribution is for. Project names
are defined by a project's setup script, and they are used to identify
projects on PyPI. When a ``Distribution`` is constructed, the
- `project_name` argument is passed through the ``safe_name()`` utility
+ ``project_name`` argument is passed through the ``safe_name()`` utility
function to filter out any unacceptable characters.
key
@@ -952,9 +952,9 @@ extras
version
A string denoting what release of the project this distribution contains.
- When a ``Distribution`` is constructed, the `version` argument is passed
+ When a ``Distribution`` is constructed, the ``version`` argument is passed
through the ``safe_version()`` utility function to filter out any
- unacceptable characters. If no `version` is specified at construction
+ unacceptable characters. If no ``version`` is specified at construction
time, then attempting to access this attribute later will cause the
``Distribution`` to try to discover its version by reading its ``PKG-INFO``
metadata file. If ``PKG-INFO`` is unavailable or can't be parsed,
@@ -967,7 +967,7 @@ parsed_version
distributions by version. (See the `Parsing Utilities`_ section below for
more information on the ``parse_version()`` function.) Note that accessing
``parsed_version`` may result in a ``ValueError`` if the ``Distribution``
- was constructed without a `version` and without `metadata` capable of
+ was constructed without a ``version`` and without ``metadata`` capable of
supplying the missing version info.
py_version
@@ -998,9 +998,9 @@ precedence
------------------------
``activate(path=None)``
- Ensure distribution is importable on `path`. If `path` is None,
+ Ensure distribution is importable on ``path``. If ``path`` is None,
``sys.path`` is used instead. This ensures that the distribution's
- ``location`` is in the `path` list, and it also performs any necessary
+ ``location`` is in the ``path`` list, and it also performs any necessary
namespace package fixups or declarations. (That is, if the distribution
contains namespace packages, this method ensures that they are declared,
and that the distribution's contents for those namespace packages are
@@ -1020,7 +1020,7 @@ precedence
``requires(extras=())``
List the ``Requirement`` objects that specify this distribution's
- dependencies. If `extras` is specified, it should be a sequence of names
+ dependencies. If ``extras`` is specified, it should be a sequence of names
of "extras" defined by the distribution, and the list returned will then
include any dependencies needed to support the named "extras".
@@ -1047,11 +1047,11 @@ by the distribution. See the section above on `Entry Points`_ for more
detailed information about these operations:
``get_entry_info(group, name)``
- Return the ``EntryPoint`` object for `group` and `name`, or None if no
+ Return the ``EntryPoint`` object for ``group`` and ``name``, or None if no
such point is advertised by this distribution.
``get_entry_map(group=None)``
- Return the entry point map for `group`. If `group` is None, return
+ Return the entry point map for ``group``. If ``group`` is None, return
a dictionary mapping group names to entry point maps for all groups.
(An entry point map is a dictionary of entry point names to ``EntryPoint``
objects.)
@@ -1079,8 +1079,8 @@ documented in later sections):
* ``resource_isdir(resource_name)``
* ``resource_listdir(resource_name)``
-If the distribution was created with a `metadata` argument, these resource and
-metadata access methods are all delegated to that `metadata` provider.
+If the distribution was created with a ``metadata`` argument, these resource and
+metadata access methods are all delegated to that ``metadata`` provider.
Otherwise, they are delegated to an ``EmptyProvider``, so that the distribution
will appear to have no resources or metadata. This delegation approach is used
so that supporting custom importers or new distribution formats can be done
@@ -1112,11 +1112,11 @@ Thus, you can use the APIs below without needing an explicit
Basic Resource Access
---------------------
-In the following methods, the `package_or_requirement` argument may be either
+In the following methods, the ``package_or_requirement`` argument may be either
a Python package/module name (e.g. ``foo.bar``) or a ``Requirement`` instance.
If it is a package or module name, the named module or package must be
importable (i.e., be in a distribution or directory on ``sys.path``), and the
-`resource_name` argument is interpreted relative to the named package. (Note
+``resource_name`` argument is interpreted relative to the named package. (Note
that if a module name is used, then the resource name is relative to the
package immediately containing the named module. Also, you should not use use
a namespace package name, because a namespace package can be spread across
@@ -1127,7 +1127,7 @@ If it is a ``Requirement``, then the requirement is automatically resolved
(searching the current ``Environment`` if necessary) and a matching
distribution is added to the ``WorkingSet`` and ``sys.path`` if one was not
already present. (Unless the ``Requirement`` can't be satisfied, in which
-case an exception is raised.) The `resource_name` argument is then interpreted
+case an exception is raised.) The ``resource_name`` argument is then interpreted
relative to the root of the identified distribution; i.e. its first path
segment will be treated as a peer of the top-level modules or packages in the
distribution.
@@ -1229,12 +1229,12 @@ no need to use these methods. Unlike the other methods listed above, they are
you must therefore have an explicit ``ResourceManager`` instance to use them.
``get_cache_path(archive_name, names=())``
- Return absolute location in cache for `archive_name` and `names`
+ Return absolute location in cache for ``archive_name`` and ``names``
The parent directory of the resulting path will be created if it does
- not already exist. `archive_name` should be the base filename of the
+ not already exist. ``archive_name`` should be the base filename of the
enclosing egg (which may not be the name of the enclosing zipfile!),
- including its ".egg" extension. `names`, if provided, should be a
+ including its ".egg" extension. ``names``, if provided, should be a
sequence of path name parts "under" the egg's extraction location.
This method should only be called by resource providers that need to
@@ -1250,12 +1250,12 @@ you must therefore have an explicit ``ResourceManager`` instance to use them.
wrap or handle extraction errors themselves.
``postprocess(tempname, filename)``
- Perform any platform-specific postprocessing of `tempname`.
+ Perform any platform-specific postprocessing of ``tempname``.
Resource providers should call this method ONLY after successfully
extracting a compressed resource. They must NOT call it on resources
that are already in the filesystem.
- `tempname` is the current (temporary) name of the file, and `filename`
+ ``tempname`` is the current (temporary) name of the file, and ``filename``
is the name it will be renamed to by the caller after this routine
returns.
@@ -1323,7 +1323,7 @@ implement the ``IMetadataProvider`` or ``IResourceProvider`` interfaces are:
``run_script(script_name, namespace)``
Execute the named script in the supplied namespace dictionary. Raises
``ResolutionError`` if there is no script by that name in the ``scripts``
- metadata directory. `namespace` should be a Python dictionary, usually
+ metadata directory. ``namespace`` should be a Python dictionary, usually
a module dictionary if the script is being run as a module.
@@ -1380,11 +1380,11 @@ with other (PEP 302-compatible) importers or module loaders, you may need to
register various handlers and support functions using these APIs:
``register_finder(importer_type, distribution_finder)``
- Register `distribution_finder` to find distributions in ``sys.path`` items.
- `importer_type` is the type or class of a PEP 302 "Importer" (``sys.path``
- item handler), and `distribution_finder` is a callable that, when passed a
- path item, the importer instance, and an `only` flag, yields
- ``Distribution`` instances found under that path item. (The `only` flag,
+ Register ``distribution_finder`` to find distributions in ``sys.path`` items.
+ ``importer_type`` is the type or class of a PEP 302 "Importer" (``sys.path``
+ item handler), and ``distribution_finder`` is a callable that, when passed a
+ path item, the importer instance, and an ``only`` flag, yields
+ ``Distribution`` instances found under that path item. (The ``only`` flag,
if true, means the finder should yield only ``Distribution`` objects whose
``location`` is equal to the path item provided.)
@@ -1392,16 +1392,16 @@ register various handlers and support functions using these APIs:
example finder function.
``register_loader_type(loader_type, provider_factory)``
- Register `provider_factory` to make ``IResourceProvider`` objects for
- `loader_type`. `loader_type` is the type or class of a PEP 302
- ``module.__loader__``, and `provider_factory` is a function that, when
+ Register ``provider_factory`` to make ``IResourceProvider`` objects for
+ ``loader_type``. ``loader_type`` is the type or class of a PEP 302
+ ``module.__loader__``, and ``provider_factory`` is a function that, when
passed a module object, returns an `IResourceProvider`_ for that module,
allowing it to be used with the `ResourceManager API`_.
``register_namespace_handler(importer_type, namespace_handler)``
- Register `namespace_handler` to declare namespace packages for the given
- `importer_type`. `importer_type` is the type or class of a PEP 302
- "importer" (sys.path item handler), and `namespace_handler` is a callable
+ Register ``namespace_handler`` to declare namespace packages for the given
+ ``importer_type``. ``importer_type`` is the type or class of a PEP 302
+ "importer" (sys.path item handler), and ``namespace_handler`` is a callable
with a signature like this::
def namespace_handler(importer, path_entry, moduleName, module):
@@ -1421,23 +1421,23 @@ IResourceProvider
-----------------
``IResourceProvider`` is an abstract class that documents what methods are
-required of objects returned by a `provider_factory` registered with
+required of objects returned by a ``provider_factory`` registered with
``register_loader_type()``. ``IResourceProvider`` is a subclass of
``IMetadataProvider``, so objects that implement this interface must also
implement all of the `IMetadataProvider Methods`_ as well as the methods
-shown here. The `manager` argument to the methods below must be an object
+shown here. The ``manager`` argument to the methods below must be an object
that supports the full `ResourceManager API`_ documented above.
``get_resource_filename(manager, resource_name)``
- Return a true filesystem path for `resource_name`, coordinating the
- extraction with `manager`, if the resource must be unpacked to the
+ Return a true filesystem path for ``resource_name``, coordinating the
+ extraction with ``manager``, if the resource must be unpacked to the
filesystem.
``get_resource_stream(manager, resource_name)``
- Return a readable file-like object for `resource_name`.
+ Return a readable file-like object for ``resource_name``.
``get_resource_string(manager, resource_name)``
- Return a string containing the contents of `resource_name`.
+ Return a string containing the contents of ``resource_name``.
``has_resource(resource_name)``
Does the package contain the named resource?
@@ -1501,15 +1501,15 @@ where appropriate. Their inheritance tree looks like this::
``PathMetadata(path, egg_info)``
Create an ``IResourceProvider`` for a filesystem-based distribution, where
- `path` is the filesystem location of the importable modules, and `egg_info`
+ ``path`` is the filesystem location of the importable modules, and ``egg_info``
is the filesystem location of the distribution's metadata directory.
- `egg_info` should usually be the ``EGG-INFO`` subdirectory of `path` for an
- "unpacked egg", and a ``ProjectName.egg-info`` subdirectory of `path` for
+ ``egg_info`` should usually be the ``EGG-INFO`` subdirectory of ``path`` for an
+ "unpacked egg", and a ``ProjectName.egg-info`` subdirectory of ``path`` for
a "development egg". However, other uses are possible for custom purposes.
``EggMetadata(zipimporter)``
Create an ``IResourceProvider`` for a zipfile-based distribution. The
- `zipimporter` should be a ``zipimport.zipimporter`` instance, and may
+ ``zipimporter`` should be a ``zipimport.zipimporter`` instance, and may
represent a "basket" (a zipfile containing multiple ".egg" subdirectories)
a specific egg *within* a basket, or a zipfile egg (where the zipfile
itself is a ".egg"). It can also be a combination, such as a zipfile egg
@@ -1547,12 +1547,12 @@ Parsing Utilities
``yield_lines(strs)``
Yield non-empty/non-comment lines from a string/unicode or a possibly-
- nested sequence thereof. If `strs` is an instance of ``basestring``, it
+ nested sequence thereof. If ``strs`` is an instance of ``basestring``, it
is split into lines, and each non-blank, non-comment line is yielded after
stripping leading and trailing whitespace. (Lines whose first non-blank
character is ``#`` are considered comment lines.)
- If `strs` is not an instance of ``basestring``, it is iterated over, and
+ If ``strs`` is not an instance of ``basestring``, it is iterated over, and
each item is passed recursively to ``yield_lines()``, so that an arbitrarily
nested sequence of strings, or sequences of sequences of strings can be
flattened out to the lines contained therein. So for example, passing
@@ -1636,15 +1636,15 @@ Platform Utilities
``compatible_platforms()`` function.
``compatible_platforms(provided, required)``
- Return true if a distribution built on the `provided` platform may be used
- on the `required` platform. If either platform value is ``None``, it is
+ Return true if a distribution built on the ``provided`` platform may be used
+ on the ``required`` platform. If either platform value is ``None``, it is
considered a wildcard, and the platforms are therefore compatible.
Likewise, if the platform strings are equal, they're also considered
compatible, and ``True`` is returned. Currently, the only non-equal
platform strings that are considered compatible are macOS platform
strings with the same hardware type (e.g. ``ppc``) and major version
- (e.g. ``10``) with the `provided` platform's minor version being less than
- or equal to the `required` platform's minor version.
+ (e.g. ``10``) with the ``provided`` platform's minor version being less than
+ or equal to the ``required`` platform's minor version.
``get_default_cache()``
Determine the default cache location for extracting resources from zipped
@@ -1666,14 +1666,14 @@ File/Path Utilities
-------------------
``ensure_directory(path)``
- Ensure that the parent directory (``os.path.dirname``) of `path` actually
+ Ensure that the parent directory (``os.path.dirname``) of ``path`` actually
exists, using ``os.makedirs()`` if necessary.
``normalize_path(path)``
- Return a "normalized" version of `path`, such that two paths represent
+ Return a "normalized" version of ``path``, such that two paths represent
the same filesystem location if they have equal ``normalized_path()``
values. Specifically, this is a shortcut for calling ``os.path.realpath``
- and ``os.path.normcase`` on `path`. Unfortunately, on certain platforms
+ and ``os.path.normcase`` on ``path``. Unfortunately, on certain platforms
(notably Cygwin and macOS) the ``normcase`` function does not accurately
reflect the platform's case-sensitivity, so there is always the possibility
of two apparently-different paths being equal on such platforms.
diff --git a/docs/references/keywords.rst b/docs/references/keywords.rst
index 563561908c..03ce9fa23a 100644
--- a/docs/references/keywords.rst
+++ b/docs/references/keywords.rst
@@ -1,3 +1,7 @@
+========
+Keywords
+========
+
``name``
A string specifying the name of the package.
@@ -189,7 +193,7 @@
discovery of services or plugins provided by a project. See :ref:`Dynamic
Discovery of Services and Plugins` for details and examples of the format
of this argument. In addition, this keyword is used to support
- :ref:`Automatic Script Creation`.
+ :ref:`Automatic Script Creation <entry_points>`.
``extras_require``
A dictionary mapping names of "extras" (optional features of your project)
@@ -282,7 +286,7 @@
this argument. The named class must be instantiable with no arguments, and
its instances must support the ``loadTestsFromNames()`` method as defined
in the Python ``unittest`` module's ``TestLoader`` class. Setuptools will
- pass only one test "name" in the `names` argument: the value supplied for
+ pass only one test "name" in the ``names`` argument: the value supplied for
the ``test_suite`` argument. The loader you specify may interpret this
string in any way it likes, as there are no restrictions on what may be
contained in a ``test_suite`` string.
@@ -317,15 +321,15 @@
``use_2to3``
Convert the source code from Python 2 to Python 3 with 2to3 during the
- build process. See :doc:`python3` for more details.
+ build process. See :doc:`../deprecated/python3` for more details.
``convert_2to3_doctests``
List of doctest source files that need to be converted with 2to3.
- See :doc:`python3` for more details.
+ See :doc:`../deprecated/python3` for more details.
``use_2to3_fixers``
A list of modules to search for additional fixers to be used during
- the 2to3 conversion. See :doc:`python3` for more details.
+ the 2to3 conversion. See :doc:`../deprecated/python3` for more details.
``use_2to3_exclude_fixers``
List of fixer names to be skipped.
diff --git a/docs/userguide/commands.rst b/docs/userguide/commands.rst
index c64f62bfdd..e632e550b3 100644
--- a/docs/userguide/commands.rst
+++ b/docs/userguide/commands.rst
@@ -275,7 +275,7 @@ is used when you are building source distributions.)
In addition to writing the core egg metadata defined by ``setuptools`` and
required by ``pkg_resources``, this command can be extended to write other
metadata files as well, by defining entry points in the ``egg_info.writers``
-group. See the section on `Adding new EGG-INFO Files`_ below for more details.
+group. See the section on :ref:`Adding new EGG-INFO Files` below for more details.
Note that using additional metadata writers may require you to include a
``setup_requires`` argument to ``setup()`` in order to ensure that the desired
writers are available on ``sys.path``.
@@ -315,7 +315,7 @@ added in the following order:
(Note: Because these options modify the version number used for source and
binary distributions of your project, you should first make sure that you know
how the resulting version numbers will be interpreted by automated tools
-like pip. See the section above on `Specifying Your Project's Version`_ for an
+like pip. See the section above on :ref:`Specifying Your Project's Version` for an
explanation of pre- and post-release tags, as well as tips on how to choose and
verify a versioning scheme for your project.)
diff --git a/docs/userguide/datafiles.rst b/docs/userguide/datafiles.rst
index 315ec7245d..69cf36e699 100644
--- a/docs/userguide/datafiles.rst
+++ b/docs/userguide/datafiles.rst
@@ -20,8 +20,8 @@ e.g.::
This tells setuptools to install any data files it finds in your packages.
The data files must be specified via the distutils' ``MANIFEST.in`` file.
(They can also be tracked by a revision control system, using an appropriate
-plugin. See the section below on `Adding Support for Revision Control
-Systems`_ for information on how to write such plugins.)
+plugin. See the section below on :ref:`Adding Support for Revision
+Control Systems` for information on how to write such plugins.)
If you want finer-grained control over what files are included (for example,
if you have documentation files in your package directories and want to exclude
@@ -144,6 +144,9 @@ if they track intermediate revisions of your project using Subversion; be sure
to let them know when you make changes that remove files from inclusion so they
can run ``setup.py clean --all``.
+
+.. _Accessing Data Files at Runtime:
+
Accessing Data Files at Runtime
-------------------------------
@@ -171,4 +174,4 @@ fall back to the platform-specific location for installing data files, there is
no supported facility to reliably retrieve these resources.
Instead, the PyPA recommends that any data files you wish to be accessible at
-run time be included in the package.
\ No newline at end of file
+run time be included in the package.
diff --git a/docs/userguide/declarative_config.rst b/docs/userguide/declarative_config.rst
index 51c897c409..bc66869b6e 100644
--- a/docs/userguide/declarative_config.rst
+++ b/docs/userguide/declarative_config.rst
@@ -1,3 +1,5 @@
+.. _declarative config:
+
-----------------------------------------
Configuring setup() using setup.cfg files
-----------------------------------------
@@ -199,7 +201,7 @@ obsoletes list-comma
string in such a file, so validation is stricter in this case.
Notes:
-1. The `version` file attribute has only been supported since 39.2.0.
+1. The ``version`` file attribute has only been supported since 39.2.0.
Options
-------
@@ -235,12 +237,12 @@ data_files dict 40.6.0
**packages** - The ``find:`` and ``find_namespace:`` directive can be further configured
in a dedicated subsection ``options.packages.find``. This subsection
- accepts the same keys as the `setuptools.find_packages` and the
- `setuptools.find_namespace_packages` function:
+ accepts the same keys as the ``setuptools.find_packages`` and the
+ ``setuptools.find_namespace_packages`` function:
``where``, ``include``, and ``exclude``.
**find_namespace directive** - The ``find_namespace:`` directive is supported since Python >=3.3.
Notes:
-1. In the `package_data` section, a key named with a single asterisk (`*`)
-refers to all packages, in lieu of the empty string used in `setup.py`.
+1. In the ``package_data`` section, a key named with a single asterisk (``*``)
+refers to all packages, in lieu of the empty string used in ``setup.py``.
diff --git a/docs/userguide/dependency_management.rst b/docs/userguide/dependency_management.rst
index a26ab6c3b0..354a9f8c36 100644
--- a/docs/userguide/dependency_management.rst
+++ b/docs/userguide/dependency_management.rst
@@ -25,7 +25,7 @@ you also need the ``wheel`` package as well since it is recommended that you
upload a ``.whl`` file to PyPI alongside your ``.tar.gz`` file. Unlike the
other two types of dependency keyword, this one is specified in your
``pyproject.toml`` file (if you have forgot what this is, go to
-:ref:`quickstart` or (WIP)):
+:doc:`quickstart` or (WIP)):
.. code-block:: ini
@@ -36,10 +36,11 @@ other two types of dependency keyword, this one is specified in your
.. note::
This used to be accomplished with the ``setup_requires`` keyword but is
now considered deprecated in favor of the PEP 517 style described above.
- To peek into how this legacy keyword is used, consult our :ref:`guide on
- deprecated practice (WIP)`
+ To peek into how this legacy keyword is used, consult our :doc:`guide on
+ deprecated practice (WIP) <../deprecated/index>`
+.. _Declaring Dependencies:
Declaring required dependency
=============================
@@ -266,7 +267,7 @@ the two dependencies ``PDF`` maps to.
The second use case is that other package can use this "extra" for their
own dependencies. For example, if "Project-B" needs "project A" with PDF support
-installed, it might declare the dependency like this::
+installed, it might declare the dependency like this:
.. code-block:: ini
@@ -309,4 +310,4 @@ In some cases, you might need to specify the minimum required python version.
This is handled with the ``python_requires`` keyword supplied to ``setup.cfg``
or ``setup.py``.
-Example WIP
\ No newline at end of file
+Example WIP
diff --git a/docs/userguide/development_mode.rst b/docs/userguide/development_mode.rst
index 9d4e758155..bce724a79f 100644
--- a/docs/userguide/development_mode.rst
+++ b/docs/userguide/development_mode.rst
@@ -49,7 +49,7 @@ source from a staging area using ``setup.py develop --uninstall``, specifying
the desired staging area if it's not the default.
There are several options to control the precise behavior of the ``develop``
-command; see the section on the `develop`_ command below for more details.
+command; see the section on the :ref:`develop <develop>` command below for more details.
Note that you can also apply setuptools commands to non-setuptools projects,
using commands like this::
@@ -57,4 +57,4 @@ using commands like this::
python -c "import setuptools; with open('setup.py') as f: exec(compile(f.read(), 'setup.py', 'exec'))" develop
That is, you can simply list the normal setup commands and options following
-the quoted part.
\ No newline at end of file
+the quoted part.
diff --git a/docs/userguide/distribution.rst b/docs/userguide/distribution.rst
index 77ea2660e2..377f7bb4f1 100644
--- a/docs/userguide/distribution.rst
+++ b/docs/userguide/distribution.rst
@@ -23,17 +23,17 @@ egg distributions by adding one or more of the following to the project's
You can add these tags by adding ``egg_info`` and the desired options to
the command line ahead of the ``sdist`` or ``bdist`` commands that you want
to generate a daily build or snapshot for. See the section below on the
-`egg_info`_ command for more details.
+:ref:`egg_info <egg_info>` command for more details.
(Also, before you release your project, be sure to see the section above on
-`Specifying Your Project's Version`_ for more information about how pre- and
+:ref:`Specifying Your Project's Version` for more information about how pre- and
post-release tags affect how version numbers are interpreted. This is
important in order to make sure that dependency processing tools will know
which versions of your project are newer than others.)
Finally, if you are creating builds frequently, and either building them in a
downloadable location or are copying them to a distribution server, you should
-probably also check out the `rotate`_ command, which lets you automatically
+probably also check out the :ref:`rotate <rotate>` command, which lets you automatically
delete all but the N most-recently-modified distributions matching a glob
pattern. So, you can use a command line like::
@@ -46,7 +46,7 @@ that were built most recently.
If you have to manage automated builds for multiple packages, each with
different tagging and rotation policies, you may also want to check out the
-`alias`_ command, which would let each package define an alias like ``daily``
+:ref:`alias <alias>` command, which would let each package define an alias like ``daily``
that would perform the necessary tag, build, and rotate commands. Then, a
simpler script or cron job could just run ``setup.py daily`` in each project
directory. (And, you could also define sitewide or per-user default versions
@@ -61,7 +61,7 @@ selection with pluggable endpoints for looking up files to include. If you are
using a revision control system, and your source distributions only need to
include files that you're tracking in revision control, use a corresponding
plugin instead of writing a ``MANIFEST.in`` file. See the section below on
-`Adding Support for Revision Control Systems`_ for information on plugins.
+:ref:`Adding Support for Revision Control Systems` for information on plugins.
If you need to include automatically generated files, or files that are kept in
an unsupported revision control system, you'll need to create a ``MANIFEST.in``
@@ -114,7 +114,8 @@ You can then use it like this::
setup.py release sdist bdist_egg
Or of course you can create more elaborate aliases that do all of the above.
-See the sections below on the `egg_info`_ and `alias`_ commands for more ideas.
+See the sections below on the :ref:`egg_info <egg_info>` and
+:ref:`alias <alias>` commands for more ideas.
Distributing Extensions compiled with Cython
--------------------------------------------
@@ -154,6 +155,9 @@ control system will be able to build it even if they don't have Cython
installed, and that your source releases will be similarly usable with or
without Cython.
+
+.. _Specifying Your Project's Version:
+
Specifying Your Project's Version
---------------------------------
@@ -237,4 +241,4 @@ have setuptools automatically tag your in-development releases with various
pre- or post-release tags. See the following sections for more details:
* `Tagging and "Daily Build" or "Snapshot" Releases`_
-* The `egg_info`_ command
\ No newline at end of file
+* The :ref:`egg_info <egg_info>` command
diff --git a/docs/userguide/entry_point.rst b/docs/userguide/entry_point.rst
index 7f5165a876..d1127ae4fa 100644
--- a/docs/userguide/entry_point.rst
+++ b/docs/userguide/entry_point.rst
@@ -33,6 +33,8 @@ with ``__init__.py`` as:
and ``__main__.py`` providing a hook:
+.. code-block:: python
+
from . import hello_world
if __name__ == '__main__':
hello_world()
@@ -49,7 +51,7 @@ user-friendly name for installers of the package to execute. Installers
like pip will create wrapper scripts to execute a function. In the
above example, to create a command ``hello-world`` that invokes
``timmins.hello_world``, add a console script entry point to
-``setup.cfg``::
+``setup.cfg``:
.. code-block:: ini
@@ -74,6 +76,8 @@ In addition to ``console_scripts``, Setuptools supports ``gui_scripts``, which
will launch a GUI application without running in a terminal window.
+.. _dynamic discovery of services and plugins:
+
Advertising Behavior
====================
@@ -138,9 +142,9 @@ Some entry points may require additional dependencies to properly function.
For such an entry point, declare in square brakets any number of dependency
``extras`` following the entry point definition. Such entry points will only
be viable if their extras were declared and installed. See the
-:ref:`guide on dependencies management <dependency_management>` for
+:doc:`guide on dependencies management <dependency_management>` for
more information on defining extra requirements. Consider from the
-above example::
+above example:
.. code-block:: ini
diff --git a/docs/userguide/extension.rst b/docs/userguide/extension.rst
index 1e4846fc92..4de24ec9d3 100644
--- a/docs/userguide/extension.rst
+++ b/docs/userguide/extension.rst
@@ -1,3 +1,5 @@
+.. _Creating ``distutils`` Extensions:
+
Creating ``distutils`` Extensions
=================================
@@ -9,8 +11,8 @@ the extension just refer to it in their ``setup_requires`` argument.
With ``setuptools``, your distutils extension projects can hook in new
commands and ``setup()`` arguments just by defining "entry points". These
are mappings from command or argument names to a specification of where to
-import a handler from. (See the section on `Dynamic Discovery of Services and
-Plugins`_ above for some more background on entry points.)
+import a handler from. (See the section on :ref:`Dynamic Discovery of
+Services and Plugins` above for some more background on entry points.)
Adding Commands
@@ -120,6 +122,8 @@ plugin is encouraged to load the configuration/settings for their behavior
independently.
+.. _Adding new EGG-INFO Files:
+
Adding new EGG-INFO Files
-------------------------
@@ -173,6 +177,9 @@ the ``cmd`` object's ``write_file()``, ``delete_file()``, and
``write_or_delete_file()`` methods exclusively for your file operations. See
those methods' docstrings for more details.
+
+.. _Adding Support for Revision Control Systems:
+
Adding Support for Revision Control Systems
-------------------------------------------------
@@ -232,4 +239,4 @@ A few important points for writing revision control file finders:
* Your finder function SHOULD NOT raise any errors, and SHOULD deal gracefully
with the absence of needed programs (i.e., ones belonging to the revision
control system itself. It *may*, however, use ``distutils.log.warn()`` to
- inform the user of the missing program(s).
\ No newline at end of file
+ inform the user of the missing program(s).
diff --git a/docs/userguide/index.rst b/docs/userguide/index.rst
index abee331a2f..57b059e50b 100644
--- a/docs/userguide/index.rst
+++ b/docs/userguide/index.rst
@@ -24,3 +24,5 @@ ordinary Python packages based on the ``distutils``.
declarative_config
keywords
commands
+ functionalities_rewrite
+ miscellaneous
diff --git a/docs/userguide/keywords.rst b/docs/userguide/keywords.rst
index e2852b3410..268e4f4238 100644
--- a/docs/userguide/keywords.rst
+++ b/docs/userguide/keywords.rst
@@ -8,19 +8,19 @@ unless you need the associated ``setuptools`` feature.
``include_package_data``
If set to ``True``, this tells ``setuptools`` to automatically include any
data files it finds inside your package directories that are specified by
- your ``MANIFEST.in`` file. For more information, see the section below on
- `Including Data Files`_.
+ your ``MANIFEST.in`` file. For more information, see the section on
+ :ref:`Including Data Files`.
``exclude_package_data``
A dictionary mapping package names to lists of glob patterns that should
be *excluded* from your package directories. You can use this to trim back
any excess files included by ``include_package_data``. For a complete
- description and examples, see the section below on `Including Data Files`_.
+ description and examples, see the section on :ref:`Including Data Files`.
``package_data``
A dictionary mapping package names to lists of glob patterns. For a
- complete description and examples, see the section below on `Including
- Data Files`_. You do not need to use this option if you are using
+ complete description and examples, see the section on :ref:`Including
+ Data Files`. You do not need to use this option if you are using
``include_package_data``, unless you need to add e.g. files that are
generated by your setup script and build process. (And are therefore not
in source control or are files that you don't want to include in your
@@ -34,22 +34,22 @@ unless you need the associated ``setuptools`` feature.
``install_requires``
A string or list of strings specifying what other distributions need to
- be installed when this one is. See the section below on `Declaring
- Dependencies`_ for details and examples of the format of this argument.
+ be installed when this one is. See the section on :ref:`Declaring
+ Dependencies` for details and examples of the format of this argument.
``entry_points``
A dictionary mapping entry point group names to strings or lists of strings
defining the entry points. Entry points are used to support dynamic
- discovery of services or plugins provided by a project. See `Dynamic
- Discovery of Services and Plugins`_ for details and examples of the format
- of this argument. In addition, this keyword is used to support `Automatic
- Script Creation`_.
+ discovery of services or plugins provided by a project. See :ref:`Dynamic
+ Discovery of Services and Plugins` for details and examples of the format
+ of this argument. In addition, this keyword is used to support
+ :ref:`Automatic Script Creation <entry_points>`.
``extras_require``
A dictionary mapping names of "extras" (optional features of your project)
to strings or lists of strings specifying what other distributions must be
- installed to support those features. See the section below on `Declaring
- Dependencies`_ for details and examples of the format of this argument.
+ installed to support those features. See the section on :ref:`Declaring
+ Dependencies` for details and examples of the format of this argument.
``python_requires``
A string corresponding to a version specifier (as defined in PEP 440) for
@@ -87,7 +87,7 @@ unless you need the associated ``setuptools`` feature.
as you declare them in each project that contains any subpackages of the
namespace package, and as long as the namespace package's ``__init__.py``
does not contain any code other than a namespace declaration. See the
- section below on `Namespace Packages`_ for more information.
+ section below on :ref:`Namespace Packages` for more information.
``test_suite``
A string naming a ``unittest.TestCase`` subclass (or a package or module
@@ -98,9 +98,9 @@ unless you need the associated ``setuptools`` feature.
added to the tests to be run. If the named suite is a package, any
submodules and subpackages are recursively added to the overall test suite.
- Specifying this argument enables use of the `test`_ command to run the
+ Specifying this argument enables use of the :ref:`test <test>` command to run the
specified test suite, e.g. via ``setup.py test``. See the section on the
- `test`_ command below for more details.
+ :ref:`test <test>` command below for more details.
New in 41.5.0: Deprecated the test command.
@@ -124,7 +124,7 @@ unless you need the associated ``setuptools`` feature.
this argument. The named class must be instantiable with no arguments, and
its instances must support the ``loadTestsFromNames()`` method as defined
in the Python ``unittest`` module's ``TestLoader`` class. Setuptools will
- pass only one test "name" in the `names` argument: the value supplied for
+ pass only one test "name" in the ``names`` argument: the value supplied for
the ``test_suite`` argument. The loader you specify may interpret this
string in any way it likes, as there are no restrictions on what may be
contained in a ``test_suite`` string.
@@ -155,21 +155,21 @@ unless you need the associated ``setuptools`` feature.
extensions that access other files in the project (such as data files or
shared libraries), you probably do NOT need this argument and shouldn't
mess with it. For more details on how this argument works, see the section
- below on `Automatic Resource Extraction`_.
+ below on :ref:`Automatic Resource Extraction`.
``use_2to3``
Convert the source code from Python 2 to Python 3 with 2to3 during the
- build process. See :doc:`python3` for more details.
+ build process. See :doc:`../deprecated/python3` for more details.
``convert_2to3_doctests``
List of doctest source files that need to be converted with 2to3.
- See :doc:`python3` for more details.
+ See :doc:`../deprecated/python3` for more details.
``use_2to3_fixers``
A list of modules to search for additional fixers to be used during
- the 2to3 conversion. See :doc:`python3` for more details.
+ the 2to3 conversion. See :doc:`../deprecated/python3` for more details.
``project_urls``
An arbitrary map of URL names to hyperlinks, allowing more extensible
documentation of where various resources can be found than the simple
- ``url`` and ``download_url`` options provide.
\ No newline at end of file
+ ``url`` and ``download_url`` options provide.
diff --git a/docs/userguide/miscellaneous.rst b/docs/userguide/miscellaneous.rst
index 65e075cddc..3df327d795 100644
--- a/docs/userguide/miscellaneous.rst
+++ b/docs/userguide/miscellaneous.rst
@@ -1,3 +1,5 @@
+.. _Automatic Resource Extraction:
+
Automatic Resource Extraction
-----------------------------
@@ -46,8 +48,8 @@ directory. However, since it can be tedious to create such files by hand, you
may want to create a distutils extension that will create the necessary files
from arguments to ``setup()``, in much the same way that ``setuptools`` does
for many of the ``setup()`` arguments it adds. See the section below on
-`Creating distutils Extensions`_ for more details, especially the subsection on
-`Adding new EGG-INFO Files`_.
+:ref:`Creating ``distutils\`\` Extensions` for more details, especially the
+subsection on :ref:`Adding new EGG-INFO Files`.
Setting the ``zip_safe`` flag
-----------------------------
@@ -75,7 +77,7 @@ no ``__file__`` or ``__path__`` introspection or source code manipulation, then
there is an extremely solid chance the project will work when installed as a
zipfile. (And if the project uses ``pkg_resources`` for all its data file
access, then C extensions and other data files shouldn't be a problem at all.
-See the `Accessing Data Files at Runtime`_ section above for more information.)
+See the :ref:`Accessing Data Files at Runtime` section above for more information.)
However, if ``bdist_egg`` can't be *sure* that your package will work, but
you've checked over all the warnings it issued, and you are either satisfied it
diff --git a/docs/userguide/package_discovery.rst b/docs/userguide/package_discovery.rst
index 0e0d27c5b2..3915408d67 100644
--- a/docs/userguide/package_discovery.rst
+++ b/docs/userguide/package_discovery.rst
@@ -6,13 +6,13 @@ Package Discovery and Namespace Package
.. note::
a full specification for the keyword supplied to ``setup.cfg`` or
- ``setup.py`` can be found at :ref:`keywords reference <keywords_ref>`
+ ``setup.py`` can be found at :doc:`keywords reference <keywords>`
.. note::
the examples provided here are only to demonstrate the functionality
introduced. More metadata and options arguments need to be supplied
if you want to replicate them on your system. If you are completely
- new to setuptools, the :ref:`quickstart section <quickstart>` is a good
+ new to setuptools, the :doc:`quickstart section <quickstart>` is a good
place to start.
``Setuptools`` provide powerful tools to handle package discovery, including
@@ -97,6 +97,8 @@ in ``src`` that starts with the name ``pkg`` and not ``additional``:
)
+.. _Namespace Packages:
+
Using ``find_namespace:`` or ``find_namespace_packages``
========================================================
``setuptools`` provides the ``find_namespace:`` (``find_namespace_packages``)
diff --git a/docs/userguide/quickstart.rst b/docs/userguide/quickstart.rst
index 5282975102..24ea3e4b52 100644
--- a/docs/userguide/quickstart.rst
+++ b/docs/userguide/quickstart.rst
@@ -21,8 +21,7 @@ the backend (build system) it wants to use. The distribution can then
be generated with whatever tools that provides a ``build sdist``-alike
functionality. While this may appear cumbersome, given the added pieces,
it in fact tremendously enhances the portability of your package. The
-change is driven under `PEP 517 <https://www.python.org/dev/peps/pep-0517/#
-build-requirements>``. To learn more about Python packaging in general,
+change is driven under :pep:`517 <517#build-requirements>`. To learn more about Python packaging in general,
navigate to the `bottom <Resources on python packaging>`_ of this page.
@@ -82,8 +81,8 @@ Automatic package discovery
For simple projects, it's usually easy enough to manually add packages to
the ``packages`` keyword in ``setup.cfg``. However, for very large projects
, it can be a big burden to keep the package list updated. ``setuptools``
-therefore provides two convenient tools to ease the burden: ``find: `` and
-``find_namespace: ``. To use it in your project:
+therefore provides two convenient tools to ease the burden: :literal:`find:\ ` and
+:literal:`find_namespace:\ `. To use it in your project:
.. code-block:: ini
@@ -122,7 +121,7 @@ keyword in your ``setup.cfg``:
When this project is installed, a ``main`` script will be installed and will
invoke the ``some_func`` in the ``__init__.py`` file when called by the user.
For detailed usage, including managing the additional or optional dependencies,
-go to :ref:`entry_point`.
+go to :doc:`entry_point`.
Dependency management
@@ -147,9 +146,11 @@ additional keywords such as ``setup_requires`` that allows you to install
dependencies before running the script, and ``extras_requires`` that take
care of those needed by automatically generated scripts. It also provides
mechanisms to handle dependencies that are not in PyPI. For more advanced use,
-see :ref:`dependency_management`
+see :doc:`dependency_management`
+.. _Including Data Files:
+
Including Data Files
====================
The distutils have traditionally allowed installation of "data files", which
@@ -164,7 +165,7 @@ can simply use the ``include_package_data`` keyword:
This tells setuptools to install any data files it finds in your packages.
The data files must be specified via the distutils' ``MANIFEST.in`` file.
-For more details, see :ref:`datafiles`
+For more details, see :doc:`datafiles`
Development mode
diff --git a/tox.ini b/tox.ini
index 535b67d3b9..828d2c02e3 100644
--- a/tox.ini
+++ b/tox.ini
@@ -49,7 +49,16 @@ extras =
testing
changedir = docs
commands =
- python -m sphinx . {toxinidir}/build/html
+ {envpython} -m sphinx \
+ -j auto \
+ -b html \
+ --color \
+ -a \
+ -n \
+ -W \
+ -d "{temp_dir}/.doctrees" \
+ . \
+ "{toxinidir}/build/html"
[testenv:finalize]
skip_install = True
|
cloud-custodian__cloud-custodian-1510 | Feature request - tenancy
Would be nice to look for resources with `dedicated|default|host` tenancy
* [ec2](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/dedicated-instance.html)
* [rds](http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_VPC.WorkingWithRDSInstanceinaVPC.html) *this is done at the vpc level*
* [redshift](http://docs.aws.amazon.com/redshift/latest/mgmt/managing-clusters-vpc.html) *this is done at the vpc level*
| [
{
"content": "# Copyright 2015-2017 Capital One Services, LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nimport itertools\nimport operator\nimport random\nimport re\n\nimport six\nfrom botocore.exceptions import ClientError\nfrom dateutil.parser import parse\nfrom concurrent.futures import as_completed\n\nfrom c7n.actions import (\n ActionRegistry, BaseAction, ModifyVpcSecurityGroupsAction\n)\nfrom c7n.filters import (\n FilterRegistry, AgeFilter, ValueFilter, Filter, OPERATORS, DefaultVpcBase\n)\nfrom c7n.filters.offhours import OffHour, OnHour\nfrom c7n.filters.health import HealthEventFilter\nimport c7n.filters.vpc as net_filters\n\nfrom c7n.manager import resources\nfrom c7n.query import QueryResourceManager\n\nfrom c7n import utils\nfrom c7n.utils import type_schema\n\n\nfilters = FilterRegistry('ec2.filters')\nactions = ActionRegistry('ec2.actions')\n\nfilters.register('health-event', HealthEventFilter)\n\n\[email protected]('ec2')\nclass EC2(QueryResourceManager):\n\n class resource_type(object):\n service = 'ec2'\n type = 'instance'\n enum_spec = ('describe_instances', 'Reservations[].Instances[]', None)\n detail_spec = None\n id = 'InstanceId'\n filter_name = 'InstanceIds'\n filter_type = 'list'\n name = 'PublicDnsName'\n date = 'LaunchTime'\n dimension = 'InstanceId'\n config_type = \"AWS::EC2::Instance\"\n shape = \"Instance\"\n\n default_report_fields = (\n 'CustodianDate',\n 'InstanceId',\n 'tag:Name',\n 'InstanceType',\n 'LaunchTime',\n 'VpcId',\n 'PrivateIpAddress',\n )\n\n filter_registry = filters\n action_registry = actions\n\n # if we have to do a fallback scenario where tags don't come in describe\n permissions = ('ec2:DescribeTags',)\n\n def __init__(self, ctx, data):\n super(EC2, self).__init__(ctx, data)\n self.queries = QueryFilter.parse(self.data.get('query', []))\n\n def resources(self, query=None):\n q = self.resource_query()\n if q is not None:\n query = query or {}\n query['Filters'] = q\n return super(EC2, self).resources(query=query)\n\n def resource_query(self):\n qf = []\n qf_names = set()\n # allow same name to be specified multiple times and append the queries\n # under the same name\n for q in self.queries:\n qd = q.query()\n if qd['Name'] in qf_names:\n for qf in qf:\n if qd['Name'] == qf['Name']:\n qf['Values'].extend(qd['Values'])\n else:\n qf_names.add(qd['Name'])\n qf.append(qd)\n return qf\n\n def augment(self, resources):\n \"\"\"EC2 API and AWOL Tags\n\n While ec2 api generally returns tags when doing describe_x on for\n various resources, it may also silently fail to do so unless a tag\n is used as a filter.\n\n See footnote on http://goo.gl/YozD9Q for official documentation.\n\n Apriori we may be using custodian to ensure tags (including\n name), so there isn't a good default to ensure that we will\n always get tags from describe_x calls.\n \"\"\"\n\n # First if we're in event based lambda go ahead and skip this,\n # tags can't be trusted in ec2 instances immediately post creation.\n if not resources or self.data.get('mode', {}).get('type', '') in (\n 'cloudtrail', 'ec2-instance-state'):\n return resources\n\n # AWOL detector, so we don't make extraneous api calls.\n resource_count = len(resources)\n search_count = min(int(resource_count % 0.05) + 1, 5)\n if search_count > resource_count:\n search_count = resource_count\n found = False\n for r in random.sample(resources, search_count):\n if 'Tags' in r:\n found = True\n break\n\n if found:\n return resources\n\n # Okay go and do the tag lookup\n client = utils.local_session(self.session_factory).client('ec2')\n tag_set = self.retry(\n client.describe_tags,\n Filters=[{'Name': 'resource-type',\n 'Values': ['instance']}])['Tags']\n resource_tags = {}\n for t in tag_set:\n t.pop('ResourceType')\n rid = t.pop('ResourceId')\n resource_tags.setdefault(rid, []).append(t)\n\n m = self.get_model()\n for r in resources:\n r['Tags'] = resource_tags.get(r[m.id], ())\n return resources\n\n\[email protected]('security-group')\nclass SecurityGroupFilter(net_filters.SecurityGroupFilter):\n\n RelatedIdsExpression = \"SecurityGroups[].GroupId\"\n\n\[email protected]('subnet')\nclass SubnetFilter(net_filters.SubnetFilter):\n\n RelatedIdsExpression = \"SubnetId\"\n\n\nfilters.register('network-location', net_filters.NetworkLocation)\n\n\[email protected]('state-age')\nclass StateTransitionAge(AgeFilter):\n \"\"\"Age an instance has been in the given state.\n\n .. code-block: yaml\n\n policies:\n - name: ec2-state-running-7-days\n resource: ec2\n filters:\n - type: state-age\n op: ge\n days: 7\n \"\"\"\n RE_PARSE_AGE = re.compile(\"\\(.*?\\)\")\n\n # this filter doesn't use date_attribute, but needs to define it\n # to pass AgeFilter's validate method\n date_attribute = \"dummy\"\n\n schema = type_schema(\n 'state-age',\n op={'type': 'string', 'enum': list(OPERATORS.keys())},\n days={'type': 'number'})\n\n def get_resource_date(self, i):\n v = i.get('StateTransitionReason')\n if not v:\n return None\n dates = self.RE_PARSE_AGE.findall(v)\n if dates:\n return parse(dates[0][1:-1])\n return None\n\n\nclass StateTransitionFilter(object):\n \"\"\"Filter instances by state.\n\n Try to simplify construction for policy authors by automatically\n filtering elements (filters or actions) to the instances states\n they are valid for.\n\n For more details see http://goo.gl/TZH9Q5\n \"\"\"\n valid_origin_states = ()\n\n def filter_instance_state(self, instances, states=None):\n states = states or self.valid_origin_states\n orig_length = len(instances)\n results = [i for i in instances\n if i['State']['Name'] in states]\n self.log.info(\"%s %d of %d instances\" % (\n self.__class__.__name__, len(results), orig_length))\n return results\n\n\[email protected]('ebs')\nclass AttachedVolume(ValueFilter):\n \"\"\"EC2 instances with EBS backed volume\n\n Filters EC2 instances with EBS backed storage devices (non ephemeral)\n\n :Example:\n\n .. code-block:: yaml\n\n policies:\n - name: ec2-encrypted-ebs-volumes\n resource: ec2\n filters:\n - type: ebs\n key: encrypted\n value: true\n \"\"\"\n\n schema = type_schema(\n 'ebs', rinherit=ValueFilter.schema,\n **{'operator': {'enum': ['and', 'or']},\n 'skip-devices': {'type': 'array', 'items': {'type': 'string'}}})\n\n def get_permissions(self):\n return self.manager.get_resource_manager('ebs').get_permissions()\n\n def process(self, resources, event=None):\n self.volume_map = self.get_volume_mapping(resources)\n self.skip = self.data.get('skip-devices', [])\n self.operator = self.data.get(\n 'operator', 'or') == 'or' and any or all\n return list(filter(self, resources))\n\n def get_volume_mapping(self, resources):\n volume_map = {}\n manager = self.manager.get_resource_manager('ebs')\n for instance_set in utils.chunks(resources, 200):\n volume_ids = []\n for i in instance_set:\n for bd in i.get('BlockDeviceMappings', ()):\n if 'Ebs' not in bd:\n continue\n volume_ids.append(bd['Ebs']['VolumeId'])\n for v in manager.get_resources(volume_ids):\n if not v['Attachments']:\n continue\n volume_map.setdefault(\n v['Attachments'][0]['InstanceId'], []).append(v)\n return volume_map\n\n def __call__(self, i):\n volumes = self.volume_map.get(i['InstanceId'])\n if not volumes:\n return False\n if self.skip:\n for v in list(volumes):\n for a in v.get('Attachments', []):\n if a['Device'] in self.skip:\n volumes.remove(v)\n return self.operator(map(self.match, volumes))\n\n\nclass InstanceImageBase(object):\n\n def prefetch_instance_images(self, instances):\n image_ids = [i['ImageId'] for i in instances if 'c7n:instance-image' not in i]\n self.image_map = self.get_local_image_mapping(image_ids)\n\n def get_base_image_mapping(self):\n return {i['ImageId']: i for i in\n self.manager.get_resource_manager('ami').resources()}\n\n def get_instance_image(self, instance):\n image = instance.get('c7n:instance-image', None)\n if not image:\n image = instance['c7n:instance-image'] = self.image_map.get(instance['ImageId'], None)\n return image\n\n def get_local_image_mapping(self, image_ids):\n base_image_map = self.get_base_image_mapping()\n resources = {i: base_image_map[i] for i in image_ids if i in base_image_map}\n missing = list(set(image_ids) - set(resources.keys()))\n if missing:\n loaded = self.manager.get_resource_manager('ami').get_resources(missing, False)\n resources.update({image['ImageId']: image for image in loaded})\n return resources\n\n\[email protected]('image-age')\nclass ImageAge(AgeFilter, InstanceImageBase):\n \"\"\"EC2 AMI age filter\n\n Filters EC2 instances based on the age of their AMI image (in days)\n\n :Example:\n\n .. code-block: yaml\n\n policies:\n - name: ec2-ancient-ami\n resource: ec2\n filters:\n - type: image-age\n op: ge\n days: 90\n \"\"\"\n\n date_attribute = \"CreationDate\"\n\n schema = type_schema(\n 'image-age',\n op={'type': 'string', 'enum': list(OPERATORS.keys())},\n days={'type': 'number'})\n\n def get_permissions(self):\n return self.manager.get_resource_manager('ami').get_permissions()\n\n def process(self, resources, event=None):\n self.prefetch_instance_images(resources)\n return super(ImageAge, self).process(resources, event)\n\n def get_resource_date(self, i):\n image = self.get_instance_image(i)\n if image:\n return parse(image['CreationDate'])\n else:\n return parse(\"2000-01-01T01:01:01.000Z\")\n\n\[email protected]('image')\nclass InstanceImage(ValueFilter, InstanceImageBase):\n\n schema = type_schema('image', rinherit=ValueFilter.schema)\n\n def get_permissions(self):\n return self.manager.get_resource_manager('ami').get_permissions()\n\n def process(self, resources, event=None):\n self.prefetch_instance_images(resources)\n return super(InstanceImage, self).process(resources, event)\n\n def __call__(self, i):\n image = self.get_instance_image(i)\n # Finally, if we have no image...\n if not image:\n self.log.warning(\n \"Could not locate image for instance:%s ami:%s\" % (\n i['InstanceId'], i[\"ImageId\"]))\n # Match instead on empty skeleton?\n return False\n return self.match(image)\n\n\[email protected]('offhour')\nclass InstanceOffHour(OffHour, StateTransitionFilter):\n \"\"\"Custodian OffHour filter\n\n Filters running EC2 instances with the intent to stop at a given hour of\n the day.\n\n :Example:\n\n .. code-block: yaml\n\n policies:\n - name: onhour-evening-stop\n resource: ec2\n filters:\n - type: offhour\n tag: custodian_downtime\n default_tz: et\n offhour: 20\n actions:\n - stop\n \"\"\"\n\n valid_origin_states = ('running',)\n\n def process(self, resources, event=None):\n return super(InstanceOffHour, self).process(\n self.filter_instance_state(resources))\n\n\[email protected]('onhour')\nclass InstanceOnHour(OnHour, StateTransitionFilter):\n \"\"\"Custodian OnHour filter\n\n Filters stopped EC2 instances with the intent to start at a given hour of\n the day.\n\n :Example:\n\n .. code-block: yaml\n\n policies:\n - name: onhour-morning-start\n resource: ec2\n filters:\n - type: onhour\n tag: custodian_downtime\n default_tz: et\n onhour: 6\n actions:\n - start\n \"\"\"\n\n valid_origin_states = ('stopped',)\n\n def process(self, resources, event=None):\n return super(InstanceOnHour, self).process(\n self.filter_instance_state(resources))\n\n\[email protected]('ephemeral')\nclass EphemeralInstanceFilter(Filter):\n \"\"\"EC2 instances with ephemeral storage\n\n Filters EC2 instances that have ephemeral storage (an instance-store backed\n root device)\n\n :Example:\n\n .. code-block: yaml\n\n policies:\n - name: ec2-ephemeral-instances\n resource: ec2\n filters:\n - type: ephemeral\n\n http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/InstanceStorage.html\n \"\"\"\n\n schema = type_schema('ephemeral')\n\n def __call__(self, i):\n return self.is_ephemeral(i)\n\n @staticmethod\n def is_ephemeral(i):\n for bd in i.get('BlockDeviceMappings', []):\n if bd['DeviceName'] in ('/dev/sda1', '/dev/xvda'):\n if 'Ebs' in bd:\n return False\n return True\n return True\n\n\[email protected]('instance-uptime')\nclass UpTimeFilter(AgeFilter):\n\n date_attribute = \"LaunchTime\"\n\n schema = type_schema(\n 'instance-uptime',\n op={'type': 'string', 'enum': list(OPERATORS.keys())},\n days={'type': 'number'})\n\n\[email protected]('instance-age')\nclass InstanceAgeFilter(AgeFilter):\n \"\"\"Filters instances based on their age (in days)\n\n :Example:\n\n .. code-block: yaml\n\n policies:\n - name: ec2-30-days-plus\n resource: ec2\n filters:\n - type: instance-age\n op: ge\n days: 30\n \"\"\"\n\n date_attribute = \"LaunchTime\"\n ebs_key_func = operator.itemgetter('AttachTime')\n\n schema = type_schema(\n 'instance-age',\n op={'type': 'string', 'enum': list(OPERATORS.keys())},\n days={'type': 'number'},\n hours={'type': 'number'},\n minutes={'type': 'number'})\n\n def get_resource_date(self, i):\n # LaunchTime is basically how long has the instance\n # been on, use the oldest ebs vol attach time\n ebs_vols = [\n block['Ebs'] for block in i['BlockDeviceMappings']\n if 'Ebs' in block]\n if not ebs_vols:\n # Fall back to using age attribute (ephemeral instances)\n return super(InstanceAgeFilter, self).get_resource_date(i)\n # Lexographical sort on date\n ebs_vols = sorted(ebs_vols, key=self.ebs_key_func)\n return ebs_vols[0]['AttachTime']\n\n\[email protected]('default-vpc')\nclass DefaultVpc(DefaultVpcBase):\n \"\"\" Matches if an ec2 database is in the default vpc\n \"\"\"\n\n schema = type_schema('default-vpc')\n\n def __call__(self, ec2):\n return ec2.get('VpcId') and self.match(ec2.get('VpcId')) or False\n\n\[email protected]('singleton')\nclass SingletonFilter(Filter, StateTransitionFilter):\n \"\"\"EC2 instances without autoscaling or a recover alarm\n\n Filters EC2 instances that are not members of an autoscaling group\n and do not have Cloudwatch recover alarms.\n\n :Example:\n\n .. code-block: yaml\n\n policies:\n - name: ec2-recover-instances\n resource: ec2\n filters:\n - singleton\n actions:\n - type: tag\n key: problem\n value: instance is not resilient\n\n https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-recover.html\n \"\"\"\n\n schema = type_schema('singleton')\n\n permissions = ('cloudwatch:DescribeAlarmsForMetric',)\n\n valid_origin_states = ('running', 'stopped', 'pending', 'stopping')\n\n in_asg = ValueFilter({\n 'key': 'tag:aws:autoscaling:groupName',\n 'value': 'not-null'}).validate()\n\n def process(self, instances, event=None):\n return super(SingletonFilter, self).process(\n self.filter_instance_state(instances))\n\n def __call__(self, i):\n if self.in_asg(i):\n return False\n else:\n return not self.has_recover_alarm(i)\n\n def has_recover_alarm(self, i):\n client = utils.local_session(self.manager.session_factory).client('cloudwatch')\n alarms = client.describe_alarms_for_metric(\n MetricName='StatusCheckFailed_System',\n Namespace='AWS/EC2',\n Dimensions=[\n {\n 'Name': 'InstanceId',\n 'Value': i['InstanceId']\n }\n ]\n )\n\n for i in alarms['MetricAlarms']:\n for a in i['AlarmActions']:\n if (\n a.startswith('arn:aws:automate:') and\n a.endswith(':ec2:recover')\n ):\n return True\n\n return False\n\n\[email protected]('start')\nclass Start(BaseAction, StateTransitionFilter):\n \"\"\"Starts a previously stopped EC2 instance.\n\n :Example:\n\n .. code-block: yaml\n\n policies:\n - name: ec2-start-stopped-instances\n resource: ec2\n query:\n - instance-state-name: stopped\n actions:\n - start\n\n http://docs.aws.amazon.com/cli/latest/reference/ec2/start-instances.html\n \"\"\"\n\n valid_origin_states = ('stopped',)\n schema = type_schema('start')\n permissions = ('ec2:StartInstances',)\n batch_size = 10\n exception = None\n\n def _filter_ec2_with_volumes(self, instances):\n return [i for i in instances if len(i['BlockDeviceMappings']) > 0]\n\n def process(self, instances):\n instances = self._filter_ec2_with_volumes(\n self.filter_instance_state(instances))\n if not len(instances):\n return\n\n client = utils.local_session(\n self.manager.session_factory).client('ec2')\n\n # Play nice around aws having insufficient capacity...\n for itype, t_instances in utils.group_by(\n instances, 'InstanceType').items():\n for izone, z_instances in utils.group_by(\n t_instances, 'AvailabilityZone').items():\n for batch in utils.chunks(z_instances, self.batch_size):\n self.process_instance_set(client, batch, itype, izone)\n\n # Raise an exception after all batches process\n if self.exception:\n if self.exception.response['Error']['Code'] not in ('InsufficientInstanceCapacity'):\n self.log.exception(\"Error while starting instances error %s\", self.exception)\n raise self.exception\n\n def process_instance_set(self, client, instances, itype, izone):\n # Setup retry with insufficient capacity as well\n retry = utils.get_retry((\n 'InsufficientInstanceCapacity',\n 'RequestLimitExceeded', 'Client.RequestLimitExceeded'),\n max_attempts=5)\n instance_ids = [i['InstanceId'] for i in instances]\n try:\n retry(client.start_instances, InstanceIds=instance_ids)\n except ClientError as e:\n # Saving exception\n self.exception = e\n self.log.exception(\n (\"Could not start instances:%d type:%s\"\n \" zone:%s instances:%s error:%s\"),\n len(instances), itype, izone,\n \", \".join(instance_ids), e)\n return\n\n\[email protected]('resize')\nclass Resize(BaseAction, StateTransitionFilter):\n \"\"\"Change an instance's size.\n\n An instance can only be resized when its stopped, this action\n can optionally restart an instance if needed to effect the instance\n type change. Instances are always left in the run state they were\n found in.\n\n There are a few caveats to be aware of, instance resizing\n needs to maintain compatibility for architecture, virtualization type\n hvm/pv, and ebs optimization at minimum.\n\n http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-resize.html\n \"\"\"\n\n schema = type_schema(\n 'resize',\n **{'restart': {'type': 'boolean'},\n 'type-map': {'type': 'object'},\n 'default': {'type': 'string'}})\n\n valid_origin_states = ('running', 'stopped')\n\n def get_permissions(self):\n perms = ('ec2:DescribeInstances', 'ec2:ModifyInstanceAttribute')\n if self.data.get('restart', False):\n perms += ('ec2:StopInstances', 'ec2:StartInstances')\n return perms\n\n def process(self, resources):\n stopped_instances = self.filter_instance_state(\n resources, ('stopped',))\n running_instances = self.filter_instance_state(\n resources, ('running',))\n\n if self.data.get('restart') and running_instances:\n Stop({'terminate-ephemeral': False},\n self.manager).process(running_instances)\n client = utils.local_session(\n self.manager.session_factory).client('ec2')\n waiter = client.get_waiter('instance_stopped')\n try:\n waiter.wait(\n InstanceIds=[r['InstanceId'] for r in running_instances])\n except ClientError as e:\n self.log.exception(\n \"Exception stopping instances for resize:\\n %s\" % e)\n\n for instance_set in utils.chunks(itertools.chain(\n stopped_instances, running_instances), 20):\n self.process_resource_set(instance_set)\n\n if self.data.get('restart') and running_instances:\n client.start_instances(\n InstanceIds=[i['InstanceId'] for i in running_instances])\n return list(itertools.chain(stopped_instances, running_instances))\n\n def process_resource_set(self, instance_set):\n type_map = self.data.get('type-map')\n default_type = self.data.get('default')\n\n client = utils.local_session(\n self.manager.session_factory).client('ec2')\n\n for i in instance_set:\n self.log.debug(\n \"resizing %s %s\" % (i['InstanceId'], i['InstanceType']))\n new_type = type_map.get(i['InstanceType'], default_type)\n if new_type == i['InstanceType']:\n continue\n try:\n client.modify_instance_attribute(\n InstanceId=i['InstanceId'],\n InstanceType={'Value': new_type})\n except ClientError as e:\n self.log.exception(\n \"Exception resizing instance:%s new:%s old:%s \\n %s\" % (\n i['InstanceId'], new_type, i['InstanceType'], e))\n\n\[email protected]('stop')\nclass Stop(BaseAction, StateTransitionFilter):\n \"\"\"Stops a running EC2 instances\n\n :Example:\n\n .. code-block: yaml\n\n policies:\n - name: ec2-stop-running-instances\n resource: ec2\n query:\n - instance-state-name: running\n actions:\n - stop\n \"\"\"\n valid_origin_states = ('running',)\n\n schema = type_schema('stop', **{'terminate-ephemeral': {'type': 'boolean'}})\n\n def get_permissions(self):\n perms = ('ec2:StopInstances',)\n if self.data.get('terminate-ephemeral', False):\n perms += ('ec2:TerminateInstances',)\n return perms\n\n def split_on_storage(self, instances):\n ephemeral = []\n persistent = []\n for i in instances:\n if EphemeralInstanceFilter.is_ephemeral(i):\n ephemeral.append(i)\n else:\n persistent.append(i)\n return ephemeral, persistent\n\n def process(self, instances):\n instances = self.filter_instance_state(instances)\n if not len(instances):\n return\n client = utils.local_session(\n self.manager.session_factory).client('ec2')\n # Ephemeral instance can't be stopped.\n ephemeral, persistent = self.split_on_storage(instances)\n if self.data.get('terminate-ephemeral', False) and ephemeral:\n self._run_instances_op(\n client.terminate_instances,\n [i['InstanceId'] for i in ephemeral])\n if persistent:\n self._run_instances_op(\n client.stop_instances,\n [i['InstanceId'] for i in persistent])\n return instances\n\n def _run_instances_op(self, op, instance_ids):\n while True:\n try:\n return self.manager.retry(op, InstanceIds=instance_ids)\n except ClientError as e:\n if e.response['Error']['Code'] == 'IncorrectInstanceState':\n msg = e.response['Error']['Message']\n e_instance_id = msg[msg.find(\"'\") + 1:msg.rfind(\"'\")]\n instance_ids.remove(e_instance_id)\n if not instance_ids:\n return\n continue\n raise\n\n\[email protected]('terminate')\nclass Terminate(BaseAction, StateTransitionFilter):\n \"\"\" Terminate a set of instances.\n\n While ec2 offers a bulk delete api, any given instance can be configured\n with api deletion termination protection, so we can't use the bulk call\n reliabily, we need to process the instances individually. Additionally\n If we're configured with 'force' then we'll turn off instance termination\n protection.\n\n :Example:\n\n .. code-block: yaml\n\n policies:\n - name: ec2-process-termination\n resource: ec2\n filters:\n - type: marked-for-op\n op: terminate\n actions:\n - terminate\n \"\"\"\n\n valid_origin_states = ('running', 'stopped', 'pending', 'stopping')\n\n schema = type_schema('terminate', force={'type': 'boolean'})\n\n def get_permissions(self):\n permissions = (\"ec2:TerminateInstances\",)\n if self.data.get('force'):\n permissions += ('ec2:ModifyInstanceAttribute',)\n return permissions\n\n def process(self, instances):\n instances = self.filter_instance_state(instances)\n if not len(instances):\n return\n if self.data.get('force'):\n self.log.info(\"Disabling termination protection on instances\")\n self.disable_deletion_protection(instances)\n client = utils.local_session(\n self.manager.session_factory).client('ec2')\n # limit batch sizes to avoid api limits\n for batch in utils.chunks(instances, 100):\n self.manager.retry(\n client.terminate_instances,\n InstanceIds=[i['InstanceId'] for i in instances])\n\n def disable_deletion_protection(self, instances):\n\n @utils.worker\n def process_instance(i):\n client = utils.local_session(\n self.manager.session_factory).client('ec2')\n try:\n self.manager.retry(\n client.modify_instance_attribute,\n InstanceId=i['InstanceId'],\n Attribute='disableApiTermination',\n Value='false')\n except ClientError as e:\n if e.response['Error']['Code'] == 'IncorrectInstanceState':\n return\n raise\n\n with self.executor_factory(max_workers=2) as w:\n list(w.map(process_instance, instances))\n\n\[email protected]('snapshot')\nclass Snapshot(BaseAction):\n \"\"\"Snapshots volumes attached to an EC2 instance\n\n :Example:\n\n .. code-block: yaml\n\n policies:\n - name: ec2-snapshots\n resource: ec2\n actions:\n - type: snapshot\n copy-tags:\n - Name\n \"\"\"\n\n schema = type_schema(\n 'snapshot',\n **{'copy-tags': {'type': 'array', 'items': {'type': 'string'}}})\n permissions = ('ec2:CreateSnapshot', 'ec2:CreateTags',)\n\n def process(self, resources):\n for resource in resources:\n with self.executor_factory(max_workers=2) as w:\n futures = []\n futures.append(w.submit(self.process_volume_set, resource))\n for f in as_completed(futures):\n if f.exception():\n self.log.error(\n \"Exception creating snapshot set \\n %s\" % (\n f.exception()))\n\n @utils.worker\n def process_volume_set(self, resource):\n c = utils.local_session(self.manager.session_factory).client('ec2')\n for block_device in resource['BlockDeviceMappings']:\n if 'Ebs' not in block_device:\n continue\n volume_id = block_device['Ebs']['VolumeId']\n description = \"Automated,Backup,%s,%s\" % (\n resource['InstanceId'],\n volume_id)\n try:\n response = c.create_snapshot(\n DryRun=self.manager.config.dryrun,\n VolumeId=volume_id,\n Description=description)\n except ClientError as e:\n if e.response['Error']['Code'] == 'IncorrectState':\n self.log.warning(\n \"action:%s volume:%s is incorrect state\" % (\n self.__class__.__name__.lower(),\n volume_id))\n continue\n raise\n\n tags = [\n {'Key': 'Name', 'Value': volume_id},\n {'Key': 'InstanceId', 'Value': resource['InstanceId']},\n {'Key': 'DeviceName', 'Value': block_device['DeviceName']},\n {'Key': 'custodian_snapshot', 'Value': ''}\n ]\n\n copy_keys = self.data.get('copy-tags', [])\n copy_tags = []\n if copy_keys:\n for t in resource.get('Tags', []):\n if t['Key'] in copy_keys:\n copy_tags.append(t)\n\n if len(copy_tags) + len(tags) > 40:\n self.log.warning(\n \"action:%s volume:%s too many tags to copy\" % (\n self.__class__.__name__.lower(),\n volume_id))\n copy_tags = []\n\n tags.extend(copy_tags)\n c.create_tags(\n DryRun=self.manager.config.dryrun,\n Resources=[\n response['SnapshotId']],\n Tags=tags)\n\n\[email protected]('modify-security-groups')\nclass EC2ModifyVpcSecurityGroups(ModifyVpcSecurityGroupsAction):\n \"\"\"Modify security groups on an instance.\"\"\"\n\n permissions = (\"ec2:ModifyNetworkInterfaceAttribute\",)\n\n def process(self, instances):\n if not len(instances):\n return\n client = utils.local_session(\n self.manager.session_factory).client('ec2')\n\n # handle multiple ENIs\n interfaces = []\n for i in instances:\n for eni in i['NetworkInterfaces']:\n if i.get('c7n:matched-security-groups'):\n eni['c7n:matched-security-groups'] = i[\n 'c7n:matched-security-groups']\n interfaces.append(eni)\n\n groups = super(EC2ModifyVpcSecurityGroups, self).get_groups(interfaces)\n\n for idx, i in enumerate(interfaces):\n client.modify_network_interface_attribute(\n NetworkInterfaceId=i['NetworkInterfaceId'],\n Groups=groups[idx])\n\n\[email protected]('autorecover-alarm')\nclass AutorecoverAlarm(BaseAction, StateTransitionFilter):\n \"\"\"Adds a cloudwatch metric alarm to recover an EC2 instance.\n\n This action takes effect on instances that are NOT part\n of an ASG.\n\n :Example:\n\n .. code-block: yaml\n\n policies:\n - name: ec2-autorecover-alarm\n resource: ec2\n filters:\n - singleton\n actions:\n - autorecover-alarm\n\n https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-recover.html\n \"\"\"\n\n schema = type_schema('autorecover-alarm')\n permissions = ('ec2:DescribeInstanceStatus',\n 'ec2:RecoverInstances',\n 'ec2:DescribeInstanceRecoveryAttribute')\n\n valid_origin_states = ('running', 'stopped', 'pending', 'stopping')\n filter_asg_membership = ValueFilter({\n 'key': 'tag:aws:autoscaling:groupName',\n 'value': 'empty'}).validate()\n\n def process(self, instances):\n instances = self.filter_asg_membership.process(\n self.filter_instance_state(instances))\n if not len(instances):\n return\n client = utils.local_session(\n self.manager.session_factory).client('cloudwatch')\n for i in instances:\n client.put_metric_alarm(\n AlarmName='recover-{}'.format(i['InstanceId']),\n AlarmDescription='Auto Recover {}'.format(i['InstanceId']),\n ActionsEnabled=True,\n AlarmActions=[\n 'arn:aws:automate:{}:ec2:recover'.format(\n i['Placement']['AvailabilityZone'][:-1])\n ],\n MetricName='StatusCheckFailed_System',\n Namespace='AWS/EC2',\n Statistic='Minimum',\n Dimensions=[\n {\n 'Name': 'InstanceId',\n 'Value': i['InstanceId']\n }\n ],\n Period=60,\n EvaluationPeriods=2,\n Threshold=0,\n ComparisonOperator='GreaterThanThreshold'\n )\n\n\[email protected]('set-instance-profile')\nclass SetInstanceProfile(BaseAction, StateTransitionFilter):\n \"\"\"Sets (or removes) the instance profile for a running EC2 instance.\n\n :Example:\n\n .. code-block: yaml\n\n policies:\n - name: set-default-instance-profile\n resource: ec2\n query:\n - IamInstanceProfile: absent\n actions:\n - type: set-instance-profile\n name: default\n\n https://docs.aws.amazon.com/cli/latest/reference/ec2/associate-iam-instance-profile.html\n https://docs.aws.amazon.com/cli/latest/reference/ec2/disassociate-iam-instance-profile.html\n \"\"\"\n\n schema = type_schema(\n 'set-instance-profile',\n **{'name': {'type': 'string'}})\n\n permissions = (\n 'ec2:AssociateIamInstanceProfile',\n 'ec2:DisassociateIamInstanceProfile',\n 'iam:PassRole')\n\n valid_origin_states = ('running', 'pending')\n\n def process(self, instances):\n instances = self.filter_instance_state(instances)\n if not len(instances):\n return\n client = utils.local_session(\n self.manager.session_factory).client('ec2')\n profile_name = self.data.get('name', '')\n\n for i in instances:\n if profile_name:\n client.associate_iam_instance_profile(\n IamInstanceProfile={'Name': self.data.get('name', '')},\n InstanceId=i['InstanceId'])\n else:\n response = client.describe_iam_instance_profile_associations(\n Filters=[\n {\n 'Name': 'instance-id',\n 'Values': [i['InstanceId']],\n },\n {\n 'Name': 'state',\n 'Values': ['associating', 'associated']\n }\n ]\n )\n for a in response['IamInstanceProfileAssociations']:\n client.disassociate_iam_instance_profile(\n AssociationId=a['AssociationId']\n )\n\n return instances\n\n\n# Valid EC2 Query Filters\n# http://docs.aws.amazon.com/AWSEC2/latest/CommandLineReference/ApiReference-cmd-DescribeInstances.html\nEC2_VALID_FILTERS = {\n 'architecture': ('i386', 'x86_64'),\n 'availability-zone': str,\n 'iam-instance-profile.arn': str,\n 'image-id': str,\n 'instance-id': str,\n 'instance-lifecycle': ('spot',),\n 'instance-state-name': (\n 'pending',\n 'terminated',\n 'running',\n 'shutting-down',\n 'stopping',\n 'stopped'),\n 'instance.group-id': str,\n 'instance.group-name': str,\n 'tag-key': str,\n 'tag-value': str,\n 'tag:': str,\n 'vpc-id': str}\n\n\nclass QueryFilter(object):\n\n @classmethod\n def parse(cls, data):\n results = []\n for d in data:\n if not isinstance(d, dict):\n raise ValueError(\n \"EC2 Query Filter Invalid structure %s\" % d)\n results.append(cls(d).validate())\n return results\n\n def __init__(self, data):\n self.data = data\n self.key = None\n self.value = None\n\n def validate(self):\n if not len(list(self.data.keys())) == 1:\n raise ValueError(\n \"EC2 Query Filter Invalid %s\" % self.data)\n self.key = list(self.data.keys())[0]\n self.value = list(self.data.values())[0]\n\n if self.key not in EC2_VALID_FILTERS and not self.key.startswith(\n 'tag:'):\n raise ValueError(\n \"EC2 Query Filter invalid filter name %s\" % (self.data))\n\n if self.value is None:\n raise ValueError(\n \"EC2 Query Filters must have a value, use tag-key\"\n \" w/ tag name as value for tag present checks\"\n \" %s\" % self.data)\n return self\n\n def query(self):\n value = self.value\n if isinstance(self.value, six.string_types):\n value = [self.value]\n\n return {'Name': self.key, 'Values': value}\n",
"path": "c7n/resources/ec2.py"
}
] | [
{
"content": "# Copyright 2015-2017 Capital One Services, LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nimport itertools\nimport operator\nimport random\nimport re\n\nimport six\nfrom botocore.exceptions import ClientError\nfrom dateutil.parser import parse\nfrom concurrent.futures import as_completed\n\nfrom c7n.actions import (\n ActionRegistry, BaseAction, ModifyVpcSecurityGroupsAction\n)\nfrom c7n.filters import (\n FilterRegistry, AgeFilter, ValueFilter, Filter, OPERATORS, DefaultVpcBase\n)\nfrom c7n.filters.offhours import OffHour, OnHour\nfrom c7n.filters.health import HealthEventFilter\nimport c7n.filters.vpc as net_filters\n\nfrom c7n.manager import resources\nfrom c7n.query import QueryResourceManager\n\nfrom c7n import utils\nfrom c7n.utils import type_schema\n\n\nfilters = FilterRegistry('ec2.filters')\nactions = ActionRegistry('ec2.actions')\n\nfilters.register('health-event', HealthEventFilter)\n\n\[email protected]('ec2')\nclass EC2(QueryResourceManager):\n\n class resource_type(object):\n service = 'ec2'\n type = 'instance'\n enum_spec = ('describe_instances', 'Reservations[].Instances[]', None)\n detail_spec = None\n id = 'InstanceId'\n filter_name = 'InstanceIds'\n filter_type = 'list'\n name = 'PublicDnsName'\n date = 'LaunchTime'\n dimension = 'InstanceId'\n config_type = \"AWS::EC2::Instance\"\n shape = \"Instance\"\n\n default_report_fields = (\n 'CustodianDate',\n 'InstanceId',\n 'tag:Name',\n 'InstanceType',\n 'LaunchTime',\n 'VpcId',\n 'PrivateIpAddress',\n )\n\n filter_registry = filters\n action_registry = actions\n\n # if we have to do a fallback scenario where tags don't come in describe\n permissions = ('ec2:DescribeTags',)\n\n def __init__(self, ctx, data):\n super(EC2, self).__init__(ctx, data)\n self.queries = QueryFilter.parse(self.data.get('query', []))\n\n def resources(self, query=None):\n q = self.resource_query()\n if q is not None:\n query = query or {}\n query['Filters'] = q\n return super(EC2, self).resources(query=query)\n\n def resource_query(self):\n qf = []\n qf_names = set()\n # allow same name to be specified multiple times and append the queries\n # under the same name\n for q in self.queries:\n qd = q.query()\n if qd['Name'] in qf_names:\n for qf in qf:\n if qd['Name'] == qf['Name']:\n qf['Values'].extend(qd['Values'])\n else:\n qf_names.add(qd['Name'])\n qf.append(qd)\n return qf\n\n def augment(self, resources):\n \"\"\"EC2 API and AWOL Tags\n\n While ec2 api generally returns tags when doing describe_x on for\n various resources, it may also silently fail to do so unless a tag\n is used as a filter.\n\n See footnote on http://goo.gl/YozD9Q for official documentation.\n\n Apriori we may be using custodian to ensure tags (including\n name), so there isn't a good default to ensure that we will\n always get tags from describe_x calls.\n \"\"\"\n\n # First if we're in event based lambda go ahead and skip this,\n # tags can't be trusted in ec2 instances immediately post creation.\n if not resources or self.data.get('mode', {}).get('type', '') in (\n 'cloudtrail', 'ec2-instance-state'):\n return resources\n\n # AWOL detector, so we don't make extraneous api calls.\n resource_count = len(resources)\n search_count = min(int(resource_count % 0.05) + 1, 5)\n if search_count > resource_count:\n search_count = resource_count\n found = False\n for r in random.sample(resources, search_count):\n if 'Tags' in r:\n found = True\n break\n\n if found:\n return resources\n\n # Okay go and do the tag lookup\n client = utils.local_session(self.session_factory).client('ec2')\n tag_set = self.retry(\n client.describe_tags,\n Filters=[{'Name': 'resource-type',\n 'Values': ['instance']}])['Tags']\n resource_tags = {}\n for t in tag_set:\n t.pop('ResourceType')\n rid = t.pop('ResourceId')\n resource_tags.setdefault(rid, []).append(t)\n\n m = self.get_model()\n for r in resources:\n r['Tags'] = resource_tags.get(r[m.id], ())\n return resources\n\n\[email protected]('security-group')\nclass SecurityGroupFilter(net_filters.SecurityGroupFilter):\n\n RelatedIdsExpression = \"SecurityGroups[].GroupId\"\n\n\[email protected]('subnet')\nclass SubnetFilter(net_filters.SubnetFilter):\n\n RelatedIdsExpression = \"SubnetId\"\n\n\nfilters.register('network-location', net_filters.NetworkLocation)\n\n\[email protected]('state-age')\nclass StateTransitionAge(AgeFilter):\n \"\"\"Age an instance has been in the given state.\n\n .. code-block: yaml\n\n policies:\n - name: ec2-state-running-7-days\n resource: ec2\n filters:\n - type: state-age\n op: ge\n days: 7\n \"\"\"\n RE_PARSE_AGE = re.compile(\"\\(.*?\\)\")\n\n # this filter doesn't use date_attribute, but needs to define it\n # to pass AgeFilter's validate method\n date_attribute = \"dummy\"\n\n schema = type_schema(\n 'state-age',\n op={'type': 'string', 'enum': list(OPERATORS.keys())},\n days={'type': 'number'})\n\n def get_resource_date(self, i):\n v = i.get('StateTransitionReason')\n if not v:\n return None\n dates = self.RE_PARSE_AGE.findall(v)\n if dates:\n return parse(dates[0][1:-1])\n return None\n\n\nclass StateTransitionFilter(object):\n \"\"\"Filter instances by state.\n\n Try to simplify construction for policy authors by automatically\n filtering elements (filters or actions) to the instances states\n they are valid for.\n\n For more details see http://goo.gl/TZH9Q5\n \"\"\"\n valid_origin_states = ()\n\n def filter_instance_state(self, instances, states=None):\n states = states or self.valid_origin_states\n orig_length = len(instances)\n results = [i for i in instances\n if i['State']['Name'] in states]\n self.log.info(\"%s %d of %d instances\" % (\n self.__class__.__name__, len(results), orig_length))\n return results\n\n\[email protected]('ebs')\nclass AttachedVolume(ValueFilter):\n \"\"\"EC2 instances with EBS backed volume\n\n Filters EC2 instances with EBS backed storage devices (non ephemeral)\n\n :Example:\n\n .. code-block:: yaml\n\n policies:\n - name: ec2-encrypted-ebs-volumes\n resource: ec2\n filters:\n - type: ebs\n key: encrypted\n value: true\n \"\"\"\n\n schema = type_schema(\n 'ebs', rinherit=ValueFilter.schema,\n **{'operator': {'enum': ['and', 'or']},\n 'skip-devices': {'type': 'array', 'items': {'type': 'string'}}})\n\n def get_permissions(self):\n return self.manager.get_resource_manager('ebs').get_permissions()\n\n def process(self, resources, event=None):\n self.volume_map = self.get_volume_mapping(resources)\n self.skip = self.data.get('skip-devices', [])\n self.operator = self.data.get(\n 'operator', 'or') == 'or' and any or all\n return list(filter(self, resources))\n\n def get_volume_mapping(self, resources):\n volume_map = {}\n manager = self.manager.get_resource_manager('ebs')\n for instance_set in utils.chunks(resources, 200):\n volume_ids = []\n for i in instance_set:\n for bd in i.get('BlockDeviceMappings', ()):\n if 'Ebs' not in bd:\n continue\n volume_ids.append(bd['Ebs']['VolumeId'])\n for v in manager.get_resources(volume_ids):\n if not v['Attachments']:\n continue\n volume_map.setdefault(\n v['Attachments'][0]['InstanceId'], []).append(v)\n return volume_map\n\n def __call__(self, i):\n volumes = self.volume_map.get(i['InstanceId'])\n if not volumes:\n return False\n if self.skip:\n for v in list(volumes):\n for a in v.get('Attachments', []):\n if a['Device'] in self.skip:\n volumes.remove(v)\n return self.operator(map(self.match, volumes))\n\n\nclass InstanceImageBase(object):\n\n def prefetch_instance_images(self, instances):\n image_ids = [i['ImageId'] for i in instances if 'c7n:instance-image' not in i]\n self.image_map = self.get_local_image_mapping(image_ids)\n\n def get_base_image_mapping(self):\n return {i['ImageId']: i for i in\n self.manager.get_resource_manager('ami').resources()}\n\n def get_instance_image(self, instance):\n image = instance.get('c7n:instance-image', None)\n if not image:\n image = instance['c7n:instance-image'] = self.image_map.get(instance['ImageId'], None)\n return image\n\n def get_local_image_mapping(self, image_ids):\n base_image_map = self.get_base_image_mapping()\n resources = {i: base_image_map[i] for i in image_ids if i in base_image_map}\n missing = list(set(image_ids) - set(resources.keys()))\n if missing:\n loaded = self.manager.get_resource_manager('ami').get_resources(missing, False)\n resources.update({image['ImageId']: image for image in loaded})\n return resources\n\n\[email protected]('image-age')\nclass ImageAge(AgeFilter, InstanceImageBase):\n \"\"\"EC2 AMI age filter\n\n Filters EC2 instances based on the age of their AMI image (in days)\n\n :Example:\n\n .. code-block: yaml\n\n policies:\n - name: ec2-ancient-ami\n resource: ec2\n filters:\n - type: image-age\n op: ge\n days: 90\n \"\"\"\n\n date_attribute = \"CreationDate\"\n\n schema = type_schema(\n 'image-age',\n op={'type': 'string', 'enum': list(OPERATORS.keys())},\n days={'type': 'number'})\n\n def get_permissions(self):\n return self.manager.get_resource_manager('ami').get_permissions()\n\n def process(self, resources, event=None):\n self.prefetch_instance_images(resources)\n return super(ImageAge, self).process(resources, event)\n\n def get_resource_date(self, i):\n image = self.get_instance_image(i)\n if image:\n return parse(image['CreationDate'])\n else:\n return parse(\"2000-01-01T01:01:01.000Z\")\n\n\[email protected]('image')\nclass InstanceImage(ValueFilter, InstanceImageBase):\n\n schema = type_schema('image', rinherit=ValueFilter.schema)\n\n def get_permissions(self):\n return self.manager.get_resource_manager('ami').get_permissions()\n\n def process(self, resources, event=None):\n self.prefetch_instance_images(resources)\n return super(InstanceImage, self).process(resources, event)\n\n def __call__(self, i):\n image = self.get_instance_image(i)\n # Finally, if we have no image...\n if not image:\n self.log.warning(\n \"Could not locate image for instance:%s ami:%s\" % (\n i['InstanceId'], i[\"ImageId\"]))\n # Match instead on empty skeleton?\n return False\n return self.match(image)\n\n\[email protected]('offhour')\nclass InstanceOffHour(OffHour, StateTransitionFilter):\n \"\"\"Custodian OffHour filter\n\n Filters running EC2 instances with the intent to stop at a given hour of\n the day.\n\n :Example:\n\n .. code-block: yaml\n\n policies:\n - name: onhour-evening-stop\n resource: ec2\n filters:\n - type: offhour\n tag: custodian_downtime\n default_tz: et\n offhour: 20\n actions:\n - stop\n \"\"\"\n\n valid_origin_states = ('running',)\n\n def process(self, resources, event=None):\n return super(InstanceOffHour, self).process(\n self.filter_instance_state(resources))\n\n\[email protected]('onhour')\nclass InstanceOnHour(OnHour, StateTransitionFilter):\n \"\"\"Custodian OnHour filter\n\n Filters stopped EC2 instances with the intent to start at a given hour of\n the day.\n\n :Example:\n\n .. code-block: yaml\n\n policies:\n - name: onhour-morning-start\n resource: ec2\n filters:\n - type: onhour\n tag: custodian_downtime\n default_tz: et\n onhour: 6\n actions:\n - start\n \"\"\"\n\n valid_origin_states = ('stopped',)\n\n def process(self, resources, event=None):\n return super(InstanceOnHour, self).process(\n self.filter_instance_state(resources))\n\n\[email protected]('ephemeral')\nclass EphemeralInstanceFilter(Filter):\n \"\"\"EC2 instances with ephemeral storage\n\n Filters EC2 instances that have ephemeral storage (an instance-store backed\n root device)\n\n :Example:\n\n .. code-block: yaml\n\n policies:\n - name: ec2-ephemeral-instances\n resource: ec2\n filters:\n - type: ephemeral\n\n http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/InstanceStorage.html\n \"\"\"\n\n schema = type_schema('ephemeral')\n\n def __call__(self, i):\n return self.is_ephemeral(i)\n\n @staticmethod\n def is_ephemeral(i):\n for bd in i.get('BlockDeviceMappings', []):\n if bd['DeviceName'] in ('/dev/sda1', '/dev/xvda'):\n if 'Ebs' in bd:\n return False\n return True\n return True\n\n\[email protected]('instance-uptime')\nclass UpTimeFilter(AgeFilter):\n\n date_attribute = \"LaunchTime\"\n\n schema = type_schema(\n 'instance-uptime',\n op={'type': 'string', 'enum': list(OPERATORS.keys())},\n days={'type': 'number'})\n\n\[email protected]('instance-age')\nclass InstanceAgeFilter(AgeFilter):\n \"\"\"Filters instances based on their age (in days)\n\n :Example:\n\n .. code-block: yaml\n\n policies:\n - name: ec2-30-days-plus\n resource: ec2\n filters:\n - type: instance-age\n op: ge\n days: 30\n \"\"\"\n\n date_attribute = \"LaunchTime\"\n ebs_key_func = operator.itemgetter('AttachTime')\n\n schema = type_schema(\n 'instance-age',\n op={'type': 'string', 'enum': list(OPERATORS.keys())},\n days={'type': 'number'},\n hours={'type': 'number'},\n minutes={'type': 'number'})\n\n def get_resource_date(self, i):\n # LaunchTime is basically how long has the instance\n # been on, use the oldest ebs vol attach time\n ebs_vols = [\n block['Ebs'] for block in i['BlockDeviceMappings']\n if 'Ebs' in block]\n if not ebs_vols:\n # Fall back to using age attribute (ephemeral instances)\n return super(InstanceAgeFilter, self).get_resource_date(i)\n # Lexographical sort on date\n ebs_vols = sorted(ebs_vols, key=self.ebs_key_func)\n return ebs_vols[0]['AttachTime']\n\n\[email protected]('default-vpc')\nclass DefaultVpc(DefaultVpcBase):\n \"\"\" Matches if an ec2 database is in the default vpc\n \"\"\"\n\n schema = type_schema('default-vpc')\n\n def __call__(self, ec2):\n return ec2.get('VpcId') and self.match(ec2.get('VpcId')) or False\n\n\[email protected]('singleton')\nclass SingletonFilter(Filter, StateTransitionFilter):\n \"\"\"EC2 instances without autoscaling or a recover alarm\n\n Filters EC2 instances that are not members of an autoscaling group\n and do not have Cloudwatch recover alarms.\n\n :Example:\n\n .. code-block: yaml\n\n policies:\n - name: ec2-recover-instances\n resource: ec2\n filters:\n - singleton\n actions:\n - type: tag\n key: problem\n value: instance is not resilient\n\n https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-recover.html\n \"\"\"\n\n schema = type_schema('singleton')\n\n permissions = ('cloudwatch:DescribeAlarmsForMetric',)\n\n valid_origin_states = ('running', 'stopped', 'pending', 'stopping')\n\n in_asg = ValueFilter({\n 'key': 'tag:aws:autoscaling:groupName',\n 'value': 'not-null'}).validate()\n\n def process(self, instances, event=None):\n return super(SingletonFilter, self).process(\n self.filter_instance_state(instances))\n\n def __call__(self, i):\n if self.in_asg(i):\n return False\n else:\n return not self.has_recover_alarm(i)\n\n def has_recover_alarm(self, i):\n client = utils.local_session(self.manager.session_factory).client('cloudwatch')\n alarms = client.describe_alarms_for_metric(\n MetricName='StatusCheckFailed_System',\n Namespace='AWS/EC2',\n Dimensions=[\n {\n 'Name': 'InstanceId',\n 'Value': i['InstanceId']\n }\n ]\n )\n\n for i in alarms['MetricAlarms']:\n for a in i['AlarmActions']:\n if (\n a.startswith('arn:aws:automate:') and\n a.endswith(':ec2:recover')\n ):\n return True\n\n return False\n\n\[email protected]('start')\nclass Start(BaseAction, StateTransitionFilter):\n \"\"\"Starts a previously stopped EC2 instance.\n\n :Example:\n\n .. code-block: yaml\n\n policies:\n - name: ec2-start-stopped-instances\n resource: ec2\n query:\n - instance-state-name: stopped\n actions:\n - start\n\n http://docs.aws.amazon.com/cli/latest/reference/ec2/start-instances.html\n \"\"\"\n\n valid_origin_states = ('stopped',)\n schema = type_schema('start')\n permissions = ('ec2:StartInstances',)\n batch_size = 10\n exception = None\n\n def _filter_ec2_with_volumes(self, instances):\n return [i for i in instances if len(i['BlockDeviceMappings']) > 0]\n\n def process(self, instances):\n instances = self._filter_ec2_with_volumes(\n self.filter_instance_state(instances))\n if not len(instances):\n return\n\n client = utils.local_session(\n self.manager.session_factory).client('ec2')\n\n # Play nice around aws having insufficient capacity...\n for itype, t_instances in utils.group_by(\n instances, 'InstanceType').items():\n for izone, z_instances in utils.group_by(\n t_instances, 'AvailabilityZone').items():\n for batch in utils.chunks(z_instances, self.batch_size):\n self.process_instance_set(client, batch, itype, izone)\n\n # Raise an exception after all batches process\n if self.exception:\n if self.exception.response['Error']['Code'] not in ('InsufficientInstanceCapacity'):\n self.log.exception(\"Error while starting instances error %s\", self.exception)\n raise self.exception\n\n def process_instance_set(self, client, instances, itype, izone):\n # Setup retry with insufficient capacity as well\n retry = utils.get_retry((\n 'InsufficientInstanceCapacity',\n 'RequestLimitExceeded', 'Client.RequestLimitExceeded'),\n max_attempts=5)\n instance_ids = [i['InstanceId'] for i in instances]\n try:\n retry(client.start_instances, InstanceIds=instance_ids)\n except ClientError as e:\n # Saving exception\n self.exception = e\n self.log.exception(\n (\"Could not start instances:%d type:%s\"\n \" zone:%s instances:%s error:%s\"),\n len(instances), itype, izone,\n \", \".join(instance_ids), e)\n return\n\n\[email protected]('resize')\nclass Resize(BaseAction, StateTransitionFilter):\n \"\"\"Change an instance's size.\n\n An instance can only be resized when its stopped, this action\n can optionally restart an instance if needed to effect the instance\n type change. Instances are always left in the run state they were\n found in.\n\n There are a few caveats to be aware of, instance resizing\n needs to maintain compatibility for architecture, virtualization type\n hvm/pv, and ebs optimization at minimum.\n\n http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-resize.html\n \"\"\"\n\n schema = type_schema(\n 'resize',\n **{'restart': {'type': 'boolean'},\n 'type-map': {'type': 'object'},\n 'default': {'type': 'string'}})\n\n valid_origin_states = ('running', 'stopped')\n\n def get_permissions(self):\n perms = ('ec2:DescribeInstances', 'ec2:ModifyInstanceAttribute')\n if self.data.get('restart', False):\n perms += ('ec2:StopInstances', 'ec2:StartInstances')\n return perms\n\n def process(self, resources):\n stopped_instances = self.filter_instance_state(\n resources, ('stopped',))\n running_instances = self.filter_instance_state(\n resources, ('running',))\n\n if self.data.get('restart') and running_instances:\n Stop({'terminate-ephemeral': False},\n self.manager).process(running_instances)\n client = utils.local_session(\n self.manager.session_factory).client('ec2')\n waiter = client.get_waiter('instance_stopped')\n try:\n waiter.wait(\n InstanceIds=[r['InstanceId'] for r in running_instances])\n except ClientError as e:\n self.log.exception(\n \"Exception stopping instances for resize:\\n %s\" % e)\n\n for instance_set in utils.chunks(itertools.chain(\n stopped_instances, running_instances), 20):\n self.process_resource_set(instance_set)\n\n if self.data.get('restart') and running_instances:\n client.start_instances(\n InstanceIds=[i['InstanceId'] for i in running_instances])\n return list(itertools.chain(stopped_instances, running_instances))\n\n def process_resource_set(self, instance_set):\n type_map = self.data.get('type-map')\n default_type = self.data.get('default')\n\n client = utils.local_session(\n self.manager.session_factory).client('ec2')\n\n for i in instance_set:\n self.log.debug(\n \"resizing %s %s\" % (i['InstanceId'], i['InstanceType']))\n new_type = type_map.get(i['InstanceType'], default_type)\n if new_type == i['InstanceType']:\n continue\n try:\n client.modify_instance_attribute(\n InstanceId=i['InstanceId'],\n InstanceType={'Value': new_type})\n except ClientError as e:\n self.log.exception(\n \"Exception resizing instance:%s new:%s old:%s \\n %s\" % (\n i['InstanceId'], new_type, i['InstanceType'], e))\n\n\[email protected]('stop')\nclass Stop(BaseAction, StateTransitionFilter):\n \"\"\"Stops a running EC2 instances\n\n :Example:\n\n .. code-block: yaml\n\n policies:\n - name: ec2-stop-running-instances\n resource: ec2\n query:\n - instance-state-name: running\n actions:\n - stop\n \"\"\"\n valid_origin_states = ('running',)\n\n schema = type_schema('stop', **{'terminate-ephemeral': {'type': 'boolean'}})\n\n def get_permissions(self):\n perms = ('ec2:StopInstances',)\n if self.data.get('terminate-ephemeral', False):\n perms += ('ec2:TerminateInstances',)\n return perms\n\n def split_on_storage(self, instances):\n ephemeral = []\n persistent = []\n for i in instances:\n if EphemeralInstanceFilter.is_ephemeral(i):\n ephemeral.append(i)\n else:\n persistent.append(i)\n return ephemeral, persistent\n\n def process(self, instances):\n instances = self.filter_instance_state(instances)\n if not len(instances):\n return\n client = utils.local_session(\n self.manager.session_factory).client('ec2')\n # Ephemeral instance can't be stopped.\n ephemeral, persistent = self.split_on_storage(instances)\n if self.data.get('terminate-ephemeral', False) and ephemeral:\n self._run_instances_op(\n client.terminate_instances,\n [i['InstanceId'] for i in ephemeral])\n if persistent:\n self._run_instances_op(\n client.stop_instances,\n [i['InstanceId'] for i in persistent])\n return instances\n\n def _run_instances_op(self, op, instance_ids):\n while True:\n try:\n return self.manager.retry(op, InstanceIds=instance_ids)\n except ClientError as e:\n if e.response['Error']['Code'] == 'IncorrectInstanceState':\n msg = e.response['Error']['Message']\n e_instance_id = msg[msg.find(\"'\") + 1:msg.rfind(\"'\")]\n instance_ids.remove(e_instance_id)\n if not instance_ids:\n return\n continue\n raise\n\n\[email protected]('terminate')\nclass Terminate(BaseAction, StateTransitionFilter):\n \"\"\" Terminate a set of instances.\n\n While ec2 offers a bulk delete api, any given instance can be configured\n with api deletion termination protection, so we can't use the bulk call\n reliabily, we need to process the instances individually. Additionally\n If we're configured with 'force' then we'll turn off instance termination\n protection.\n\n :Example:\n\n .. code-block: yaml\n\n policies:\n - name: ec2-process-termination\n resource: ec2\n filters:\n - type: marked-for-op\n op: terminate\n actions:\n - terminate\n \"\"\"\n\n valid_origin_states = ('running', 'stopped', 'pending', 'stopping')\n\n schema = type_schema('terminate', force={'type': 'boolean'})\n\n def get_permissions(self):\n permissions = (\"ec2:TerminateInstances\",)\n if self.data.get('force'):\n permissions += ('ec2:ModifyInstanceAttribute',)\n return permissions\n\n def process(self, instances):\n instances = self.filter_instance_state(instances)\n if not len(instances):\n return\n if self.data.get('force'):\n self.log.info(\"Disabling termination protection on instances\")\n self.disable_deletion_protection(instances)\n client = utils.local_session(\n self.manager.session_factory).client('ec2')\n # limit batch sizes to avoid api limits\n for batch in utils.chunks(instances, 100):\n self.manager.retry(\n client.terminate_instances,\n InstanceIds=[i['InstanceId'] for i in instances])\n\n def disable_deletion_protection(self, instances):\n\n @utils.worker\n def process_instance(i):\n client = utils.local_session(\n self.manager.session_factory).client('ec2')\n try:\n self.manager.retry(\n client.modify_instance_attribute,\n InstanceId=i['InstanceId'],\n Attribute='disableApiTermination',\n Value='false')\n except ClientError as e:\n if e.response['Error']['Code'] == 'IncorrectInstanceState':\n return\n raise\n\n with self.executor_factory(max_workers=2) as w:\n list(w.map(process_instance, instances))\n\n\[email protected]('snapshot')\nclass Snapshot(BaseAction):\n \"\"\"Snapshots volumes attached to an EC2 instance\n\n :Example:\n\n .. code-block: yaml\n\n policies:\n - name: ec2-snapshots\n resource: ec2\n actions:\n - type: snapshot\n copy-tags:\n - Name\n \"\"\"\n\n schema = type_schema(\n 'snapshot',\n **{'copy-tags': {'type': 'array', 'items': {'type': 'string'}}})\n permissions = ('ec2:CreateSnapshot', 'ec2:CreateTags',)\n\n def process(self, resources):\n for resource in resources:\n with self.executor_factory(max_workers=2) as w:\n futures = []\n futures.append(w.submit(self.process_volume_set, resource))\n for f in as_completed(futures):\n if f.exception():\n self.log.error(\n \"Exception creating snapshot set \\n %s\" % (\n f.exception()))\n\n @utils.worker\n def process_volume_set(self, resource):\n c = utils.local_session(self.manager.session_factory).client('ec2')\n for block_device in resource['BlockDeviceMappings']:\n if 'Ebs' not in block_device:\n continue\n volume_id = block_device['Ebs']['VolumeId']\n description = \"Automated,Backup,%s,%s\" % (\n resource['InstanceId'],\n volume_id)\n try:\n response = c.create_snapshot(\n DryRun=self.manager.config.dryrun,\n VolumeId=volume_id,\n Description=description)\n except ClientError as e:\n if e.response['Error']['Code'] == 'IncorrectState':\n self.log.warning(\n \"action:%s volume:%s is incorrect state\" % (\n self.__class__.__name__.lower(),\n volume_id))\n continue\n raise\n\n tags = [\n {'Key': 'Name', 'Value': volume_id},\n {'Key': 'InstanceId', 'Value': resource['InstanceId']},\n {'Key': 'DeviceName', 'Value': block_device['DeviceName']},\n {'Key': 'custodian_snapshot', 'Value': ''}\n ]\n\n copy_keys = self.data.get('copy-tags', [])\n copy_tags = []\n if copy_keys:\n for t in resource.get('Tags', []):\n if t['Key'] in copy_keys:\n copy_tags.append(t)\n\n if len(copy_tags) + len(tags) > 40:\n self.log.warning(\n \"action:%s volume:%s too many tags to copy\" % (\n self.__class__.__name__.lower(),\n volume_id))\n copy_tags = []\n\n tags.extend(copy_tags)\n c.create_tags(\n DryRun=self.manager.config.dryrun,\n Resources=[\n response['SnapshotId']],\n Tags=tags)\n\n\[email protected]('modify-security-groups')\nclass EC2ModifyVpcSecurityGroups(ModifyVpcSecurityGroupsAction):\n \"\"\"Modify security groups on an instance.\"\"\"\n\n permissions = (\"ec2:ModifyNetworkInterfaceAttribute\",)\n\n def process(self, instances):\n if not len(instances):\n return\n client = utils.local_session(\n self.manager.session_factory).client('ec2')\n\n # handle multiple ENIs\n interfaces = []\n for i in instances:\n for eni in i['NetworkInterfaces']:\n if i.get('c7n:matched-security-groups'):\n eni['c7n:matched-security-groups'] = i[\n 'c7n:matched-security-groups']\n interfaces.append(eni)\n\n groups = super(EC2ModifyVpcSecurityGroups, self).get_groups(interfaces)\n\n for idx, i in enumerate(interfaces):\n client.modify_network_interface_attribute(\n NetworkInterfaceId=i['NetworkInterfaceId'],\n Groups=groups[idx])\n\n\[email protected]('autorecover-alarm')\nclass AutorecoverAlarm(BaseAction, StateTransitionFilter):\n \"\"\"Adds a cloudwatch metric alarm to recover an EC2 instance.\n\n This action takes effect on instances that are NOT part\n of an ASG.\n\n :Example:\n\n .. code-block: yaml\n\n policies:\n - name: ec2-autorecover-alarm\n resource: ec2\n filters:\n - singleton\n actions:\n - autorecover-alarm\n\n https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-recover.html\n \"\"\"\n\n schema = type_schema('autorecover-alarm')\n permissions = ('ec2:DescribeInstanceStatus',\n 'ec2:RecoverInstances',\n 'ec2:DescribeInstanceRecoveryAttribute')\n\n valid_origin_states = ('running', 'stopped', 'pending', 'stopping')\n filter_asg_membership = ValueFilter({\n 'key': 'tag:aws:autoscaling:groupName',\n 'value': 'empty'}).validate()\n\n def process(self, instances):\n instances = self.filter_asg_membership.process(\n self.filter_instance_state(instances))\n if not len(instances):\n return\n client = utils.local_session(\n self.manager.session_factory).client('cloudwatch')\n for i in instances:\n client.put_metric_alarm(\n AlarmName='recover-{}'.format(i['InstanceId']),\n AlarmDescription='Auto Recover {}'.format(i['InstanceId']),\n ActionsEnabled=True,\n AlarmActions=[\n 'arn:aws:automate:{}:ec2:recover'.format(\n i['Placement']['AvailabilityZone'][:-1])\n ],\n MetricName='StatusCheckFailed_System',\n Namespace='AWS/EC2',\n Statistic='Minimum',\n Dimensions=[\n {\n 'Name': 'InstanceId',\n 'Value': i['InstanceId']\n }\n ],\n Period=60,\n EvaluationPeriods=2,\n Threshold=0,\n ComparisonOperator='GreaterThanThreshold'\n )\n\n\[email protected]('set-instance-profile')\nclass SetInstanceProfile(BaseAction, StateTransitionFilter):\n \"\"\"Sets (or removes) the instance profile for a running EC2 instance.\n\n :Example:\n\n .. code-block: yaml\n\n policies:\n - name: set-default-instance-profile\n resource: ec2\n query:\n - IamInstanceProfile: absent\n actions:\n - type: set-instance-profile\n name: default\n\n https://docs.aws.amazon.com/cli/latest/reference/ec2/associate-iam-instance-profile.html\n https://docs.aws.amazon.com/cli/latest/reference/ec2/disassociate-iam-instance-profile.html\n \"\"\"\n\n schema = type_schema(\n 'set-instance-profile',\n **{'name': {'type': 'string'}})\n\n permissions = (\n 'ec2:AssociateIamInstanceProfile',\n 'ec2:DisassociateIamInstanceProfile',\n 'iam:PassRole')\n\n valid_origin_states = ('running', 'pending')\n\n def process(self, instances):\n instances = self.filter_instance_state(instances)\n if not len(instances):\n return\n client = utils.local_session(\n self.manager.session_factory).client('ec2')\n profile_name = self.data.get('name', '')\n\n for i in instances:\n if profile_name:\n client.associate_iam_instance_profile(\n IamInstanceProfile={'Name': self.data.get('name', '')},\n InstanceId=i['InstanceId'])\n else:\n response = client.describe_iam_instance_profile_associations(\n Filters=[\n {\n 'Name': 'instance-id',\n 'Values': [i['InstanceId']],\n },\n {\n 'Name': 'state',\n 'Values': ['associating', 'associated']\n }\n ]\n )\n for a in response['IamInstanceProfileAssociations']:\n client.disassociate_iam_instance_profile(\n AssociationId=a['AssociationId']\n )\n\n return instances\n\n\n# Valid EC2 Query Filters\n# http://docs.aws.amazon.com/AWSEC2/latest/CommandLineReference/ApiReference-cmd-DescribeInstances.html\nEC2_VALID_FILTERS = {\n 'architecture': ('i386', 'x86_64'),\n 'availability-zone': str,\n 'iam-instance-profile.arn': str,\n 'image-id': str,\n 'instance-id': str,\n 'instance-lifecycle': ('spot',),\n 'instance-state-name': (\n 'pending',\n 'terminated',\n 'running',\n 'shutting-down',\n 'stopping',\n 'stopped'),\n 'instance.group-id': str,\n 'instance.group-name': str,\n 'tag-key': str,\n 'tag-value': str,\n 'tag:': str,\n 'tenancy': ('dedicated', 'default', 'host'),\n 'vpc-id': str}\n\n\nclass QueryFilter(object):\n\n @classmethod\n def parse(cls, data):\n results = []\n for d in data:\n if not isinstance(d, dict):\n raise ValueError(\n \"EC2 Query Filter Invalid structure %s\" % d)\n results.append(cls(d).validate())\n return results\n\n def __init__(self, data):\n self.data = data\n self.key = None\n self.value = None\n\n def validate(self):\n if not len(list(self.data.keys())) == 1:\n raise ValueError(\n \"EC2 Query Filter Invalid %s\" % self.data)\n self.key = list(self.data.keys())[0]\n self.value = list(self.data.values())[0]\n\n if self.key not in EC2_VALID_FILTERS and not self.key.startswith(\n 'tag:'):\n raise ValueError(\n \"EC2 Query Filter invalid filter name %s\" % (self.data))\n\n if self.value is None:\n raise ValueError(\n \"EC2 Query Filters must have a value, use tag-key\"\n \" w/ tag name as value for tag present checks\"\n \" %s\" % self.data)\n return self\n\n def query(self):\n value = self.value\n if isinstance(self.value, six.string_types):\n value = [self.value]\n\n return {'Name': self.key, 'Values': value}\n",
"path": "c7n/resources/ec2.py"
}
] | diff --git a/c7n/resources/ec2.py b/c7n/resources/ec2.py
index c2903ad7c7e..6930570ba77 100644
--- a/c7n/resources/ec2.py
+++ b/c7n/resources/ec2.py
@@ -1166,6 +1166,7 @@ def process(self, instances):
'tag-key': str,
'tag-value': str,
'tag:': str,
+ 'tenancy': ('dedicated', 'default', 'host'),
'vpc-id': str}
diff --git a/docs/source/policy/resources/ec2.rst b/docs/source/policy/resources/ec2.rst
index ad0ac3b12f6..181cf752be3 100644
--- a/docs/source/policy/resources/ec2.rst
+++ b/docs/source/policy/resources/ec2.rst
@@ -30,6 +30,7 @@ Query
'tag-key': str,
'tag-value': str,
'tag:': str,
+ 'tenancy': ('dedicated', 'default', 'host'),
'vpc-id': str}
Filters
|
flairNLP__flair-2322 | can't load ner-multi : 'LanguageModel' object has no attribute '_load_state_dict_pre_hooks'
**Describe the bug**
Multi lingual models (both ner or pos) fails to load after download inside torch nn.
I don't have any problems with other ner packages
**To Reproduce**
> from flair.data import Sentence
> from flair.models import SequenceTagger
> tagger = SequenceTagger.load("flair/ner-multi-fast")
```
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/flair/flair/nn.py", line 93, in load
model = cls._init_model_with_state_dict(state)
File "/home/flair/flair/models/sequence_tagger_model.py", line 297, in _init_model_with_state_dict
model.load_state_dict(state["state_dict"])
File "/root/anaconda3/envs/flair/lib/python3.9/site-packages/torch/nn/modules/module.py", line 1037, in load_state_dict
load(self)
File "/root/anaconda3/envs/flair/lib/python3.9/site-packages/torch/nn/modules/module.py", line 1035, in load
load(child, prefix + name + '.')
File "/root/anaconda3/envs/flair/lib/python3.9/site-packages/torch/nn/modules/module.py", line 1035, in load
load(child, prefix + name + '.')
File "/root/anaconda3/envs/flair/lib/python3.9/site-packages/torch/nn/modules/module.py", line 1035, in load
load(child, prefix + name + '.')
File "/root/anaconda3/envs/flair/lib/python3.9/site-packages/torch/nn/modules/module.py", line 1031, in load
module._load_from_state_dict(
File "/root/anaconda3/envs/flair/lib/python3.9/site-packages/torch/nn/modules/module.py", line 957, in _load_from_state_dict
for hook in self._load_state_dict_pre_hooks.values():
File "/root/anaconda3/envs/flair/lib/python3.9/site-packages/torch/nn/modules/module.py", line 778, in __getattr__
raise ModuleAttributeError("'{}' object has no attribute '{}'".format(
torch.nn.modules.module.ModuleAttributeError: 'LanguageModel' object has no attribute '_load_state_dict_pre_hooks'
```
- tried on macos 11.1, python 3.8 and python 3.6 and 3.9.4
- torch 1.7.1 (conda env from requirements)
- Version flair 0.8.2 (I tried both pip install and pip from github repo)
| [
{
"content": "from pathlib import Path\n\nimport torch.nn as nn\nimport torch\nimport math\nfrom typing import Union, Tuple\nfrom typing import List\n\nfrom torch.optim import Optimizer\n\nimport flair\nfrom flair.data import Dictionary\n\n\nclass LanguageModel(nn.Module):\n \"\"\"Container module with an encoder, a recurrent module, and a decoder.\"\"\"\n\n def __init__(\n self,\n dictionary: Dictionary,\n is_forward_lm: bool,\n hidden_size: int,\n nlayers: int,\n embedding_size: int = 100,\n nout=None,\n document_delimiter: str = '\\n',\n dropout=0.1,\n ):\n\n super(LanguageModel, self).__init__()\n\n self.dictionary = dictionary\n self.document_delimiter = document_delimiter\n self.is_forward_lm: bool = is_forward_lm\n\n self.dropout = dropout\n self.hidden_size = hidden_size\n self.embedding_size = embedding_size\n self.nlayers = nlayers\n\n self.drop = nn.Dropout(dropout)\n self.encoder = nn.Embedding(len(dictionary), embedding_size)\n\n if nlayers == 1:\n self.rnn = nn.LSTM(embedding_size, hidden_size, nlayers)\n else:\n self.rnn = nn.LSTM(embedding_size, hidden_size, nlayers, dropout=dropout)\n\n self.hidden = None\n\n self.nout = nout\n if nout is not None:\n self.proj = nn.Linear(hidden_size, nout)\n self.initialize(self.proj.weight)\n self.decoder = nn.Linear(nout, len(dictionary))\n else:\n self.proj = None\n self.decoder = nn.Linear(hidden_size, len(dictionary))\n\n self.init_weights()\n\n # auto-spawn on GPU if available\n self.to(flair.device)\n\n def init_weights(self):\n initrange = 0.1\n self.encoder.weight.detach().uniform_(-initrange, initrange)\n self.decoder.bias.detach().fill_(0)\n self.decoder.weight.detach().uniform_(-initrange, initrange)\n\n def set_hidden(self, hidden):\n self.hidden = hidden\n\n def forward(self, input, hidden, ordered_sequence_lengths=None):\n encoded = self.encoder(input)\n emb = self.drop(encoded)\n\n self.rnn.flatten_parameters()\n\n output, hidden = self.rnn(emb, hidden)\n\n if self.proj is not None:\n output = self.proj(output)\n\n output = self.drop(output)\n\n decoded = self.decoder(\n output.view(output.size(0) * output.size(1), output.size(2))\n )\n\n return (\n decoded.view(output.size(0), output.size(1), decoded.size(1)),\n output,\n hidden,\n )\n\n def init_hidden(self, bsz):\n weight = next(self.parameters()).detach()\n return (\n weight.new(self.nlayers, bsz, self.hidden_size).zero_().clone().detach(),\n weight.new(self.nlayers, bsz, self.hidden_size).zero_().clone().detach(),\n )\n\n def get_representation(\n self,\n strings: List[str],\n start_marker: str,\n end_marker: str,\n chars_per_chunk: int = 512,\n ):\n\n len_longest_str: int = len(max(strings, key=len))\n\n # pad strings with whitespaces to longest sentence\n padded_strings: List[str] = []\n\n for string in strings:\n if not self.is_forward_lm:\n string = string[::-1]\n\n padded = f\"{start_marker}{string}{end_marker}\"\n padded_strings.append(padded)\n\n # cut up the input into chunks of max charlength = chunk_size\n chunks = []\n splice_begin = 0\n longest_padded_str: int = len_longest_str + len(start_marker) + len(end_marker)\n for splice_end in range(chars_per_chunk, longest_padded_str, chars_per_chunk):\n chunks.append([text[splice_begin:splice_end] for text in padded_strings])\n splice_begin = splice_end\n\n chunks.append(\n [text[splice_begin:longest_padded_str] for text in padded_strings]\n )\n hidden = self.init_hidden(len(chunks[0]))\n\n padding_char_index = self.dictionary.get_idx_for_item(\" \")\n\n batches: List[torch.Tensor] = []\n # push each chunk through the RNN language model\n for chunk in chunks:\n len_longest_chunk: int = len(max(chunk, key=len))\n sequences_as_char_indices: List[List[int]] = []\n for string in chunk:\n char_indices = self.dictionary.get_idx_for_items(list(string))\n char_indices += [padding_char_index] * (len_longest_chunk - len(string))\n\n sequences_as_char_indices.append(char_indices)\n t = torch.tensor(sequences_as_char_indices, dtype=torch.long).to(\n device=flair.device, non_blocking=True\n )\n batches.append(t)\n\n output_parts = []\n for batch in batches:\n batch = batch.transpose(0, 1)\n _, rnn_output, hidden = self.forward(batch, hidden)\n output_parts.append(rnn_output)\n\n # concatenate all chunks to make final output\n output = torch.cat(output_parts)\n\n return output\n\n def get_output(self, text: str):\n char_indices = [self.dictionary.get_idx_for_item(char) for char in text]\n input_vector = torch.LongTensor([char_indices]).transpose(0, 1)\n\n hidden = self.init_hidden(1)\n prediction, rnn_output, hidden = self.forward(input_vector, hidden)\n\n return self.repackage_hidden(hidden)\n\n def repackage_hidden(self, h):\n \"\"\"Wraps hidden states in new Variables, to detach them from their history.\"\"\"\n if type(h) == torch.Tensor:\n return h.clone().detach()\n else:\n return tuple(self.repackage_hidden(v) for v in h)\n\n @staticmethod\n def initialize(matrix):\n in_, out_ = matrix.size()\n stdv = math.sqrt(3.0 / (in_ + out_))\n matrix.detach().uniform_(-stdv, stdv)\n\n @classmethod\n def load_language_model(cls, model_file: Union[Path, str]):\n\n state = torch.load(str(model_file), map_location=flair.device)\n\n document_delimiter = state[\"document_delimiter\"] if \"document_delimiter\" in state else '\\n'\n\n model = LanguageModel(\n dictionary=state[\"dictionary\"],\n is_forward_lm=state[\"is_forward_lm\"],\n hidden_size=state[\"hidden_size\"],\n nlayers=state[\"nlayers\"],\n embedding_size=state[\"embedding_size\"],\n nout=state[\"nout\"],\n document_delimiter=document_delimiter,\n dropout=state[\"dropout\"],\n )\n model.load_state_dict(state[\"state_dict\"])\n model.eval()\n model.to(flair.device)\n\n return model\n\n @classmethod\n def load_checkpoint(cls, model_file: Union[Path, str]):\n state = torch.load(str(model_file), map_location=flair.device)\n\n epoch = state[\"epoch\"] if \"epoch\" in state else None\n split = state[\"split\"] if \"split\" in state else None\n loss = state[\"loss\"] if \"loss\" in state else None\n document_delimiter = state[\"document_delimiter\"] if \"document_delimiter\" in state else '\\n'\n\n optimizer_state_dict = (\n state[\"optimizer_state_dict\"] if \"optimizer_state_dict\" in state else None\n )\n\n model = LanguageModel(\n dictionary=state[\"dictionary\"],\n is_forward_lm=state[\"is_forward_lm\"],\n hidden_size=state[\"hidden_size\"],\n nlayers=state[\"nlayers\"],\n embedding_size=state[\"embedding_size\"],\n nout=state[\"nout\"],\n document_delimiter=document_delimiter,\n dropout=state[\"dropout\"],\n )\n model.load_state_dict(state[\"state_dict\"])\n model.eval()\n model.to(flair.device)\n\n return {\n \"model\": model,\n \"epoch\": epoch,\n \"split\": split,\n \"loss\": loss,\n \"optimizer_state_dict\": optimizer_state_dict,\n }\n\n def save_checkpoint(\n self, file: Union[Path, str], optimizer: Optimizer, epoch: int, split: int, loss: float\n ):\n model_state = {\n \"state_dict\": self.state_dict(),\n \"dictionary\": self.dictionary,\n \"is_forward_lm\": self.is_forward_lm,\n \"hidden_size\": self.hidden_size,\n \"nlayers\": self.nlayers,\n \"embedding_size\": self.embedding_size,\n \"nout\": self.nout,\n \"document_delimiter\": self.document_delimiter,\n \"dropout\": self.dropout,\n \"optimizer_state_dict\": optimizer.state_dict(),\n \"epoch\": epoch,\n \"split\": split,\n \"loss\": loss,\n }\n\n torch.save(model_state, str(file), pickle_protocol=4)\n\n def save(self, file: Union[Path, str]):\n model_state = {\n \"state_dict\": self.state_dict(),\n \"dictionary\": self.dictionary,\n \"is_forward_lm\": self.is_forward_lm,\n \"hidden_size\": self.hidden_size,\n \"nlayers\": self.nlayers,\n \"embedding_size\": self.embedding_size,\n \"nout\": self.nout,\n \"document_delimiter\": self.document_delimiter,\n \"dropout\": self.dropout,\n }\n\n torch.save(model_state, str(file), pickle_protocol=4)\n\n def generate_text(\n self,\n prefix: str = \"\\n\",\n number_of_characters: int = 1000,\n temperature: float = 1.0,\n break_on_suffix=None,\n ) -> Tuple[str, float]:\n\n if prefix == \"\":\n prefix = \"\\n\"\n\n with torch.no_grad():\n characters = []\n\n idx2item = self.dictionary.idx2item\n\n # initial hidden state\n hidden = self.init_hidden(1)\n\n if len(prefix) > 1:\n\n char_tensors = []\n for character in prefix[:-1]:\n char_tensors.append(\n torch.tensor(self.dictionary.get_idx_for_item(character))\n .unsqueeze(0)\n .unsqueeze(0)\n )\n\n input = torch.cat(char_tensors).to(flair.device)\n\n prediction, _, hidden = self.forward(input, hidden)\n\n input = (\n torch.tensor(self.dictionary.get_idx_for_item(prefix[-1]))\n .unsqueeze(0)\n .unsqueeze(0)\n )\n\n log_prob = 0.0\n\n for i in range(number_of_characters):\n\n input = input.to(flair.device)\n\n # get predicted weights\n prediction, _, hidden = self.forward(input, hidden)\n prediction = prediction.squeeze().detach()\n decoder_output = prediction\n\n # divide by temperature\n prediction = prediction.div(temperature)\n\n # to prevent overflow problem with small temperature values, substract largest value from all\n # this makes a vector in which the largest value is 0\n max = torch.max(prediction)\n prediction -= max\n\n # compute word weights with exponential function\n word_weights = prediction.exp().cpu()\n\n # try sampling multinomial distribution for next character\n try:\n word_idx = torch.multinomial(word_weights, 1)[0]\n except:\n word_idx = torch.tensor(0)\n\n # print(word_idx)\n prob = decoder_output[word_idx]\n log_prob += prob\n\n input = word_idx.detach().unsqueeze(0).unsqueeze(0)\n word = idx2item[word_idx].decode(\"UTF-8\")\n characters.append(word)\n\n if break_on_suffix is not None:\n if \"\".join(characters).endswith(break_on_suffix):\n break\n\n text = prefix + \"\".join(characters)\n\n log_prob = log_prob.item()\n log_prob /= len(characters)\n\n if not self.is_forward_lm:\n text = text[::-1]\n\n return text, log_prob\n\n def calculate_perplexity(self, text: str) -> float:\n\n if not self.is_forward_lm:\n text = text[::-1]\n\n # input ids\n input = torch.tensor(\n [self.dictionary.get_idx_for_item(char) for char in text[:-1]]\n ).unsqueeze(1)\n input = input.to(flair.device)\n\n # push list of character IDs through model\n hidden = self.init_hidden(1)\n prediction, _, hidden = self.forward(input, hidden)\n\n # the target is always the next character\n targets = torch.tensor(\n [self.dictionary.get_idx_for_item(char) for char in text[1:]]\n )\n targets = targets.to(flair.device)\n\n # use cross entropy loss to compare output of forward pass with targets\n cross_entroy_loss = torch.nn.CrossEntropyLoss()\n loss = cross_entroy_loss(\n prediction.view(-1, len(self.dictionary)), targets\n ).item()\n\n # exponentiate cross-entropy loss to calculate perplexity\n perplexity = math.exp(loss)\n\n return perplexity\n\n def __getstate__(self):\n\n # serialize the language models and the constructor arguments (but nothing else)\n model_state = {\n \"state_dict\": self.state_dict(),\n\n \"dictionary\": self.dictionary,\n \"is_forward_lm\": self.is_forward_lm,\n \"hidden_size\": self.hidden_size,\n \"nlayers\": self.nlayers,\n \"embedding_size\": self.embedding_size,\n \"nout\": self.nout,\n \"document_delimiter\": self.document_delimiter,\n \"dropout\": self.dropout,\n }\n\n return model_state\n\n def __setstate__(self, d):\n\n # special handling for deserializing language models\n if \"state_dict\" in d:\n\n # re-initialize language model with constructor arguments\n language_model = LanguageModel(\n dictionary=d['dictionary'],\n is_forward_lm=d['is_forward_lm'],\n hidden_size=d['hidden_size'],\n nlayers=d['nlayers'],\n embedding_size=d['embedding_size'],\n nout=d['nout'],\n document_delimiter=d['document_delimiter'],\n dropout=d['dropout'],\n )\n\n language_model.load_state_dict(d['state_dict'])\n\n # copy over state dictionary to self\n for key in language_model.__dict__.keys():\n self.__dict__[key] = language_model.__dict__[key]\n\n # set the language model to eval() by default (this is necessary since FlairEmbeddings \"protect\" the LM\n # in their \"self.train()\" method)\n self.eval()\n\n else:\n self.__dict__ = d\n\n def _apply(self, fn):\n\n # models that were serialized using torch versions older than 1.4.0 lack the _flat_weights_names attribute\n # check if this is the case and if so, set it\n for child_module in self.children():\n if isinstance(child_module, torch.nn.RNNBase) and not hasattr(child_module, \"_flat_weights_names\"):\n _flat_weights_names = []\n\n if child_module.__dict__[\"bidirectional\"]:\n num_direction = 2\n else:\n num_direction = 1\n for layer in range(child_module.__dict__[\"num_layers\"]):\n for direction in range(num_direction):\n suffix = \"_reverse\" if direction == 1 else \"\"\n param_names = [\"weight_ih_l{}{}\", \"weight_hh_l{}{}\"]\n if child_module.__dict__[\"bias\"]:\n param_names += [\"bias_ih_l{}{}\", \"bias_hh_l{}{}\"]\n param_names = [\n x.format(layer, suffix) for x in param_names\n ]\n _flat_weights_names.extend(param_names)\n\n setattr(child_module, \"_flat_weights_names\",\n _flat_weights_names)\n\n child_module._apply(fn)",
"path": "flair/models/language_model.py"
}
] | [
{
"content": "from pathlib import Path\n\nimport torch.nn as nn\nimport torch\nimport math\nfrom typing import Union, Tuple\nfrom typing import List\n\nfrom torch.optim import Optimizer\n\nimport flair\nfrom flair.data import Dictionary\n\n\nclass LanguageModel(nn.Module):\n \"\"\"Container module with an encoder, a recurrent module, and a decoder.\"\"\"\n\n def __init__(\n self,\n dictionary: Dictionary,\n is_forward_lm: bool,\n hidden_size: int,\n nlayers: int,\n embedding_size: int = 100,\n nout=None,\n document_delimiter: str = '\\n',\n dropout=0.1,\n ):\n\n super(LanguageModel, self).__init__()\n\n self.dictionary = dictionary\n self.document_delimiter = document_delimiter\n self.is_forward_lm: bool = is_forward_lm\n\n self.dropout = dropout\n self.hidden_size = hidden_size\n self.embedding_size = embedding_size\n self.nlayers = nlayers\n\n self.drop = nn.Dropout(dropout)\n self.encoder = nn.Embedding(len(dictionary), embedding_size)\n\n if nlayers == 1:\n self.rnn = nn.LSTM(embedding_size, hidden_size, nlayers)\n else:\n self.rnn = nn.LSTM(embedding_size, hidden_size, nlayers, dropout=dropout)\n\n self.hidden = None\n\n self.nout = nout\n if nout is not None:\n self.proj = nn.Linear(hidden_size, nout)\n self.initialize(self.proj.weight)\n self.decoder = nn.Linear(nout, len(dictionary))\n else:\n self.proj = None\n self.decoder = nn.Linear(hidden_size, len(dictionary))\n\n self.init_weights()\n\n # auto-spawn on GPU if available\n self.to(flair.device)\n\n def init_weights(self):\n initrange = 0.1\n self.encoder.weight.detach().uniform_(-initrange, initrange)\n self.decoder.bias.detach().fill_(0)\n self.decoder.weight.detach().uniform_(-initrange, initrange)\n\n def set_hidden(self, hidden):\n self.hidden = hidden\n\n def forward(self, input, hidden, ordered_sequence_lengths=None):\n encoded = self.encoder(input)\n emb = self.drop(encoded)\n\n self.rnn.flatten_parameters()\n\n output, hidden = self.rnn(emb, hidden)\n\n if self.proj is not None:\n output = self.proj(output)\n\n output = self.drop(output)\n\n decoded = self.decoder(\n output.view(output.size(0) * output.size(1), output.size(2))\n )\n\n return (\n decoded.view(output.size(0), output.size(1), decoded.size(1)),\n output,\n hidden,\n )\n\n def init_hidden(self, bsz):\n weight = next(self.parameters()).detach()\n return (\n weight.new(self.nlayers, bsz, self.hidden_size).zero_().clone().detach(),\n weight.new(self.nlayers, bsz, self.hidden_size).zero_().clone().detach(),\n )\n\n def get_representation(\n self,\n strings: List[str],\n start_marker: str,\n end_marker: str,\n chars_per_chunk: int = 512,\n ):\n\n len_longest_str: int = len(max(strings, key=len))\n\n # pad strings with whitespaces to longest sentence\n padded_strings: List[str] = []\n\n for string in strings:\n if not self.is_forward_lm:\n string = string[::-1]\n\n padded = f\"{start_marker}{string}{end_marker}\"\n padded_strings.append(padded)\n\n # cut up the input into chunks of max charlength = chunk_size\n chunks = []\n splice_begin = 0\n longest_padded_str: int = len_longest_str + len(start_marker) + len(end_marker)\n for splice_end in range(chars_per_chunk, longest_padded_str, chars_per_chunk):\n chunks.append([text[splice_begin:splice_end] for text in padded_strings])\n splice_begin = splice_end\n\n chunks.append(\n [text[splice_begin:longest_padded_str] for text in padded_strings]\n )\n hidden = self.init_hidden(len(chunks[0]))\n\n padding_char_index = self.dictionary.get_idx_for_item(\" \")\n\n batches: List[torch.Tensor] = []\n # push each chunk through the RNN language model\n for chunk in chunks:\n len_longest_chunk: int = len(max(chunk, key=len))\n sequences_as_char_indices: List[List[int]] = []\n for string in chunk:\n char_indices = self.dictionary.get_idx_for_items(list(string))\n char_indices += [padding_char_index] * (len_longest_chunk - len(string))\n\n sequences_as_char_indices.append(char_indices)\n t = torch.tensor(sequences_as_char_indices, dtype=torch.long).to(\n device=flair.device, non_blocking=True\n )\n batches.append(t)\n\n output_parts = []\n for batch in batches:\n batch = batch.transpose(0, 1)\n _, rnn_output, hidden = self.forward(batch, hidden)\n output_parts.append(rnn_output)\n\n # concatenate all chunks to make final output\n output = torch.cat(output_parts)\n\n return output\n\n def get_output(self, text: str):\n char_indices = [self.dictionary.get_idx_for_item(char) for char in text]\n input_vector = torch.LongTensor([char_indices]).transpose(0, 1)\n\n hidden = self.init_hidden(1)\n prediction, rnn_output, hidden = self.forward(input_vector, hidden)\n\n return self.repackage_hidden(hidden)\n\n def repackage_hidden(self, h):\n \"\"\"Wraps hidden states in new Variables, to detach them from their history.\"\"\"\n if type(h) == torch.Tensor:\n return h.clone().detach()\n else:\n return tuple(self.repackage_hidden(v) for v in h)\n\n @staticmethod\n def initialize(matrix):\n in_, out_ = matrix.size()\n stdv = math.sqrt(3.0 / (in_ + out_))\n matrix.detach().uniform_(-stdv, stdv)\n\n @classmethod\n def load_language_model(cls, model_file: Union[Path, str]):\n\n state = torch.load(str(model_file), map_location=flair.device)\n\n document_delimiter = state[\"document_delimiter\"] if \"document_delimiter\" in state else '\\n'\n\n model = LanguageModel(\n dictionary=state[\"dictionary\"],\n is_forward_lm=state[\"is_forward_lm\"],\n hidden_size=state[\"hidden_size\"],\n nlayers=state[\"nlayers\"],\n embedding_size=state[\"embedding_size\"],\n nout=state[\"nout\"],\n document_delimiter=document_delimiter,\n dropout=state[\"dropout\"],\n )\n model.load_state_dict(state[\"state_dict\"])\n model.eval()\n model.to(flair.device)\n\n return model\n\n @classmethod\n def load_checkpoint(cls, model_file: Union[Path, str]):\n state = torch.load(str(model_file), map_location=flair.device)\n\n epoch = state[\"epoch\"] if \"epoch\" in state else None\n split = state[\"split\"] if \"split\" in state else None\n loss = state[\"loss\"] if \"loss\" in state else None\n document_delimiter = state[\"document_delimiter\"] if \"document_delimiter\" in state else '\\n'\n\n optimizer_state_dict = (\n state[\"optimizer_state_dict\"] if \"optimizer_state_dict\" in state else None\n )\n\n model = LanguageModel(\n dictionary=state[\"dictionary\"],\n is_forward_lm=state[\"is_forward_lm\"],\n hidden_size=state[\"hidden_size\"],\n nlayers=state[\"nlayers\"],\n embedding_size=state[\"embedding_size\"],\n nout=state[\"nout\"],\n document_delimiter=document_delimiter,\n dropout=state[\"dropout\"],\n )\n model.load_state_dict(state[\"state_dict\"])\n model.eval()\n model.to(flair.device)\n\n return {\n \"model\": model,\n \"epoch\": epoch,\n \"split\": split,\n \"loss\": loss,\n \"optimizer_state_dict\": optimizer_state_dict,\n }\n\n def save_checkpoint(\n self, file: Union[Path, str], optimizer: Optimizer, epoch: int, split: int, loss: float\n ):\n model_state = {\n \"state_dict\": self.state_dict(),\n \"dictionary\": self.dictionary,\n \"is_forward_lm\": self.is_forward_lm,\n \"hidden_size\": self.hidden_size,\n \"nlayers\": self.nlayers,\n \"embedding_size\": self.embedding_size,\n \"nout\": self.nout,\n \"document_delimiter\": self.document_delimiter,\n \"dropout\": self.dropout,\n \"optimizer_state_dict\": optimizer.state_dict(),\n \"epoch\": epoch,\n \"split\": split,\n \"loss\": loss,\n }\n\n torch.save(model_state, str(file), pickle_protocol=4)\n\n def save(self, file: Union[Path, str]):\n model_state = {\n \"state_dict\": self.state_dict(),\n \"dictionary\": self.dictionary,\n \"is_forward_lm\": self.is_forward_lm,\n \"hidden_size\": self.hidden_size,\n \"nlayers\": self.nlayers,\n \"embedding_size\": self.embedding_size,\n \"nout\": self.nout,\n \"document_delimiter\": self.document_delimiter,\n \"dropout\": self.dropout,\n }\n\n torch.save(model_state, str(file), pickle_protocol=4)\n\n def generate_text(\n self,\n prefix: str = \"\\n\",\n number_of_characters: int = 1000,\n temperature: float = 1.0,\n break_on_suffix=None,\n ) -> Tuple[str, float]:\n\n if prefix == \"\":\n prefix = \"\\n\"\n\n with torch.no_grad():\n characters = []\n\n idx2item = self.dictionary.idx2item\n\n # initial hidden state\n hidden = self.init_hidden(1)\n\n if len(prefix) > 1:\n\n char_tensors = []\n for character in prefix[:-1]:\n char_tensors.append(\n torch.tensor(self.dictionary.get_idx_for_item(character))\n .unsqueeze(0)\n .unsqueeze(0)\n )\n\n input = torch.cat(char_tensors).to(flair.device)\n\n prediction, _, hidden = self.forward(input, hidden)\n\n input = (\n torch.tensor(self.dictionary.get_idx_for_item(prefix[-1]))\n .unsqueeze(0)\n .unsqueeze(0)\n )\n\n log_prob = 0.0\n\n for i in range(number_of_characters):\n\n input = input.to(flair.device)\n\n # get predicted weights\n prediction, _, hidden = self.forward(input, hidden)\n prediction = prediction.squeeze().detach()\n decoder_output = prediction\n\n # divide by temperature\n prediction = prediction.div(temperature)\n\n # to prevent overflow problem with small temperature values, substract largest value from all\n # this makes a vector in which the largest value is 0\n max = torch.max(prediction)\n prediction -= max\n\n # compute word weights with exponential function\n word_weights = prediction.exp().cpu()\n\n # try sampling multinomial distribution for next character\n try:\n word_idx = torch.multinomial(word_weights, 1)[0]\n except:\n word_idx = torch.tensor(0)\n\n # print(word_idx)\n prob = decoder_output[word_idx]\n log_prob += prob\n\n input = word_idx.detach().unsqueeze(0).unsqueeze(0)\n word = idx2item[word_idx].decode(\"UTF-8\")\n characters.append(word)\n\n if break_on_suffix is not None:\n if \"\".join(characters).endswith(break_on_suffix):\n break\n\n text = prefix + \"\".join(characters)\n\n log_prob = log_prob.item()\n log_prob /= len(characters)\n\n if not self.is_forward_lm:\n text = text[::-1]\n\n return text, log_prob\n\n def calculate_perplexity(self, text: str) -> float:\n\n if not self.is_forward_lm:\n text = text[::-1]\n\n # input ids\n input = torch.tensor(\n [self.dictionary.get_idx_for_item(char) for char in text[:-1]]\n ).unsqueeze(1)\n input = input.to(flair.device)\n\n # push list of character IDs through model\n hidden = self.init_hidden(1)\n prediction, _, hidden = self.forward(input, hidden)\n\n # the target is always the next character\n targets = torch.tensor(\n [self.dictionary.get_idx_for_item(char) for char in text[1:]]\n )\n targets = targets.to(flair.device)\n\n # use cross entropy loss to compare output of forward pass with targets\n cross_entroy_loss = torch.nn.CrossEntropyLoss()\n loss = cross_entroy_loss(\n prediction.view(-1, len(self.dictionary)), targets\n ).item()\n\n # exponentiate cross-entropy loss to calculate perplexity\n perplexity = math.exp(loss)\n\n return perplexity\n\n def __getstate__(self):\n\n # serialize the language models and the constructor arguments (but nothing else)\n model_state = {\n \"state_dict\": self.state_dict(),\n\n \"dictionary\": self.dictionary,\n \"is_forward_lm\": self.is_forward_lm,\n \"hidden_size\": self.hidden_size,\n \"nlayers\": self.nlayers,\n \"embedding_size\": self.embedding_size,\n \"nout\": self.nout,\n \"document_delimiter\": self.document_delimiter,\n \"dropout\": self.dropout,\n }\n\n return model_state\n\n def __setstate__(self, d):\n\n # special handling for deserializing language models\n if \"state_dict\" in d:\n\n # re-initialize language model with constructor arguments\n language_model = LanguageModel(\n dictionary=d['dictionary'],\n is_forward_lm=d['is_forward_lm'],\n hidden_size=d['hidden_size'],\n nlayers=d['nlayers'],\n embedding_size=d['embedding_size'],\n nout=d['nout'],\n document_delimiter=d['document_delimiter'],\n dropout=d['dropout'],\n )\n\n language_model.load_state_dict(d['state_dict'])\n\n # copy over state dictionary to self\n for key in language_model.__dict__.keys():\n self.__dict__[key] = language_model.__dict__[key]\n\n # set the language model to eval() by default (this is necessary since FlairEmbeddings \"protect\" the LM\n # in their \"self.train()\" method)\n self.eval()\n\n else:\n super().__setstate__(d)\n\n def _apply(self, fn):\n\n # models that were serialized using torch versions older than 1.4.0 lack the _flat_weights_names attribute\n # check if this is the case and if so, set it\n for child_module in self.children():\n if isinstance(child_module, torch.nn.RNNBase) and not hasattr(child_module, \"_flat_weights_names\"):\n _flat_weights_names = []\n\n if child_module.__dict__[\"bidirectional\"]:\n num_direction = 2\n else:\n num_direction = 1\n for layer in range(child_module.__dict__[\"num_layers\"]):\n for direction in range(num_direction):\n suffix = \"_reverse\" if direction == 1 else \"\"\n param_names = [\"weight_ih_l{}{}\", \"weight_hh_l{}{}\"]\n if child_module.__dict__[\"bias\"]:\n param_names += [\"bias_ih_l{}{}\", \"bias_hh_l{}{}\"]\n param_names = [\n x.format(layer, suffix) for x in param_names\n ]\n _flat_weights_names.extend(param_names)\n\n setattr(child_module, \"_flat_weights_names\",\n _flat_weights_names)\n\n child_module._apply(fn)",
"path": "flair/models/language_model.py"
}
] | diff --git a/flair/models/language_model.py b/flair/models/language_model.py
index 27f4b245ee..85232a8329 100644
--- a/flair/models/language_model.py
+++ b/flair/models/language_model.py
@@ -445,7 +445,7 @@ def __setstate__(self, d):
self.eval()
else:
- self.__dict__ = d
+ super().__setstate__(d)
def _apply(self, fn):
|
hpcaitech__ColossalAI-2608 | [tensor] fix some unittests
[tensor] fix some unittests
[tensor] fix some unittests
[BUG]: Testing failed due to triton
### 🐛 Describe the bug
The build on PR workflow failed with the following errors:
<img width="1509" alt="Screenshot 2023-02-07 at 10 30 17" src="https://user-images.githubusercontent.com/31818963/217132926-fd6cffa1-2c4b-46aa-a6cc-1a3d10918411.png">
### Environment
_No response_
| [
{
"content": "#!/usr/bin/env python\n# -*- encoding: utf-8 -*-\n\nimport torch.nn as nn\ntry:\n import apex.amp as apex_amp\nexcept ImportError:\n pass\n\nfrom torch import Tensor\n\nfrom colossalai.nn.optimizer import ColossalaiOptimizer\nfrom colossalai.utils import clip_grad_norm_fp32\n\n\nclass ApexAMPOptimizer(ColossalaiOptimizer):\n \"\"\" A wrapper class for APEX optimizer and it implements apex-specific backward and clip_grad_norm\n methods\n \"\"\"\n\n def backward(self, loss: Tensor):\n \"\"\"Backward pass to get all gradients\n\n Args:\n loss (torch.Tensor): Loss computed by a loss function\n \"\"\"\n with apex_amp.scale_loss(loss, self.optim) as scaled_loss:\n scaled_loss.backward()\n\n def clip_grad_norm(self, model: nn.Module, max_norm: float):\n \"\"\"Clip gradients by norm\n\n Args:\n model (torch.nn.Module): Your model object\n max_norm (float): The max norm value for gradient clipping\n \"\"\"\n if max_norm > 0:\n clip_grad_norm_fp32(apex_amp.master_params(self.optim), max_norm)\n",
"path": "colossalai/amp/apex_amp/apex_amp.py"
}
] | [
{
"content": "#!/usr/bin/env python\n# -*- encoding: utf-8 -*-\n\nimport torch.nn as nn\n\ntry:\n import apex.amp as apex_amp\nexcept ImportError:\n pass\n\nfrom torch import Tensor\n\nfrom colossalai.nn.optimizer import ColossalaiOptimizer\nfrom colossalai.utils import clip_grad_norm_fp32\n\n\nclass ApexAMPOptimizer(ColossalaiOptimizer):\n \"\"\" A wrapper class for APEX optimizer and it implements apex-specific backward and clip_grad_norm\n methods\n \"\"\"\n\n def backward(self, loss: Tensor):\n \"\"\"Backward pass to get all gradients\n\n Args:\n loss (torch.Tensor): Loss computed by a loss function\n \"\"\"\n with apex_amp.scale_loss(loss, self.optim) as scaled_loss:\n scaled_loss.backward()\n\n def clip_grad_norm(self, model: nn.Module, max_norm: float):\n \"\"\"Clip gradients by norm\n\n Args:\n model (torch.nn.Module): Your model object\n max_norm (float): The max norm value for gradient clipping\n \"\"\"\n if max_norm > 0:\n clip_grad_norm_fp32(apex_amp.master_params(self.optim), max_norm)\n",
"path": "colossalai/amp/apex_amp/apex_amp.py"
}
] | diff --git a/.github/workflows/build_on_pr.yml b/.github/workflows/build_on_pr.yml
index 82b671acea93..c7882db6ec61 100644
--- a/.github/workflows/build_on_pr.yml
+++ b/.github/workflows/build_on_pr.yml
@@ -52,6 +52,7 @@ jobs:
**/*.h
**/*.cpp
**/*.cu
+ **/*.txt
- name: List changed files
run: |
diff --git a/colossalai/amp/apex_amp/apex_amp.py b/colossalai/amp/apex_amp/apex_amp.py
index 69a4e348e5a7..e6bdbe4520f9 100644
--- a/colossalai/amp/apex_amp/apex_amp.py
+++ b/colossalai/amp/apex_amp/apex_amp.py
@@ -2,6 +2,7 @@
# -*- encoding: utf-8 -*-
import torch.nn as nn
+
try:
import apex.amp as apex_amp
except ImportError:
diff --git a/requirements/requirements-test.txt b/requirements/requirements-test.txt
index 9ef0a682b6b8..93055cd12109 100644
--- a/requirements/requirements-test.txt
+++ b/requirements/requirements-test.txt
@@ -9,5 +9,5 @@ torchaudio
torchrec==0.2.0
contexttimer
einops
-triton==2.0.0.dev20221011
+triton==2.0.0.dev20221202
git+https://github.com/HazyResearch/flash-attention.git@c422fee3776eb3ea24e011ef641fd5fbeb212623#egg=flash_attn
|
buildbot__buildbot-4244 | Broken links to and unsuccessful docs building at Read the Docs
PDFs unavailable.
http://media.readthedocs.org/pdf/buildbot/v1.3.0/buildbot.pdf
https://docs.buildbot.net/
https://readthedocs.org/projects/buildbot/
https://readthedocs.org/projects/buildbot/downloads/
https://readthedocs.org/projects/buildbot/builds/
| [
{
"content": "#!/usr/bin/env python\n#\n# This file is part of Buildbot. Buildbot is free software: you can\n# redistribute it and/or modify it under the terms of the GNU General Public\n# License as published by the Free Software Foundation, version 2.\n#\n# This program is distributed in the hope that it will be useful, but WITHOUT\n# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS\n# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more\n# details.\n#\n# You should have received a copy of the GNU General Public License along with\n# this program; if not, write to the Free Software Foundation, Inc., 51\n# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.\n#\n# Copyright Buildbot Team Members\n\n\"\"\"\nStandard setup script.\n\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import print_function\n\nimport glob\nimport inspect\nimport os\nimport pkg_resources\nimport sys\nfrom distutils.command.install_data import install_data\nfrom distutils.command.sdist import sdist\nfrom distutils.version import LooseVersion\n\nfrom setuptools import setup\n\nfrom buildbot import version\n\nif \"bdist_wheel\" in sys.argv:\n BUILDING_WHEEL = True\nelse:\n BUILDING_WHEEL = False\n\n\ndef include(d, e):\n \"\"\"Generate a pair of (directory, file-list) for installation.\n\n 'd' -- A directory\n 'e' -- A glob pattern\"\"\"\n\n return (d, [f for f in glob.glob('%s/%s' % (d, e)) if os.path.isfile(f)])\n\n\ndef include_statics(d):\n r = []\n for root, ds, fs in os.walk(d):\n r.append((root, [os.path.join(root, f) for f in fs]))\n return r\n\n\nclass install_data_twisted(install_data):\n\n \"\"\"make sure data files are installed in package.\n this is evil.\n copied from Twisted/setup.py.\n \"\"\"\n\n def finalize_options(self):\n self.set_undefined_options('install',\n ('install_lib', 'install_dir'),\n )\n install_data.finalize_options(self)\n\n def run(self):\n install_data.run(self)\n # ensure there's a buildbot/VERSION file\n fn = os.path.join(self.install_dir, 'buildbot', 'VERSION')\n open(fn, 'w').write(version)\n self.outfiles.append(fn)\n\n\nclass our_sdist(sdist):\n\n def make_release_tree(self, base_dir, files):\n sdist.make_release_tree(self, base_dir, files)\n\n # ensure there's a buildbot/VERSION file\n fn = os.path.join(base_dir, 'buildbot', 'VERSION')\n open(fn, 'w').write(version)\n\n # ensure that NEWS has a copy of the latest release notes, with the\n # proper version substituted\n src_fn = os.path.join('docs', 'relnotes/index.rst')\n with open(src_fn) as f:\n src = f.read()\n src = src.replace('|version|', version)\n dst_fn = os.path.join(base_dir, 'NEWS')\n with open(dst_fn, 'w') as f:\n f.write(src)\n\n\ndef define_plugin_entry(name, module_name):\n \"\"\"\n helper to produce lines suitable for setup.py's entry_points\n \"\"\"\n if isinstance(name, tuple):\n entry, name = name\n else:\n entry = name\n return '%s = %s:%s' % (entry, module_name, name)\n\n\ndef concat_dicts(*dicts):\n result = dict()\n for d in dicts:\n result.update(d)\n return result\n\n\ndef define_plugin_entries(groups):\n \"\"\"\n helper to all groups for plugins\n \"\"\"\n result = dict()\n\n for group, modules in groups:\n tempo = []\n for module_name, names in modules:\n tempo.extend([define_plugin_entry(name, module_name)\n for name in names])\n result[group] = tempo\n\n return result\n\n__file__ = inspect.getframeinfo(inspect.currentframe()).filename\n\nwith open(os.path.join(os.path.dirname(__file__), 'README.rst')) as long_d_f:\n long_description = long_d_f.read()\n\nsetup_args = {\n 'name': \"buildbot\",\n 'version': version,\n 'description': \"The Continuous Integration Framework\",\n 'long_description': long_description,\n 'author': \"Brian Warner\",\n 'author_email': \"[email protected]\",\n 'maintainer': \"Dustin J. Mitchell\",\n 'maintainer_email': \"[email protected]\",\n 'url': \"http://buildbot.net/\",\n 'license': \"GNU GPL\",\n 'classifiers': [\n 'Development Status :: 5 - Production/Stable',\n 'Environment :: No Input/Output (Daemon)',\n 'Environment :: Web Environment',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: GNU General Public License (GPL)',\n 'Topic :: Software Development :: Build Tools',\n 'Topic :: Software Development :: Testing',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6'\n ],\n\n 'packages': [\n \"buildbot\",\n \"buildbot.buildslave\",\n \"buildbot.configurators\",\n \"buildbot.worker\",\n \"buildbot.worker.protocols\",\n \"buildbot.changes\",\n \"buildbot.clients\",\n \"buildbot.data\",\n \"buildbot.db\",\n \"buildbot.db.migrate.versions\",\n \"buildbot.db.types\",\n \"buildbot.monkeypatches\",\n \"buildbot.mq\",\n \"buildbot.plugins\",\n \"buildbot.process\",\n \"buildbot.process.users\",\n \"buildbot.reporters\",\n \"buildbot.schedulers\",\n \"buildbot.scripts\",\n \"buildbot.secrets\",\n \"buildbot.secrets.providers\",\n \"buildbot.statistics\",\n \"buildbot.statistics.storage_backends\",\n \"buildbot.status\",\n \"buildbot.steps\",\n \"buildbot.steps.package\",\n \"buildbot.steps.package.deb\",\n \"buildbot.steps.package.rpm\",\n \"buildbot.steps.source\",\n \"buildbot.util\",\n \"buildbot.wamp\",\n \"buildbot.www\",\n \"buildbot.www.hooks\",\n \"buildbot.www.authz\",\n ] + ([] if BUILDING_WHEEL else [ # skip tests for wheels (save 50% of the archive)\n \"buildbot.test\",\n \"buildbot.test.util\",\n \"buildbot.test.fake\",\n \"buildbot.test.fuzz\",\n \"buildbot.test.integration\",\n \"buildbot.test.integration.interop\",\n \"buildbot.test.regressions\",\n \"buildbot.test.unit\",\n ]),\n 'data_files': [\n (\"buildbot\", [\n \"buildbot/buildbot.png\",\n ]),\n include(\"buildbot/reporters/templates\", \"*.txt\"),\n (\"buildbot/db/migrate\", [\n \"buildbot/db/migrate/migrate.cfg\",\n ]),\n include(\"buildbot/db/migrate/versions\", \"*.py\"),\n (\"buildbot/scripts\", [\n \"buildbot/scripts/sample.cfg\",\n \"buildbot/scripts/buildbot_tac.tmpl\",\n ]),\n include(\"buildbot/spec\", \"*.raml\"),\n include(\"buildbot/spec/types\", \"*.raml\"),\n include(\"buildbot/test/unit/test_templates_dir\", \"*.html\"),\n include(\"buildbot/test/unit/test_templates_dir/plugin\", \"*.*\"),\n ] + include_statics(\"buildbot/www/static\"),\n 'cmdclass': {'install_data': install_data_twisted,\n 'sdist': our_sdist},\n 'entry_points': concat_dicts(define_plugin_entries([\n ('buildbot.changes', [\n ('buildbot.changes.mail', [\n 'MaildirSource', 'CVSMaildirSource',\n 'SVNCommitEmailMaildirSource',\n 'BzrLaunchpadEmailMaildirSource']),\n ('buildbot.changes.bitbucket', ['BitbucketPullrequestPoller']),\n ('buildbot.changes.github', ['GitHubPullrequestPoller']),\n ('buildbot.changes.bonsaipoller', ['BonsaiPoller']),\n ('buildbot.changes.gerritchangesource', ['GerritChangeSource']),\n ('buildbot.changes.gitpoller', ['GitPoller']),\n ('buildbot.changes.hgpoller', ['HgPoller']),\n ('buildbot.changes.p4poller', ['P4Source']),\n ('buildbot.changes.pb', ['PBChangeSource']),\n ('buildbot.changes.svnpoller', ['SVNPoller'])\n ]),\n ('buildbot.schedulers', [\n ('buildbot.schedulers.basic', [\n 'SingleBranchScheduler', 'AnyBranchScheduler']),\n ('buildbot.schedulers.dependent', ['Dependent']),\n ('buildbot.schedulers.triggerable', ['Triggerable']),\n ('buildbot.schedulers.forcesched', ['ForceScheduler']),\n ('buildbot.schedulers.timed', [\n 'Periodic', 'Nightly', 'NightlyTriggerable']),\n ('buildbot.schedulers.trysched', [\n 'Try_Jobdir', 'Try_Userpass'])\n ]),\n ('buildbot.secrets', [\n ('buildbot.secrets.providers.file', ['SecretInAFile']),\n ('buildbot.secrets.providers.vault', ['HashiCorpVaultSecretProvider'])\n ]),\n ('buildbot.worker', [\n ('buildbot.worker.base', ['Worker']),\n ('buildbot.worker.ec2', ['EC2LatentWorker']),\n ('buildbot.worker.libvirt', ['LibVirtWorker']),\n ('buildbot.worker.openstack', ['OpenStackLatentWorker']),\n ('buildbot.worker.docker', ['DockerLatentWorker']),\n ('buildbot.worker.hyper', ['HyperLatentWorker']),\n ('buildbot.worker.local', ['LocalWorker']),\n ]),\n ('buildbot.steps', [\n ('buildbot.process.buildstep', ['BuildStep']),\n ('buildbot.steps.cmake', ['CMake']),\n ('buildbot.steps.cppcheck', ['Cppcheck']),\n ('buildbot.steps.http', [\n 'HTTPStep', 'POST', 'GET', 'PUT', 'DELETE', 'HEAD',\n 'OPTIONS']),\n ('buildbot.steps.master', [\n 'MasterShellCommand', 'SetProperty', 'SetProperties', 'LogRenderable', \"Assert\"]),\n ('buildbot.steps.maxq', ['MaxQ']),\n ('buildbot.steps.mswin', ['Robocopy']),\n ('buildbot.steps.mtrlogobserver', ['MTR']),\n ('buildbot.steps.package.deb.lintian', ['DebLintian']),\n ('buildbot.steps.package.deb.pbuilder', [\n 'DebPbuilder', 'DebCowbuilder', 'UbuPbuilder',\n 'UbuCowbuilder']),\n ('buildbot.steps.package.rpm.mock', [\n 'Mock', 'MockBuildSRPM', 'MockRebuild']),\n ('buildbot.steps.package.rpm.rpmbuild', ['RpmBuild']),\n ('buildbot.steps.package.rpm.rpmlint', ['RpmLint']),\n ('buildbot.steps.package.rpm.rpmspec', ['RpmSpec']),\n ('buildbot.steps.python', [\n 'BuildEPYDoc', 'PyFlakes', 'PyLint', 'Sphinx']),\n ('buildbot.steps.python_twisted', [\n 'HLint', 'Trial', 'RemovePYCs']),\n ('buildbot.steps.shell', [\n 'ShellCommand', 'TreeSize', 'SetPropertyFromCommand',\n 'Configure', 'WarningCountingShellCommand', 'Compile',\n 'Test', 'PerlModuleTest']),\n ('buildbot.steps.shellsequence', ['ShellSequence']),\n ('buildbot.steps.source.bzr', ['Bzr']),\n ('buildbot.steps.source.cvs', ['CVS']),\n ('buildbot.steps.source.darcs', ['Darcs']),\n ('buildbot.steps.source.gerrit', ['Gerrit']),\n ('buildbot.steps.source.git', ['Git']),\n ('buildbot.steps.source.github', ['GitHub']),\n ('buildbot.steps.source.gitlab', ['GitLab']),\n ('buildbot.steps.source.mercurial', ['Mercurial']),\n ('buildbot.steps.source.mtn', ['Monotone']),\n ('buildbot.steps.source.p4', ['P4']),\n ('buildbot.steps.source.repo', ['Repo']),\n ('buildbot.steps.source.svn', ['SVN']),\n ('buildbot.steps.subunit', ['SubunitShellCommand']),\n ('buildbot.steps.transfer', [\n 'FileUpload', 'DirectoryUpload', 'MultipleFileUpload',\n 'FileDownload', 'StringDownload', 'JSONStringDownload',\n 'JSONPropertiesDownload']),\n ('buildbot.steps.trigger', ['Trigger']),\n ('buildbot.steps.vstudio', [\n 'VC6', 'VC7', 'VS2003', 'VC8', 'VS2005', 'VCExpress9', 'VC9',\n 'VS2008', 'VC10', 'VS2010', 'VC11', 'VS2012', 'VC12', 'VS2013',\n 'VC14', 'VS2015', 'MsBuild4', 'MsBuild', 'MsBuild12', 'MsBuild14']),\n ('buildbot.steps.worker', [\n 'SetPropertiesFromEnv', 'FileExists', 'CopyDirectory',\n 'RemoveDirectory', 'MakeDirectory']),\n ]),\n ('buildbot.reporters', [\n ('buildbot.reporters.mail', ['MailNotifier']),\n ('buildbot.reporters.pushjet', ['PushjetNotifier']),\n ('buildbot.reporters.pushover', ['PushoverNotifier']),\n ('buildbot.reporters.message', ['MessageFormatter']),\n ('buildbot.reporters.gerrit', ['GerritStatusPush']),\n ('buildbot.reporters.gerrit_verify_status',\n ['GerritVerifyStatusPush']),\n ('buildbot.reporters.hipchat', ['HipChatStatusPush']),\n ('buildbot.reporters.http', ['HttpStatusPush']),\n ('buildbot.reporters.github', ['GitHubStatusPush', 'GitHubCommentPush']),\n ('buildbot.reporters.gitlab', ['GitLabStatusPush']),\n ('buildbot.reporters.stash', ['StashStatusPush']),\n ('buildbot.reporters.bitbucketserver', ['BitbucketServerStatusPush', 'BitbucketServerPRCommentPush']),\n ('buildbot.reporters.bitbucket', ['BitbucketStatusPush']),\n ('buildbot.reporters.irc', ['IRC']),\n ]),\n ('buildbot.util', [\n # Connection seems to be a way too generic name, though\n ('buildbot.worker.libvirt', ['Connection']),\n ('buildbot.changes.filter', ['ChangeFilter']),\n ('buildbot.changes.gerritchangesource', ['GerritChangeFilter']),\n ('buildbot.changes.svnpoller', [\n ('svn.split_file_projects_branches',\n 'split_file_projects_branches'),\n ('svn.split_file_branches', 'split_file_branches'),\n ('svn.split_file_alwaystrunk', 'split_file_alwaystrunk')]),\n ('buildbot.configurators.janitor', ['JanitorConfigurator']),\n ('buildbot.config', ['BuilderConfig']),\n ('buildbot.locks', [\n 'MasterLock',\n 'WorkerLock',\n ]),\n ('buildbot.manhole', [\n 'AuthorizedKeysManhole', 'PasswordManhole', 'TelnetManhole']),\n ('buildbot.process.builder', [\n 'enforceChosenWorker',\n ]),\n ('buildbot.process.factory', [\n 'BuildFactory', 'GNUAutoconf', 'CPAN', 'Distutils', 'Trial',\n 'BasicBuildFactory', 'QuickBuildFactory', 'BasicSVN']),\n ('buildbot.process.logobserver', ['LogLineObserver']),\n ('buildbot.process.properties', [\n 'FlattenList', 'Interpolate', 'Property', 'Transform',\n 'WithProperties', 'renderer', 'Secret']),\n ('buildbot.process.properties', [\n 'CommandlineUserManager']),\n ('buildbot.revlinks', ['RevlinkMatch']),\n ('buildbot.reporters.utils', ['URLForBuild']),\n ('buildbot.schedulers.forcesched', [\n 'AnyPropertyParameter', 'BooleanParameter',\n 'ChoiceStringParameter',\n 'CodebaseParameter', 'FileParameter', 'FixedParameter', 'InheritBuildParameter',\n 'IntParameter', 'NestedParameter', 'ParameterGroup',\n 'PatchParameter',\n 'StringParameter', 'TextParameter', 'UserNameParameter',\n 'WorkerChoiceParameter',\n ]),\n ('buildbot.process.results', [\n 'Results', 'SUCCESS', 'WARNINGS', 'FAILURE', 'SKIPPED',\n 'EXCEPTION', 'RETRY', 'CANCELLED']),\n ('buildbot.steps.mtrlogobserver', ['EqConnectionPool']),\n ('buildbot.steps.source.repo', [\n ('repo.DownloadsFromChangeSource',\n 'RepoDownloadsFromChangeSource'),\n ('repo.DownloadsFromProperties',\n 'RepoDownloadsFromProperties')]),\n ('buildbot.steps.shellsequence', ['ShellArg']),\n ('buildbot.www.avatar', ['AvatarGravatar']),\n ('buildbot.www.auth', [\n 'UserPasswordAuth', 'HTPasswdAuth', 'RemoteUserAuth', 'CustomAuth']),\n ('buildbot.www.ldapuserinfo', ['LdapUserInfo']),\n ('buildbot.www.oauth2', [\n 'GoogleAuth', 'GitHubAuth', 'GitLabAuth', 'BitbucketAuth']),\n ('buildbot.db.dbconfig', [\n 'DbConfig']),\n ('buildbot.www.authz', [\n 'Authz', 'fnmatchStrMatcher', 'reStrMatcher']),\n ('buildbot.www.authz.roles', [\n 'RolesFromEmails', 'RolesFromGroups', 'RolesFromOwner', 'RolesFromUsername',\n 'RolesFromDomain']),\n ('buildbot.www.authz.endpointmatchers', [\n 'AnyEndpointMatcher', 'StopBuildEndpointMatcher', 'ForceBuildEndpointMatcher',\n 'RebuildBuildEndpointMatcher', 'AnyControlEndpointMatcher', 'EnableSchedulerEndpointMatcher']),\n ]),\n ('buildbot.webhooks', [\n ('buildbot.www.hooks.base', ['base']),\n ('buildbot.www.hooks.bitbucket', ['bitbucket']),\n ('buildbot.www.hooks.github', ['github']),\n ('buildbot.www.hooks.gitlab', ['gitlab']),\n ('buildbot.www.hooks.gitorious', ['gitorious']),\n ('buildbot.www.hooks.poller', ['poller']),\n ('buildbot.www.hooks.bitbucketcloud', ['bitbucketcloud']),\n ('buildbot.www.hooks.bitbucketserver', ['bitbucketserver'])\n ])\n ]), {\n 'console_scripts': [\n 'buildbot=buildbot.scripts.runner:run',\n # this will also be shipped on non windows :-(\n 'buildbot_windows_service=buildbot.scripts.windows_service:HandleCommandLine',\n ]}\n )\n}\n\n# set zip_safe to false to force Windows installs to always unpack eggs\n# into directories, which seems to work better --\n# see http://buildbot.net/trac/ticket/907\nif sys.platform == \"win32\":\n setup_args['zip_safe'] = False\n\npy_27 = sys.version_info[0] > 2 or (\n sys.version_info[0] == 2 and sys.version_info[1] >= 7)\nif not py_27:\n raise RuntimeError(\"Buildbot master requires at least Python-2.7\")\n\n# pip<1.4 doesn't have the --pre flag, and will thus attempt to install alpha\n# and beta versions of Buildbot. Prevent that from happening.\nVERSION_MSG = \"\"\"\nThis is a pre-release version of Buildbot, which can only be installed with\npip-1.4 or later Try installing the latest stable version of Buildbot instead:\n pip install buildbot==0.8.12\nSee https://pypi.python.org/pypi/buildbot to verify the current stable version.\n\"\"\"\nif 'a' in version or 'b' in version:\n try:\n pip_dist = pkg_resources.get_distribution('pip')\n except pkg_resources.DistributionNotFound:\n pip_dist = None\n\n if pip_dist:\n if LooseVersion(pip_dist.version) < LooseVersion('1.4'):\n raise RuntimeError(VERSION_MSG)\n\nif sys.version_info[0] >= 3:\n twisted_ver = \">= 17.9.0\"\nelse:\n twisted_ver = \">= 16.1.0\"\nautobahn_ver = \">= 0.16.0\"\ntxaio_ver = \">= 2.2.2\"\n\nbundle_version = version.split(\"-\")[0]\n\n# dependencies\nsetup_args['install_requires'] = [\n 'setuptools >= 8.0',\n 'Twisted ' + twisted_ver,\n 'Jinja2 >= 2.1',\n # required for tests, but Twisted requires this anyway\n 'zope.interface >= 4.1.1',\n # python-future required for py2/3 compatibility\n 'future',\n 'sqlalchemy>=0.8.0',\n 'sqlalchemy-migrate>=0.9',\n 'python-dateutil>=1.5',\n 'txaio ' + txaio_ver,\n 'autobahn ' + autobahn_ver,\n 'PyJWT',\n]\n\n# Unit test dependencies.\ntest_deps = [\n # http client libraries\n 'treq',\n 'txrequests',\n # pyjade required for custom templates tests\n 'pyjade',\n # boto3 and moto required for running EC2 tests\n 'boto3',\n 'moto',\n # txgithub required to run buildbot.status.github module tests\n 'txgithub',\n 'mock>=2.0.0',\n]\nif sys.platform != 'win32':\n test_deps += [\n # LZ4 fails to build on Windows:\n # https://github.com/steeve/python-lz4/issues/27\n # lz4 required for log compression tests.\n 'lz4',\n ]\n\nsetup_args['tests_require'] = test_deps\n\nsetup_args['extras_require'] = {\n 'test': [\n 'setuptools_trial',\n 'isort',\n # spellcheck introduced in version 1.4.0\n 'pylint<1.7.0',\n 'pyenchant',\n 'flake8~=2.6.0',\n ] + test_deps,\n 'bundle': [\n \"buildbot-www=={0}\".format(bundle_version),\n \"buildbot-worker=={0}\".format(bundle_version),\n \"buildbot-waterfall-view=={0}\".format(bundle_version),\n \"buildbot-console-view=={0}\".format(bundle_version),\n \"buildbot-grid-view=={0}\".format(bundle_version),\n ],\n 'tls': [\n 'Twisted[tls] ' + twisted_ver,\n # There are bugs with extras inside extras:\n # <https://github.com/pypa/pip/issues/3516>\n # so we explicitly include Twisted[tls] dependencies.\n 'pyopenssl >= 16.0.0',\n 'service_identity',\n 'idna >= 0.6',\n ],\n 'docs': [\n 'docutils<0.13.0',\n 'sphinx>1.4.0',\n 'sphinxcontrib-blockdiag',\n 'sphinxcontrib-spelling',\n 'pyenchant',\n 'docutils>=0.8',\n 'sphinx-jinja',\n 'towncrier'\n ],\n}\n\nif '--help-commands' in sys.argv or 'trial' in sys.argv or 'test' in sys.argv:\n setup_args['setup_requires'] = [\n 'setuptools_trial',\n ]\n\nif os.getenv('NO_INSTALL_REQS'):\n setup_args['install_requires'] = None\n setup_args['extras_require'] = None\n\nif __name__ == '__main__':\n setup(**setup_args)\n\n# Local Variables:\n# fill-column: 71\n# End:\n",
"path": "master/setup.py"
}
] | [
{
"content": "#!/usr/bin/env python\n#\n# This file is part of Buildbot. Buildbot is free software: you can\n# redistribute it and/or modify it under the terms of the GNU General Public\n# License as published by the Free Software Foundation, version 2.\n#\n# This program is distributed in the hope that it will be useful, but WITHOUT\n# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS\n# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more\n# details.\n#\n# You should have received a copy of the GNU General Public License along with\n# this program; if not, write to the Free Software Foundation, Inc., 51\n# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.\n#\n# Copyright Buildbot Team Members\n\n\"\"\"\nStandard setup script.\n\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import print_function\n\nimport glob\nimport inspect\nimport os\nimport pkg_resources\nimport sys\nfrom distutils.command.install_data import install_data\nfrom distutils.command.sdist import sdist\nfrom distutils.version import LooseVersion\n\nfrom setuptools import setup\n\nfrom buildbot import version\n\nif \"bdist_wheel\" in sys.argv:\n BUILDING_WHEEL = True\nelse:\n BUILDING_WHEEL = False\n\n\ndef include(d, e):\n \"\"\"Generate a pair of (directory, file-list) for installation.\n\n 'd' -- A directory\n 'e' -- A glob pattern\"\"\"\n\n return (d, [f for f in glob.glob('%s/%s' % (d, e)) if os.path.isfile(f)])\n\n\ndef include_statics(d):\n r = []\n for root, ds, fs in os.walk(d):\n r.append((root, [os.path.join(root, f) for f in fs]))\n return r\n\n\nclass install_data_twisted(install_data):\n\n \"\"\"make sure data files are installed in package.\n this is evil.\n copied from Twisted/setup.py.\n \"\"\"\n\n def finalize_options(self):\n self.set_undefined_options('install',\n ('install_lib', 'install_dir'),\n )\n install_data.finalize_options(self)\n\n def run(self):\n install_data.run(self)\n # ensure there's a buildbot/VERSION file\n fn = os.path.join(self.install_dir, 'buildbot', 'VERSION')\n open(fn, 'w').write(version)\n self.outfiles.append(fn)\n\n\nclass our_sdist(sdist):\n\n def make_release_tree(self, base_dir, files):\n sdist.make_release_tree(self, base_dir, files)\n\n # ensure there's a buildbot/VERSION file\n fn = os.path.join(base_dir, 'buildbot', 'VERSION')\n open(fn, 'w').write(version)\n\n # ensure that NEWS has a copy of the latest release notes, with the\n # proper version substituted\n src_fn = os.path.join('docs', 'relnotes/index.rst')\n with open(src_fn) as f:\n src = f.read()\n src = src.replace('|version|', version)\n dst_fn = os.path.join(base_dir, 'NEWS')\n with open(dst_fn, 'w') as f:\n f.write(src)\n\n\ndef define_plugin_entry(name, module_name):\n \"\"\"\n helper to produce lines suitable for setup.py's entry_points\n \"\"\"\n if isinstance(name, tuple):\n entry, name = name\n else:\n entry = name\n return '%s = %s:%s' % (entry, module_name, name)\n\n\ndef concat_dicts(*dicts):\n result = dict()\n for d in dicts:\n result.update(d)\n return result\n\n\ndef define_plugin_entries(groups):\n \"\"\"\n helper to all groups for plugins\n \"\"\"\n result = dict()\n\n for group, modules in groups:\n tempo = []\n for module_name, names in modules:\n tempo.extend([define_plugin_entry(name, module_name)\n for name in names])\n result[group] = tempo\n\n return result\n\n__file__ = inspect.getframeinfo(inspect.currentframe()).filename\n\nwith open(os.path.join(os.path.dirname(__file__), 'README.rst')) as long_d_f:\n long_description = long_d_f.read()\n\nsetup_args = {\n 'name': \"buildbot\",\n 'version': version,\n 'description': \"The Continuous Integration Framework\",\n 'long_description': long_description,\n 'author': \"Brian Warner\",\n 'author_email': \"[email protected]\",\n 'maintainer': \"Dustin J. Mitchell\",\n 'maintainer_email': \"[email protected]\",\n 'url': \"http://buildbot.net/\",\n 'license': \"GNU GPL\",\n 'classifiers': [\n 'Development Status :: 5 - Production/Stable',\n 'Environment :: No Input/Output (Daemon)',\n 'Environment :: Web Environment',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: GNU General Public License (GPL)',\n 'Topic :: Software Development :: Build Tools',\n 'Topic :: Software Development :: Testing',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6'\n ],\n\n 'packages': [\n \"buildbot\",\n \"buildbot.buildslave\",\n \"buildbot.configurators\",\n \"buildbot.worker\",\n \"buildbot.worker.protocols\",\n \"buildbot.changes\",\n \"buildbot.clients\",\n \"buildbot.data\",\n \"buildbot.db\",\n \"buildbot.db.migrate.versions\",\n \"buildbot.db.types\",\n \"buildbot.monkeypatches\",\n \"buildbot.mq\",\n \"buildbot.plugins\",\n \"buildbot.process\",\n \"buildbot.process.users\",\n \"buildbot.reporters\",\n \"buildbot.schedulers\",\n \"buildbot.scripts\",\n \"buildbot.secrets\",\n \"buildbot.secrets.providers\",\n \"buildbot.statistics\",\n \"buildbot.statistics.storage_backends\",\n \"buildbot.status\",\n \"buildbot.steps\",\n \"buildbot.steps.package\",\n \"buildbot.steps.package.deb\",\n \"buildbot.steps.package.rpm\",\n \"buildbot.steps.source\",\n \"buildbot.util\",\n \"buildbot.wamp\",\n \"buildbot.www\",\n \"buildbot.www.hooks\",\n \"buildbot.www.authz\",\n ] + ([] if BUILDING_WHEEL else [ # skip tests for wheels (save 50% of the archive)\n \"buildbot.test\",\n \"buildbot.test.util\",\n \"buildbot.test.fake\",\n \"buildbot.test.fuzz\",\n \"buildbot.test.integration\",\n \"buildbot.test.integration.interop\",\n \"buildbot.test.regressions\",\n \"buildbot.test.unit\",\n ]),\n 'data_files': [\n (\"buildbot\", [\n \"buildbot/buildbot.png\",\n ]),\n include(\"buildbot/reporters/templates\", \"*.txt\"),\n (\"buildbot/db/migrate\", [\n \"buildbot/db/migrate/migrate.cfg\",\n ]),\n include(\"buildbot/db/migrate/versions\", \"*.py\"),\n (\"buildbot/scripts\", [\n \"buildbot/scripts/sample.cfg\",\n \"buildbot/scripts/buildbot_tac.tmpl\",\n ]),\n include(\"buildbot/spec\", \"*.raml\"),\n include(\"buildbot/spec/types\", \"*.raml\"),\n include(\"buildbot/test/unit/test_templates_dir\", \"*.html\"),\n include(\"buildbot/test/unit/test_templates_dir/plugin\", \"*.*\"),\n ] + include_statics(\"buildbot/www/static\"),\n 'cmdclass': {'install_data': install_data_twisted,\n 'sdist': our_sdist},\n 'entry_points': concat_dicts(define_plugin_entries([\n ('buildbot.changes', [\n ('buildbot.changes.mail', [\n 'MaildirSource', 'CVSMaildirSource',\n 'SVNCommitEmailMaildirSource',\n 'BzrLaunchpadEmailMaildirSource']),\n ('buildbot.changes.bitbucket', ['BitbucketPullrequestPoller']),\n ('buildbot.changes.github', ['GitHubPullrequestPoller']),\n ('buildbot.changes.bonsaipoller', ['BonsaiPoller']),\n ('buildbot.changes.gerritchangesource', ['GerritChangeSource']),\n ('buildbot.changes.gitpoller', ['GitPoller']),\n ('buildbot.changes.hgpoller', ['HgPoller']),\n ('buildbot.changes.p4poller', ['P4Source']),\n ('buildbot.changes.pb', ['PBChangeSource']),\n ('buildbot.changes.svnpoller', ['SVNPoller'])\n ]),\n ('buildbot.schedulers', [\n ('buildbot.schedulers.basic', [\n 'SingleBranchScheduler', 'AnyBranchScheduler']),\n ('buildbot.schedulers.dependent', ['Dependent']),\n ('buildbot.schedulers.triggerable', ['Triggerable']),\n ('buildbot.schedulers.forcesched', ['ForceScheduler']),\n ('buildbot.schedulers.timed', [\n 'Periodic', 'Nightly', 'NightlyTriggerable']),\n ('buildbot.schedulers.trysched', [\n 'Try_Jobdir', 'Try_Userpass'])\n ]),\n ('buildbot.secrets', [\n ('buildbot.secrets.providers.file', ['SecretInAFile']),\n ('buildbot.secrets.providers.vault', ['HashiCorpVaultSecretProvider'])\n ]),\n ('buildbot.worker', [\n ('buildbot.worker.base', ['Worker']),\n ('buildbot.worker.ec2', ['EC2LatentWorker']),\n ('buildbot.worker.libvirt', ['LibVirtWorker']),\n ('buildbot.worker.openstack', ['OpenStackLatentWorker']),\n ('buildbot.worker.docker', ['DockerLatentWorker']),\n ('buildbot.worker.hyper', ['HyperLatentWorker']),\n ('buildbot.worker.local', ['LocalWorker']),\n ]),\n ('buildbot.steps', [\n ('buildbot.process.buildstep', ['BuildStep']),\n ('buildbot.steps.cmake', ['CMake']),\n ('buildbot.steps.cppcheck', ['Cppcheck']),\n ('buildbot.steps.http', [\n 'HTTPStep', 'POST', 'GET', 'PUT', 'DELETE', 'HEAD',\n 'OPTIONS']),\n ('buildbot.steps.master', [\n 'MasterShellCommand', 'SetProperty', 'SetProperties', 'LogRenderable', \"Assert\"]),\n ('buildbot.steps.maxq', ['MaxQ']),\n ('buildbot.steps.mswin', ['Robocopy']),\n ('buildbot.steps.mtrlogobserver', ['MTR']),\n ('buildbot.steps.package.deb.lintian', ['DebLintian']),\n ('buildbot.steps.package.deb.pbuilder', [\n 'DebPbuilder', 'DebCowbuilder', 'UbuPbuilder',\n 'UbuCowbuilder']),\n ('buildbot.steps.package.rpm.mock', [\n 'Mock', 'MockBuildSRPM', 'MockRebuild']),\n ('buildbot.steps.package.rpm.rpmbuild', ['RpmBuild']),\n ('buildbot.steps.package.rpm.rpmlint', ['RpmLint']),\n ('buildbot.steps.package.rpm.rpmspec', ['RpmSpec']),\n ('buildbot.steps.python', [\n 'BuildEPYDoc', 'PyFlakes', 'PyLint', 'Sphinx']),\n ('buildbot.steps.python_twisted', [\n 'HLint', 'Trial', 'RemovePYCs']),\n ('buildbot.steps.shell', [\n 'ShellCommand', 'TreeSize', 'SetPropertyFromCommand',\n 'Configure', 'WarningCountingShellCommand', 'Compile',\n 'Test', 'PerlModuleTest']),\n ('buildbot.steps.shellsequence', ['ShellSequence']),\n ('buildbot.steps.source.bzr', ['Bzr']),\n ('buildbot.steps.source.cvs', ['CVS']),\n ('buildbot.steps.source.darcs', ['Darcs']),\n ('buildbot.steps.source.gerrit', ['Gerrit']),\n ('buildbot.steps.source.git', ['Git']),\n ('buildbot.steps.source.github', ['GitHub']),\n ('buildbot.steps.source.gitlab', ['GitLab']),\n ('buildbot.steps.source.mercurial', ['Mercurial']),\n ('buildbot.steps.source.mtn', ['Monotone']),\n ('buildbot.steps.source.p4', ['P4']),\n ('buildbot.steps.source.repo', ['Repo']),\n ('buildbot.steps.source.svn', ['SVN']),\n ('buildbot.steps.subunit', ['SubunitShellCommand']),\n ('buildbot.steps.transfer', [\n 'FileUpload', 'DirectoryUpload', 'MultipleFileUpload',\n 'FileDownload', 'StringDownload', 'JSONStringDownload',\n 'JSONPropertiesDownload']),\n ('buildbot.steps.trigger', ['Trigger']),\n ('buildbot.steps.vstudio', [\n 'VC6', 'VC7', 'VS2003', 'VC8', 'VS2005', 'VCExpress9', 'VC9',\n 'VS2008', 'VC10', 'VS2010', 'VC11', 'VS2012', 'VC12', 'VS2013',\n 'VC14', 'VS2015', 'MsBuild4', 'MsBuild', 'MsBuild12', 'MsBuild14']),\n ('buildbot.steps.worker', [\n 'SetPropertiesFromEnv', 'FileExists', 'CopyDirectory',\n 'RemoveDirectory', 'MakeDirectory']),\n ]),\n ('buildbot.reporters', [\n ('buildbot.reporters.mail', ['MailNotifier']),\n ('buildbot.reporters.pushjet', ['PushjetNotifier']),\n ('buildbot.reporters.pushover', ['PushoverNotifier']),\n ('buildbot.reporters.message', ['MessageFormatter']),\n ('buildbot.reporters.gerrit', ['GerritStatusPush']),\n ('buildbot.reporters.gerrit_verify_status',\n ['GerritVerifyStatusPush']),\n ('buildbot.reporters.hipchat', ['HipChatStatusPush']),\n ('buildbot.reporters.http', ['HttpStatusPush']),\n ('buildbot.reporters.github', ['GitHubStatusPush', 'GitHubCommentPush']),\n ('buildbot.reporters.gitlab', ['GitLabStatusPush']),\n ('buildbot.reporters.stash', ['StashStatusPush']),\n ('buildbot.reporters.bitbucketserver', ['BitbucketServerStatusPush', 'BitbucketServerPRCommentPush']),\n ('buildbot.reporters.bitbucket', ['BitbucketStatusPush']),\n ('buildbot.reporters.irc', ['IRC']),\n ]),\n ('buildbot.util', [\n # Connection seems to be a way too generic name, though\n ('buildbot.worker.libvirt', ['Connection']),\n ('buildbot.changes.filter', ['ChangeFilter']),\n ('buildbot.changes.gerritchangesource', ['GerritChangeFilter']),\n ('buildbot.changes.svnpoller', [\n ('svn.split_file_projects_branches',\n 'split_file_projects_branches'),\n ('svn.split_file_branches', 'split_file_branches'),\n ('svn.split_file_alwaystrunk', 'split_file_alwaystrunk')]),\n ('buildbot.configurators.janitor', ['JanitorConfigurator']),\n ('buildbot.config', ['BuilderConfig']),\n ('buildbot.locks', [\n 'MasterLock',\n 'WorkerLock',\n ]),\n ('buildbot.manhole', [\n 'AuthorizedKeysManhole', 'PasswordManhole', 'TelnetManhole']),\n ('buildbot.process.builder', [\n 'enforceChosenWorker',\n ]),\n ('buildbot.process.factory', [\n 'BuildFactory', 'GNUAutoconf', 'CPAN', 'Distutils', 'Trial',\n 'BasicBuildFactory', 'QuickBuildFactory', 'BasicSVN']),\n ('buildbot.process.logobserver', ['LogLineObserver']),\n ('buildbot.process.properties', [\n 'FlattenList', 'Interpolate', 'Property', 'Transform',\n 'WithProperties', 'renderer', 'Secret']),\n ('buildbot.process.properties', [\n 'CommandlineUserManager']),\n ('buildbot.revlinks', ['RevlinkMatch']),\n ('buildbot.reporters.utils', ['URLForBuild']),\n ('buildbot.schedulers.forcesched', [\n 'AnyPropertyParameter', 'BooleanParameter',\n 'ChoiceStringParameter',\n 'CodebaseParameter', 'FileParameter', 'FixedParameter', 'InheritBuildParameter',\n 'IntParameter', 'NestedParameter', 'ParameterGroup',\n 'PatchParameter',\n 'StringParameter', 'TextParameter', 'UserNameParameter',\n 'WorkerChoiceParameter',\n ]),\n ('buildbot.process.results', [\n 'Results', 'SUCCESS', 'WARNINGS', 'FAILURE', 'SKIPPED',\n 'EXCEPTION', 'RETRY', 'CANCELLED']),\n ('buildbot.steps.mtrlogobserver', ['EqConnectionPool']),\n ('buildbot.steps.source.repo', [\n ('repo.DownloadsFromChangeSource',\n 'RepoDownloadsFromChangeSource'),\n ('repo.DownloadsFromProperties',\n 'RepoDownloadsFromProperties')]),\n ('buildbot.steps.shellsequence', ['ShellArg']),\n ('buildbot.www.avatar', ['AvatarGravatar']),\n ('buildbot.www.auth', [\n 'UserPasswordAuth', 'HTPasswdAuth', 'RemoteUserAuth', 'CustomAuth']),\n ('buildbot.www.ldapuserinfo', ['LdapUserInfo']),\n ('buildbot.www.oauth2', [\n 'GoogleAuth', 'GitHubAuth', 'GitLabAuth', 'BitbucketAuth']),\n ('buildbot.db.dbconfig', [\n 'DbConfig']),\n ('buildbot.www.authz', [\n 'Authz', 'fnmatchStrMatcher', 'reStrMatcher']),\n ('buildbot.www.authz.roles', [\n 'RolesFromEmails', 'RolesFromGroups', 'RolesFromOwner', 'RolesFromUsername',\n 'RolesFromDomain']),\n ('buildbot.www.authz.endpointmatchers', [\n 'AnyEndpointMatcher', 'StopBuildEndpointMatcher', 'ForceBuildEndpointMatcher',\n 'RebuildBuildEndpointMatcher', 'AnyControlEndpointMatcher', 'EnableSchedulerEndpointMatcher']),\n ]),\n ('buildbot.webhooks', [\n ('buildbot.www.hooks.base', ['base']),\n ('buildbot.www.hooks.bitbucket', ['bitbucket']),\n ('buildbot.www.hooks.github', ['github']),\n ('buildbot.www.hooks.gitlab', ['gitlab']),\n ('buildbot.www.hooks.gitorious', ['gitorious']),\n ('buildbot.www.hooks.poller', ['poller']),\n ('buildbot.www.hooks.bitbucketcloud', ['bitbucketcloud']),\n ('buildbot.www.hooks.bitbucketserver', ['bitbucketserver'])\n ])\n ]), {\n 'console_scripts': [\n 'buildbot=buildbot.scripts.runner:run',\n # this will also be shipped on non windows :-(\n 'buildbot_windows_service=buildbot.scripts.windows_service:HandleCommandLine',\n ]}\n )\n}\n\n# set zip_safe to false to force Windows installs to always unpack eggs\n# into directories, which seems to work better --\n# see http://buildbot.net/trac/ticket/907\nif sys.platform == \"win32\":\n setup_args['zip_safe'] = False\n\npy_27 = sys.version_info[0] > 2 or (\n sys.version_info[0] == 2 and sys.version_info[1] >= 7)\nif not py_27:\n raise RuntimeError(\"Buildbot master requires at least Python-2.7\")\n\n# pip<1.4 doesn't have the --pre flag, and will thus attempt to install alpha\n# and beta versions of Buildbot. Prevent that from happening.\nVERSION_MSG = \"\"\"\nThis is a pre-release version of Buildbot, which can only be installed with\npip-1.4 or later Try installing the latest stable version of Buildbot instead:\n pip install buildbot==0.8.12\nSee https://pypi.python.org/pypi/buildbot to verify the current stable version.\n\"\"\"\nif 'a' in version or 'b' in version:\n try:\n pip_dist = pkg_resources.get_distribution('pip')\n except pkg_resources.DistributionNotFound:\n pip_dist = None\n\n if pip_dist:\n if LooseVersion(pip_dist.version) < LooseVersion('1.4'):\n raise RuntimeError(VERSION_MSG)\n\nif sys.version_info[0] >= 3:\n twisted_ver = \">= 17.9.0\"\nelse:\n twisted_ver = \">= 16.1.0\"\nautobahn_ver = \">= 0.16.0\"\ntxaio_ver = \">= 2.2.2\"\n\nbundle_version = version.split(\"-\")[0]\n\n# dependencies\nsetup_args['install_requires'] = [\n 'setuptools >= 8.0',\n 'Twisted ' + twisted_ver,\n 'Jinja2 >= 2.1',\n # required for tests, but Twisted requires this anyway\n 'zope.interface >= 4.1.1',\n # python-future required for py2/3 compatibility\n 'future',\n 'sqlalchemy>=0.8.0',\n 'sqlalchemy-migrate>=0.9',\n 'python-dateutil>=1.5',\n 'txaio ' + txaio_ver,\n 'autobahn ' + autobahn_ver,\n 'PyJWT',\n]\n\n# Unit test dependencies.\ntest_deps = [\n # http client libraries\n 'treq',\n 'txrequests',\n # pyjade required for custom templates tests\n 'pyjade',\n # boto3 and moto required for running EC2 tests\n 'boto3',\n 'moto',\n # txgithub required to run buildbot.status.github module tests\n 'txgithub',\n 'mock>=2.0.0',\n]\nif sys.platform != 'win32':\n test_deps += [\n # LZ4 fails to build on Windows:\n # https://github.com/steeve/python-lz4/issues/27\n # lz4 required for log compression tests.\n 'lz4',\n ]\n\nsetup_args['tests_require'] = test_deps\n\nsetup_args['extras_require'] = {\n 'test': [\n 'setuptools_trial',\n 'isort',\n # spellcheck introduced in version 1.4.0\n 'pylint<1.7.0',\n 'pyenchant',\n 'flake8~=2.6.0',\n ] + test_deps,\n 'bundle': [\n \"buildbot-www=={0}\".format(bundle_version),\n \"buildbot-worker=={0}\".format(bundle_version),\n \"buildbot-waterfall-view=={0}\".format(bundle_version),\n \"buildbot-console-view=={0}\".format(bundle_version),\n \"buildbot-grid-view=={0}\".format(bundle_version),\n ],\n 'tls': [\n 'Twisted[tls] ' + twisted_ver,\n # There are bugs with extras inside extras:\n # <https://github.com/pypa/pip/issues/3516>\n # so we explicitly include Twisted[tls] dependencies.\n 'pyopenssl >= 16.0.0',\n 'service_identity',\n 'idna >= 0.6',\n ],\n 'docs': [\n 'docutils<0.13.0',\n 'sphinx>1.4.0',\n 'sphinxcontrib-blockdiag',\n 'sphinxcontrib-spelling',\n 'pyenchant',\n 'docutils>=0.8',\n 'sphinx-jinja',\n 'towncrier',\n 'yaml'\n ],\n}\n\nif '--help-commands' in sys.argv or 'trial' in sys.argv or 'test' in sys.argv:\n setup_args['setup_requires'] = [\n 'setuptools_trial',\n ]\n\nif os.getenv('NO_INSTALL_REQS'):\n setup_args['install_requires'] = None\n setup_args['extras_require'] = None\n\nif __name__ == '__main__':\n setup(**setup_args)\n\n# Local Variables:\n# fill-column: 71\n# End:\n",
"path": "master/setup.py"
}
] | diff --git a/master/setup.py b/master/setup.py
index e7ec7c131624..d79e2dc9890e 100755
--- a/master/setup.py
+++ b/master/setup.py
@@ -540,7 +540,8 @@ def define_plugin_entries(groups):
'pyenchant',
'docutils>=0.8',
'sphinx-jinja',
- 'towncrier'
+ 'towncrier',
+ 'yaml'
],
}
|
TheAlgorithms__Python-7390 | [PYTEST WARNING] Horn schunk
### Feature description
@skief @poyea Please could you resolve this warning
```
computer_vision/horn_schunck.py:15
/home/runner/work/Python/Python/computer_vision/horn_schunck.py:15:
DeprecationWarning: Please use `convolve` from the `scipy.ndimage` namespace, the `scipy.ndimage.filters` namespace is deprecated.
from scipy.ndimage.filters import convolve
```
origin: #7211
| [
{
"content": "\"\"\"\n The Horn-Schunck method estimates the optical flow for every single pixel of\n a sequence of images.\n It works by assuming brightness constancy between two consecutive frames\n and smoothness in the optical flow.\n\n Useful resources:\n Wikipedia: https://en.wikipedia.org/wiki/Horn%E2%80%93Schunck_method\n Paper: http://image.diku.dk/imagecanon/material/HornSchunckOptical_Flow.pdf\n\"\"\"\n\nfrom typing import SupportsIndex\n\nimport numpy as np\nfrom scipy.ndimage.filters import convolve\n\n\ndef warp(\n image: np.ndarray, horizontal_flow: np.ndarray, vertical_flow: np.ndarray\n) -> np.ndarray:\n \"\"\"\n Warps the pixels of an image into a new image using the horizontal and vertical\n flows.\n Pixels that are warped from an invalid location are set to 0.\n\n Parameters:\n image: Grayscale image\n horizontal_flow: Horizontal flow\n vertical_flow: Vertical flow\n\n Returns: Warped image\n\n >>> warp(np.array([[0, 1, 2], [0, 3, 0], [2, 2, 2]]), \\\n np.array([[0, 1, -1], [-1, 0, 0], [1, 1, 1]]), \\\n np.array([[0, 0, 0], [0, 1, 0], [0, 0, 1]]))\n array([[0, 0, 0],\n [3, 1, 0],\n [0, 2, 3]])\n \"\"\"\n flow = np.stack((horizontal_flow, vertical_flow), 2)\n\n # Create a grid of all pixel coordinates and subtract the flow to get the\n # target pixels coordinates\n grid = np.stack(\n np.meshgrid(np.arange(0, image.shape[1]), np.arange(0, image.shape[0])), 2\n )\n grid = np.round(grid - flow).astype(np.int32)\n\n # Find the locations outside of the original image\n invalid = (grid < 0) | (grid >= np.array([image.shape[1], image.shape[0]]))\n grid[invalid] = 0\n\n warped = image[grid[:, :, 1], grid[:, :, 0]]\n\n # Set pixels at invalid locations to 0\n warped[invalid[:, :, 0] | invalid[:, :, 1]] = 0\n\n return warped\n\n\ndef horn_schunck(\n image0: np.ndarray,\n image1: np.ndarray,\n num_iter: SupportsIndex,\n alpha: float | None = None,\n) -> tuple[np.ndarray, np.ndarray]:\n \"\"\"\n This function performs the Horn-Schunck algorithm and returns the estimated\n optical flow. It is assumed that the input images are grayscale and\n normalized to be in [0, 1].\n\n Parameters:\n image0: First image of the sequence\n image1: Second image of the sequence\n alpha: Regularization constant\n num_iter: Number of iterations performed\n\n Returns: estimated horizontal & vertical flow\n\n >>> np.round(horn_schunck(np.array([[0, 0, 2], [0, 0, 2]]), \\\n np.array([[0, 2, 0], [0, 2, 0]]), alpha=0.1, num_iter=110)).\\\n astype(np.int32)\n array([[[ 0, -1, -1],\n [ 0, -1, -1]],\n <BLANKLINE>\n [[ 0, 0, 0],\n [ 0, 0, 0]]], dtype=int32)\n \"\"\"\n if alpha is None:\n alpha = 0.1\n\n # Initialize flow\n horizontal_flow = np.zeros_like(image0)\n vertical_flow = np.zeros_like(image0)\n\n # Prepare kernels for the calculation of the derivatives and the average velocity\n kernel_x = np.array([[-1, 1], [-1, 1]]) * 0.25\n kernel_y = np.array([[-1, -1], [1, 1]]) * 0.25\n kernel_t = np.array([[1, 1], [1, 1]]) * 0.25\n kernel_laplacian = np.array(\n [[1 / 12, 1 / 6, 1 / 12], [1 / 6, 0, 1 / 6], [1 / 12, 1 / 6, 1 / 12]]\n )\n\n # Iteratively refine the flow\n for _ in range(num_iter):\n warped_image = warp(image0, horizontal_flow, vertical_flow)\n derivative_x = convolve(warped_image, kernel_x) + convolve(image1, kernel_x)\n derivative_y = convolve(warped_image, kernel_y) + convolve(image1, kernel_y)\n derivative_t = convolve(warped_image, kernel_t) + convolve(image1, -kernel_t)\n\n avg_horizontal_velocity = convolve(horizontal_flow, kernel_laplacian)\n avg_vertical_velocity = convolve(vertical_flow, kernel_laplacian)\n\n # This updates the flow as proposed in the paper (Step 12)\n update = (\n derivative_x * avg_horizontal_velocity\n + derivative_y * avg_vertical_velocity\n + derivative_t\n )\n update = update / (alpha**2 + derivative_x**2 + derivative_y**2)\n\n horizontal_flow = avg_horizontal_velocity - derivative_x * update\n vertical_flow = avg_vertical_velocity - derivative_y * update\n\n return horizontal_flow, vertical_flow\n\n\nif __name__ == \"__main__\":\n import doctest\n\n doctest.testmod()\n",
"path": "computer_vision/horn_schunck.py"
}
] | [
{
"content": "\"\"\"\n The Horn-Schunck method estimates the optical flow for every single pixel of\n a sequence of images.\n It works by assuming brightness constancy between two consecutive frames\n and smoothness in the optical flow.\n\n Useful resources:\n Wikipedia: https://en.wikipedia.org/wiki/Horn%E2%80%93Schunck_method\n Paper: http://image.diku.dk/imagecanon/material/HornSchunckOptical_Flow.pdf\n\"\"\"\n\nfrom typing import SupportsIndex\n\nimport numpy as np\nfrom scipy.ndimage import convolve\n\n\ndef warp(\n image: np.ndarray, horizontal_flow: np.ndarray, vertical_flow: np.ndarray\n) -> np.ndarray:\n \"\"\"\n Warps the pixels of an image into a new image using the horizontal and vertical\n flows.\n Pixels that are warped from an invalid location are set to 0.\n\n Parameters:\n image: Grayscale image\n horizontal_flow: Horizontal flow\n vertical_flow: Vertical flow\n\n Returns: Warped image\n\n >>> warp(np.array([[0, 1, 2], [0, 3, 0], [2, 2, 2]]), \\\n np.array([[0, 1, -1], [-1, 0, 0], [1, 1, 1]]), \\\n np.array([[0, 0, 0], [0, 1, 0], [0, 0, 1]]))\n array([[0, 0, 0],\n [3, 1, 0],\n [0, 2, 3]])\n \"\"\"\n flow = np.stack((horizontal_flow, vertical_flow), 2)\n\n # Create a grid of all pixel coordinates and subtract the flow to get the\n # target pixels coordinates\n grid = np.stack(\n np.meshgrid(np.arange(0, image.shape[1]), np.arange(0, image.shape[0])), 2\n )\n grid = np.round(grid - flow).astype(np.int32)\n\n # Find the locations outside of the original image\n invalid = (grid < 0) | (grid >= np.array([image.shape[1], image.shape[0]]))\n grid[invalid] = 0\n\n warped = image[grid[:, :, 1], grid[:, :, 0]]\n\n # Set pixels at invalid locations to 0\n warped[invalid[:, :, 0] | invalid[:, :, 1]] = 0\n\n return warped\n\n\ndef horn_schunck(\n image0: np.ndarray,\n image1: np.ndarray,\n num_iter: SupportsIndex,\n alpha: float | None = None,\n) -> tuple[np.ndarray, np.ndarray]:\n \"\"\"\n This function performs the Horn-Schunck algorithm and returns the estimated\n optical flow. It is assumed that the input images are grayscale and\n normalized to be in [0, 1].\n\n Parameters:\n image0: First image of the sequence\n image1: Second image of the sequence\n alpha: Regularization constant\n num_iter: Number of iterations performed\n\n Returns: estimated horizontal & vertical flow\n\n >>> np.round(horn_schunck(np.array([[0, 0, 2], [0, 0, 2]]), \\\n np.array([[0, 2, 0], [0, 2, 0]]), alpha=0.1, num_iter=110)).\\\n astype(np.int32)\n array([[[ 0, -1, -1],\n [ 0, -1, -1]],\n <BLANKLINE>\n [[ 0, 0, 0],\n [ 0, 0, 0]]], dtype=int32)\n \"\"\"\n if alpha is None:\n alpha = 0.1\n\n # Initialize flow\n horizontal_flow = np.zeros_like(image0)\n vertical_flow = np.zeros_like(image0)\n\n # Prepare kernels for the calculation of the derivatives and the average velocity\n kernel_x = np.array([[-1, 1], [-1, 1]]) * 0.25\n kernel_y = np.array([[-1, -1], [1, 1]]) * 0.25\n kernel_t = np.array([[1, 1], [1, 1]]) * 0.25\n kernel_laplacian = np.array(\n [[1 / 12, 1 / 6, 1 / 12], [1 / 6, 0, 1 / 6], [1 / 12, 1 / 6, 1 / 12]]\n )\n\n # Iteratively refine the flow\n for _ in range(num_iter):\n warped_image = warp(image0, horizontal_flow, vertical_flow)\n derivative_x = convolve(warped_image, kernel_x) + convolve(image1, kernel_x)\n derivative_y = convolve(warped_image, kernel_y) + convolve(image1, kernel_y)\n derivative_t = convolve(warped_image, kernel_t) + convolve(image1, -kernel_t)\n\n avg_horizontal_velocity = convolve(horizontal_flow, kernel_laplacian)\n avg_vertical_velocity = convolve(vertical_flow, kernel_laplacian)\n\n # This updates the flow as proposed in the paper (Step 12)\n update = (\n derivative_x * avg_horizontal_velocity\n + derivative_y * avg_vertical_velocity\n + derivative_t\n )\n update = update / (alpha**2 + derivative_x**2 + derivative_y**2)\n\n horizontal_flow = avg_horizontal_velocity - derivative_x * update\n vertical_flow = avg_vertical_velocity - derivative_y * update\n\n return horizontal_flow, vertical_flow\n\n\nif __name__ == \"__main__\":\n import doctest\n\n doctest.testmod()\n",
"path": "computer_vision/horn_schunck.py"
}
] | diff --git a/computer_vision/horn_schunck.py b/computer_vision/horn_schunck.py
index 2a153d06ddae..b63e0268294c 100644
--- a/computer_vision/horn_schunck.py
+++ b/computer_vision/horn_schunck.py
@@ -12,7 +12,7 @@
from typing import SupportsIndex
import numpy as np
-from scipy.ndimage.filters import convolve
+from scipy.ndimage import convolve
def warp(
|
mlcommons__GaNDLF-722 | Move unit testing data to the MLCommons Storage
**Is your feature request related to a problem? Please describe.**
Currently, the unit testing data is on UPenn Box - which is inconvenient for someone without access who wants to make any updates.
**Describe the solution you'd like**
Changing this to the MLCommons storage would make things much easier from an admin perspective.
**Describe alternatives you've considered**
N.A.
**Additional context**
N.A.
| [
{
"content": "#!/usr/bin/env python\n\n\"\"\"The setup script.\"\"\"\n\n\nimport sys, re, os\nfrom setuptools import setup, find_packages\nfrom setuptools.command.install import install\nfrom setuptools.command.develop import develop\nfrom setuptools.command.egg_info import egg_info\n\ntry:\n with open(\"README.md\") as readme_file:\n readme = readme_file.read()\nexcept Exception as error:\n readme = \"No README information found.\"\n sys.stderr.write(\"Warning: Could not open '%s' due %s\\n\" % (\"README.md\", error))\n\n\nclass CustomInstallCommand(install):\n def run(self):\n install.run(self)\n\n\nclass CustomDevelopCommand(develop):\n def run(self):\n develop.run(self)\n\n\nclass CustomEggInfoCommand(egg_info):\n def run(self):\n egg_info.run(self)\n\n\ntry:\n filepath = \"GANDLF/version.py\"\n version_file = open(filepath)\n (__version__,) = re.findall('__version__ = \"(.*)\"', version_file.read())\n\nexcept Exception as error:\n __version__ = \"0.0.1\"\n sys.stderr.write(\"Warning: Could not open '%s' due %s\\n\" % (filepath, error))\n\n# Handle cases where specific files need to be bundled into the final package as installed via PyPI\ndockerfiles = [\n item\n for item in os.listdir(os.path.dirname(os.path.abspath(__file__)))\n if (os.path.isfile(item) and item.startswith(\"Dockerfile-\"))\n]\nentrypoint_files = [\n item\n for item in os.listdir(os.path.dirname(os.path.abspath(__file__)))\n if (os.path.isfile(item) and item.startswith(\"gandlf_\"))\n]\nsetup_files = [\"setup.py\", \".dockerignore\", \"pyproject.toml\", \"MANIFEST.in\"]\nall_extra_files = dockerfiles + entrypoint_files + setup_files\nall_extra_files_pathcorrected = [os.path.join(\"../\", item) for item in all_extra_files]\n# find_packages should only ever find these as subpackages of gandlf, not as top-level packages\n# generate this dynamically?\n# GANDLF.GANDLF is needed to prevent recursion madness in deployments\ntoplevel_package_excludes = [\n \"GANDLF.GANDLF\",\n \"anonymize\",\n \"cli\",\n \"compute\",\n \"data\",\n \"grad_clipping\",\n \"losses\",\n \"metrics\",\n \"models\",\n \"optimizers\",\n \"schedulers\",\n \"utils\",\n]\n\n\nrequirements = [\n \"torch==1.13.1\",\n \"black\",\n \"numpy==1.22.0\",\n \"scipy\",\n \"SimpleITK!=2.0.*\",\n \"SimpleITK!=2.2.1\", # https://github.com/mlcommons/GaNDLF/issues/536\n \"torchvision\",\n \"tqdm\",\n \"torchio==0.18.75\",\n \"pandas<2.0.0\",\n \"scikit-learn>=0.23.2\",\n \"scikit-image>=0.19.1\",\n \"setuptools\",\n \"seaborn\",\n \"pyyaml\",\n \"tiffslide\",\n \"matplotlib\",\n \"requests>=2.25.0\",\n \"pytest\",\n \"coverage\",\n \"pytest-cov\",\n \"psutil\",\n \"medcam\",\n \"opencv-python\",\n \"torchmetrics==0.8.1\",\n \"zarr==2.10.3\",\n \"pydicom\",\n \"onnx\",\n \"torchinfo==1.7.0\",\n \"segmentation-models-pytorch==0.3.2\",\n \"ACSConv==0.1.1\",\n \"docker\",\n \"dicom-anonymizer\",\n \"twine\",\n \"zarr\",\n \"keyring\",\n]\n\nif __name__ == \"__main__\":\n setup(\n name=\"GANDLF\",\n version=__version__,\n author=\"MLCommons\",\n author_email=\"[email protected]\",\n python_requires=\">=3.8\",\n packages=find_packages(\n where=os.path.dirname(os.path.abspath(__file__)),\n exclude=toplevel_package_excludes,\n ),\n cmdclass={\n \"install\": CustomInstallCommand,\n \"develop\": CustomDevelopCommand,\n \"egg_info\": CustomEggInfoCommand,\n },\n scripts=[\n \"gandlf_run\",\n \"gandlf_constructCSV\",\n \"gandlf_collectStats\",\n \"gandlf_patchMiner\",\n \"gandlf_preprocess\",\n \"gandlf_anonymizer\",\n \"gandlf_verifyInstall\",\n \"gandlf_configGenerator\",\n \"gandlf_recoverConfig\",\n \"gandlf_deploy\",\n \"gandlf_optimizeModel\",\n \"gandlf_generateMetrics\",\n ],\n classifiers=[\n \"Development Status :: 3 - Alpha\",\n \"Intended Audience :: Science/Research\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Natural Language :: English\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"Topic :: Scientific/Engineering :: Medical Science Apps.\",\n ],\n description=(\n \"PyTorch-based framework that handles segmentation/regression/classification using various DL architectures for medical imaging.\"\n ),\n install_requires=requirements,\n license=\"Apache-2.0\",\n long_description=readme,\n long_description_content_type=\"text/markdown\",\n include_package_data=True,\n package_data={\"GANDLF\": all_extra_files_pathcorrected},\n keywords=\"semantic, segmentation, regression, classification, data-augmentation, medical-imaging, clinical-workflows, deep-learning, pytorch\",\n zip_safe=False,\n )\n",
"path": "setup.py"
}
] | [
{
"content": "#!/usr/bin/env python\n\n\"\"\"The setup script.\"\"\"\n\n\nimport sys, re, os\nfrom setuptools import setup, find_packages\nfrom setuptools.command.install import install\nfrom setuptools.command.develop import develop\nfrom setuptools.command.egg_info import egg_info\n\ntry:\n with open(\"README.md\") as readme_file:\n readme = readme_file.read()\nexcept Exception as error:\n readme = \"No README information found.\"\n sys.stderr.write(\"Warning: Could not open '%s' due %s\\n\" % (\"README.md\", error))\n\n\nclass CustomInstallCommand(install):\n def run(self):\n install.run(self)\n\n\nclass CustomDevelopCommand(develop):\n def run(self):\n develop.run(self)\n\n\nclass CustomEggInfoCommand(egg_info):\n def run(self):\n egg_info.run(self)\n\n\ntry:\n filepath = \"GANDLF/version.py\"\n version_file = open(filepath)\n (__version__,) = re.findall('__version__ = \"(.*)\"', version_file.read())\n\nexcept Exception as error:\n __version__ = \"0.0.1\"\n sys.stderr.write(\"Warning: Could not open '%s' due %s\\n\" % (filepath, error))\n\n# Handle cases where specific files need to be bundled into the final package as installed via PyPI\ndockerfiles = [\n item\n for item in os.listdir(os.path.dirname(os.path.abspath(__file__)))\n if (os.path.isfile(item) and item.startswith(\"Dockerfile-\"))\n]\nentrypoint_files = [\n item\n for item in os.listdir(os.path.dirname(os.path.abspath(__file__)))\n if (os.path.isfile(item) and item.startswith(\"gandlf_\"))\n]\nsetup_files = [\"setup.py\", \".dockerignore\", \"pyproject.toml\", \"MANIFEST.in\"]\nall_extra_files = dockerfiles + entrypoint_files + setup_files\nall_extra_files_pathcorrected = [os.path.join(\"../\", item) for item in all_extra_files]\n# find_packages should only ever find these as subpackages of gandlf, not as top-level packages\n# generate this dynamically?\n# GANDLF.GANDLF is needed to prevent recursion madness in deployments\ntoplevel_package_excludes = [\n \"GANDLF.GANDLF\",\n \"anonymize\",\n \"cli\",\n \"compute\",\n \"data\",\n \"grad_clipping\",\n \"losses\",\n \"metrics\",\n \"models\",\n \"optimizers\",\n \"schedulers\",\n \"utils\",\n]\n\n\nrequirements = [\n \"torch==1.13.1\",\n \"black\",\n \"numpy==1.22.0\",\n \"scipy\",\n \"SimpleITK!=2.0.*\",\n \"SimpleITK!=2.2.1\", # https://github.com/mlcommons/GaNDLF/issues/536\n \"torchvision\",\n \"tqdm\",\n \"torchio==0.18.75\",\n \"pandas<2.0.0\",\n \"scikit-learn>=0.23.2\",\n \"scikit-image>=0.19.1\",\n \"setuptools\",\n \"seaborn\",\n \"pyyaml\",\n \"tiffslide\",\n \"matplotlib\",\n \"gdown\",\n \"pytest\",\n \"coverage\",\n \"pytest-cov\",\n \"psutil\",\n \"medcam\",\n \"opencv-python\",\n \"torchmetrics==0.8.1\",\n \"zarr==2.10.3\",\n \"pydicom\",\n \"onnx\",\n \"torchinfo==1.7.0\",\n \"segmentation-models-pytorch==0.3.2\",\n \"ACSConv==0.1.1\",\n \"docker\",\n \"dicom-anonymizer\",\n \"twine\",\n \"zarr\",\n \"keyring\",\n]\n\nif __name__ == \"__main__\":\n setup(\n name=\"GANDLF\",\n version=__version__,\n author=\"MLCommons\",\n author_email=\"[email protected]\",\n python_requires=\">=3.8\",\n packages=find_packages(\n where=os.path.dirname(os.path.abspath(__file__)),\n exclude=toplevel_package_excludes,\n ),\n cmdclass={\n \"install\": CustomInstallCommand,\n \"develop\": CustomDevelopCommand,\n \"egg_info\": CustomEggInfoCommand,\n },\n scripts=[\n \"gandlf_run\",\n \"gandlf_constructCSV\",\n \"gandlf_collectStats\",\n \"gandlf_patchMiner\",\n \"gandlf_preprocess\",\n \"gandlf_anonymizer\",\n \"gandlf_verifyInstall\",\n \"gandlf_configGenerator\",\n \"gandlf_recoverConfig\",\n \"gandlf_deploy\",\n \"gandlf_optimizeModel\",\n \"gandlf_generateMetrics\",\n ],\n classifiers=[\n \"Development Status :: 3 - Alpha\",\n \"Intended Audience :: Science/Research\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Natural Language :: English\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"Topic :: Scientific/Engineering :: Medical Science Apps.\",\n ],\n description=(\n \"PyTorch-based framework that handles segmentation/regression/classification using various DL architectures for medical imaging.\"\n ),\n install_requires=requirements,\n license=\"Apache-2.0\",\n long_description=readme,\n long_description_content_type=\"text/markdown\",\n include_package_data=True,\n package_data={\"GANDLF\": all_extra_files_pathcorrected},\n keywords=\"semantic, segmentation, regression, classification, data-augmentation, medical-imaging, clinical-workflows, deep-learning, pytorch\",\n zip_safe=False,\n )\n",
"path": "setup.py"
}
] | diff --git a/setup.py b/setup.py
index e744e4835..332d6087a 100644
--- a/setup.py
+++ b/setup.py
@@ -92,7 +92,7 @@ def run(self):
"pyyaml",
"tiffslide",
"matplotlib",
- "requests>=2.25.0",
+ "gdown",
"pytest",
"coverage",
"pytest-cov",
diff --git a/testing/test_full.py b/testing/test_full.py
index 4680a71ae..9dd860782 100644
--- a/testing/test_full.py
+++ b/testing/test_full.py
@@ -1,5 +1,5 @@
from pathlib import Path
-import requests, zipfile, io, os, csv, random, copy, shutil, yaml, torch, pytest
+import gdown, zipfile, os, csv, random, copy, shutil, yaml, torch, pytest
import SimpleITK as sitk
import numpy as np
import pandas as pd
@@ -109,9 +109,7 @@
def test_generic_download_data():
print("00: Downloading the sample data")
- urlToDownload = (
- "https://upenn.box.com/shared/static/y8162xkq1zz5555ye3pwadry2m2e39bs.zip"
- )
+ urlToDownload = "https://drive.google.com/uc?id=1c4Yrv-jnK6Tk7Ne1HmMTChv-4nYk43NT"
files_check = [
os.path.join(inputDir, "2d_histo_segmentation", "1", "image.tiff"),
@@ -122,9 +120,11 @@ def test_generic_download_data():
for file in files_check:
if not os.path.isfile(file):
print("Downloading and extracting sample data")
- r = requests.get(urlToDownload)
- z = zipfile.ZipFile(io.BytesIO(r.content))
- z.extractall(testingDir)
+ output = os.path.join(testingDir, "gandlf_unit_test_data.tgz")
+ gdown.download(urlToDownload, output, quiet=False)
+ with zipfile.ZipFile(output, "r") as zip_ref:
+ zip_ref.extractall(testingDir)
+ os.remove(output)
break
sanitize_outputDir()
|
bookwyrm-social__bookwyrm-2128 | When adding multiple authors to one book, only the first is added
**Describe the bug**
I would like to add multiple authors to a book (it's an anthology). When I add multiple authors to the book, via the "Add Another Author" button, only the first one is added.
**To Reproduce**
1. Edit book
2. Fill the info for one author (Charlie Jane Anders) in the input
3. Click "Add Another Author"
4. Fill the info for the new author in the second input
5. Click "Add Another Author"
6. Fill the info for that new author in that third input
7. Save book
8. The interface shows the message _Is "Charlie Jane Anders" one of these authors?_
9. Select one of the choices
10. Only Charlie Jane Anders has been added, the other authors don't show up.
**Expected behavior**
I would like to see all authors added, and the interface for selecting the right author (ie. _Is "Charlie Jane Anders" one of these authors?_) should show the choices for all the authors I add.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Instance**
boitam.eu (I expect this problem to be global)
| [
{
"content": "\"\"\" the good stuff! the books! \"\"\"\nfrom re import sub, findall\nfrom django.contrib.auth.decorators import login_required, permission_required\nfrom django.contrib.postgres.search import SearchRank, SearchVector\nfrom django.db import transaction\nfrom django.http import HttpResponseBadRequest\nfrom django.shortcuts import get_object_or_404, redirect\nfrom django.template.response import TemplateResponse\nfrom django.utils.decorators import method_decorator\nfrom django.views.decorators.http import require_POST\nfrom django.views import View\n\nfrom bookwyrm import book_search, forms, models\n\n# from bookwyrm.activitypub.base_activity import ActivityObject\nfrom bookwyrm.utils.isni import (\n find_authors_by_name,\n build_author_from_isni,\n augment_author_metadata,\n)\nfrom bookwyrm.views.helpers import get_edition\nfrom .books import set_cover_from_url\n\n# pylint: disable=no-self-use\n@method_decorator(login_required, name=\"dispatch\")\n@method_decorator(\n permission_required(\"bookwyrm.edit_book\", raise_exception=True), name=\"dispatch\"\n)\nclass EditBook(View):\n \"\"\"edit a book\"\"\"\n\n def get(self, request, book_id):\n \"\"\"info about a book\"\"\"\n book = get_edition(book_id)\n if not book.description:\n book.description = book.parent_work.description\n data = {\"book\": book, \"form\": forms.EditionForm(instance=book)}\n return TemplateResponse(request, \"book/edit/edit_book.html\", data)\n\n def post(self, request, book_id):\n \"\"\"edit a book cool\"\"\"\n book = get_object_or_404(models.Edition, id=book_id)\n form = forms.EditionForm(request.POST, request.FILES, instance=book)\n\n data = {\"book\": book, \"form\": form}\n if not form.is_valid():\n return TemplateResponse(request, \"book/edit/edit_book.html\", data)\n\n data = add_authors(request, data)\n\n # either of the above cases requires additional confirmation\n if data.get(\"add_author\"):\n return TemplateResponse(request, \"book/edit/edit_book.html\", data)\n\n remove_authors = request.POST.getlist(\"remove_authors\")\n for author_id in remove_authors:\n book.authors.remove(author_id)\n\n book = form.save(commit=False)\n\n url = request.POST.get(\"cover-url\")\n if url:\n image = set_cover_from_url(url)\n if image:\n book.cover.save(*image, save=False)\n\n book.save()\n return redirect(f\"/book/{book.id}\")\n\n\n@method_decorator(login_required, name=\"dispatch\")\n@method_decorator(\n permission_required(\"bookwyrm.edit_book\", raise_exception=True), name=\"dispatch\"\n)\nclass CreateBook(View):\n \"\"\"brand new book\"\"\"\n\n def get(self, request):\n \"\"\"info about a book\"\"\"\n data = {\"form\": forms.EditionForm()}\n return TemplateResponse(request, \"book/edit/edit_book.html\", data)\n\n # pylint: disable=too-many-locals\n def post(self, request):\n \"\"\"create a new book\"\"\"\n # returns None if no match is found\n form = forms.EditionForm(request.POST, request.FILES)\n data = {\"form\": form}\n\n # collect data provided by the work or import item\n parent_work_id = request.POST.get(\"parent_work\")\n authors = None\n if request.POST.get(\"authors\"):\n author_ids = findall(r\"\\d+\", request.POST[\"authors\"])\n authors = models.Author.objects.filter(id__in=author_ids)\n\n # fake book in case we need to keep editing\n if parent_work_id:\n data[\"book\"] = {\n \"parent_work\": {\"id\": parent_work_id},\n \"authors\": authors,\n }\n\n if not form.is_valid():\n return TemplateResponse(request, \"book/edit/edit_book.html\", data)\n\n data = add_authors(request, data)\n\n # check if this is an edition of an existing work\n author_text = \", \".join(data.get(\"add_author\", []))\n data[\"book_matches\"] = book_search.search(\n f'{form.cleaned_data.get(\"title\")} {author_text}',\n min_confidence=0.1,\n )[:5]\n\n # go to confirm mode\n if not parent_work_id or data.get(\"add_author\"):\n return TemplateResponse(request, \"book/edit/edit_book.html\", data)\n\n with transaction.atomic():\n book = form.save()\n parent_work = get_object_or_404(models.Work, id=parent_work_id)\n book.parent_work = parent_work\n\n if authors:\n book.authors.add(*authors)\n\n url = request.POST.get(\"cover-url\")\n if url:\n image = set_cover_from_url(url)\n if image:\n book.cover.save(*image, save=False)\n\n book.save()\n return redirect(f\"/book/{book.id}\")\n\n\ndef add_authors(request, data):\n \"\"\"helper for adding authors\"\"\"\n add_author = [author for author in request.POST.getlist(\"add_author\") if author]\n if not add_author:\n return data\n\n data[\"add_author\"] = add_author\n data[\"author_matches\"] = []\n data[\"isni_matches\"] = []\n\n # creting a book or adding an author to a book needs another step\n data[\"confirm_mode\"] = True\n # this isn't preserved because it isn't part of the form obj\n data[\"remove_authors\"] = request.POST.getlist(\"remove_authors\")\n data[\"cover_url\"] = request.POST.get(\"cover-url\")\n\n for author in add_author:\n # filter out empty author fields\n if not author:\n continue\n # check for existing authors\n vector = SearchVector(\"name\", weight=\"A\") + SearchVector(\"aliases\", weight=\"B\")\n\n author_matches = (\n models.Author.objects.annotate(search=vector)\n .annotate(rank=SearchRank(vector, author))\n .filter(rank__gt=0.4)\n .order_by(\"-rank\")[:5]\n )\n\n isni_authors = find_authors_by_name(\n author, description=True\n ) # find matches from ISNI API\n\n # dedupe isni authors we already have in the DB\n exists = [\n i\n for i in isni_authors\n for a in author_matches\n if sub(r\"\\D\", \"\", str(i.isni)) == sub(r\"\\D\", \"\", str(a.isni))\n ]\n\n # pylint: disable=cell-var-from-loop\n matches = list(filter(lambda x: x not in exists, isni_authors))\n # combine existing and isni authors\n matches.extend(author_matches)\n\n data[\"author_matches\"].append(\n {\n \"name\": author.strip(),\n \"matches\": matches,\n \"existing_isnis\": exists,\n }\n )\n return data\n\n\n@require_POST\n@permission_required(\"bookwyrm.edit_book\", raise_exception=True)\ndef create_book_from_data(request):\n \"\"\"create a book with starter data\"\"\"\n author_ids = findall(r\"\\d+\", request.POST.get(\"authors\"))\n book = {\n \"parent_work\": {\"id\": request.POST.get(\"parent_work\")},\n \"authors\": models.Author.objects.filter(id__in=author_ids).all(),\n \"subjects\": request.POST.getlist(\"subjects\"),\n }\n\n data = {\"book\": book, \"form\": forms.EditionForm(request.POST)}\n return TemplateResponse(request, \"book/edit/edit_book.html\", data)\n\n\n@method_decorator(login_required, name=\"dispatch\")\n@method_decorator(\n permission_required(\"bookwyrm.edit_book\", raise_exception=True), name=\"dispatch\"\n)\nclass ConfirmEditBook(View):\n \"\"\"confirm edits to a book\"\"\"\n\n # pylint: disable=too-many-locals\n # pylint: disable=too-many-branches\n def post(self, request, book_id=None):\n \"\"\"edit a book cool\"\"\"\n # returns None if no match is found\n book = models.Edition.objects.filter(id=book_id).first()\n form = forms.EditionForm(request.POST, request.FILES, instance=book)\n\n data = {\"book\": book, \"form\": form}\n if not form.is_valid():\n return TemplateResponse(request, \"book/edit/edit_book.html\", data)\n\n with transaction.atomic():\n # save book\n book = form.save()\n\n # add known authors\n authors = None\n if request.POST.get(\"authors\"):\n author_ids = findall(r\"\\d+\", request.POST[\"authors\"])\n authors = models.Author.objects.filter(id__in=author_ids)\n book.authors.add(*authors)\n\n # get or create author as needed\n for i in range(int(request.POST.get(\"author-match-count\", 0))):\n match = request.POST.get(f\"author_match-{i}\")\n if not match:\n return HttpResponseBadRequest()\n try:\n # if it's an int, it's an ID\n match = int(match)\n author = get_object_or_404(\n models.Author, id=request.POST[f\"author_match-{i}\"]\n )\n # update author metadata if the ISNI record is more complete\n isni = request.POST.get(f\"isni-for-{match}\", None)\n if isni is not None:\n augment_author_metadata(author, isni)\n except ValueError:\n # otherwise it's a new author\n isni_match = request.POST.get(f\"author_match-{i}\")\n author_object = build_author_from_isni(isni_match)\n # with author data class from isni id\n if \"author\" in author_object:\n skeleton = models.Author.objects.create(\n name=author_object[\"author\"].name\n )\n author = author_object[\"author\"].to_model(\n model=models.Author, overwrite=True, instance=skeleton\n )\n else:\n # or it's just a name\n author = models.Author.objects.create(name=match)\n book.authors.add(author)\n\n # create work, if needed\n if not book.parent_work:\n work_match = request.POST.get(\"parent_work\")\n if work_match and work_match != \"0\":\n work = get_object_or_404(models.Work, id=work_match)\n else:\n work = models.Work.objects.create(title=form.cleaned_data[\"title\"])\n work.authors.set(book.authors.all())\n book.parent_work = work\n\n for author_id in request.POST.getlist(\"remove_authors\"):\n book.authors.remove(author_id)\n\n # import cover, if requested\n url = request.POST.get(\"cover-url\")\n if url:\n image = set_cover_from_url(url)\n if image:\n book.cover.save(*image, save=False)\n\n # we don't tell the world when creating a book\n book.save(broadcast=False)\n\n return redirect(f\"/book/{book.id}\")\n",
"path": "bookwyrm/views/books/edit_book.py"
}
] | [
{
"content": "\"\"\" the good stuff! the books! \"\"\"\nfrom re import sub, findall\nfrom django.contrib.auth.decorators import login_required, permission_required\nfrom django.contrib.postgres.search import SearchRank, SearchVector\nfrom django.db import transaction\nfrom django.http import HttpResponseBadRequest\nfrom django.shortcuts import get_object_or_404, redirect\nfrom django.template.response import TemplateResponse\nfrom django.utils.decorators import method_decorator\nfrom django.views.decorators.http import require_POST\nfrom django.views import View\n\nfrom bookwyrm import book_search, forms, models\n\n# from bookwyrm.activitypub.base_activity import ActivityObject\nfrom bookwyrm.utils.isni import (\n find_authors_by_name,\n build_author_from_isni,\n augment_author_metadata,\n)\nfrom bookwyrm.views.helpers import get_edition\nfrom .books import set_cover_from_url\n\n# pylint: disable=no-self-use\n@method_decorator(login_required, name=\"dispatch\")\n@method_decorator(\n permission_required(\"bookwyrm.edit_book\", raise_exception=True), name=\"dispatch\"\n)\nclass EditBook(View):\n \"\"\"edit a book\"\"\"\n\n def get(self, request, book_id):\n \"\"\"info about a book\"\"\"\n book = get_edition(book_id)\n if not book.description:\n book.description = book.parent_work.description\n data = {\"book\": book, \"form\": forms.EditionForm(instance=book)}\n return TemplateResponse(request, \"book/edit/edit_book.html\", data)\n\n def post(self, request, book_id):\n \"\"\"edit a book cool\"\"\"\n book = get_object_or_404(models.Edition, id=book_id)\n form = forms.EditionForm(request.POST, request.FILES, instance=book)\n\n data = {\"book\": book, \"form\": form}\n if not form.is_valid():\n return TemplateResponse(request, \"book/edit/edit_book.html\", data)\n\n data = add_authors(request, data)\n\n # either of the above cases requires additional confirmation\n if data.get(\"add_author\"):\n return TemplateResponse(request, \"book/edit/edit_book.html\", data)\n\n remove_authors = request.POST.getlist(\"remove_authors\")\n for author_id in remove_authors:\n book.authors.remove(author_id)\n\n book = form.save(commit=False)\n\n url = request.POST.get(\"cover-url\")\n if url:\n image = set_cover_from_url(url)\n if image:\n book.cover.save(*image, save=False)\n\n book.save()\n return redirect(f\"/book/{book.id}\")\n\n\n@method_decorator(login_required, name=\"dispatch\")\n@method_decorator(\n permission_required(\"bookwyrm.edit_book\", raise_exception=True), name=\"dispatch\"\n)\nclass CreateBook(View):\n \"\"\"brand new book\"\"\"\n\n def get(self, request):\n \"\"\"info about a book\"\"\"\n data = {\"form\": forms.EditionForm()}\n return TemplateResponse(request, \"book/edit/edit_book.html\", data)\n\n # pylint: disable=too-many-locals\n def post(self, request):\n \"\"\"create a new book\"\"\"\n # returns None if no match is found\n form = forms.EditionForm(request.POST, request.FILES)\n data = {\"form\": form}\n\n # collect data provided by the work or import item\n parent_work_id = request.POST.get(\"parent_work\")\n authors = None\n if request.POST.get(\"authors\"):\n author_ids = findall(r\"\\d+\", request.POST[\"authors\"])\n authors = models.Author.objects.filter(id__in=author_ids)\n\n # fake book in case we need to keep editing\n if parent_work_id:\n data[\"book\"] = {\n \"parent_work\": {\"id\": parent_work_id},\n \"authors\": authors,\n }\n\n if not form.is_valid():\n return TemplateResponse(request, \"book/edit/edit_book.html\", data)\n\n data = add_authors(request, data)\n\n # check if this is an edition of an existing work\n author_text = \", \".join(data.get(\"add_author\", []))\n data[\"book_matches\"] = book_search.search(\n f'{form.cleaned_data.get(\"title\")} {author_text}',\n min_confidence=0.1,\n )[:5]\n\n # go to confirm mode\n if not parent_work_id or data.get(\"add_author\"):\n return TemplateResponse(request, \"book/edit/edit_book.html\", data)\n\n with transaction.atomic():\n book = form.save()\n parent_work = get_object_or_404(models.Work, id=parent_work_id)\n book.parent_work = parent_work\n\n if authors:\n book.authors.add(*authors)\n\n url = request.POST.get(\"cover-url\")\n if url:\n image = set_cover_from_url(url)\n if image:\n book.cover.save(*image, save=False)\n\n book.save()\n return redirect(f\"/book/{book.id}\")\n\n\ndef add_authors(request, data):\n \"\"\"helper for adding authors\"\"\"\n add_author = [author for author in request.POST.getlist(\"add_author\") if author]\n if not add_author:\n return data\n\n data[\"add_author\"] = add_author\n data[\"author_matches\"] = []\n data[\"isni_matches\"] = []\n\n # creting a book or adding an author to a book needs another step\n data[\"confirm_mode\"] = True\n # this isn't preserved because it isn't part of the form obj\n data[\"remove_authors\"] = request.POST.getlist(\"remove_authors\")\n data[\"cover_url\"] = request.POST.get(\"cover-url\")\n\n for author in add_author:\n # filter out empty author fields\n if not author:\n continue\n # check for existing authors\n vector = SearchVector(\"name\", weight=\"A\") + SearchVector(\"aliases\", weight=\"B\")\n\n author_matches = (\n models.Author.objects.annotate(search=vector)\n .annotate(rank=SearchRank(vector, author))\n .filter(rank__gt=0.4)\n .order_by(\"-rank\")[:5]\n )\n\n isni_authors = find_authors_by_name(\n author, description=True\n ) # find matches from ISNI API\n\n # dedupe isni authors we already have in the DB\n exists = [\n i\n for i in isni_authors\n for a in author_matches\n if sub(r\"\\D\", \"\", str(i.isni)) == sub(r\"\\D\", \"\", str(a.isni))\n ]\n\n # pylint: disable=cell-var-from-loop\n matches = list(filter(lambda x: x not in exists, isni_authors))\n # combine existing and isni authors\n matches.extend(author_matches)\n\n data[\"author_matches\"].append(\n {\n \"name\": author.strip(),\n \"matches\": matches,\n \"existing_isnis\": exists,\n }\n )\n return data\n\n\n@require_POST\n@permission_required(\"bookwyrm.edit_book\", raise_exception=True)\ndef create_book_from_data(request):\n \"\"\"create a book with starter data\"\"\"\n author_ids = findall(r\"\\d+\", request.POST.get(\"authors\"))\n book = {\n \"parent_work\": {\"id\": request.POST.get(\"parent_work\")},\n \"authors\": models.Author.objects.filter(id__in=author_ids).all(),\n \"subjects\": request.POST.getlist(\"subjects\"),\n }\n\n data = {\"book\": book, \"form\": forms.EditionForm(request.POST)}\n return TemplateResponse(request, \"book/edit/edit_book.html\", data)\n\n\n@method_decorator(login_required, name=\"dispatch\")\n@method_decorator(\n permission_required(\"bookwyrm.edit_book\", raise_exception=True), name=\"dispatch\"\n)\nclass ConfirmEditBook(View):\n \"\"\"confirm edits to a book\"\"\"\n\n # pylint: disable=too-many-locals\n # pylint: disable=too-many-branches\n def post(self, request, book_id=None):\n \"\"\"edit a book cool\"\"\"\n # returns None if no match is found\n book = models.Edition.objects.filter(id=book_id).first()\n form = forms.EditionForm(request.POST, request.FILES, instance=book)\n\n data = {\"book\": book, \"form\": form}\n if not form.is_valid():\n return TemplateResponse(request, \"book/edit/edit_book.html\", data)\n\n with transaction.atomic():\n # save book\n book = form.save()\n\n # add known authors\n authors = None\n if request.POST.get(\"authors\"):\n author_ids = findall(r\"\\d+\", request.POST[\"authors\"])\n authors = models.Author.objects.filter(id__in=author_ids)\n book.authors.add(*authors)\n\n # get or create author as needed\n for i in range(int(request.POST.get(\"author-match-count\", 0))):\n match = request.POST.get(f\"author_match-{i}\")\n if not match:\n return HttpResponseBadRequest()\n try:\n # if it's an int, it's an ID\n match = int(match)\n author = get_object_or_404(\n models.Author, id=request.POST[f\"author_match-{i}\"]\n )\n # update author metadata if the ISNI record is more complete\n isni = request.POST.get(f\"isni-for-{match}\", None)\n if isni is not None:\n augment_author_metadata(author, isni)\n except ValueError:\n # otherwise it's a new author\n isni_match = request.POST.get(f\"author_match-{i}\")\n author_object = build_author_from_isni(isni_match)\n # with author data class from isni id\n if \"author\" in author_object:\n skeleton = models.Author.objects.create(\n name=author_object[\"author\"].name\n )\n author = author_object[\"author\"].to_model(\n model=models.Author, overwrite=True, instance=skeleton\n )\n else:\n # or it's just a name\n author = models.Author.objects.create(name=match)\n book.authors.add(author)\n\n # create work, if needed\n if not book.parent_work:\n work_match = request.POST.get(\"parent_work\")\n if work_match and work_match != \"0\":\n work = get_object_or_404(models.Work, id=work_match)\n else:\n work = models.Work.objects.create(title=form.cleaned_data[\"title\"])\n work.authors.set(book.authors.all())\n book.parent_work = work\n\n for author_id in request.POST.getlist(\"remove_authors\"):\n book.authors.remove(author_id)\n\n # import cover, if requested\n url = request.POST.get(\"cover-url\")\n if url:\n image = set_cover_from_url(url)\n if image:\n book.cover.save(*image, save=False)\n\n # we don't tell the world when creating a book\n book.save(broadcast=False)\n\n return redirect(f\"/book/{book.id}\")\n",
"path": "bookwyrm/views/books/edit_book.py"
}
] | diff --git a/bookwyrm/tests/views/books/test_edit_book.py b/bookwyrm/tests/views/books/test_edit_book.py
index cabfe972d6..c7869807bc 100644
--- a/bookwyrm/tests/views/books/test_edit_book.py
+++ b/bookwyrm/tests/views/books/test_edit_book.py
@@ -9,6 +9,7 @@
from django.test.client import RequestFactory
from bookwyrm import forms, models, views
+from bookwyrm.views.books.edit_book import add_authors
from bookwyrm.tests.validate_html import validate_html
from bookwyrm.tests.views.books.test_book import _setup_cover_url
@@ -214,3 +215,22 @@ def test_create_book_upload_cover_url(self):
self.book.refresh_from_db()
self.assertTrue(self.book.cover)
+
+ def test_add_authors_helper(self):
+ """converts form input into author matches"""
+ form = forms.EditionForm(instance=self.book)
+ form.data["title"] = "New Title"
+ form.data["last_edited_by"] = self.local_user.id
+ form.data["add_author"] = ["Sappho", "Some Guy"]
+ request = self.factory.post("", form.data)
+ request.user = self.local_user
+
+ with patch("bookwyrm.utils.isni.find_authors_by_name") as mock:
+ mock.return_value = []
+ result = add_authors(request, form.data)
+
+ self.assertTrue(result["confirm_mode"])
+ self.assertEqual(result["add_author"], ["Sappho", "Some Guy"])
+ self.assertEqual(len(result["author_matches"]), 2)
+ self.assertEqual(result["author_matches"][0]["name"], "Sappho")
+ self.assertEqual(result["author_matches"][1]["name"], "Some Guy")
diff --git a/bookwyrm/views/books/edit_book.py b/bookwyrm/views/books/edit_book.py
index 2315cfce2d..d830ebdcfb 100644
--- a/bookwyrm/views/books/edit_book.py
+++ b/bookwyrm/views/books/edit_book.py
@@ -189,7 +189,7 @@ def add_authors(request, data):
"existing_isnis": exists,
}
)
- return data
+ return data
@require_POST
|
sktime__sktime-3618 | [BUG] ShapeletTransformClassifier numba error when dtype is not float64
**Describe the bug**
Seems that when using `ShapeletTransformClassifier` there is some Numba accelerated functions that break if the data in the input data frame are of type `int32`.
**To Reproduce**
MRE as below:
```python
import warnings
warnings.simplefilter('ignore', category=FutureWarning)
import numpy as np
import pandas as pd
from sklearn.model_selection import train_test_split
from sktime.classification.shapelet_based import ShapeletTransformClassifier
from sktime.contrib.vector_classifiers._rotation_forest import RotationForest
# make fake data
data = pd.DataFrame(np.random.random((5000, 250))).astype(np.float32)
# reshape to input into Shapelet Classifier
data4train = data.apply(lambda row: pd.Series({
'time-series': pd.Series(row.values)
}), axis=1)
# make targets
targets = pd.Series(2500 * [1] + 2500 * [0])
# train test split
X_train, X_test, y_train, y_test = train_test_split(
data4train, targets, test_size=0.7, random_state=42
)
# train
clf = ShapeletTransformClassifier(
estimator=RotationForest(n_estimators=3),
n_shapelet_samples=500,
max_shapelets=20,
batch_size=100,
)
clf.fit(X_train, y_train)
```
**Expected behavior**
will not throw an error, and also enforce conversion to float32 or float64 within the classifier?
**Additional context**
removing conversion to `float32` (hence `dtype == float64`) will make the code running without issues.
**Versions**
numba 0.55.1
sklearn 0.24.1
sktime 0.11.0
pandas 1.4.2
python 3.8.10
**Stacktrace output**
```bash
TypingError: Failed in nopython mode pipeline (step: nopython frontend)
Cannot unify array(float64, 1d, C) and array(float32, 1d, C) for 'X_n.2', defined at /path_to_mypython/python/lib/python3.8/site-packages/sktime/utils/numba/general.py (39)
File "../python/lib/python3.8/site-packages/sktime/utils/numba/general.py", line 39:
def z_normalise_series(X):
<source elided>
return X_n
```
| [
{
"content": "# -*- coding: utf-8 -*-\n\"\"\"General numba utilities.\"\"\"\n\nimport numpy as np\nfrom numba import njit\n\n\n@njit(fastmath=True, cache=True)\ndef unique_count(X):\n \"\"\"Numba unique count function for a 1D array.\"\"\"\n if len(X) > 0:\n X = np.sort(X)\n unique = np.zeros(len(X))\n unique[0] = X[0]\n counts = np.zeros(len(X), dtype=np.int_)\n counts[0] = 1\n unique_count = 0\n\n for i in X[1:]:\n if i != unique[unique_count]:\n unique_count += 1\n unique[unique_count] = i\n counts[unique_count] = 1\n else:\n counts[unique_count] += 1\n return unique[: unique_count + 1], counts[: unique_count + 1]\n return None, np.zeros(0, dtype=np.int_)\n\n\n@njit(fastmath=True, cache=True)\ndef z_normalise_series(X):\n \"\"\"Numba z-normalisation function for a single time series.\"\"\"\n std = np.std(X)\n if std > 0:\n X_n = (X - np.mean(X)) / std\n else:\n X_n = np.zeros(len(X))\n\n return X_n\n",
"path": "sktime/utils/numba/general.py"
}
] | [
{
"content": "# -*- coding: utf-8 -*-\n\"\"\"General numba utilities.\"\"\"\n\nimport numpy as np\nfrom numba import njit\n\n\n@njit(fastmath=True, cache=True)\ndef unique_count(X):\n \"\"\"Numba unique count function for a 1D array.\"\"\"\n if len(X) > 0:\n X = np.sort(X)\n unique = np.zeros(len(X))\n unique[0] = X[0]\n counts = np.zeros(len(X), dtype=np.int_)\n counts[0] = 1\n unique_count = 0\n\n for i in X[1:]:\n if i != unique[unique_count]:\n unique_count += 1\n unique[unique_count] = i\n counts[unique_count] = 1\n else:\n counts[unique_count] += 1\n return unique[: unique_count + 1], counts[: unique_count + 1]\n return None, np.zeros(0, dtype=np.int_)\n\n\n@njit(fastmath=True, cache=True)\ndef z_normalise_series(X):\n \"\"\"Numba z-normalisation function for a single time series.\"\"\"\n std = np.std(X)\n if std > 0:\n X_n = (X - np.mean(X)) / std\n else:\n X_n = X - np.mean(X)\n return X_n\n",
"path": "sktime/utils/numba/general.py"
}
] | diff --git a/sktime/utils/estimators/tests/__init__.py b/sktime/utils/estimators/tests/__init__.py
index 79740faf9fc..095656a554e 100644
--- a/sktime/utils/estimators/tests/__init__.py
+++ b/sktime/utils/estimators/tests/__init__.py
@@ -1,4 +1,4 @@
# -*- coding: utf-8 -*-
-"""Tests for Mock Estimnators."""
+"""Tests for Mock Estimators."""
__author__ = ["ltsaprounis"]
diff --git a/sktime/utils/numba/general.py b/sktime/utils/numba/general.py
index c18f5a12d22..49223f5d19e 100644
--- a/sktime/utils/numba/general.py
+++ b/sktime/utils/numba/general.py
@@ -34,6 +34,5 @@ def z_normalise_series(X):
if std > 0:
X_n = (X - np.mean(X)) / std
else:
- X_n = np.zeros(len(X))
-
+ X_n = X - np.mean(X)
return X_n
diff --git a/sktime/utils/numba/tests/__init__.py b/sktime/utils/numba/tests/__init__.py
new file mode 100644
index 00000000000..2445591e01b
--- /dev/null
+++ b/sktime/utils/numba/tests/__init__.py
@@ -0,0 +1,4 @@
+# -*- coding: utf-8 -*-
+"""Tests for numba utils."""
+
+__author__ = ["TonyBagnall"]
diff --git a/sktime/utils/numba/tests/test_general.py b/sktime/utils/numba/tests/test_general.py
new file mode 100644
index 00000000000..f2c9a249ffb
--- /dev/null
+++ b/sktime/utils/numba/tests/test_general.py
@@ -0,0 +1,21 @@
+# -*- coding: utf-8 -*-
+"""Tests for numba functions."""
+
+__author__ = ["TonyBagnall"]
+
+import numpy as np
+import pytest
+from numpy.testing import assert_array_equal
+
+from sktime.utils.numba.general import z_normalise_series
+
+DATATYPES = ["int32", "int64", "float32", "float64"]
+
+
[email protected]("type", DATATYPES)
+def test_z_normalise_series(type):
+ """Test the function z_normalise_series."""
+ a = np.array([2, 2, 2], dtype=type)
+ a_expected = np.array([0, 0, 0], dtype=type)
+ a_result = z_normalise_series(a)
+ assert_array_equal(a_result, a_expected)
|
mlcommons__GaNDLF-766 | `gdown` does not seem to be working
**Describe the bug**
Current CI seems to be broken.
**To Reproduce**
Steps to reproduce the behavior:
1. Run any CI test
2. See error:
```python-traceback
[SNIP!]
if gdrive_file_id and is_gdrive_download_link:
content_disposition = six.moves.urllib_parse.unquote(
res.headers["Content-Disposition"]
)
m = re.search(r"filename\*=UTF-8''(.*)", content_disposition)
> filename_from_url = m.groups()[0]
E AttributeError: 'NoneType' object has no attribute 'groups'
```
Example: https://github.com/mlcommons/GaNDLF/actions/runs/7489779631/job/20387346791?pr=764#step:9:219
**Expected behavior**
The sample data file download should work.
**Screenshots**
N.A.
**GaNDLF Version**
Current master
**Desktop (please complete the following information):**
N.A.
**Additional context**
Basically, it is this error: https://github.com/wkentaro/gdown/issues/291
| [
{
"content": "#!/usr/bin/env python\n\n\"\"\"The setup script.\"\"\"\n\n\nimport sys, re, os\nfrom setuptools import setup, find_packages\nfrom setuptools.command.install import install\nfrom setuptools.command.develop import develop\nfrom setuptools.command.egg_info import egg_info\n\ntry:\n with open(\"README.md\") as readme_file:\n readme = readme_file.read()\nexcept Exception as error:\n readme = \"No README information found.\"\n sys.stderr.write(\n \"Warning: Could not open '%s' due %s\\n\" % (\"README.md\", error)\n )\n\n\nclass CustomInstallCommand(install):\n def run(self):\n install.run(self)\n\n\nclass CustomDevelopCommand(develop):\n def run(self):\n develop.run(self)\n\n\nclass CustomEggInfoCommand(egg_info):\n def run(self):\n egg_info.run(self)\n\n\ntry:\n filepath = \"GANDLF/version.py\"\n version_file = open(filepath)\n (__version__,) = re.findall('__version__ = \"(.*)\"', version_file.read())\n\nexcept Exception as error:\n __version__ = \"0.0.1\"\n sys.stderr.write(\n \"Warning: Could not open '%s' due %s\\n\" % (filepath, error)\n )\n\n# Handle cases where specific files need to be bundled into the final package as installed via PyPI\ndockerfiles = [\n item\n for item in os.listdir(os.path.dirname(os.path.abspath(__file__)))\n if (os.path.isfile(item) and item.startswith(\"Dockerfile-\"))\n]\nentrypoint_files = [\n item\n for item in os.listdir(os.path.dirname(os.path.abspath(__file__)))\n if (os.path.isfile(item) and item.startswith(\"gandlf_\"))\n]\nsetup_files = [\"setup.py\", \".dockerignore\", \"pyproject.toml\", \"MANIFEST.in\"]\nall_extra_files = dockerfiles + entrypoint_files + setup_files\nall_extra_files_pathcorrected = [\n os.path.join(\"../\", item) for item in all_extra_files\n]\n# find_packages should only ever find these as subpackages of gandlf, not as top-level packages\n# generate this dynamically?\n# GANDLF.GANDLF is needed to prevent recursion madness in deployments\ntoplevel_package_excludes = [\n \"GANDLF.GANDLF\",\n \"anonymize\",\n \"cli\",\n \"compute\",\n \"data\",\n \"grad_clipping\",\n \"losses\",\n \"metrics\",\n \"models\",\n \"optimizers\",\n \"schedulers\",\n \"utils\",\n]\n\n\nrequirements = [\n \"torch==2.1.0\",\n \"black==23.11.0\",\n \"numpy==1.25.0\",\n \"scipy\",\n \"SimpleITK!=2.0.*\",\n \"SimpleITK!=2.2.1\", # https://github.com/mlcommons/GaNDLF/issues/536\n \"torchvision\",\n \"tqdm\",\n \"torchio==0.19.3\",\n \"pandas>=2.0.0\",\n \"scikit-learn>=0.23.2\",\n \"scikit-image>=0.19.1\",\n \"setuptools\",\n \"seaborn\",\n \"pyyaml\",\n \"tiffslide\",\n \"matplotlib\",\n \"gdown\",\n \"pytest\",\n \"coverage\",\n \"pytest-cov\",\n \"psutil\",\n \"medcam\",\n \"opencv-python\",\n \"torchmetrics==1.1.2\",\n \"zarr==2.10.3\",\n \"pydicom\",\n \"onnx\",\n \"torchinfo==1.7.0\",\n \"segmentation-models-pytorch==0.3.3\",\n \"ACSConv==0.1.1\",\n \"docker\",\n \"dicom-anonymizer\",\n \"twine\",\n \"zarr\",\n \"keyring\",\n]\n\nif __name__ == \"__main__\":\n setup(\n name=\"GANDLF\",\n version=__version__,\n author=\"MLCommons\",\n author_email=\"[email protected]\",\n python_requires=\">3.8, <3.12\",\n packages=find_packages(\n where=os.path.dirname(os.path.abspath(__file__)),\n exclude=toplevel_package_excludes,\n ),\n cmdclass={\n \"install\": CustomInstallCommand,\n \"develop\": CustomDevelopCommand,\n \"egg_info\": CustomEggInfoCommand,\n },\n scripts=[\n \"gandlf_run\",\n \"gandlf_constructCSV\",\n \"gandlf_collectStats\",\n \"gandlf_patchMiner\",\n \"gandlf_preprocess\",\n \"gandlf_anonymizer\",\n \"gandlf_verifyInstall\",\n \"gandlf_configGenerator\",\n \"gandlf_recoverConfig\",\n \"gandlf_deploy\",\n \"gandlf_optimizeModel\",\n \"gandlf_generateMetrics\",\n ],\n classifiers=[\n \"Development Status :: 3 - Alpha\",\n \"Intended Audience :: Science/Research\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Natural Language :: English\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"Programming Language :: Python :: 3.11\",\n \"Topic :: Scientific/Engineering :: Medical Science Apps.\",\n ],\n description=(\n \"PyTorch-based framework that handles segmentation/regression/classification using various DL architectures for medical imaging.\"\n ),\n install_requires=requirements,\n license=\"Apache-2.0\",\n long_description=readme,\n long_description_content_type=\"text/markdown\",\n include_package_data=True,\n package_data={\"GANDLF\": all_extra_files_pathcorrected},\n keywords=\"semantic, segmentation, regression, classification, data-augmentation, medical-imaging, clinical-workflows, deep-learning, pytorch\",\n zip_safe=False,\n )\n",
"path": "setup.py"
}
] | [
{
"content": "#!/usr/bin/env python\n\n\"\"\"The setup script.\"\"\"\n\n\nimport sys, re, os\nfrom setuptools import setup, find_packages\nfrom setuptools.command.install import install\nfrom setuptools.command.develop import develop\nfrom setuptools.command.egg_info import egg_info\n\ntry:\n with open(\"README.md\") as readme_file:\n readme = readme_file.read()\nexcept Exception as error:\n readme = \"No README information found.\"\n sys.stderr.write(\n \"Warning: Could not open '%s' due %s\\n\" % (\"README.md\", error)\n )\n\n\nclass CustomInstallCommand(install):\n def run(self):\n install.run(self)\n\n\nclass CustomDevelopCommand(develop):\n def run(self):\n develop.run(self)\n\n\nclass CustomEggInfoCommand(egg_info):\n def run(self):\n egg_info.run(self)\n\n\ntry:\n filepath = \"GANDLF/version.py\"\n version_file = open(filepath)\n (__version__,) = re.findall('__version__ = \"(.*)\"', version_file.read())\n\nexcept Exception as error:\n __version__ = \"0.0.1\"\n sys.stderr.write(\n \"Warning: Could not open '%s' due %s\\n\" % (filepath, error)\n )\n\n# Handle cases where specific files need to be bundled into the final package as installed via PyPI\ndockerfiles = [\n item\n for item in os.listdir(os.path.dirname(os.path.abspath(__file__)))\n if (os.path.isfile(item) and item.startswith(\"Dockerfile-\"))\n]\nentrypoint_files = [\n item\n for item in os.listdir(os.path.dirname(os.path.abspath(__file__)))\n if (os.path.isfile(item) and item.startswith(\"gandlf_\"))\n]\nsetup_files = [\"setup.py\", \".dockerignore\", \"pyproject.toml\", \"MANIFEST.in\"]\nall_extra_files = dockerfiles + entrypoint_files + setup_files\nall_extra_files_pathcorrected = [\n os.path.join(\"../\", item) for item in all_extra_files\n]\n# find_packages should only ever find these as subpackages of gandlf, not as top-level packages\n# generate this dynamically?\n# GANDLF.GANDLF is needed to prevent recursion madness in deployments\ntoplevel_package_excludes = [\n \"GANDLF.GANDLF\",\n \"anonymize\",\n \"cli\",\n \"compute\",\n \"data\",\n \"grad_clipping\",\n \"losses\",\n \"metrics\",\n \"models\",\n \"optimizers\",\n \"schedulers\",\n \"utils\",\n]\n\n\nrequirements = [\n \"torch==2.1.0\",\n \"black==23.11.0\",\n \"numpy==1.25.0\",\n \"scipy\",\n \"SimpleITK!=2.0.*\",\n \"SimpleITK!=2.2.1\", # https://github.com/mlcommons/GaNDLF/issues/536\n \"torchvision\",\n \"tqdm\",\n \"torchio==0.19.3\",\n \"pandas>=2.0.0\",\n \"scikit-learn>=0.23.2\",\n \"scikit-image>=0.19.1\",\n \"setuptools\",\n \"seaborn\",\n \"pyyaml\",\n \"tiffslide\",\n \"matplotlib\",\n \"gdown==4.6.3\",\n \"pytest\",\n \"coverage\",\n \"pytest-cov\",\n \"psutil\",\n \"medcam\",\n \"opencv-python\",\n \"torchmetrics==1.1.2\",\n \"zarr==2.10.3\",\n \"pydicom\",\n \"onnx\",\n \"torchinfo==1.7.0\",\n \"segmentation-models-pytorch==0.3.3\",\n \"ACSConv==0.1.1\",\n \"docker\",\n \"dicom-anonymizer\",\n \"twine\",\n \"zarr\",\n \"keyring\",\n]\n\nif __name__ == \"__main__\":\n setup(\n name=\"GANDLF\",\n version=__version__,\n author=\"MLCommons\",\n author_email=\"[email protected]\",\n python_requires=\">3.8, <3.12\",\n packages=find_packages(\n where=os.path.dirname(os.path.abspath(__file__)),\n exclude=toplevel_package_excludes,\n ),\n cmdclass={\n \"install\": CustomInstallCommand,\n \"develop\": CustomDevelopCommand,\n \"egg_info\": CustomEggInfoCommand,\n },\n scripts=[\n \"gandlf_run\",\n \"gandlf_constructCSV\",\n \"gandlf_collectStats\",\n \"gandlf_patchMiner\",\n \"gandlf_preprocess\",\n \"gandlf_anonymizer\",\n \"gandlf_verifyInstall\",\n \"gandlf_configGenerator\",\n \"gandlf_recoverConfig\",\n \"gandlf_deploy\",\n \"gandlf_optimizeModel\",\n \"gandlf_generateMetrics\",\n ],\n classifiers=[\n \"Development Status :: 3 - Alpha\",\n \"Intended Audience :: Science/Research\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Natural Language :: English\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"Programming Language :: Python :: 3.11\",\n \"Topic :: Scientific/Engineering :: Medical Science Apps.\",\n ],\n description=(\n \"PyTorch-based framework that handles segmentation/regression/classification using various DL architectures for medical imaging.\"\n ),\n install_requires=requirements,\n license=\"Apache-2.0\",\n long_description=readme,\n long_description_content_type=\"text/markdown\",\n include_package_data=True,\n package_data={\"GANDLF\": all_extra_files_pathcorrected},\n keywords=\"semantic, segmentation, regression, classification, data-augmentation, medical-imaging, clinical-workflows, deep-learning, pytorch\",\n zip_safe=False,\n )\n",
"path": "setup.py"
}
] | diff --git a/setup.py b/setup.py
index 464f7a603..4917c2432 100644
--- a/setup.py
+++ b/setup.py
@@ -98,7 +98,7 @@ def run(self):
"pyyaml",
"tiffslide",
"matplotlib",
- "gdown",
+ "gdown==4.6.3",
"pytest",
"coverage",
"pytest-cov",
diff --git a/testing/test_full.py b/testing/test_full.py
index a323d47e0..772258320 100644
--- a/testing/test_full.py
+++ b/testing/test_full.py
@@ -123,7 +123,7 @@ def test_generic_download_data():
if not os.path.isfile(file):
print("Downloading and extracting sample data")
output = os.path.join(testingDir, "gandlf_unit_test_data.tgz")
- gdown.download(urlToDownload, output, quiet=False)
+ gdown.download(urlToDownload, output, quiet=False, verify = True)
with zipfile.ZipFile(output, "r") as zip_ref:
zip_ref.extractall(testingDir)
os.remove(output)
|
facebookresearch__hydra-1808 | [Bug] hydra-optuna-sweeper 1.1.0 requires numpy<1.20.0
# 🐛 Bug
## Description
<!-- A clear and concise description of what the bug is. -->
I used the guide from
https://hydra.cc/docs/plugins/optuna_sweeper/
And install hydra-optuna-sweeper:
```bash
pip install hydra-optuna-sweeper --upgrade
```
But it seems this plugin requires numpy<1.20.0:

**Edit:**
I searched for optuna's requirements, found this:
https://github.com/optuna/optuna/blob/cbae80476c15b6d39e1d8851dc6a501c63c3ca92/setup.py#L35
Why hydra-optuna-sweeper need to use numpy<1.20.0?
| [
{
"content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\n__version__ = \"1.1.0\"\n",
"path": "plugins/hydra_optuna_sweeper/hydra_plugins/hydra_optuna_sweeper/__init__.py"
}
] | [
{
"content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\n__version__ = \"1.1.1\"\n",
"path": "plugins/hydra_optuna_sweeper/hydra_plugins/hydra_optuna_sweeper/__init__.py"
}
] | diff --git a/plugins/hydra_optuna_sweeper/NEWS.md b/plugins/hydra_optuna_sweeper/NEWS.md
index 704308ed298..9b29b7e101b 100644
--- a/plugins/hydra_optuna_sweeper/NEWS.md
+++ b/plugins/hydra_optuna_sweeper/NEWS.md
@@ -1,3 +1,11 @@
+1.1.1 (2021-09-01)
+=======================
+
+### Maintenance Changes
+
+- Update optuna dependency ([#1746](https://github.com/facebookresearch/hydra/issues/1634))
+
+
1.1.0.dev2 (2021-06-10)
=======================
diff --git a/plugins/hydra_optuna_sweeper/hydra_plugins/hydra_optuna_sweeper/__init__.py b/plugins/hydra_optuna_sweeper/hydra_plugins/hydra_optuna_sweeper/__init__.py
index 13f9060d5d4..c2490e62c7a 100644
--- a/plugins/hydra_optuna_sweeper/hydra_plugins/hydra_optuna_sweeper/__init__.py
+++ b/plugins/hydra_optuna_sweeper/hydra_plugins/hydra_optuna_sweeper/__init__.py
@@ -1,3 +1,3 @@
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
-__version__ = "1.1.0"
+__version__ = "1.1.1"
|
jupyterhub__jupyterhub-2545 | Releasing 1.0
With #2435 rounding out the final thing I think we need for the next release, I think it's time to put together the 1.0 release.
This should consist of:
- [x] assembling changelog #2440
- [x] making sure new features are well documented
- [x] publishing beta release
- [x] test beta (perhaps by adding it to the z2jh chart)
- [ ] release 1.0 final
| [
{
"content": "\"\"\"JupyterHub version info\"\"\"\n# Copyright (c) Jupyter Development Team.\n# Distributed under the terms of the Modified BSD License.\n\nversion_info = (\n 1,\n 0,\n 0,\n \"b2\", # release (b1, rc1, or \"\" for final or dev)\n # \"dev\", # dev or nothing\n)\n\n# pep 440 version: no dot before beta/rc, but before .dev\n# 0.1.0rc1\n# 0.1.0a1\n# 0.1.0b1.dev\n# 0.1.0.dev\n\n__version__ = \".\".join(map(str, version_info[:3])) + \".\".join(version_info[3:])\n\n\ndef _check_version(hub_version, singleuser_version, log):\n \"\"\"Compare Hub and single-user server versions\"\"\"\n if not hub_version:\n log.warning(\n \"Hub has no version header, which means it is likely < 0.8. Expected %s\",\n __version__,\n )\n return\n\n if not singleuser_version:\n log.warning(\n \"Single-user server has no version header, which means it is likely < 0.8. Expected %s\",\n __version__,\n )\n return\n\n # compare minor X.Y versions\n if hub_version != singleuser_version:\n from distutils.version import LooseVersion as V\n\n hub_major_minor = V(hub_version).version[:2]\n singleuser_major_minor = V(singleuser_version).version[:2]\n extra = \"\"\n if singleuser_major_minor == hub_major_minor:\n # patch-level mismatch or lower, log difference at debug-level\n # because this should be fine\n log_method = log.debug\n else:\n # log warning-level for more significant mismatch, such as 0.8 vs 0.9, etc.\n log_method = log.warning\n extra = \" This could cause failure to authenticate and result in redirect loops!\"\n log_method(\n \"jupyterhub version %s != jupyterhub-singleuser version %s.\" + extra,\n hub_version,\n singleuser_version,\n )\n else:\n log.debug(\n \"jupyterhub and jupyterhub-singleuser both on version %s\" % hub_version\n )\n",
"path": "jupyterhub/_version.py"
}
] | [
{
"content": "\"\"\"JupyterHub version info\"\"\"\n# Copyright (c) Jupyter Development Team.\n# Distributed under the terms of the Modified BSD License.\n\nversion_info = (\n 1,\n 0,\n 0,\n # \"b2\", # release (b1, rc1, or \"\" for final or dev)\n # \"dev\", # dev or nothing\n)\n\n# pep 440 version: no dot before beta/rc, but before .dev\n# 0.1.0rc1\n# 0.1.0a1\n# 0.1.0b1.dev\n# 0.1.0.dev\n\n__version__ = \".\".join(map(str, version_info[:3])) + \".\".join(version_info[3:])\n\n\ndef _check_version(hub_version, singleuser_version, log):\n \"\"\"Compare Hub and single-user server versions\"\"\"\n if not hub_version:\n log.warning(\n \"Hub has no version header, which means it is likely < 0.8. Expected %s\",\n __version__,\n )\n return\n\n if not singleuser_version:\n log.warning(\n \"Single-user server has no version header, which means it is likely < 0.8. Expected %s\",\n __version__,\n )\n return\n\n # compare minor X.Y versions\n if hub_version != singleuser_version:\n from distutils.version import LooseVersion as V\n\n hub_major_minor = V(hub_version).version[:2]\n singleuser_major_minor = V(singleuser_version).version[:2]\n extra = \"\"\n if singleuser_major_minor == hub_major_minor:\n # patch-level mismatch or lower, log difference at debug-level\n # because this should be fine\n log_method = log.debug\n else:\n # log warning-level for more significant mismatch, such as 0.8 vs 0.9, etc.\n log_method = log.warning\n extra = \" This could cause failure to authenticate and result in redirect loops!\"\n log_method(\n \"jupyterhub version %s != jupyterhub-singleuser version %s.\" + extra,\n hub_version,\n singleuser_version,\n )\n else:\n log.debug(\n \"jupyterhub and jupyterhub-singleuser both on version %s\" % hub_version\n )\n",
"path": "jupyterhub/_version.py"
}
] | diff --git a/docs/source/changelog.md b/docs/source/changelog.md
index 69848748ff..4cd14dbdff 100644
--- a/docs/source/changelog.md
+++ b/docs/source/changelog.md
@@ -9,7 +9,7 @@ command line for details.
## 1.0
-### [1.0.0] 2019-04-XX
+### [1.0.0] 2019-05-03
JupyterHub 1.0 is a major milestone for JupyterHub.
Huge thanks to the many people who have contributed to this release,
@@ -577,7 +577,7 @@ First preview release
[Unreleased]: https://github.com/jupyterhub/jupyterhub/compare/1.0.0...HEAD
-[1.0.0]: https://github.com/jupyterhub/jupyterhub/compare/0.9.5...HEAD
+[1.0.0]: https://github.com/jupyterhub/jupyterhub/compare/0.9.6...1.0.0
[0.9.6]: https://github.com/jupyterhub/jupyterhub/compare/0.9.4...0.9.6
[0.9.4]: https://github.com/jupyterhub/jupyterhub/compare/0.9.3...0.9.4
[0.9.3]: https://github.com/jupyterhub/jupyterhub/compare/0.9.2...0.9.3
diff --git a/jupyterhub/_version.py b/jupyterhub/_version.py
index 73f02fb240..240bab3966 100644
--- a/jupyterhub/_version.py
+++ b/jupyterhub/_version.py
@@ -6,7 +6,7 @@
1,
0,
0,
- "b2", # release (b1, rc1, or "" for final or dev)
+ # "b2", # release (b1, rc1, or "" for final or dev)
# "dev", # dev or nothing
)
|
nipy__nipype-3199 | REL: 1.5.0
## Summary
Prep for new feature release 1.5.0, targeting release on Monday, February 24.
Given that we just released 1.4.2 about a week ago, I'm inclined to hold this one off for any feature PRs that would like to shoot for inclusion.
Require merge or postponement decision on all issues/PRs in https://github.com/nipy/nipype/milestone/38:
* [x] ENH: Add a ``Bandpass`` filter interface under ``algorithms.filters`` #2915 (@oesteban)
* [x] [WIP/ENH] Adds a new interface for AFNI's ``3dMEMA`` command #2953 (@JesseyWright / @oesteban)
* [x] ENH: Add interface for fslorient #2955 (@felixsc1)
* [x] [FIX] Mrtrix3 usedefault issue (#3004) (@matteomancini)
* [x] [DOC] SelectFiles docstring corrected #3041 (@AKSoo)
* [ ] FIX Ants N4BiasFieldCorrection rescale_intensities bug #3139 (@salma1601)
* [x] CI: Test Python 3.8 #3154 (@effigies)
* [x] ENH: Detect values for EulerNumber interface #3173 (@mgxd)
Will try review the open PRs and see if anything is close enough to push on, tomorrow.
## Release checklist
* [ ] Merge pending PRs
* [x] Update changelog
* [x] Update .mailmap
* [x] Update .zenodo.json
* [x] Set release number in `nipype/info.py`
* [x] Update `doc/interfaces.rst` with previous releases
* [x] Check conda-forge feedstock build (conda-forge/nipype-feedstock#67)
* [ ] Tutorial tests (https://circleci.com/workflow-run/be312bea-8273-47cf-9e52-54257d969422)
## Uncredited authors
The following authors have contributed, but not added themselves to the [`.zenodo.json`](https://github.com/nipy/nipype/blob/master/.zenodo.json) file. If you would like to be an author on Zenodo releases, please add yourself or comment with your preferred publication name, affiliation and [ORCID](https://orcid.org/). If you would like to stop being spammed whenever I'm the one doing releases, let me know, and I'll add you to a blacklist.
No entry to sort: cdla (@cdla)
No entry to sort: Gio Piantoni (@gpiantoni)
No entry to sort: Victor Férat (@vferat)
No entry to sort: Niklas Förster (@niklasfoe)
~~No entry to sort: Adam Kimbler (@adamkimbler)~~
No entry to sort: Kirstie Whitaker (@KirstieJane)
No entry to sort: Pablo Polosecki (@polosecki)
No entry to sort: Ami Tsuchida
No entry to sort: Daniel Brenner (@brennerd11)
No entry to sort: Isaiah Norton (@ihnorton)
No entry to sort: Kevin Sitek (@sitek)
No entry to sort: Luke Bloy (@bloyl)
No entry to sort: Martin Luessi (@mluessi)
No entry to sort: steve (@steve19922)
No entry to sort: Charl Linssen (@turingbirds)
No entry to sort: Félix C. Morency (@fmorency)
~~No entry to sort: Jonathan R. Williford (@williford)~~
No entry to sort: Michiel Cottaar (@MichielCottaar)
No entry to sort: Regina Kim (@reginakim)
No entry to sort: Valentin Haenel (@esc)
No entry to sort: Xu Wang
No entry to sort: maedoc (@maedoc)
I am unable to find GitHub handles for Ami Tsuchida or Xu Wang.
Apologies also to anybody who may have
## Acknowledgment
- [x] \(Mandatory\) I acknowledge that this contribution will be available under the Apache 2 license.
| [
{
"content": "\"\"\" This file contains defines parameters for nipy that we use to fill\nsettings in setup.py, the nipy top-level docstring, and for building the\ndocs. In setup.py in particular, we exec this file, so it cannot import nipy\n\"\"\"\n\n# nipype version information\n# Remove -dev for release\n__version__ = \"1.5.0-rc1.post-dev\"\n\n\ndef get_nipype_gitversion():\n \"\"\"Nipype version as reported by the last commit in git\n\n Returns\n -------\n None or str\n Version of Nipype according to git.\n \"\"\"\n import os\n import subprocess\n\n try:\n import nipype\n\n gitpath = os.path.realpath(\n os.path.join(os.path.dirname(nipype.__file__), os.path.pardir)\n )\n except:\n gitpath = os.getcwd()\n gitpathgit = os.path.join(gitpath, \".git\")\n if not os.path.exists(gitpathgit):\n return None\n ver = None\n try:\n o, _ = subprocess.Popen(\n \"git describe\", shell=True, cwd=gitpath, stdout=subprocess.PIPE\n ).communicate()\n except Exception:\n pass\n else:\n ver = o.decode().strip().split(\"-\")[-1]\n return ver\n\n\nif __version__.endswith(\"-dev\"):\n gitversion = get_nipype_gitversion()\n if gitversion:\n __version__ = \"{}+{}\".format(__version__, gitversion)\n\nCLASSIFIERS = [\n \"Development Status :: 5 - Production/Stable\",\n \"Environment :: Console\",\n \"Intended Audience :: Science/Research\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Operating System :: MacOS :: MacOS X\",\n \"Operating System :: POSIX :: Linux\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Topic :: Scientific/Engineering\",\n]\nPYTHON_REQUIRES = \">= 3.6\"\n\ndescription = \"Neuroimaging in Python: Pipelines and Interfaces\"\n\n# Note: this long_description is actually a copy/paste from the top-level\n# README.txt, so that it shows up nicely on PyPI. So please remember to edit\n# it only in one place and sync it correctly.\nlong_description = \"\"\"========================================================\nNIPYPE: Neuroimaging in Python: Pipelines and Interfaces\n========================================================\n\nCurrent neuroimaging software offer users an incredible opportunity to\nanalyze data using a variety of different algorithms. However, this has\nresulted in a heterogeneous collection of specialized applications\nwithout transparent interoperability or a uniform operating interface.\n\n*Nipype*, an open-source, community-developed initiative under the\numbrella of `NiPy <http://nipy.org>`_, is a Python project that provides a\nuniform interface to existing neuroimaging software and facilitates interaction\nbetween these packages within a single workflow. Nipype provides an environment\nthat encourages interactive exploration of algorithms from different\npackages (e.g., AFNI, ANTS, BRAINS, BrainSuite, Camino, FreeSurfer, FSL, MNE,\nMRtrix, MNE, Nipy, Slicer, SPM), eases the design of workflows within and\nbetween packages, and reduces the learning curve necessary to use different \\\npackages. Nipype is creating a collaborative platform for neuroimaging \\\nsoftware development in a high-level language and addressing limitations of \\\nexisting pipeline systems.\n\n*Nipype* allows you to:\n\n* easily interact with tools from different software packages\n* combine processing steps from different software packages\n* develop new workflows faster by reusing common steps from old ones\n* process data faster by running it in parallel on many cores/machines\n* make your research easily reproducible\n* share your processing workflows with the community\n\"\"\"\n\n# versions\nNIBABEL_MIN_VERSION = \"2.1.0\"\nNETWORKX_MIN_VERSION = \"1.9\"\nNUMPY_MIN_VERSION = \"1.13\"\n# Numpy bug in python 3.7:\n# https://www.opensourceanswers.com/blog/you-shouldnt-use-python-37-for-data-science-right-now.html\nNUMPY_MIN_VERSION_37 = \"1.15.3\"\nSCIPY_MIN_VERSION = \"0.14\"\nTRAITS_MIN_VERSION = \"4.6\"\nDATEUTIL_MIN_VERSION = \"2.2\"\nFUTURE_MIN_VERSION = \"0.16.0\"\nSIMPLEJSON_MIN_VERSION = \"3.8.0\"\nPROV_VERSION = \"1.5.2\"\nRDFLIB_MIN_VERSION = \"5.0.0\"\nCLICK_MIN_VERSION = \"6.6.0\"\nPYDOT_MIN_VERSION = \"1.2.3\"\n\nNAME = \"nipype\"\nMAINTAINER = \"nipype developers\"\nMAINTAINER_EMAIL = \"[email protected]\"\nDESCRIPTION = description\nLONG_DESCRIPTION = long_description\nURL = \"http://nipy.org/nipype\"\nDOWNLOAD_URL = \"http://github.com/nipy/nipype/archives/master\"\nLICENSE = \"Apache License, 2.0\"\nAUTHOR = \"nipype developers\"\nAUTHOR_EMAIL = \"[email protected]\"\nPLATFORMS = \"OS Independent\"\nMAJOR = __version__.split(\".\")[0]\nMINOR = __version__.split(\".\")[1]\nMICRO = __version__.replace(\"-\", \".\").split(\".\")[2]\nISRELEASE = (\n len(__version__.replace(\"-\", \".\").split(\".\")) == 3\n or \"post\" in __version__.replace(\"-\", \".\").split(\".\")[-1]\n)\nVERSION = __version__\nPROVIDES = [\"nipype\"]\nREQUIRES = [\n \"click>=%s\" % CLICK_MIN_VERSION,\n \"networkx>=%s\" % NETWORKX_MIN_VERSION,\n \"nibabel>=%s\" % NIBABEL_MIN_VERSION,\n 'numpy>=%s ; python_version < \"3.7\"' % NUMPY_MIN_VERSION,\n 'numpy>=%s ; python_version >= \"3.7\"' % NUMPY_MIN_VERSION_37,\n \"packaging\",\n \"prov>=%s\" % PROV_VERSION,\n \"pydot>=%s\" % PYDOT_MIN_VERSION,\n \"pydotplus\",\n \"python-dateutil>=%s\" % DATEUTIL_MIN_VERSION,\n \"rdflib>=%s\" % RDFLIB_MIN_VERSION,\n \"scipy>=%s\" % SCIPY_MIN_VERSION,\n \"simplejson>=%s\" % SIMPLEJSON_MIN_VERSION,\n \"traits>=%s,!=5.0\" % TRAITS_MIN_VERSION,\n \"filelock>=3.0.0\",\n \"etelemetry>=0.2.0\",\n]\n\nTESTS_REQUIRES = [\n \"codecov\",\n \"coverage<5\",\n \"pytest\",\n \"pytest-cov\",\n \"pytest-env\",\n \"pytest-timeout\",\n]\n\nEXTRA_REQUIRES = {\n \"data\": [\"datalad\"],\n \"doc\": [\n \"dipy\",\n \"ipython\",\n \"matplotlib\",\n \"nbsphinx\",\n \"sphinx-argparse\",\n \"sphinx>=2.1.2\",\n \"sphinxcontrib-apidoc\",\n \"sphinxcontrib-napoleon\",\n ],\n \"duecredit\": [\"duecredit\"],\n \"nipy\": [\"nitime\", \"nilearn\", \"dipy\", \"nipy\", \"matplotlib\"],\n \"profiler\": [\"psutil>=5.0\"],\n \"pybids\": [\"pybids>=0.7.0\"],\n \"specs\": [\"black\"],\n \"ssh\": [\"paramiko\"],\n \"tests\": TESTS_REQUIRES,\n \"xvfbwrapper\": [\"xvfbwrapper\"],\n # 'mesh': ['mayavi'] # Enable when it works\n}\n\n\ndef _list_union(iterable):\n return list(set(sum(iterable, [])))\n\n\n# Enable a handle to install all extra dependencies at once\nEXTRA_REQUIRES[\"all\"] = _list_union(EXTRA_REQUIRES.values())\n# dev = doc + tests + specs\nEXTRA_REQUIRES[\"dev\"] = _list_union(\n val for key, val in EXTRA_REQUIRES.items() if key in (\"doc\", \"tests\", \"specs\")\n)\n\nSTATUS = \"stable\"\n",
"path": "nipype/info.py"
}
] | [
{
"content": "\"\"\" This file contains defines parameters for nipy that we use to fill\nsettings in setup.py, the nipy top-level docstring, and for building the\ndocs. In setup.py in particular, we exec this file, so it cannot import nipy\n\"\"\"\n\n# nipype version information\n# Remove -dev for release\n__version__ = \"1.5.0\"\n\n\ndef get_nipype_gitversion():\n \"\"\"Nipype version as reported by the last commit in git\n\n Returns\n -------\n None or str\n Version of Nipype according to git.\n \"\"\"\n import os\n import subprocess\n\n try:\n import nipype\n\n gitpath = os.path.realpath(\n os.path.join(os.path.dirname(nipype.__file__), os.path.pardir)\n )\n except:\n gitpath = os.getcwd()\n gitpathgit = os.path.join(gitpath, \".git\")\n if not os.path.exists(gitpathgit):\n return None\n ver = None\n try:\n o, _ = subprocess.Popen(\n \"git describe\", shell=True, cwd=gitpath, stdout=subprocess.PIPE\n ).communicate()\n except Exception:\n pass\n else:\n ver = o.decode().strip().split(\"-\")[-1]\n return ver\n\n\nif __version__.endswith(\"-dev\"):\n gitversion = get_nipype_gitversion()\n if gitversion:\n __version__ = \"{}+{}\".format(__version__, gitversion)\n\nCLASSIFIERS = [\n \"Development Status :: 5 - Production/Stable\",\n \"Environment :: Console\",\n \"Intended Audience :: Science/Research\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Operating System :: MacOS :: MacOS X\",\n \"Operating System :: POSIX :: Linux\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Topic :: Scientific/Engineering\",\n]\nPYTHON_REQUIRES = \">= 3.6\"\n\ndescription = \"Neuroimaging in Python: Pipelines and Interfaces\"\n\n# Note: this long_description is actually a copy/paste from the top-level\n# README.txt, so that it shows up nicely on PyPI. So please remember to edit\n# it only in one place and sync it correctly.\nlong_description = \"\"\"========================================================\nNIPYPE: Neuroimaging in Python: Pipelines and Interfaces\n========================================================\n\nCurrent neuroimaging software offer users an incredible opportunity to\nanalyze data using a variety of different algorithms. However, this has\nresulted in a heterogeneous collection of specialized applications\nwithout transparent interoperability or a uniform operating interface.\n\n*Nipype*, an open-source, community-developed initiative under the\numbrella of `NiPy <http://nipy.org>`_, is a Python project that provides a\nuniform interface to existing neuroimaging software and facilitates interaction\nbetween these packages within a single workflow. Nipype provides an environment\nthat encourages interactive exploration of algorithms from different\npackages (e.g., AFNI, ANTS, BRAINS, BrainSuite, Camino, FreeSurfer, FSL, MNE,\nMRtrix, MNE, Nipy, Slicer, SPM), eases the design of workflows within and\nbetween packages, and reduces the learning curve necessary to use different \\\npackages. Nipype is creating a collaborative platform for neuroimaging \\\nsoftware development in a high-level language and addressing limitations of \\\nexisting pipeline systems.\n\n*Nipype* allows you to:\n\n* easily interact with tools from different software packages\n* combine processing steps from different software packages\n* develop new workflows faster by reusing common steps from old ones\n* process data faster by running it in parallel on many cores/machines\n* make your research easily reproducible\n* share your processing workflows with the community\n\"\"\"\n\n# versions\nNIBABEL_MIN_VERSION = \"2.1.0\"\nNETWORKX_MIN_VERSION = \"1.9\"\nNUMPY_MIN_VERSION = \"1.13\"\n# Numpy bug in python 3.7:\n# https://www.opensourceanswers.com/blog/you-shouldnt-use-python-37-for-data-science-right-now.html\nNUMPY_MIN_VERSION_37 = \"1.15.3\"\nSCIPY_MIN_VERSION = \"0.14\"\nTRAITS_MIN_VERSION = \"4.6\"\nDATEUTIL_MIN_VERSION = \"2.2\"\nFUTURE_MIN_VERSION = \"0.16.0\"\nSIMPLEJSON_MIN_VERSION = \"3.8.0\"\nPROV_VERSION = \"1.5.2\"\nRDFLIB_MIN_VERSION = \"5.0.0\"\nCLICK_MIN_VERSION = \"6.6.0\"\nPYDOT_MIN_VERSION = \"1.2.3\"\n\nNAME = \"nipype\"\nMAINTAINER = \"nipype developers\"\nMAINTAINER_EMAIL = \"[email protected]\"\nDESCRIPTION = description\nLONG_DESCRIPTION = long_description\nURL = \"http://nipy.org/nipype\"\nDOWNLOAD_URL = \"http://github.com/nipy/nipype/archives/master\"\nLICENSE = \"Apache License, 2.0\"\nAUTHOR = \"nipype developers\"\nAUTHOR_EMAIL = \"[email protected]\"\nPLATFORMS = \"OS Independent\"\nMAJOR = __version__.split(\".\")[0]\nMINOR = __version__.split(\".\")[1]\nMICRO = __version__.replace(\"-\", \".\").split(\".\")[2]\nISRELEASE = (\n len(__version__.replace(\"-\", \".\").split(\".\")) == 3\n or \"post\" in __version__.replace(\"-\", \".\").split(\".\")[-1]\n)\nVERSION = __version__\nPROVIDES = [\"nipype\"]\nREQUIRES = [\n \"click>=%s\" % CLICK_MIN_VERSION,\n \"networkx>=%s\" % NETWORKX_MIN_VERSION,\n \"nibabel>=%s\" % NIBABEL_MIN_VERSION,\n 'numpy>=%s ; python_version < \"3.7\"' % NUMPY_MIN_VERSION,\n 'numpy>=%s ; python_version >= \"3.7\"' % NUMPY_MIN_VERSION_37,\n \"packaging\",\n \"prov>=%s\" % PROV_VERSION,\n \"pydot>=%s\" % PYDOT_MIN_VERSION,\n \"pydotplus\",\n \"python-dateutil>=%s\" % DATEUTIL_MIN_VERSION,\n \"rdflib>=%s\" % RDFLIB_MIN_VERSION,\n \"scipy>=%s\" % SCIPY_MIN_VERSION,\n \"simplejson>=%s\" % SIMPLEJSON_MIN_VERSION,\n \"traits>=%s,!=5.0\" % TRAITS_MIN_VERSION,\n \"filelock>=3.0.0\",\n \"etelemetry>=0.2.0\",\n]\n\nTESTS_REQUIRES = [\n \"codecov\",\n \"coverage<5\",\n \"pytest\",\n \"pytest-cov\",\n \"pytest-env\",\n \"pytest-timeout\",\n]\n\nEXTRA_REQUIRES = {\n \"data\": [\"datalad\"],\n \"doc\": [\n \"dipy\",\n \"ipython\",\n \"matplotlib\",\n \"nbsphinx\",\n \"sphinx-argparse\",\n \"sphinx>=2.1.2\",\n \"sphinxcontrib-apidoc\",\n \"sphinxcontrib-napoleon\",\n ],\n \"duecredit\": [\"duecredit\"],\n \"nipy\": [\"nitime\", \"nilearn\", \"dipy\", \"nipy\", \"matplotlib\"],\n \"profiler\": [\"psutil>=5.0\"],\n \"pybids\": [\"pybids>=0.7.0\"],\n \"specs\": [\"black\"],\n \"ssh\": [\"paramiko\"],\n \"tests\": TESTS_REQUIRES,\n \"xvfbwrapper\": [\"xvfbwrapper\"],\n # 'mesh': ['mayavi'] # Enable when it works\n}\n\n\ndef _list_union(iterable):\n return list(set(sum(iterable, [])))\n\n\n# Enable a handle to install all extra dependencies at once\nEXTRA_REQUIRES[\"all\"] = _list_union(EXTRA_REQUIRES.values())\n# dev = doc + tests + specs\nEXTRA_REQUIRES[\"dev\"] = _list_union(\n val for key, val in EXTRA_REQUIRES.items() if key in (\"doc\", \"tests\", \"specs\")\n)\n\nSTATUS = \"stable\"\n",
"path": "nipype/info.py"
}
] | diff --git a/.mailmap b/.mailmap
index 4df0aff6e5..f603849d24 100644
--- a/.mailmap
+++ b/.mailmap
@@ -45,6 +45,8 @@ Colin Buchanan <[email protected]> <[email protected]>
Daniel Brenner <[email protected]>
Daniel Clark <[email protected]>
Daniel Geisler <[email protected]>
+Daniel Geisler <[email protected]> <[email protected]>
+Daniel Geisler <[email protected]> <[email protected]>
Daniel Ginsburg <[email protected]>
Daniel McNamee <[email protected]>
David Ellis <[email protected]> <[email protected]>
@@ -89,6 +91,7 @@ Joerg Stadler <[email protected]> <[email protected]>
John A. Lee <[email protected]>
John A. Lee <[email protected]> <[email protected]>
Joke Durnez <[email protected]>
+Jordi Huguet <[email protected]>
Josh Warner <[email protected]> <[email protected]>
Junhao WEN <[email protected]>
Kai Schlamp <[email protected]>
@@ -117,6 +120,7 @@ Lukas Snoek <[email protected]>
Marcel Falkiewicz <[email protected]> <[email protected]>
Martin Perez-Guevara <[email protected]>
Mathias Goncalves <[email protected]> <[email protected]>
+Mathias Goncalves <[email protected]> <[email protected]>
Mathieu Dubois <[email protected]> <[email protected]>
Mathieu Dubois <[email protected]> <[email protected]>
Matteo Mancini <[email protected]>
@@ -170,6 +174,7 @@ Steven Giavasis <[email protected]>
Steven Giavasis <[email protected]> <[email protected]>
Steven Giavasis <[email protected]> <[email protected]>
Steven Tilley <[email protected]> <[email protected]>
+Sulantha Mathotaarachchi <[email protected]>
Tristan Glatard <[email protected]> <[email protected]>
Victor Férat <[email protected]>
Victor Férat <[email protected]> <[email protected]>
diff --git a/.zenodo.json b/.zenodo.json
index 8a57735308..2ee43c9904 100644
--- a/.zenodo.json
+++ b/.zenodo.json
@@ -76,14 +76,14 @@
"name": "Dayan, Michael",
"orcid": "0000-0002-2666-0969"
},
- {
- "name": "Loney, Fred"
- },
{
"affiliation": "Dartmouth College: Hanover, NH, United States",
"name": "Halchenko, Yaroslav O.",
"orcid": "0000-0003-3456-2493"
},
+ {
+ "name": "Loney, Fred"
+ },
{
"affiliation": "Florida International University",
"name": "Salo, Taylor",
@@ -288,6 +288,11 @@
"name": "Kong, Xiang-Zhen",
"orcid": "0000-0002-0805-1350"
},
+ {
+ "affiliation": "Division of Psychological and Social Medicine and Developmental Neuroscience, Faculty of Medicine, Technische Universit\u00e4t Dresden, Dresden, Germany",
+ "name": "Geisler, Daniel",
+ "orcid": "0000-0003-2076-5329"
+ },
{
"name": "Salvatore, John"
},
@@ -384,6 +389,11 @@
{
"name": "Cumba, Chad"
},
+ {
+ "affiliation": "University College London",
+ "name": "P\u00e9rez-Garc\u00eda, Fernando",
+ "orcid": "0000-0001-9090-3024"
+ },
{
"name": "Blair, Ross"
},
@@ -392,16 +402,6 @@
"name": "Iqbal, Shariq",
"orcid": "0000-0003-2766-8425"
},
- {
- "affiliation": "NIMH, Scientific and Statistical Computing Core",
- "name": "Glen, Daniel",
- "orcid": "0000-0001-8456-5647"
- },
- {
- "affiliation": "Technische Universit\u00e4t Dresden, Faculty of Medicine, Department of Child and Adolescent Psychiatry",
- "name": "Geisler, Daniel",
- "orcid": "0000-0003-2076-5329"
- },
{
"affiliation": "University of Iowa",
"name": "Welch, David"
@@ -429,11 +429,6 @@
"name": "Papadopoulos Orfanos, Dimitri",
"orcid": "0000-0002-1242-8990"
},
- {
- "affiliation": "University College London",
- "name": "P\u00e9rez-Garc\u00eda, Fernando",
- "orcid": "0000-0001-9090-3024"
- },
{
"affiliation": "Leibniz Institute for Neurobiology",
"name": "Stadler, J\u00f6rg",
@@ -618,6 +613,10 @@
"name": "Gerhard, Stephan",
"orcid": "0000-0003-4454-6171"
},
+ {
+ "affiliation": "Enigma Biomedical Group",
+ "name": "Mathotaarachchi, Sulantha"
+ },
{
"name": "Saase, Victor"
},
@@ -635,6 +634,11 @@
"affiliation": "Vrije Universiteit Amsterdam",
"name": "Ort, Eduard"
},
+ {
+ "affiliation": "CNRS, UMS3552 IRMaGe",
+ "name": "Condamine, Eric",
+ "orcid": "0000-0002-9533-3769"
+ },
{
"affiliation": "Stanford University",
"name": "Lerma-Usabiaga, Garikoitz",
@@ -654,6 +658,11 @@
"name": "Pellman, John",
"orcid": "0000-0001-6810-4461"
},
+ {
+ "affiliation": "BarcelonaBeta Brain Research Center",
+ "name": "Huguet, Jordi",
+ "orcid": "0000-0001-8420-4833"
+ },
{
"affiliation": "University of Pennsylvania",
"name": "Junhao WEN",
@@ -684,6 +693,10 @@
"name": "Andberg, Sami Kristian",
"orcid": "0000-0002-5650-3964"
},
+ {
+ "affiliation": "Sagol School of Neuroscience, Tel Aviv University",
+ "name": "Baratz, Zvi"
+ },
{
"name": "Matsubara, K"
},
@@ -719,11 +732,6 @@
{
"name": "Shachnev, Dmitry"
},
- {
- "affiliation": "CNRS, UMS3552 IRMaGe",
- "name": "Condamine, Eric",
- "orcid": "0000-0002-9533-3769"
- },
{
"name": "Flandin, Guillaume"
},
diff --git a/doc/changelog/1.X.X-changelog.rst b/doc/changelog/1.X.X-changelog.rst
index 239aa7d936..e10949cf08 100644
--- a/doc/changelog/1.X.X-changelog.rst
+++ b/doc/changelog/1.X.X-changelog.rst
@@ -1,4 +1,4 @@
-1.5.0 (To be determined)
+1.5.0 (June 03, 2020)
=========================
New feature release in the 1.5.x series.
@@ -8,20 +8,36 @@ In this release, the example scripts have been split out into their own package:
(`Full changelog <https://github.com/nipy/nipype/milestone/1.5.0?closed=1>`__)
+ * FIX: volterra_expansion_order documentation error (https://github.com/nipy/nipype/pull/3213)
+ * FIX: BET incorrect output paths (https://github.com/nipy/nipype/pull/3214)
+ * FIX: Terminal output in ``report.rst`` spreads one line per character (https://github.com/nipy/nipype/pull/3220)
+ * FIX: Allow parsing freesurfer 7 version string (https://github.com/nipy/nipype/pull/3216)
+ * FIX: Use PackageInfo to get NiftyReg version (https://github.com/nipy/nipype/pull/3194)
* FIX: Partial rollback of N4BiasFieldCorrection (https://github.com/nipy/nipype/pull/3188)
* FIX: ANTs' tools maintenance overhaul (https://github.com/nipy/nipype/pull/3180)
* FIX: load_resultfile crashes if open resultsfile from crashed job (https://github.com/nipy/nipype/pull/3182)
* FIX: FSL model.py make multiple F-tests (https://github.com/nipy/nipype/pull/3166)
+ * ENH: Restore ants.legacy interfaces (https://github.com/nipy/nipype/pull/3222)
+ * ENH: Add ``"TruncateImageIntensity"`` operation to ``ants.utils.Image.Math`` (https://github.com/nipy/nipype/pull/3210)
+ * ENH: SPM NewSegment multi-channel segmentation (https://github.com/nipy/nipype/pull/3162)
+ * ENH: Add reverse-ordered transform lists to ants.Registration outputs (https://github.com/nipy/nipype/pull/3192)
* ENH: Improve workflow connect performance (https://github.com/nipy/nipype/pull/3184)
* ENH: Add ``ConstrainedSphericalDeconvolution`` interface to replace ``EstimateFOD`` for MRtrix3's ``dwi2fod`` (https://github.com/nipy/nipype/pull/3176)
* ENH: Detect values for EulerNumber interface (https://github.com/nipy/nipype/pull/3173)
* ENH: Remove examples from repository (https://github.com/nipy/nipype/pull/3172)
+ * TEST: Clean up tests (https://github.com/nipy/nipype/pull/3195)
+ * TEST: Mock terminal output before testing changing default value (https://github.com/nipy/nipype/pull/3193)
+ * REF: make invocations of python and pytest consistent with the one used/desired python (https://github.com/nipy/nipype/pull/3208)
* REF: Prefer math.gcd to hand-rolled Euclid's algorithm (https://github.com/nipy/nipype/pull/3177)
* REF: Removed all uses of numpy_mmap (https://github.com/nipy/nipype/pull/3121)
+ * DOC: Sphinx 3 compatibility (https://github.com/nipy/nipype/pull/3206)
* DOC: Update links, typos in contributing guide (https://github.com/nipy/nipype/pull/3160)
* DOC: Update SelectFiles docstring to match actual behavior (https://github.com/nipy/nipype/pull/3041)
* DOC: Updated .zenodo.json file (https://github.com/nipy/nipype/pull/3167)
* DOC: Update .zenodo.json (https://github.com/nipy/nipype/pull/3165)
+ * MNT: Permit recent nilearns (https://github.com/nipy/nipype/pull/2841)
+ * MNT: Test Python 3.8 (https://github.com/nipy/nipype/pull/3154)
+ * MNT: Restore ReadTheDocs (https://github.com/nipy/nipype/pull/3207)
* MNT: Update Zenodo ordering based on commit count (https://github.com/nipy/nipype/pull/3169)
1.4.2 (February 14, 2020)
diff --git a/nipype/info.py b/nipype/info.py
index 0e7fd0f70b..69eb443f76 100644
--- a/nipype/info.py
+++ b/nipype/info.py
@@ -5,7 +5,7 @@
# nipype version information
# Remove -dev for release
-__version__ = "1.5.0-rc1.post-dev"
+__version__ = "1.5.0"
def get_nipype_gitversion():
|
yt-project__yt-1532 | AHF answer tests are flaky
We're seeing random failures from the AHF answer tests on some PRs.
See e.g. https://tests.yt-project.org/job/yt_py3_git/414/.
| [
{
"content": "\"\"\"\nAHF data structures\n\n\n\n\"\"\"\n\n#-----------------------------------------------------------------------------\n# Copyright (c) 2017, yt Development Team.\n#\n# Distributed under the terms of the Modified BSD License.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n#-----------------------------------------------------------------------------\n\nimport glob\nimport os\nimport stat\n\nimport numpy as np\n\nfrom yt.data_objects.static_output import \\\n Dataset, \\\n ParticleFile\nfrom yt.funcs import \\\n setdefaultattr\nfrom yt.geometry.particle_geometry_handler import \\\n ParticleIndex\nfrom yt.utilities.cosmology import \\\n Cosmology\n\nfrom .fields import AHFHalosFieldInfo\n\n\nclass AHFHalosFile(ParticleFile):\n def __init__(self, ds, io, filename, file_id):\n root, _ = os.path.splitext(filename)\n candidates = glob.glob(root + '*.AHF_halos')\n if len(candidates) == 1:\n filename = candidates[0]\n else:\n raise ValueError('Too many AHF_halos files.')\n self.col_names = self._read_column_names(filename)\n super(AHFHalosFile, self).__init__(ds, io, filename, file_id)\n\n def read_data(self, usecols=None):\n return np.genfromtxt(self.filename, names=self.col_names,\n usecols=usecols)\n\n def _read_column_names(self, filename):\n with open(filename) as f:\n line = f.readline()\n # Remove leading '#'\n line = line[1:]\n names = line.split()\n # Remove trailing '()'\n names = [name.split('(')[0] for name in names]\n return names\n\n\nclass AHFHalosDataset(Dataset):\n _index_class = ParticleIndex\n _file_class = AHFHalosFile\n _field_info_class = AHFHalosFieldInfo\n\n def __init__(self, filename, dataset_type='ahf',\n n_ref=16, over_refine_factor=1,\n units_override=None, unit_system='cgs',\n hubble_constant=1.0):\n root, _ = os.path.splitext(filename)\n self.log_filename = root + '.log'\n self.hubble_constant = hubble_constant\n\n self.n_ref = n_ref\n self.over_refine_factor = over_refine_factor\n super(AHFHalosDataset, self).__init__(\n filename, dataset_type=dataset_type,\n units_override=units_override, unit_system=unit_system\n )\n\n def _set_code_unit_attributes(self):\n setdefaultattr(self, 'length_unit', self.quan(1.0, 'kpccm/h'))\n setdefaultattr(self, 'mass_unit', self.quan(1.0, 'Msun/h'))\n setdefaultattr(self, 'time_unit', self.quan(1.0, 's'))\n setdefaultattr(self, 'velocity_unit', self.quan(1.0, 'km/s'))\n\n def _parse_parameter_file(self):\n # Read all parameters.\n simu = self._read_log_simu()\n param = self._read_parameter()\n\n # Set up general information.\n self.filename_template = self.parameter_filename\n self.file_count = 1\n self.parameters.update(param)\n self.particle_types = ('halos')\n self.particle_types_raw = ('halos')\n self.unique_identifier = \\\n int(os.stat(self.parameter_filename)[stat.ST_CTIME])\n\n # Set up geometrical information.\n self.refine_by = 2\n self.dimensionality = 3\n nz = 1 << self.over_refine_factor\n self.domain_dimensions = np.ones(self.dimensionality, \"int32\") * nz\n self.domain_left_edge = np.array([0.0, 0.0, 0.0])\n # Note that boxsize is in Mpc but particle positions are in kpc.\n self.domain_right_edge = np.array([simu['boxsize']] * 3) * 1000\n self.periodicity = (True, True, True)\n\n # Set up cosmological information.\n self.cosmological_simulation = 1\n self.current_redshift = param['z']\n self.omega_lambda = simu['lambda0']\n self.omega_matter = simu['omega0']\n cosmo = Cosmology(self.hubble_constant,\n self.omega_matter, self.omega_lambda)\n self.current_time = cosmo.hubble_time(param['z']).in_units('s')\n\n @classmethod\n def _is_valid(self, *args, **kwargs):\n filename = args[0]\n if not filename.endswith('.parameter'):\n return False\n with open(filename, 'r') as f:\n if f.readlines()[11].startswith('AHF'):\n return True\n return False\n\n # Helper methods\n\n def _read_log_simu(self):\n simu = {}\n with open(self.log_filename) as f:\n for l in f:\n if l.startswith('simu.'):\n name, val = l.split(':')\n key = name.strip().split('.')[1]\n try:\n val = float(val)\n except:\n val = float.fromhex(val)\n simu[key] = val\n return simu\n\n def _read_parameter(self):\n param = {}\n with open(self.parameter_filename) as f:\n for l in f:\n words = l.split()\n if len(words) == 2:\n key, val = words\n try:\n val = float(val)\n param[key] = val\n except:\n pass\n return param\n",
"path": "yt/frontends/ahf/data_structures.py"
}
] | [
{
"content": "\"\"\"\nAHF data structures\n\n\n\n\"\"\"\n\n#-----------------------------------------------------------------------------\n# Copyright (c) 2017, yt Development Team.\n#\n# Distributed under the terms of the Modified BSD License.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n#-----------------------------------------------------------------------------\n\nimport glob\nimport os\nimport stat\n\nimport numpy as np\n\nfrom yt.data_objects.static_output import \\\n Dataset, \\\n ParticleFile\nfrom yt.funcs import \\\n setdefaultattr\nfrom yt.geometry.particle_geometry_handler import \\\n ParticleIndex\nfrom yt.utilities.cosmology import \\\n Cosmology\n\nfrom .fields import AHFHalosFieldInfo\n\n\nclass AHFHalosFile(ParticleFile):\n def __init__(self, ds, io, filename, file_id):\n root, _ = os.path.splitext(filename)\n candidates = glob.glob(root + '*.AHF_halos')\n if len(candidates) == 1:\n filename = candidates[0]\n else:\n raise ValueError('Too many AHF_halos files.')\n self.col_names = self._read_column_names(filename)\n super(AHFHalosFile, self).__init__(ds, io, filename, file_id)\n\n def read_data(self, usecols=None):\n return np.genfromtxt(self.filename, names=self.col_names,\n usecols=usecols)\n\n def _read_column_names(self, filename):\n with open(filename) as f:\n line = f.readline()\n # Remove leading '#'\n line = line[1:]\n names = line.split()\n # Remove trailing '()'\n names = [name.split('(')[0] for name in names]\n return names\n\n\nclass AHFHalosDataset(Dataset):\n _index_class = ParticleIndex\n _file_class = AHFHalosFile\n _field_info_class = AHFHalosFieldInfo\n\n def __init__(self, filename, dataset_type='ahf',\n n_ref=16, over_refine_factor=1,\n units_override=None, unit_system='cgs',\n hubble_constant=1.0):\n root, _ = os.path.splitext(filename)\n self.log_filename = root + '.log'\n self.hubble_constant = hubble_constant\n\n self.n_ref = n_ref\n self.over_refine_factor = over_refine_factor\n super(AHFHalosDataset, self).__init__(\n filename, dataset_type=dataset_type,\n units_override=units_override, unit_system=unit_system\n )\n\n def _set_code_unit_attributes(self):\n setdefaultattr(self, 'length_unit', self.quan(1.0, 'kpccm/h'))\n setdefaultattr(self, 'mass_unit', self.quan(1.0, 'Msun/h'))\n setdefaultattr(self, 'time_unit', self.quan(1.0, 's'))\n setdefaultattr(self, 'velocity_unit', self.quan(1.0, 'km/s'))\n\n def _parse_parameter_file(self):\n # Read all parameters.\n simu = self._read_log_simu()\n param = self._read_parameter()\n\n # Set up general information.\n self.filename_template = self.parameter_filename\n self.file_count = 1\n self.parameters.update(param)\n self.particle_types = ('halos')\n self.particle_types_raw = ('halos')\n self.unique_identifier = \\\n int(os.stat(self.parameter_filename)[stat.ST_CTIME])\n\n # Set up geometrical information.\n self.refine_by = 2\n self.dimensionality = 3\n nz = 1 << self.over_refine_factor\n self.domain_dimensions = np.ones(self.dimensionality, \"int32\") * nz\n self.domain_left_edge = np.array([0.0, 0.0, 0.0])\n # Note that boxsize is in Mpc but particle positions are in kpc.\n self.domain_right_edge = np.array([simu['boxsize']] * 3) * 1000\n self.periodicity = (True, True, True)\n\n # Set up cosmological information.\n self.cosmological_simulation = 1\n self.current_redshift = param['z']\n self.omega_lambda = simu['lambda0']\n self.omega_matter = simu['omega0']\n cosmo = Cosmology(self.hubble_constant,\n self.omega_matter, self.omega_lambda)\n self.current_time = cosmo.hubble_time(param['z']).in_units('s')\n\n @classmethod\n def _is_valid(self, *args, **kwargs):\n filename = args[0]\n if not filename.endswith('.parameter'):\n return False\n with open(filename, 'r') as f:\n if f.readlines()[11].startswith('AHF'):\n return True\n return False\n\n # Helper methods\n\n def _read_log_simu(self):\n simu = {}\n with open(self.log_filename) as f:\n for l in f:\n if l.startswith('simu.'):\n name, val = l.split(':')\n key = name.strip().split('.')[1]\n try:\n val = float(val)\n except:\n val = float.fromhex(val)\n simu[key] = val\n return simu\n\n def _read_parameter(self):\n param = {}\n with open(self.parameter_filename) as f:\n for l in f:\n words = l.split()\n if len(words) == 2:\n key, val = words\n try:\n val = float(val)\n param[key] = val\n except:\n pass\n return param\n\n @property\n def _skip_cache(self):\n return True\n",
"path": "yt/frontends/ahf/data_structures.py"
}
] | diff --git a/tests/tests.yaml b/tests/tests.yaml
index 8f2f9406945..bdc7279f13a 100644
--- a/tests/tests.yaml
+++ b/tests/tests.yaml
@@ -35,7 +35,7 @@ answer_tests:
local_gizmo_002:
- yt/frontends/gizmo/tests/test_outputs.py
- local_halos_003:
+ local_halos_004:
- yt/analysis_modules/halo_analysis/tests/test_halo_finders.py # [py2]
- yt/analysis_modules/halo_finding/tests/test_rockstar.py # [py2]
- yt/frontends/ahf/tests/test_outputs.py
diff --git a/yt/frontends/ahf/data_structures.py b/yt/frontends/ahf/data_structures.py
index bcc925c9b10..301b2ddbc02 100644
--- a/yt/frontends/ahf/data_structures.py
+++ b/yt/frontends/ahf/data_structures.py
@@ -156,3 +156,7 @@ def _read_parameter(self):
except:
pass
return param
+
+ @property
+ def _skip_cache(self):
+ return True
diff --git a/yt/frontends/ahf/tests/test_outputs.py b/yt/frontends/ahf/tests/test_outputs.py
index a7b86161271..972d6d8f7ba 100644
--- a/yt/frontends/ahf/tests/test_outputs.py
+++ b/yt/frontends/ahf/tests/test_outputs.py
@@ -38,7 +38,7 @@ def test_fields_ahf_halos():
ds = load(ahf_halos)
assert_equal(str(ds), os.path.basename(ahf_halos))
for field in _fields:
- yield FieldValuesTest(ahf_halos, field, particle_type=True)
+ yield FieldValuesTest(ds, field, particle_type=True)
@requires_file(ahf_halos)
|
zostera__django-bootstrap4-191 | Building docs locally gives ImportError
The `make docs` command raises `ImportError`.
```
WARNING: autodoc: failed to import function 'templatetags.bootstrap4.bootstrap_form' from module 'bootstrap4'; the following exception was raised:
Traceback (most recent call last):
File "/Users/dylan/Projects/django-bootstrap4/src/bootstrap4/__init__.py", line 2, in <module>
from _version import version
ModuleNotFoundError: No module named '_version'
```
| [
{
"content": "try:\n from _version import version\nexcept ImportError:\n try:\n from setuptools_scm import get_version\n\n version = get_version()\n except ImportError:\n version = \"???\"\n__version__ = version\n",
"path": "src/bootstrap4/__init__.py"
}
] | [
{
"content": "try:\n from ._version import version\nexcept ImportError:\n try:\n from setuptools_scm import get_version\n\n version = get_version()\n except ImportError:\n version = \"???\"\n__version__ = version\n",
"path": "src/bootstrap4/__init__.py"
}
] | diff --git a/src/bootstrap4/__init__.py b/src/bootstrap4/__init__.py
index 17f62110..1f96b1a4 100644
--- a/src/bootstrap4/__init__.py
+++ b/src/bootstrap4/__init__.py
@@ -1,5 +1,5 @@
try:
- from _version import version
+ from ._version import version
except ImportError:
try:
from setuptools_scm import get_version
diff --git a/tests/test_version.py b/tests/test_version.py
new file mode 100644
index 00000000..881c605a
--- /dev/null
+++ b/tests/test_version.py
@@ -0,0 +1,12 @@
+from django.test import TestCase
+
+
+class VersionTest(TestCase):
+ """Test presence of package version."""
+
+ def test_version(self):
+ import bootstrap4
+
+ version = bootstrap4.__version__
+ version_parts = version.split(".")
+ self.assertTrue(len(version_parts) >= 3)
|
learningequality__kolibri-6355 | tasks got cleared without triggering a 'clear task' action
### Observed behavior
Observed that my list of tasks got cleared after initiating a new import
### Expected behavior
tasks should not be cleared until explicitly done by the user
### User-facing consequences
loss of data: historical context
### Errors and logs
none
### Steps to reproduce
see notes below
### Context
0.13.0 beta 1
| [
{
"content": "from django.core.cache import cache\nfrom django.db.models import Manager\nfrom django.db.models import Sum\nfrom django.db.models.query import RawQuerySet\nfrom le_utils.constants import content_kinds\nfrom rest_framework import serializers\n\nfrom kolibri.core.content.models import AssessmentMetaData\nfrom kolibri.core.content.models import ChannelMetadata\nfrom kolibri.core.content.models import ContentNode\nfrom kolibri.core.content.models import File\nfrom kolibri.core.content.models import Language\nfrom kolibri.core.fields import create_timezonestamp\n\n\nclass DynamicFieldsModelSerializer(serializers.ModelSerializer):\n def __init__(self, *args, **kwargs):\n # Instantiate the superclass normally\n super(DynamicFieldsModelSerializer, self).__init__(*args, **kwargs)\n\n # enable dynamic fields specification!\n if \"request\" in self.context and self.context[\"request\"].GET.get(\n \"fields\", None\n ):\n fields = self.context[\"request\"].GET[\"fields\"].split(\",\")\n # Drop any fields that are not specified in the `fields` argument.\n allowed = set(fields)\n existing = set(self.fields.keys())\n for field_name in existing - allowed:\n self.fields.pop(field_name)\n\n\nclass ChannelMetadataSerializer(serializers.ModelSerializer):\n root = serializers.PrimaryKeyRelatedField(read_only=True)\n lang_code = serializers.SerializerMethodField()\n lang_name = serializers.SerializerMethodField()\n available = serializers.SerializerMethodField()\n num_coach_contents = serializers.IntegerField(source=\"root.num_coach_contents\")\n\n def get_lang_code(self, instance):\n if instance.root.lang is None:\n return None\n\n return instance.root.lang.lang_code\n\n def get_lang_name(self, instance):\n if instance.root.lang is None:\n return None\n\n return instance.root.lang.lang_name\n\n def get_available(self, instance):\n return instance.root.available\n\n class Meta:\n model = ChannelMetadata\n fields = (\n \"author\",\n \"description\",\n \"id\",\n \"last_updated\",\n \"lang_code\",\n \"lang_name\",\n \"name\",\n \"root\",\n \"thumbnail\",\n \"version\",\n \"available\",\n \"num_coach_contents\",\n )\n\n\nclass PublicChannelSerializer(serializers.ModelSerializer):\n included_languages = serializers.SerializerMethodField()\n matching_tokens = serializers.SerializerMethodField(\"match_tokens\")\n language = serializers.SerializerMethodField()\n icon_encoding = serializers.SerializerMethodField()\n last_published = serializers.SerializerMethodField()\n\n def get_language(self, instance):\n if instance.root.lang is None:\n return None\n\n return instance.root.lang.lang_code\n\n def get_icon_encoding(self, instance):\n return instance.thumbnail\n\n def get_included_languages(self, instance):\n return list(instance.included_languages.all().values_list(\"id\", flat=True))\n\n def get_last_published(self, instance):\n return (\n None\n if not instance.last_updated\n else create_timezonestamp(instance.last_updated)\n )\n\n def match_tokens(self, channel):\n return []\n\n class Meta:\n model = ChannelMetadata\n fields = (\n \"id\",\n \"name\",\n \"language\",\n \"included_languages\",\n \"description\",\n \"total_resource_count\",\n \"version\",\n \"published_size\",\n \"last_published\",\n \"icon_encoding\",\n \"matching_tokens\",\n \"public\",\n )\n\n\nclass LowerCaseField(serializers.CharField):\n def to_representation(self, obj):\n return super(LowerCaseField, self).to_representation(obj).lower()\n\n\nclass LanguageSerializer(serializers.ModelSerializer):\n id = LowerCaseField(max_length=14)\n lang_code = LowerCaseField(max_length=3)\n lang_subcode = LowerCaseField(max_length=10)\n\n class Meta:\n model = Language\n fields = (\"id\", \"lang_code\", \"lang_subcode\", \"lang_name\", \"lang_direction\")\n\n\nclass FileSerializer(serializers.ModelSerializer):\n checksum = serializers.CharField(source=\"local_file_id\")\n storage_url = serializers.SerializerMethodField()\n download_url = serializers.SerializerMethodField()\n extension = serializers.SerializerMethodField()\n file_size = serializers.SerializerMethodField()\n lang = LanguageSerializer()\n available = serializers.BooleanField(source=\"local_file.available\")\n\n def get_storage_url(self, target_node):\n return target_node.get_storage_url()\n\n def get_download_url(self, target_node):\n return target_node.get_download_url()\n\n def get_extension(self, target_node):\n return target_node.get_extension()\n\n def get_file_size(self, target_node):\n return target_node.get_file_size()\n\n class Meta:\n model = File\n fields = (\n \"storage_url\",\n \"id\",\n \"priority\",\n \"available\",\n \"file_size\",\n \"extension\",\n \"checksum\",\n \"preset\",\n \"lang\",\n \"supplementary\",\n \"thumbnail\",\n \"download_url\",\n )\n\n\nclass AssessmentMetaDataSerializer(serializers.ModelSerializer):\n\n assessment_item_ids = serializers.JSONField(default=\"[]\")\n mastery_model = serializers.JSONField(default=\"{}\")\n\n class Meta:\n model = AssessmentMetaData\n fields = (\n \"assessment_item_ids\",\n \"number_of_assessments\",\n \"mastery_model\",\n \"randomize\",\n \"is_manipulable\",\n )\n\n\ndef get_summary_logs(content_ids, user):\n from kolibri.core.logger.models import ContentSummaryLog\n\n if not content_ids:\n return ContentSummaryLog.objects.none()\n # get all summary logs for the current user that correspond to the descendant content nodes\n return ContentSummaryLog.objects.filter(user=user, content_id__in=content_ids)\n\n\ndef get_topic_progress_fraction(topic, user):\n leaf_ids = (\n topic.get_descendants(include_self=False)\n .order_by()\n .exclude(kind=content_kinds.TOPIC)\n .values_list(\"content_id\", flat=True)\n )\n return round(\n (\n get_summary_logs(leaf_ids, user).aggregate(Sum(\"progress\"))[\"progress__sum\"]\n or 0\n )\n / (len(leaf_ids) or 1),\n 4,\n )\n\n\ndef get_content_progress_fraction(content, user):\n from kolibri.core.logger.models import ContentSummaryLog\n\n try:\n # add up all the progress for the logs, and divide by the total number of content nodes to get overall progress\n overall_progress = ContentSummaryLog.objects.get(\n user=user, content_id=content.content_id\n ).progress\n except ContentSummaryLog.DoesNotExist:\n return None\n return round(overall_progress, 4)\n\n\ndef get_topic_and_content_progress_fraction(node, user):\n if node.kind == content_kinds.TOPIC:\n return get_topic_progress_fraction(node, user)\n else:\n return get_content_progress_fraction(node, user)\n\n\ndef get_topic_and_content_progress_fractions(nodes, user):\n leaf_ids = (\n nodes.get_descendants(include_self=True)\n .order_by()\n .exclude(available=False)\n .exclude(kind=content_kinds.TOPIC)\n .values_list(\"content_id\", flat=True)\n )\n\n leaf_node_logs = get_summary_logs(leaf_ids, user)\n\n overall_progress = {}\n\n for log in leaf_node_logs.values(\"content_id\", \"progress\"):\n overall_progress[log[\"content_id\"]] = round(log[\"progress\"], 4)\n\n for node in nodes:\n if node.kind == content_kinds.TOPIC:\n topic_leaf_ids = (\n node.get_descendants(include_self=True)\n .order_by()\n .exclude(available=False)\n .exclude(kind=content_kinds.TOPIC)\n .values_list(\"content_id\", flat=True)\n )\n\n overall_progress[node.content_id] = (\n round(\n sum(overall_progress.get(leaf_id, 0) for leaf_id in topic_leaf_ids)\n / len(topic_leaf_ids),\n 4,\n )\n if topic_leaf_ids\n else 0.0\n )\n\n return overall_progress\n\n\ndef get_content_progress_fractions(nodes, user):\n if isinstance(nodes, RawQuerySet) or isinstance(nodes, list):\n leaf_ids = [datum.content_id for datum in nodes]\n else:\n leaf_ids = nodes.exclude(kind=content_kinds.TOPIC).values_list(\n \"content_id\", flat=True\n )\n\n summary_logs = get_summary_logs(leaf_ids, user)\n\n # make a lookup dict for all logs to allow mapping from content_id to current progress\n overall_progress = {\n log[\"content_id\"]: round(log[\"progress\"], 4)\n for log in summary_logs.values(\"content_id\", \"progress\")\n }\n return overall_progress\n\n\nclass ContentNodeListSerializer(serializers.ListSerializer):\n def to_representation(self, data):\n\n # Dealing with nested relationships, data can be a Manager,\n # so, first get a queryset from the Manager if needed\n data = data.all() if isinstance(data, Manager) else data\n\n # initialize cache key\n cache_key = None\n\n # ensure that we are filtering by the parent only\n # this allows us to only cache results on the learn page\n from .api import ContentNodeFilter\n\n parent_filter_only = set(self.context[\"request\"].GET.keys()).intersection(\n ContentNodeFilter.Meta.fields\n ) == set([\"parent\"])\n\n # Cache parent look ups only\n if parent_filter_only:\n cache_key = \"contentnode_list_{parent}\".format(\n parent=self.context[\"request\"].GET.get(\"parent\")\n )\n\n if cache.get(cache_key):\n return cache.get(cache_key)\n\n if not data:\n return data\n\n if (\n \"request\" not in self.context\n or not self.context[\"request\"].user.is_facility_user\n ):\n progress_dict = {}\n else:\n user = self.context[\"request\"].user\n # Don't annotate topic progress as too expensive\n progress_dict = get_content_progress_fractions(data, user)\n\n result = []\n topic_only = True\n\n # Allow results to be limited after all queryset filtering has occurred\n if self.limit:\n data = data[: self.limit]\n\n for item in data:\n obj = self.child.to_representation(\n item,\n progress_fraction=progress_dict.get(item.content_id),\n annotate_progress_fraction=False,\n )\n topic_only = topic_only and obj.get(\"kind\") == content_kinds.TOPIC\n result.append(obj)\n\n # Only store if all nodes are topics, because we don't annotate progress on them\n # This has the happy side effect of not caching our dynamically calculated\n # recommendation queries, which might change for the same user over time\n # because they do not return topics\n if topic_only and parent_filter_only:\n cache.set(cache_key, result, 60 * 10)\n\n return result\n\n\nclass ContentNodeSerializer(DynamicFieldsModelSerializer):\n parent = serializers.PrimaryKeyRelatedField(read_only=True)\n files = FileSerializer(many=True, read_only=True)\n assessmentmetadata = AssessmentMetaDataSerializer(\n read_only=True, allow_null=True, many=True\n )\n lang = LanguageSerializer()\n\n class Meta:\n model = ContentNode\n fields = (\n \"id\",\n \"assessmentmetadata\",\n \"author\",\n \"available\",\n \"channel_id\",\n \"coach_content\",\n \"content_id\",\n \"description\",\n \"files\",\n \"kind\",\n \"lang\",\n \"license_description\",\n \"license_name\",\n \"license_owner\",\n \"num_coach_contents\",\n \"parent\",\n \"sort_order\",\n \"title\",\n )\n list_serializer_class = ContentNodeListSerializer\n\n def __new__(cls, *args, **kwargs):\n # This is overwritten to provide a ListClassSerializer for many=True\n limit = kwargs.pop(\"limit\", None)\n new = super(ContentNodeSerializer, cls).__new__(cls, *args, **kwargs)\n new.limit = limit\n return new\n\n def to_representation(\n self, instance, progress_fraction=None, annotate_progress_fraction=True\n ):\n if progress_fraction is None and annotate_progress_fraction:\n if (\n \"request\" not in self.context\n or not self.context[\"request\"].user.is_facility_user\n ):\n # Don't try to annotate for a non facility user\n progress_fraction = 0.0\n else:\n user = self.context[\"request\"].user\n if instance.kind != content_kinds.TOPIC:\n progress_fraction = get_content_progress_fraction(instance, user)\n value = super(ContentNodeSerializer, self).to_representation(instance)\n value[\"progress_fraction\"] = progress_fraction\n return value\n\n\nclass ContentNodeGranularSerializer(serializers.ModelSerializer):\n num_coach_contents = serializers.SerializerMethodField()\n coach_content = serializers.SerializerMethodField()\n total_resources = serializers.SerializerMethodField()\n importable = serializers.SerializerMethodField()\n\n class Meta:\n model = ContentNode\n fields = (\n \"id\",\n \"available\",\n \"coach_content\",\n \"importable\",\n \"kind\",\n \"num_coach_contents\",\n \"on_device_resources\",\n \"title\",\n \"total_resources\",\n )\n\n @property\n def channel_stats(self):\n return self.context[\"channel_stats\"]\n\n def get_total_resources(self, instance):\n # channel_stats is None for export\n if self.channel_stats is None:\n return instance.on_device_resources\n return self.channel_stats.get(instance.id, {\"total_resources\": 0})[\n \"total_resources\"\n ]\n\n def get_num_coach_contents(self, instance):\n # If for exporting, only show what is available on server. For importing,\n # show all of the coach contents in the topic.\n if self.channel_stats is None:\n return instance.num_coach_contents\n return self.channel_stats.get(instance.id, {\"num_coach_contents\": 0})[\n \"num_coach_contents\"\n ]\n\n def get_coach_content(self, instance):\n # If for exporting, only show what is on server. For importing,\n # show all of the coach contents in the topic.\n if self.channel_stats is None:\n return instance.coach_content\n return self.channel_stats.get(instance.id, {\"coach_content\": False})[\n \"coach_content\"\n ]\n\n def get_importable(self, instance):\n # If for export, just return None\n if self.channel_stats is None:\n return None\n return instance.id in self.channel_stats\n\n\nclass ContentNodeProgressListSerializer(serializers.ListSerializer):\n def to_representation(self, data):\n\n if not data:\n return data\n\n if (\n \"request\" not in self.context\n or not self.context[\"request\"].user.is_facility_user\n ):\n progress_dict = {}\n else:\n user = self.context[\"request\"].user\n # Don't annotate topic progress as too expensive\n progress_dict = get_topic_and_content_progress_fractions(data, user)\n\n # Dealing with nested relationships, data can be a Manager,\n # so, first get a queryset from the Manager if needed\n iterable = data.all() if isinstance(data, Manager) else data\n\n return [\n self.child.to_representation(\n item,\n progress_fraction=progress_dict.get(item.content_id, 0.0),\n annotate_progress_fraction=False,\n )\n for item in iterable\n ]\n\n\nclass ContentNodeProgressSerializer(serializers.Serializer):\n def to_representation(\n self, instance, progress_fraction=None, annotate_progress_fraction=True\n ):\n if progress_fraction is None and annotate_progress_fraction:\n if (\n \"request\" not in self.context\n or not self.context[\"request\"].user.is_facility_user\n ):\n # Don't try to annotate for a non facility user\n progress_fraction = 0\n else:\n user = self.context[\"request\"].user\n progress_fraction = (\n get_topic_and_content_progress_fraction(instance, user) or 0.0\n )\n return {\"id\": instance.id, \"progress_fraction\": progress_fraction}\n\n class Meta:\n list_serializer_class = ContentNodeProgressListSerializer\n",
"path": "kolibri/core/content/serializers.py"
}
] | [
{
"content": "from django.core.cache import cache\nfrom django.db.models import Manager\nfrom django.db.models import Sum\nfrom django.db.models.query import RawQuerySet\nfrom le_utils.constants import content_kinds\nfrom rest_framework import serializers\n\nfrom kolibri.core.content.models import AssessmentMetaData\nfrom kolibri.core.content.models import ChannelMetadata\nfrom kolibri.core.content.models import ContentNode\nfrom kolibri.core.content.models import File\nfrom kolibri.core.content.models import Language\nfrom kolibri.core.fields import create_timezonestamp\n\n\nclass DynamicFieldsModelSerializer(serializers.ModelSerializer):\n def __init__(self, *args, **kwargs):\n # Instantiate the superclass normally\n super(DynamicFieldsModelSerializer, self).__init__(*args, **kwargs)\n\n # enable dynamic fields specification!\n if \"request\" in self.context and self.context[\"request\"].GET.get(\n \"fields\", None\n ):\n fields = self.context[\"request\"].GET[\"fields\"].split(\",\")\n # Drop any fields that are not specified in the `fields` argument.\n allowed = set(fields)\n existing = set(self.fields.keys())\n for field_name in existing - allowed:\n self.fields.pop(field_name)\n\n\nclass ChannelMetadataSerializer(serializers.ModelSerializer):\n root = serializers.PrimaryKeyRelatedField(read_only=True)\n lang_code = serializers.SerializerMethodField()\n lang_name = serializers.SerializerMethodField()\n available = serializers.SerializerMethodField()\n num_coach_contents = serializers.IntegerField(source=\"root.num_coach_contents\")\n\n def get_lang_code(self, instance):\n if instance.root.lang is None:\n return None\n\n return instance.root.lang.lang_code\n\n def get_lang_name(self, instance):\n if instance.root.lang is None:\n return None\n\n return instance.root.lang.lang_name\n\n def get_available(self, instance):\n return instance.root.available\n\n class Meta:\n model = ChannelMetadata\n fields = (\n \"author\",\n \"description\",\n \"id\",\n \"last_updated\",\n \"lang_code\",\n \"lang_name\",\n \"name\",\n \"root\",\n \"thumbnail\",\n \"version\",\n \"available\",\n \"num_coach_contents\",\n \"public\",\n )\n\n\nclass PublicChannelSerializer(serializers.ModelSerializer):\n included_languages = serializers.SerializerMethodField()\n matching_tokens = serializers.SerializerMethodField(\"match_tokens\")\n language = serializers.SerializerMethodField()\n icon_encoding = serializers.SerializerMethodField()\n last_published = serializers.SerializerMethodField()\n\n def get_language(self, instance):\n if instance.root.lang is None:\n return None\n\n return instance.root.lang.lang_code\n\n def get_icon_encoding(self, instance):\n return instance.thumbnail\n\n def get_included_languages(self, instance):\n return list(instance.included_languages.all().values_list(\"id\", flat=True))\n\n def get_last_published(self, instance):\n return (\n None\n if not instance.last_updated\n else create_timezonestamp(instance.last_updated)\n )\n\n def match_tokens(self, channel):\n return []\n\n class Meta:\n model = ChannelMetadata\n fields = (\n \"id\",\n \"name\",\n \"language\",\n \"included_languages\",\n \"description\",\n \"total_resource_count\",\n \"version\",\n \"published_size\",\n \"last_published\",\n \"icon_encoding\",\n \"matching_tokens\",\n \"public\",\n )\n\n\nclass LowerCaseField(serializers.CharField):\n def to_representation(self, obj):\n return super(LowerCaseField, self).to_representation(obj).lower()\n\n\nclass LanguageSerializer(serializers.ModelSerializer):\n id = LowerCaseField(max_length=14)\n lang_code = LowerCaseField(max_length=3)\n lang_subcode = LowerCaseField(max_length=10)\n\n class Meta:\n model = Language\n fields = (\"id\", \"lang_code\", \"lang_subcode\", \"lang_name\", \"lang_direction\")\n\n\nclass FileSerializer(serializers.ModelSerializer):\n checksum = serializers.CharField(source=\"local_file_id\")\n storage_url = serializers.SerializerMethodField()\n download_url = serializers.SerializerMethodField()\n extension = serializers.SerializerMethodField()\n file_size = serializers.SerializerMethodField()\n lang = LanguageSerializer()\n available = serializers.BooleanField(source=\"local_file.available\")\n\n def get_storage_url(self, target_node):\n return target_node.get_storage_url()\n\n def get_download_url(self, target_node):\n return target_node.get_download_url()\n\n def get_extension(self, target_node):\n return target_node.get_extension()\n\n def get_file_size(self, target_node):\n return target_node.get_file_size()\n\n class Meta:\n model = File\n fields = (\n \"storage_url\",\n \"id\",\n \"priority\",\n \"available\",\n \"file_size\",\n \"extension\",\n \"checksum\",\n \"preset\",\n \"lang\",\n \"supplementary\",\n \"thumbnail\",\n \"download_url\",\n )\n\n\nclass AssessmentMetaDataSerializer(serializers.ModelSerializer):\n\n assessment_item_ids = serializers.JSONField(default=\"[]\")\n mastery_model = serializers.JSONField(default=\"{}\")\n\n class Meta:\n model = AssessmentMetaData\n fields = (\n \"assessment_item_ids\",\n \"number_of_assessments\",\n \"mastery_model\",\n \"randomize\",\n \"is_manipulable\",\n )\n\n\ndef get_summary_logs(content_ids, user):\n from kolibri.core.logger.models import ContentSummaryLog\n\n if not content_ids:\n return ContentSummaryLog.objects.none()\n # get all summary logs for the current user that correspond to the descendant content nodes\n return ContentSummaryLog.objects.filter(user=user, content_id__in=content_ids)\n\n\ndef get_topic_progress_fraction(topic, user):\n leaf_ids = (\n topic.get_descendants(include_self=False)\n .order_by()\n .exclude(kind=content_kinds.TOPIC)\n .values_list(\"content_id\", flat=True)\n )\n return round(\n (\n get_summary_logs(leaf_ids, user).aggregate(Sum(\"progress\"))[\"progress__sum\"]\n or 0\n )\n / (len(leaf_ids) or 1),\n 4,\n )\n\n\ndef get_content_progress_fraction(content, user):\n from kolibri.core.logger.models import ContentSummaryLog\n\n try:\n # add up all the progress for the logs, and divide by the total number of content nodes to get overall progress\n overall_progress = ContentSummaryLog.objects.get(\n user=user, content_id=content.content_id\n ).progress\n except ContentSummaryLog.DoesNotExist:\n return None\n return round(overall_progress, 4)\n\n\ndef get_topic_and_content_progress_fraction(node, user):\n if node.kind == content_kinds.TOPIC:\n return get_topic_progress_fraction(node, user)\n else:\n return get_content_progress_fraction(node, user)\n\n\ndef get_topic_and_content_progress_fractions(nodes, user):\n leaf_ids = (\n nodes.get_descendants(include_self=True)\n .order_by()\n .exclude(available=False)\n .exclude(kind=content_kinds.TOPIC)\n .values_list(\"content_id\", flat=True)\n )\n\n leaf_node_logs = get_summary_logs(leaf_ids, user)\n\n overall_progress = {}\n\n for log in leaf_node_logs.values(\"content_id\", \"progress\"):\n overall_progress[log[\"content_id\"]] = round(log[\"progress\"], 4)\n\n for node in nodes:\n if node.kind == content_kinds.TOPIC:\n topic_leaf_ids = (\n node.get_descendants(include_self=True)\n .order_by()\n .exclude(available=False)\n .exclude(kind=content_kinds.TOPIC)\n .values_list(\"content_id\", flat=True)\n )\n\n overall_progress[node.content_id] = (\n round(\n sum(overall_progress.get(leaf_id, 0) for leaf_id in topic_leaf_ids)\n / len(topic_leaf_ids),\n 4,\n )\n if topic_leaf_ids\n else 0.0\n )\n\n return overall_progress\n\n\ndef get_content_progress_fractions(nodes, user):\n if isinstance(nodes, RawQuerySet) or isinstance(nodes, list):\n leaf_ids = [datum.content_id for datum in nodes]\n else:\n leaf_ids = nodes.exclude(kind=content_kinds.TOPIC).values_list(\n \"content_id\", flat=True\n )\n\n summary_logs = get_summary_logs(leaf_ids, user)\n\n # make a lookup dict for all logs to allow mapping from content_id to current progress\n overall_progress = {\n log[\"content_id\"]: round(log[\"progress\"], 4)\n for log in summary_logs.values(\"content_id\", \"progress\")\n }\n return overall_progress\n\n\nclass ContentNodeListSerializer(serializers.ListSerializer):\n def to_representation(self, data):\n\n # Dealing with nested relationships, data can be a Manager,\n # so, first get a queryset from the Manager if needed\n data = data.all() if isinstance(data, Manager) else data\n\n # initialize cache key\n cache_key = None\n\n # ensure that we are filtering by the parent only\n # this allows us to only cache results on the learn page\n from .api import ContentNodeFilter\n\n parent_filter_only = set(self.context[\"request\"].GET.keys()).intersection(\n ContentNodeFilter.Meta.fields\n ) == set([\"parent\"])\n\n # Cache parent look ups only\n if parent_filter_only:\n cache_key = \"contentnode_list_{parent}\".format(\n parent=self.context[\"request\"].GET.get(\"parent\")\n )\n\n if cache.get(cache_key):\n return cache.get(cache_key)\n\n if not data:\n return data\n\n if (\n \"request\" not in self.context\n or not self.context[\"request\"].user.is_facility_user\n ):\n progress_dict = {}\n else:\n user = self.context[\"request\"].user\n # Don't annotate topic progress as too expensive\n progress_dict = get_content_progress_fractions(data, user)\n\n result = []\n topic_only = True\n\n # Allow results to be limited after all queryset filtering has occurred\n if self.limit:\n data = data[: self.limit]\n\n for item in data:\n obj = self.child.to_representation(\n item,\n progress_fraction=progress_dict.get(item.content_id),\n annotate_progress_fraction=False,\n )\n topic_only = topic_only and obj.get(\"kind\") == content_kinds.TOPIC\n result.append(obj)\n\n # Only store if all nodes are topics, because we don't annotate progress on them\n # This has the happy side effect of not caching our dynamically calculated\n # recommendation queries, which might change for the same user over time\n # because they do not return topics\n if topic_only and parent_filter_only:\n cache.set(cache_key, result, 60 * 10)\n\n return result\n\n\nclass ContentNodeSerializer(DynamicFieldsModelSerializer):\n parent = serializers.PrimaryKeyRelatedField(read_only=True)\n files = FileSerializer(many=True, read_only=True)\n assessmentmetadata = AssessmentMetaDataSerializer(\n read_only=True, allow_null=True, many=True\n )\n lang = LanguageSerializer()\n\n class Meta:\n model = ContentNode\n fields = (\n \"id\",\n \"assessmentmetadata\",\n \"author\",\n \"available\",\n \"channel_id\",\n \"coach_content\",\n \"content_id\",\n \"description\",\n \"files\",\n \"kind\",\n \"lang\",\n \"license_description\",\n \"license_name\",\n \"license_owner\",\n \"num_coach_contents\",\n \"parent\",\n \"sort_order\",\n \"title\",\n )\n list_serializer_class = ContentNodeListSerializer\n\n def __new__(cls, *args, **kwargs):\n # This is overwritten to provide a ListClassSerializer for many=True\n limit = kwargs.pop(\"limit\", None)\n new = super(ContentNodeSerializer, cls).__new__(cls, *args, **kwargs)\n new.limit = limit\n return new\n\n def to_representation(\n self, instance, progress_fraction=None, annotate_progress_fraction=True\n ):\n if progress_fraction is None and annotate_progress_fraction:\n if (\n \"request\" not in self.context\n or not self.context[\"request\"].user.is_facility_user\n ):\n # Don't try to annotate for a non facility user\n progress_fraction = 0.0\n else:\n user = self.context[\"request\"].user\n if instance.kind != content_kinds.TOPIC:\n progress_fraction = get_content_progress_fraction(instance, user)\n value = super(ContentNodeSerializer, self).to_representation(instance)\n value[\"progress_fraction\"] = progress_fraction\n return value\n\n\nclass ContentNodeGranularSerializer(serializers.ModelSerializer):\n num_coach_contents = serializers.SerializerMethodField()\n coach_content = serializers.SerializerMethodField()\n total_resources = serializers.SerializerMethodField()\n importable = serializers.SerializerMethodField()\n\n class Meta:\n model = ContentNode\n fields = (\n \"id\",\n \"available\",\n \"coach_content\",\n \"importable\",\n \"kind\",\n \"num_coach_contents\",\n \"on_device_resources\",\n \"title\",\n \"total_resources\",\n )\n\n @property\n def channel_stats(self):\n return self.context[\"channel_stats\"]\n\n def get_total_resources(self, instance):\n # channel_stats is None for export\n if self.channel_stats is None:\n return instance.on_device_resources\n return self.channel_stats.get(instance.id, {\"total_resources\": 0})[\n \"total_resources\"\n ]\n\n def get_num_coach_contents(self, instance):\n # If for exporting, only show what is available on server. For importing,\n # show all of the coach contents in the topic.\n if self.channel_stats is None:\n return instance.num_coach_contents\n return self.channel_stats.get(instance.id, {\"num_coach_contents\": 0})[\n \"num_coach_contents\"\n ]\n\n def get_coach_content(self, instance):\n # If for exporting, only show what is on server. For importing,\n # show all of the coach contents in the topic.\n if self.channel_stats is None:\n return instance.coach_content\n return self.channel_stats.get(instance.id, {\"coach_content\": False})[\n \"coach_content\"\n ]\n\n def get_importable(self, instance):\n # If for export, just return None\n if self.channel_stats is None:\n return None\n return instance.id in self.channel_stats\n\n\nclass ContentNodeProgressListSerializer(serializers.ListSerializer):\n def to_representation(self, data):\n\n if not data:\n return data\n\n if (\n \"request\" not in self.context\n or not self.context[\"request\"].user.is_facility_user\n ):\n progress_dict = {}\n else:\n user = self.context[\"request\"].user\n # Don't annotate topic progress as too expensive\n progress_dict = get_topic_and_content_progress_fractions(data, user)\n\n # Dealing with nested relationships, data can be a Manager,\n # so, first get a queryset from the Manager if needed\n iterable = data.all() if isinstance(data, Manager) else data\n\n return [\n self.child.to_representation(\n item,\n progress_fraction=progress_dict.get(item.content_id, 0.0),\n annotate_progress_fraction=False,\n )\n for item in iterable\n ]\n\n\nclass ContentNodeProgressSerializer(serializers.Serializer):\n def to_representation(\n self, instance, progress_fraction=None, annotate_progress_fraction=True\n ):\n if progress_fraction is None and annotate_progress_fraction:\n if (\n \"request\" not in self.context\n or not self.context[\"request\"].user.is_facility_user\n ):\n # Don't try to annotate for a non facility user\n progress_fraction = 0\n else:\n user = self.context[\"request\"].user\n progress_fraction = (\n get_topic_and_content_progress_fraction(instance, user) or 0.0\n )\n return {\"id\": instance.id, \"progress_fraction\": progress_fraction}\n\n class Meta:\n list_serializer_class = ContentNodeProgressListSerializer\n",
"path": "kolibri/core/content/serializers.py"
}
] | diff --git a/kolibri/core/content/serializers.py b/kolibri/core/content/serializers.py
index da871d2f6ec..326e6497fa9 100644
--- a/kolibri/core/content/serializers.py
+++ b/kolibri/core/content/serializers.py
@@ -67,6 +67,7 @@ class Meta:
"version",
"available",
"num_coach_contents",
+ "public",
)
diff --git a/kolibri/plugins/device/assets/src/modules/manageContent/actions/taskActions.js b/kolibri/plugins/device/assets/src/modules/manageContent/actions/taskActions.js
index 4e59f38b109..34392d30983 100644
--- a/kolibri/plugins/device/assets/src/modules/manageContent/actions/taskActions.js
+++ b/kolibri/plugins/device/assets/src/modules/manageContent/actions/taskActions.js
@@ -6,6 +6,7 @@ import pick from 'lodash/fp/pick';
import { TaskStatuses, TaskTypes } from '../../../constants';
const logging = logger.getLogger(__filename);
+
export function cancelTask(store, taskId) {
return new Promise(resolve => {
let cancelWatch;
@@ -15,7 +16,7 @@ export function cancelTask(store, taskId) {
TaskStatuses.CANCELED,
() => {
cancelWatch();
- TaskResource.deleteFinishedTasks().then(resolve);
+ TaskResource.deleteFinishedTask(taskId).then(resolve);
}
);
TaskResource.cancelTask(taskId);
diff --git a/kolibri/plugins/device/assets/src/modules/manageContent/index.js b/kolibri/plugins/device/assets/src/modules/manageContent/index.js
index 424ffc22edf..f5c80138f6b 100644
--- a/kolibri/plugins/device/assets/src/modules/manageContent/index.js
+++ b/kolibri/plugins/device/assets/src/modules/manageContent/index.js
@@ -1,4 +1,5 @@
import find from 'lodash/find';
+import findLastIndex from 'lodash/findLastIndex';
import wizard from '../wizard';
import { TaskTypes, TaskStatuses, taskIsClearable } from '../../constants';
import actions from './actions';
@@ -43,14 +44,36 @@ export default {
},
getters: {
// Channels that are installed & also "available"
- installedChannelsWithResources(state) {
- return state.channelList.filter(channel => channel.available);
+ installedChannelsWithResources(state, getters) {
+ const channels = state.channelList.filter(channel => channel.available);
+
+ return channels.map(channel => {
+ const taskIndex = findLastIndex(getters.managedTasks, task => {
+ return (
+ ![TaskTypes.DISKCONTENTEXPORT, TaskTypes.DISKEXPORT, TaskTypes.DELETECHANNEL].includes(
+ task.type
+ ) &&
+ task.channel_id === channel.id &&
+ task.status === TaskStatuses.COMPLETED
+ );
+ });
+ return {
+ ...channel,
+ taskIndex,
+ };
+ });
},
channelIsInstalled(state) {
return function findChannel(channelId) {
return find(state.channelList, { id: channelId, available: true });
};
},
+ channelIsOnDevice(state) {
+ // Channel data just needs to exist, but doesn't need to be available
+ return function findChannel(channelId) {
+ return find(state.channelList, { id: channelId });
+ };
+ },
channelIsBeingDeleted(state) {
return function beingDeleted(channelId) {
const match = find(state.taskList, {
diff --git a/kolibri/plugins/device/assets/src/modules/wizard/actions/selectContentActions.js b/kolibri/plugins/device/assets/src/modules/wizard/actions/selectContentActions.js
index 7fb760a425e..8ccbc3d7a85 100644
--- a/kolibri/plugins/device/assets/src/modules/wizard/actions/selectContentActions.js
+++ b/kolibri/plugins/device/assets/src/modules/wizard/actions/selectContentActions.js
@@ -10,7 +10,7 @@ import { getChannelWithContentSizes } from '../apiChannelMetadata';
export function loadChannelMetadata(store) {
let dbPromise;
const { transferredChannel } = store.state.manageContent.wizard;
- const channelOnDevice = store.getters['manageContent/channelIsInstalled'](transferredChannel.id);
+ const channelOnDevice = store.getters['manageContent/channelIsOnDevice'](transferredChannel.id);
// If channel _is_ on the device, but not "available" (i.e. no resources installed yet)
// _and_ has been updated, then download the metadata
diff --git a/kolibri/plugins/device/assets/src/modules/wizard/utils.js b/kolibri/plugins/device/assets/src/modules/wizard/utils.js
index 80458a583c9..104f40c20d2 100644
--- a/kolibri/plugins/device/assets/src/modules/wizard/utils.js
+++ b/kolibri/plugins/device/assets/src/modules/wizard/utils.js
@@ -56,9 +56,14 @@ export function downloadChannelMetadata(store = coreStore) {
.then(completedTask => {
const { taskId, cancelled } = completedTask;
if (taskId && !cancelled) {
- return TaskResource.deleteFinishedTasks().then(() => {
- return getChannelWithContentSizes(transferredChannel.id);
- });
+ return TaskResource.deleteFinishedTask(taskId)
+ .then(() => {
+ return getChannelWithContentSizes(transferredChannel.id);
+ })
+ .catch(() => {
+ // Fail silently just in case something happens
+ return getChannelWithContentSizes(transferredChannel.id);
+ });
}
return Promise.reject({ errorType: ErrorTypes.CHANNEL_TASK_ERROR });
});
diff --git a/kolibri/plugins/device/assets/src/views/ManageContentPage/ChannelPanel/WithImportDetails.vue b/kolibri/plugins/device/assets/src/views/ManageContentPage/ChannelPanel/WithImportDetails.vue
index ee08417d042..db81a426778 100644
--- a/kolibri/plugins/device/assets/src/views/ManageContentPage/ChannelPanel/WithImportDetails.vue
+++ b/kolibri/plugins/device/assets/src/views/ManageContentPage/ChannelPanel/WithImportDetails.vue
@@ -125,8 +125,6 @@
}
},
isUnlistedChannel() {
- // This is only defined when entering a remote import workflow,
- // so false !== undefined.
return this.channel.public === false;
},
tasksInQueue() {
@@ -256,18 +254,27 @@
margin-left: 8px;
}
+ .private-icons {
+ position: relative;
+ display: inline-block;
+ margin-top: -3px;
+ margin-bottom: 3px;
+ vertical-align: top;
+ }
+
.new-label {
position: absolute;
- top: 3px;
- padding: 2px 5px 2px 4px;
+ top: 2px;
+ display: inline-block;
+ padding: 2px 8px;
margin-left: 8px;
font-size: 14px;
+ font-weight: bold;
border-radius: 2px;
- }
- .private-icons {
- position: relative;
- display: inline-block;
+ .channel-list-item-sm & {
+ top: -2px;
+ }
}
.selected-msg {
diff --git a/kolibri/plugins/device/assets/src/views/ManageContentPage/ChannelPanel/WithSizeAndOptions.vue b/kolibri/plugins/device/assets/src/views/ManageContentPage/ChannelPanel/WithSizeAndOptions.vue
index 4d1a020e856..6db580f253e 100644
--- a/kolibri/plugins/device/assets/src/views/ManageContentPage/ChannelPanel/WithSizeAndOptions.vue
+++ b/kolibri/plugins/device/assets/src/views/ManageContentPage/ChannelPanel/WithSizeAndOptions.vue
@@ -5,7 +5,30 @@
:class="{'panel-sm': windowIsSmall}"
:style="{ borderTop: `1px solid ${$themePalette.grey.v_200}` }"
>
- <ChannelDetails :channel="channel" />
+ <ChannelDetails :channel="channel">
+
+ <template v-slot:belowname>
+ <div class="private-icons">
+ <KTooltip reference="lockicon" :refs="$refs" placement="top">
+ {{ WithImportDetailsStrings.$tr('unlistedChannelTooltip') }}
+ </KTooltip>
+ <KIcon
+ v-if="channel.public === false"
+ ref="lockicon"
+ class="lock-icon"
+ icon="unlistedchannel"
+ />
+ <span
+ v-if="showNewLabel"
+ class="new-label"
+ :style="{
+ color: $themeTokens.textInverted,
+ backgroundColor: $themeTokens.success
+ }"
+ >{{ WithImportDetailsStrings.$tr('newLabel') }}</span>
+ </div>
+ </template>
+ </ChannelDetails>
<div
class="col-2"
@@ -35,7 +58,11 @@
import responsiveWindowMixin from 'kolibri.coreVue.mixins.responsiveWindowMixin';
import commonCoreStrings from 'kolibri.coreVue.mixins.commonCoreStrings';
import bytesForHumans from 'kolibri.utils.bytesForHumans';
+ import { crossComponentTranslator } from 'kolibri.utils.i18n';
import ChannelDetails from './ChannelDetails';
+ import WithImportDetails from './WithImportDetails';
+
+ const WithImportDetailsStrings = crossComponentTranslator(WithImportDetails);
export default {
name: 'WithSizeAndOptions',
@@ -52,11 +79,18 @@
type: Boolean,
default: false,
},
+ showNewLabel: {
+ type: Boolean,
+ required: false,
+ },
},
computed: {
resourcesSizeText() {
return bytesForHumans(this.channel.on_device_file_size);
},
+ WithImportDetailsStrings() {
+ return WithImportDetailsStrings;
+ },
},
methods: {
handleManageChannelAction() {
@@ -86,6 +120,16 @@
padding: 16px 0;
}
+ svg.lock-icon {
+ width: 24px;
+ height: 24px;
+
+ .panel-sm & {
+ width: 20px;
+ height: 20px;
+ }
+ }
+
.col-2 {
min-width: 80px;
margin-right: 16px;
@@ -110,4 +154,31 @@
margin: 0;
}
+ .private-icons {
+ position: relative;
+ display: inline-block;
+ margin-top: -3px;
+ margin-bottom: 3px;
+ vertical-align: top;
+
+ .panel-sm & {
+ margin-top: -1px;
+ margin-bottom: 1px;
+ }
+ }
+
+ .new-label {
+ position: absolute;
+ top: 2px;
+ padding: 2px 8px;
+ margin-left: 8px;
+ font-size: 14px;
+ font-weight: bold;
+ border-radius: 2px;
+
+ .panel-sm & {
+ top: -2px;
+ }
+ }
+
</style>
diff --git a/kolibri/plugins/device/assets/src/views/ManageContentPage/TasksBar.vue b/kolibri/plugins/device/assets/src/views/ManageContentPage/TasksBar.vue
index 043ac05f37e..9fdd17a1976 100644
--- a/kolibri/plugins/device/assets/src/views/ManageContentPage/TasksBar.vue
+++ b/kolibri/plugins/device/assets/src/views/ManageContentPage/TasksBar.vue
@@ -45,13 +45,7 @@
export default {
name: 'TasksBar',
- components: {},
mixins: [commonCoreStrings, responsiveWindowMixin],
- props: {},
- data() {
- return {};
- },
-
computed: {
...mapGetters('manageContent', ['managedTasks']),
clearCompletedString() {
diff --git a/kolibri/plugins/device/assets/src/views/ManageContentPage/api.js b/kolibri/plugins/device/assets/src/views/ManageContentPage/api.js
index a01dd91b4aa..1738e1b2e6a 100644
--- a/kolibri/plugins/device/assets/src/views/ManageContentPage/api.js
+++ b/kolibri/plugins/device/assets/src/views/ManageContentPage/api.js
@@ -1,5 +1,6 @@
import find from 'lodash/find';
import { TaskResource, ChannelResource, RemoteChannelResource } from 'kolibri.resources';
+import { TaskTypes } from '../../constants';
import { NetworkLocationResource } from '../../apiResources';
const kolibriStudioUrl = 'https://studio.learningequality.org';
@@ -98,7 +99,7 @@ export function fetchOrTriggerChannelDiffStatsTask(params) {
}
return TaskResource.fetchCollection({ force: true }).then(tasks => {
- const match = find(tasks, taskAttrs);
+ const match = find(tasks, { ...taskAttrs, type: TaskTypes.CHANNELDIFFSTATS });
if (match) {
return match;
} else {
diff --git a/kolibri/plugins/device/assets/src/views/ManageContentPage/index.vue b/kolibri/plugins/device/assets/src/views/ManageContentPage/index.vue
index 4bee488f589..36131b41804 100644
--- a/kolibri/plugins/device/assets/src/views/ManageContentPage/index.vue
+++ b/kolibri/plugins/device/assets/src/views/ManageContentPage/index.vue
@@ -49,10 +49,11 @@
<div class="channels-list">
<ChannelPanel
- v-for="channel in installedChannelsWithResources"
+ v-for="channel in sortedChannels"
:key="channel.id"
:channel="channel"
:disabled="channelIsBeingDeleted(channel.id)"
+ :showNewLabel="showNewLabel(channel.id)"
@select_delete="deleteChannelId = channel.id"
@select_manage="handleSelectManage(channel.id)"
/>
@@ -78,11 +79,12 @@
import find from 'lodash/find';
import get from 'lodash/get';
+ import sortBy from 'lodash/sortBy';
import { mapState, mapGetters, mapActions } from 'vuex';
import commonCoreStrings from 'kolibri.coreVue.mixins.commonCoreStrings';
import { TaskResource } from 'kolibri.resources';
import taskNotificationMixin from '../taskNotificationMixin';
- import { PageNames } from '../../constants';
+ import { PageNames, TaskStatuses } from '../../constants';
import SelectTransferSourceModal from './SelectTransferSourceModal';
import ChannelPanel from './ChannelPanel/WithSizeAndOptions';
import DeleteChannelModal from './DeleteChannelModal';
@@ -105,11 +107,25 @@
data() {
return {
deleteChannelId: null,
+ channelOrders: {},
};
},
computed: {
- ...mapGetters('manageContent', ['installedChannelsWithResources', 'channelIsBeingDeleted']),
+ ...mapGetters('manageContent', [
+ 'installedChannelsWithResources',
+ 'channelIsBeingDeleted',
+ 'managedTasks',
+ ]),
...mapState('manageContent/wizard', ['pageName']),
+ doneTasks() {
+ return this.managedTasks.filter(task => task.status === TaskStatuses.COMPLETED).length;
+ },
+ sortedChannels() {
+ return sortBy(
+ this.installedChannelsWithResources,
+ channel => -this.channelOrders[channel.id]
+ );
+ },
channelsAreInstalled() {
return this.installedChannelsWithResources.length > 0;
},
@@ -127,6 +143,27 @@
];
},
},
+ watch: {
+ installedChannelsWithResources: {
+ // Save channel orders that are set temporarily based on managedTasks
+ handler(val) {
+ val.forEach(channel => {
+ const currentOrder = this.channelOrders[channel.id];
+ if ((!currentOrder && channel.taskIndex > -1) || currentOrder < channel.taskIndex) {
+ this.$set(this.channelOrders, channel.id, channel.taskIndex);
+ }
+ });
+ },
+ immediate: true,
+ deep: true,
+ },
+ doneTasks(val, oldVal) {
+ // Just refresh the channel list whenever anything finishes to get the latest version
+ if (val > oldVal) {
+ this.refreshChannelList();
+ }
+ },
+ },
methods: {
...mapActions('manageContent', ['refreshChannelList', 'startImportWorkflow']),
handleSelect({ value }) {
@@ -137,6 +174,10 @@
}[value];
this.$router.push(this.$router.getRoute(nextRoute));
},
+ showNewLabel(channelId) {
+ const match = find(this.installedChannelsWithResources, { id: channelId });
+ return match && match.taskIndex > -1;
+ },
handleDeleteChannel() {
if (this.deleteChannelId) {
const channelId = this.deleteChannelId;
diff --git a/kolibri/plugins/device/assets/src/views/SelectContentPage/ChannelContentsSummary.vue b/kolibri/plugins/device/assets/src/views/SelectContentPage/ChannelContentsSummary.vue
index 9406430ae5f..fba10a82097 100644
--- a/kolibri/plugins/device/assets/src/views/SelectContentPage/ChannelContentsSummary.vue
+++ b/kolibri/plugins/device/assets/src/views/SelectContentPage/ChannelContentsSummary.vue
@@ -14,6 +14,7 @@
<h1>
<KLabeledIcon icon="channel" :label="channel.name" />
<KIcon
+ v-if="channel.public === false"
ref="lockicon"
class="lock-icon"
icon="unlistedchannel"
diff --git a/kolibri/plugins/device/assets/src/views/SelectContentPage/index.vue b/kolibri/plugins/device/assets/src/views/SelectContentPage/index.vue
index 269ce9524dc..dc8d444a49f 100644
--- a/kolibri/plugins/device/assets/src/views/SelectContentPage/index.vue
+++ b/kolibri/plugins/device/assets/src/views/SelectContentPage/index.vue
@@ -114,7 +114,7 @@
};
},
computed: {
- ...mapGetters('manageContent', ['channelIsInstalled']),
+ ...mapGetters('manageContent', ['channelIsOnDevice']),
...mapState('manageContent', ['taskList']),
...mapGetters('manageContent/wizard', [
'inLocalImportMode',
@@ -154,7 +154,7 @@
return undefined;
},
channelOnDevice() {
- return this.channelIsInstalled(this.transferredChannel.id) || {};
+ return this.channelIsOnDevice(this.transferredChannel.id) || {};
},
availableVersions() {
return {
diff --git a/kolibri/plugins/facility/assets/src/modules/manageCSV/actions.js b/kolibri/plugins/facility/assets/src/modules/manageCSV/actions.js
index 82f8a53b144..9e6d458ea34 100644
--- a/kolibri/plugins/facility/assets/src/modules/manageCSV/actions.js
+++ b/kolibri/plugins/facility/assets/src/modules/manageCSV/actions.js
@@ -66,7 +66,7 @@ function checkTaskStatus(store, newTasks, taskType, taskId, commitStart, commitF
const task = completed[0];
if (task.status === TaskStatuses.COMPLETED) {
store.commit(commitFinish, new Date());
- TaskResource.deleteFinishedTasks();
+ TaskResource.deleteFinishedTask(taskId);
}
}
} else {
|
doccano__doccano-693 | CORS error when running locally in development mode
How to reproduce the behaviour
---------
1. `git clone https://github.com/doccano/doccano.git`
2. `cd doccano`
3. `docker-compose -f docker-compose.dev.yml up`
4. Visit `http://localhost:3000/auth`
5. Login with user `admin` and password `password`
Your Environment
---------
* Operating System: macOS Catalina 10.15.3
* Browser: Chrome 80.0.3987.163 (Official Build) (64-bit)
What Happens
---------
I get a CORS error and I can't login:
```
Access to XMLHttpRequest at 'http://127.0.0.1:8000/v1/auth-token' from origin 'http://localhost:3000' has been blocked by CORS policy: Response to preflight request doesn't pass access control check: No 'Access-Control-Allow-Origin' header is present on the requested resource.
```

Here is what the Request Headers look like:

| [
{
"content": "\"\"\"\nDjango settings for app project.\n\nFor more information on this file, see\nhttps://docs.djangoproject.com/en/2.0/topics/settings/\n\nFor the full list of settings and their values, see\nhttps://docs.djangoproject.com/en/2.0/ref/settings/\n\nAny setting that is configured via an environment variable may\nalso be set in a `.env` file in the project base directory.\n\"\"\"\nfrom os import path\n\nimport django_heroku\nimport dj_database_url\nfrom environs import Env\nfrom furl import furl\n\n\n# Build paths inside the project like this: path.join(BASE_DIR, ...)\nBASE_DIR = path.dirname(path.dirname(path.abspath(__file__)))\n\nenv = Env()\nenv.read_env(path.join(BASE_DIR, '.env'), recurse=False)\n\n\n# Quick-start development settings - unsuitable for production\n# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/\n\n# SECURITY WARNING: keep the secret key used in production secret!\nSECRET_KEY = env('SECRET_KEY',\n 'v8sk33sy82!uw3ty=!jjv5vp7=s2phrzw(m(hrn^f7e_#1h2al')\n\n# SECURITY WARNING: don't run with debug turned on in production!\nDEBUG = env.bool('DEBUG', True)\n\n# True if you want to allow users to be able to create an account\nALLOW_SIGNUP = env.bool('ALLOW_SIGNUP', True)\n\n# ALLOWED_HOSTS = []\n\n\n# Application definition\n\nINSTALLED_APPS = [\n 'whitenoise.runserver_nostatic',\n 'django.contrib.admin',\n 'django.contrib.auth',\n 'django.contrib.contenttypes',\n 'django.contrib.sessions',\n 'django.contrib.messages',\n 'django.contrib.staticfiles',\n 'server.apps.ServerConfig',\n 'api.apps.ApiConfig',\n 'widget_tweaks',\n 'rest_framework',\n 'rest_framework.authtoken',\n 'django_filters',\n 'social_django',\n 'polymorphic',\n 'webpack_loader',\n 'corsheaders',\n 'drf_yasg'\n]\n\nCLOUD_BROWSER_APACHE_LIBCLOUD_PROVIDER = env('CLOUD_BROWSER_LIBCLOUD_PROVIDER', None)\nCLOUD_BROWSER_APACHE_LIBCLOUD_ACCOUNT = env('CLOUD_BROWSER_LIBCLOUD_ACCOUNT', None)\nCLOUD_BROWSER_APACHE_LIBCLOUD_SECRET_KEY = env('CLOUD_BROWSER_LIBCLOUD_KEY', None)\n\nif CLOUD_BROWSER_APACHE_LIBCLOUD_PROVIDER:\n CLOUD_BROWSER_DATASTORE = 'ApacheLibcloud'\n CLOUD_BROWSER_OBJECT_REDIRECT_URL = '/v1/cloud-upload'\n INSTALLED_APPS.append('cloud_browser')\n\nMIDDLEWARE = [\n 'django.middleware.security.SecurityMiddleware',\n 'whitenoise.middleware.WhiteNoiseMiddleware',\n 'django.contrib.sessions.middleware.SessionMiddleware',\n 'django.middleware.common.CommonMiddleware',\n 'django.middleware.csrf.CsrfViewMiddleware',\n 'django.contrib.auth.middleware.AuthenticationMiddleware',\n 'django.contrib.messages.middleware.MessageMiddleware',\n 'django.middleware.clickjacking.XFrameOptionsMiddleware',\n 'social_django.middleware.SocialAuthExceptionMiddleware',\n 'applicationinsights.django.ApplicationInsightsMiddleware',\n 'corsheaders.middleware.CorsMiddleware',\n]\n\nROOT_URLCONF = 'app.urls'\n\nTEMPLATES = [\n {\n 'BACKEND': 'django.template.backends.django.DjangoTemplates',\n 'DIRS': [path.join(BASE_DIR, 'server/templates'), path.join(BASE_DIR, 'authentification/templates')],\n 'APP_DIRS': True,\n 'OPTIONS': {\n 'context_processors': [\n 'django.template.context_processors.debug',\n 'django.template.context_processors.request',\n 'django.contrib.auth.context_processors.auth',\n 'django.contrib.messages.context_processors.messages',\n 'social_django.context_processors.backends',\n 'social_django.context_processors.login_redirect',\n ],\n 'libraries': {\n 'analytics': 'server.templatetags.analytics',\n 'utils_templating': 'authentification.templatetags.utils_templating',\n },\n },\n },\n]\n\n# Static files (CSS, JavaScript, Images)\n# https://docs.djangoproject.com/en/2.0/howto/static-files/\n\nSTATIC_URL = '/static/'\nSTATIC_ROOT = path.join(BASE_DIR, 'staticfiles')\n\nSTATICFILES_DIRS = [\n static_path\n for static_path in (\n path.join(BASE_DIR, 'server', 'static', 'assets'),\n path.join(BASE_DIR, 'server', 'static', 'static'),\n )\n if path.isdir(static_path)\n]\n\nSTATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'\n\nWEBPACK_LOADER = {\n 'DEFAULT': {\n 'CACHE': not DEBUG,\n 'BUNDLE_DIR_NAME': 'bundle/',\n 'STATS_FILE': path.join(BASE_DIR, 'server', 'static', 'webpack-stats.json'),\n 'POLL_INTERVAL': 0.1,\n 'TIMEOUT': None,\n 'IGNORE': [r'.*\\.hot-update.js', r'.+\\.map']\n }\n}\n\nWSGI_APPLICATION = 'app.wsgi.application'\n\nAUTHENTICATION_BACKENDS = [\n 'social_core.backends.github.GithubOAuth2',\n 'social_core.backends.azuread_tenant.AzureADTenantOAuth2',\n 'django.contrib.auth.backends.ModelBackend',\n]\n\nSOCIAL_AUTH_GITHUB_KEY = env('OAUTH_GITHUB_KEY', None)\nSOCIAL_AUTH_GITHUB_SECRET = env('OAUTH_GITHUB_SECRET', None)\nGITHUB_ADMIN_ORG_NAME = env('GITHUB_ADMIN_ORG_NAME', None)\nGITHUB_ADMIN_TEAM_NAME = env('GITHUB_ADMIN_TEAM_NAME', None)\n\nif GITHUB_ADMIN_ORG_NAME and GITHUB_ADMIN_TEAM_NAME:\n SOCIAL_AUTH_GITHUB_SCOPE = ['read:org']\n\nSOCIAL_AUTH_AZUREAD_TENANT_OAUTH2_KEY = env('OAUTH_AAD_KEY', None)\nSOCIAL_AUTH_AZUREAD_TENANT_OAUTH2_SECRET = env('OAUTH_AAD_SECRET', None)\nSOCIAL_AUTH_AZUREAD_TENANT_OAUTH2_TENANT_ID = env('OAUTH_AAD_TENANT', None)\nAZUREAD_ADMIN_GROUP_ID = env('AZUREAD_ADMIN_GROUP_ID', None)\n\nif AZUREAD_ADMIN_GROUP_ID:\n SOCIAL_AUTH_AZUREAD_TENANT_OAUTH2_RESOURCE = 'https://graph.microsoft.com/'\n SOCIAL_AUTH_AZUREAD_TENANT_OAUTH2_SCOPE = ['Directory.Read.All']\n\nSOCIAL_AUTH_PIPELINE = [\n 'social_core.pipeline.social_auth.social_details',\n 'social_core.pipeline.social_auth.social_uid',\n 'social_core.pipeline.social_auth.auth_allowed',\n 'social_core.pipeline.social_auth.social_user',\n 'social_core.pipeline.user.get_username',\n 'social_core.pipeline.user.create_user',\n 'social_core.pipeline.social_auth.associate_user',\n 'social_core.pipeline.social_auth.load_extra_data',\n 'social_core.pipeline.user.user_details',\n 'server.social_auth.fetch_github_permissions',\n 'server.social_auth.fetch_azuread_permissions',\n]\n\nROLE_PROJECT_ADMIN = env('ROLE_PROJECT_ADMIN', 'project_admin')\nROLE_ANNOTATOR = env('ROLE_ANNOTATOR', 'annotator')\nROLE_ANNOTATION_APPROVER = env('ROLE_ANNOTATION_APPROVER', 'annotation_approver')\n\n# Database\n# https://docs.djangoproject.com/en/2.0/ref/settings/#databases\n\nDATABASES = {\n 'default': {\n 'ENGINE': 'django.db.backends.sqlite3',\n 'NAME': path.join(BASE_DIR, 'db.sqlite3'),\n }\n}\n\n\n# Password validation\n# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators\n\nAUTH_PASSWORD_VALIDATORS = [\n {\n 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',\n },\n]\n\nREST_FRAMEWORK = {\n # Use Django's standard `django.contrib.auth` permissions,\n # or allow read-only access for unauthenticated users.\n 'DEFAULT_PERMISSION_CLASSES': [\n 'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly',\n 'rest_framework.permissions.IsAuthenticated',\n ],\n 'DEFAULT_AUTHENTICATION_CLASSES': (\n 'rest_framework.authentication.SessionAuthentication',\n 'rest_framework.authentication.TokenAuthentication',\n ),\n 'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',\n 'PAGE_SIZE': env.int('DOCCANO_PAGE_SIZE', default=5),\n 'DEFAULT_FILTER_BACKENDS': ('django_filters.rest_framework.DjangoFilterBackend',),\n 'SEARCH_PARAM': 'q',\n 'DEFAULT_RENDERER_CLASSES': (\n 'rest_framework.renderers.JSONRenderer',\n 'rest_framework.renderers.BrowsableAPIRenderer',\n 'rest_framework_xml.renderers.XMLRenderer'\n )\n}\n\n# Internationalization\n# https://docs.djangoproject.com/en/2.0/topics/i18n/\n\nLANGUAGE_CODE = 'en-us'\n\nTIME_ZONE = 'UTC'\n\nUSE_I18N = True\n\nUSE_L10N = True\n\nUSE_TZ = True\n\nTEST_RUNNER = 'xmlrunner.extra.djangotestrunner.XMLTestRunner'\nTEST_OUTPUT_DIR = path.join(BASE_DIR, 'junitxml')\n\nLOGIN_URL = '/login/'\nLOGIN_REDIRECT_URL = '/projects/'\nLOGOUT_REDIRECT_URL = '/'\n\ndjango_heroku.settings(locals(), test_runner=False)\n\n# Change 'default' database configuration with $DATABASE_URL.\nDATABASES['default'].update(dj_database_url.config(\n env='DATABASE_URL',\n conn_max_age=env.int('DATABASE_CONN_MAX_AGE', 500),\n ssl_require='sslmode' not in furl(env('DATABASE_URL', '')).args,\n))\n\n# work-around for dj-database-url: explicitly disable ssl for sqlite\nif DATABASES['default'].get('ENGINE') == 'django.db.backends.sqlite3':\n DATABASES['default'].get('OPTIONS', {}).pop('sslmode', None)\n\n# work-around for dj-database-url: patch ssl for mysql\nif DATABASES['default'].get('ENGINE') == 'django.db.backends.mysql':\n DATABASES['default'].get('OPTIONS', {}).pop('sslmode', None)\n if env('MYSQL_SSL_CA', None):\n DATABASES['default'].setdefault('OPTIONS', {})\\\n .setdefault('ssl', {}).setdefault('ca', env('MYSQL_SSL_CA', None))\n\n# default to a sensible modern driver for Azure SQL\nif DATABASES['default'].get('ENGINE') == 'sql_server.pyodbc':\n DATABASES['default'].setdefault('OPTIONS', {})\\\n .setdefault('driver', 'ODBC Driver 17 for SQL Server')\n\n# Honor the 'X-Forwarded-Proto' header for request.is_secure()\nSECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')\nSESSION_COOKIE_SECURE = env.bool('SESSION_COOKIE_SECURE', False)\nCSRF_COOKIE_SECURE = env.bool('CSRF_COOKIE_SECURE', False)\nCSRF_TRUSTED_ORIGINS = env.list('CSRF_TRUSTED_ORIGINS', [])\n\n# Allow all host headers\n# ALLOWED_HOSTS = ['*']\n\n# Size of the batch for creating documents\n# on the import phase\nIMPORT_BATCH_SIZE = env.int('IMPORT_BATCH_SIZE', 500)\n\nGOOGLE_TRACKING_ID = env('GOOGLE_TRACKING_ID', 'UA-125643874-2').strip()\n\nAZURE_APPINSIGHTS_IKEY = env('AZURE_APPINSIGHTS_IKEY', None)\nAPPLICATION_INSIGHTS = {\n 'ikey': AZURE_APPINSIGHTS_IKEY if AZURE_APPINSIGHTS_IKEY else None,\n 'endpoint': env('AZURE_APPINSIGHTS_ENDPOINT', None),\n}\n\n# necessary for email verification of new accounts\nEMAIL_USE_TLS = env.bool('EMAIL_USE_TLS', False)\nEMAIL_HOST = env('EMAIL_HOST', None)\nEMAIL_HOST_USER = env('EMAIL_HOST_USER', None)\nEMAIL_HOST_PASSWORD = env('EMAIL_HOST_PASSWORD', None)\nEMAIL_PORT = env.int('EMAIL_PORT', 587)\n\nif not EMAIL_HOST:\n EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'\n\n\nif DEBUG:\n CORS_ORIGIN_WHITELIST = (\n 'http://127.0.0.1:3000',\n 'http://0.0.0.0:3000',\n )\n",
"path": "app/app/settings.py"
}
] | [
{
"content": "\"\"\"\nDjango settings for app project.\n\nFor more information on this file, see\nhttps://docs.djangoproject.com/en/2.0/topics/settings/\n\nFor the full list of settings and their values, see\nhttps://docs.djangoproject.com/en/2.0/ref/settings/\n\nAny setting that is configured via an environment variable may\nalso be set in a `.env` file in the project base directory.\n\"\"\"\nfrom os import path\n\nimport django_heroku\nimport dj_database_url\nfrom environs import Env\nfrom furl import furl\n\n\n# Build paths inside the project like this: path.join(BASE_DIR, ...)\nBASE_DIR = path.dirname(path.dirname(path.abspath(__file__)))\n\nenv = Env()\nenv.read_env(path.join(BASE_DIR, '.env'), recurse=False)\n\n\n# Quick-start development settings - unsuitable for production\n# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/\n\n# SECURITY WARNING: keep the secret key used in production secret!\nSECRET_KEY = env('SECRET_KEY',\n 'v8sk33sy82!uw3ty=!jjv5vp7=s2phrzw(m(hrn^f7e_#1h2al')\n\n# SECURITY WARNING: don't run with debug turned on in production!\nDEBUG = env.bool('DEBUG', True)\n\n# True if you want to allow users to be able to create an account\nALLOW_SIGNUP = env.bool('ALLOW_SIGNUP', True)\n\n# ALLOWED_HOSTS = []\n\n\n# Application definition\n\nINSTALLED_APPS = [\n 'whitenoise.runserver_nostatic',\n 'django.contrib.admin',\n 'django.contrib.auth',\n 'django.contrib.contenttypes',\n 'django.contrib.sessions',\n 'django.contrib.messages',\n 'django.contrib.staticfiles',\n 'server.apps.ServerConfig',\n 'api.apps.ApiConfig',\n 'widget_tweaks',\n 'rest_framework',\n 'rest_framework.authtoken',\n 'django_filters',\n 'social_django',\n 'polymorphic',\n 'webpack_loader',\n 'corsheaders',\n 'drf_yasg'\n]\n\nCLOUD_BROWSER_APACHE_LIBCLOUD_PROVIDER = env('CLOUD_BROWSER_LIBCLOUD_PROVIDER', None)\nCLOUD_BROWSER_APACHE_LIBCLOUD_ACCOUNT = env('CLOUD_BROWSER_LIBCLOUD_ACCOUNT', None)\nCLOUD_BROWSER_APACHE_LIBCLOUD_SECRET_KEY = env('CLOUD_BROWSER_LIBCLOUD_KEY', None)\n\nif CLOUD_BROWSER_APACHE_LIBCLOUD_PROVIDER:\n CLOUD_BROWSER_DATASTORE = 'ApacheLibcloud'\n CLOUD_BROWSER_OBJECT_REDIRECT_URL = '/v1/cloud-upload'\n INSTALLED_APPS.append('cloud_browser')\n\nMIDDLEWARE = [\n 'django.middleware.security.SecurityMiddleware',\n 'whitenoise.middleware.WhiteNoiseMiddleware',\n 'django.contrib.sessions.middleware.SessionMiddleware',\n 'django.middleware.common.CommonMiddleware',\n 'django.middleware.csrf.CsrfViewMiddleware',\n 'django.contrib.auth.middleware.AuthenticationMiddleware',\n 'django.contrib.messages.middleware.MessageMiddleware',\n 'django.middleware.clickjacking.XFrameOptionsMiddleware',\n 'social_django.middleware.SocialAuthExceptionMiddleware',\n 'applicationinsights.django.ApplicationInsightsMiddleware',\n 'corsheaders.middleware.CorsMiddleware',\n]\n\nROOT_URLCONF = 'app.urls'\n\nTEMPLATES = [\n {\n 'BACKEND': 'django.template.backends.django.DjangoTemplates',\n 'DIRS': [path.join(BASE_DIR, 'server/templates'), path.join(BASE_DIR, 'authentification/templates')],\n 'APP_DIRS': True,\n 'OPTIONS': {\n 'context_processors': [\n 'django.template.context_processors.debug',\n 'django.template.context_processors.request',\n 'django.contrib.auth.context_processors.auth',\n 'django.contrib.messages.context_processors.messages',\n 'social_django.context_processors.backends',\n 'social_django.context_processors.login_redirect',\n ],\n 'libraries': {\n 'analytics': 'server.templatetags.analytics',\n 'utils_templating': 'authentification.templatetags.utils_templating',\n },\n },\n },\n]\n\n# Static files (CSS, JavaScript, Images)\n# https://docs.djangoproject.com/en/2.0/howto/static-files/\n\nSTATIC_URL = '/static/'\nSTATIC_ROOT = path.join(BASE_DIR, 'staticfiles')\n\nSTATICFILES_DIRS = [\n static_path\n for static_path in (\n path.join(BASE_DIR, 'server', 'static', 'assets'),\n path.join(BASE_DIR, 'server', 'static', 'static'),\n )\n if path.isdir(static_path)\n]\n\nSTATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'\n\nWEBPACK_LOADER = {\n 'DEFAULT': {\n 'CACHE': not DEBUG,\n 'BUNDLE_DIR_NAME': 'bundle/',\n 'STATS_FILE': path.join(BASE_DIR, 'server', 'static', 'webpack-stats.json'),\n 'POLL_INTERVAL': 0.1,\n 'TIMEOUT': None,\n 'IGNORE': [r'.*\\.hot-update.js', r'.+\\.map']\n }\n}\n\nWSGI_APPLICATION = 'app.wsgi.application'\n\nAUTHENTICATION_BACKENDS = [\n 'social_core.backends.github.GithubOAuth2',\n 'social_core.backends.azuread_tenant.AzureADTenantOAuth2',\n 'django.contrib.auth.backends.ModelBackend',\n]\n\nSOCIAL_AUTH_GITHUB_KEY = env('OAUTH_GITHUB_KEY', None)\nSOCIAL_AUTH_GITHUB_SECRET = env('OAUTH_GITHUB_SECRET', None)\nGITHUB_ADMIN_ORG_NAME = env('GITHUB_ADMIN_ORG_NAME', None)\nGITHUB_ADMIN_TEAM_NAME = env('GITHUB_ADMIN_TEAM_NAME', None)\n\nif GITHUB_ADMIN_ORG_NAME and GITHUB_ADMIN_TEAM_NAME:\n SOCIAL_AUTH_GITHUB_SCOPE = ['read:org']\n\nSOCIAL_AUTH_AZUREAD_TENANT_OAUTH2_KEY = env('OAUTH_AAD_KEY', None)\nSOCIAL_AUTH_AZUREAD_TENANT_OAUTH2_SECRET = env('OAUTH_AAD_SECRET', None)\nSOCIAL_AUTH_AZUREAD_TENANT_OAUTH2_TENANT_ID = env('OAUTH_AAD_TENANT', None)\nAZUREAD_ADMIN_GROUP_ID = env('AZUREAD_ADMIN_GROUP_ID', None)\n\nif AZUREAD_ADMIN_GROUP_ID:\n SOCIAL_AUTH_AZUREAD_TENANT_OAUTH2_RESOURCE = 'https://graph.microsoft.com/'\n SOCIAL_AUTH_AZUREAD_TENANT_OAUTH2_SCOPE = ['Directory.Read.All']\n\nSOCIAL_AUTH_PIPELINE = [\n 'social_core.pipeline.social_auth.social_details',\n 'social_core.pipeline.social_auth.social_uid',\n 'social_core.pipeline.social_auth.auth_allowed',\n 'social_core.pipeline.social_auth.social_user',\n 'social_core.pipeline.user.get_username',\n 'social_core.pipeline.user.create_user',\n 'social_core.pipeline.social_auth.associate_user',\n 'social_core.pipeline.social_auth.load_extra_data',\n 'social_core.pipeline.user.user_details',\n 'server.social_auth.fetch_github_permissions',\n 'server.social_auth.fetch_azuread_permissions',\n]\n\nROLE_PROJECT_ADMIN = env('ROLE_PROJECT_ADMIN', 'project_admin')\nROLE_ANNOTATOR = env('ROLE_ANNOTATOR', 'annotator')\nROLE_ANNOTATION_APPROVER = env('ROLE_ANNOTATION_APPROVER', 'annotation_approver')\n\n# Database\n# https://docs.djangoproject.com/en/2.0/ref/settings/#databases\n\nDATABASES = {\n 'default': {\n 'ENGINE': 'django.db.backends.sqlite3',\n 'NAME': path.join(BASE_DIR, 'db.sqlite3'),\n }\n}\n\n\n# Password validation\n# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators\n\nAUTH_PASSWORD_VALIDATORS = [\n {\n 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',\n },\n]\n\nREST_FRAMEWORK = {\n # Use Django's standard `django.contrib.auth` permissions,\n # or allow read-only access for unauthenticated users.\n 'DEFAULT_PERMISSION_CLASSES': [\n 'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly',\n 'rest_framework.permissions.IsAuthenticated',\n ],\n 'DEFAULT_AUTHENTICATION_CLASSES': (\n 'rest_framework.authentication.SessionAuthentication',\n 'rest_framework.authentication.TokenAuthentication',\n ),\n 'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',\n 'PAGE_SIZE': env.int('DOCCANO_PAGE_SIZE', default=5),\n 'DEFAULT_FILTER_BACKENDS': ('django_filters.rest_framework.DjangoFilterBackend',),\n 'SEARCH_PARAM': 'q',\n 'DEFAULT_RENDERER_CLASSES': (\n 'rest_framework.renderers.JSONRenderer',\n 'rest_framework.renderers.BrowsableAPIRenderer',\n 'rest_framework_xml.renderers.XMLRenderer'\n )\n}\n\n# Internationalization\n# https://docs.djangoproject.com/en/2.0/topics/i18n/\n\nLANGUAGE_CODE = 'en-us'\n\nTIME_ZONE = 'UTC'\n\nUSE_I18N = True\n\nUSE_L10N = True\n\nUSE_TZ = True\n\nTEST_RUNNER = 'xmlrunner.extra.djangotestrunner.XMLTestRunner'\nTEST_OUTPUT_DIR = path.join(BASE_DIR, 'junitxml')\n\nLOGIN_URL = '/login/'\nLOGIN_REDIRECT_URL = '/projects/'\nLOGOUT_REDIRECT_URL = '/'\n\ndjango_heroku.settings(locals(), test_runner=False)\n\n# Change 'default' database configuration with $DATABASE_URL.\nDATABASES['default'].update(dj_database_url.config(\n env='DATABASE_URL',\n conn_max_age=env.int('DATABASE_CONN_MAX_AGE', 500),\n ssl_require='sslmode' not in furl(env('DATABASE_URL', '')).args,\n))\n\n# work-around for dj-database-url: explicitly disable ssl for sqlite\nif DATABASES['default'].get('ENGINE') == 'django.db.backends.sqlite3':\n DATABASES['default'].get('OPTIONS', {}).pop('sslmode', None)\n\n# work-around for dj-database-url: patch ssl for mysql\nif DATABASES['default'].get('ENGINE') == 'django.db.backends.mysql':\n DATABASES['default'].get('OPTIONS', {}).pop('sslmode', None)\n if env('MYSQL_SSL_CA', None):\n DATABASES['default'].setdefault('OPTIONS', {})\\\n .setdefault('ssl', {}).setdefault('ca', env('MYSQL_SSL_CA', None))\n\n# default to a sensible modern driver for Azure SQL\nif DATABASES['default'].get('ENGINE') == 'sql_server.pyodbc':\n DATABASES['default'].setdefault('OPTIONS', {})\\\n .setdefault('driver', 'ODBC Driver 17 for SQL Server')\n\n# Honor the 'X-Forwarded-Proto' header for request.is_secure()\nSECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')\nSESSION_COOKIE_SECURE = env.bool('SESSION_COOKIE_SECURE', False)\nCSRF_COOKIE_SECURE = env.bool('CSRF_COOKIE_SECURE', False)\nCSRF_TRUSTED_ORIGINS = env.list('CSRF_TRUSTED_ORIGINS', [])\n\n# Allow all host headers\n# ALLOWED_HOSTS = ['*']\n\n# Size of the batch for creating documents\n# on the import phase\nIMPORT_BATCH_SIZE = env.int('IMPORT_BATCH_SIZE', 500)\n\nGOOGLE_TRACKING_ID = env('GOOGLE_TRACKING_ID', 'UA-125643874-2').strip()\n\nAZURE_APPINSIGHTS_IKEY = env('AZURE_APPINSIGHTS_IKEY', None)\nAPPLICATION_INSIGHTS = {\n 'ikey': AZURE_APPINSIGHTS_IKEY if AZURE_APPINSIGHTS_IKEY else None,\n 'endpoint': env('AZURE_APPINSIGHTS_ENDPOINT', None),\n}\n\n# necessary for email verification of new accounts\nEMAIL_USE_TLS = env.bool('EMAIL_USE_TLS', False)\nEMAIL_HOST = env('EMAIL_HOST', None)\nEMAIL_HOST_USER = env('EMAIL_HOST_USER', None)\nEMAIL_HOST_PASSWORD = env('EMAIL_HOST_PASSWORD', None)\nEMAIL_PORT = env.int('EMAIL_PORT', 587)\n\nif not EMAIL_HOST:\n EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'\n\n\nif DEBUG:\n CORS_ORIGIN_WHITELIST = (\n 'http://127.0.0.1:3000',\n 'http://0.0.0.0:3000',\n 'http://localhost:3000'\n )\n",
"path": "app/app/settings.py"
}
] | diff --git a/app/app/settings.py b/app/app/settings.py
index b55abca0e6..56307df505 100644
--- a/app/app/settings.py
+++ b/app/app/settings.py
@@ -314,4 +314,5 @@
CORS_ORIGIN_WHITELIST = (
'http://127.0.0.1:3000',
'http://0.0.0.0:3000',
+ 'http://localhost:3000'
)
|
scikit-hep__pyhf-1460 | Logging configuration in contrib/utils
# Question
`pyhf.contrib.utils` sets up logging:
https://github.com/scikit-hep/pyhf/blob/6b769fd6f5e1473deba2b4c55d49ebdb3db5b447/src/pyhf/contrib/utils.py#L9
This interferes with custom logging users may want to set up. To achieve this now, they would have to do so before `from pyhf.contrib.utils import download`. To avoid this issue, the logging should not be configured in this part of the code (and only for the CLI).
# Relevant Issues and Pull Requests
#865
User-defined log formatting
# Description
`pyhf` uses `logging` for outputs, and calls `logging.basicConfig()` in a few places.
This has the effect of preventing the user to set their desired logging behavior after `pyhf` import.
While calling this a bug might be a bit of a stretch, I think it might be unintentional since `pyhf` does not apply any logging formatting as far as I can tell.
# Expected Behavior
I expect no calls to `logging.basicConfig()` within `pyhf` to leave the formatting fully up to the user, no matter whether they want to set it before or after importing `pyhf`.
# Actual Behavior
User-defined `logging` formatting only works before importing `pyhf`.
# Steps to Reproduce
importing `pyhf` before formatting:
```
import logging
import pyhf
print(pyhf.__version__)
logging.basicConfig(level=logging.INFO)
log = logging.getLogger(__name__)
log.info("message")
```
output:
```
0.4.1
```
and when applying formatting before input, the expected behavior:
```
import logging
logging.basicConfig(level=logging.INFO)
import pyhf
print(pyhf.__version__)
log = logging.getLogger(__name__)
log.info("message")
```
output:
```
0.4.1
INFO:__main__:message
```
# Checklist
- [ ] Run `git fetch` to get the most up to date version of `master`
- no, but checked code on master to confirm that the relevant part is unchanged
- [X] Searched through existing Issues to confirm this is not a duplicate issue
- [X] Filled out the Description, Expected Behavior, Actual Behavior, and Steps to Reproduce sections above or have edited/removed them in a way that fully describes the issue
| [
{
"content": "\"\"\"Helper utilities for common tasks.\"\"\"\n\nfrom urllib.parse import urlparse\nimport tarfile\nfrom io import BytesIO\nimport logging\nfrom .. import exceptions\n\nlogging.basicConfig()\nlog = logging.getLogger(__name__)\n\n__all__ = [\"download\"]\n\n\ndef __dir__():\n return __all__\n\n\ntry:\n import requests\n\n def download(archive_url, output_directory, force=False, compress=False):\n \"\"\"\n Download the patchset archive from the remote URL and extract it in a\n directory at the path given.\n\n Example:\n\n >>> from pyhf.contrib.utils import download\n >>> download(\"https://doi.org/10.17182/hepdata.90607.v3/r3\", \"1Lbb-likelihoods\")\n >>> import os\n >>> sorted(os.listdir(\"1Lbb-likelihoods\"))\n ['BkgOnly.json', 'README.md', 'patchset.json']\n >>> download(\"https://doi.org/10.17182/hepdata.90607.v3/r3\", \"1Lbb-likelihoods.tar.gz\", compress=True)\n >>> import glob\n >>> glob.glob(\"1Lbb-likelihoods.tar.gz\")\n ['1Lbb-likelihoods.tar.gz']\n\n Args:\n archive_url (:obj:`str`): The URL of the :class:`~pyhf.patchset.PatchSet` archive to download.\n output_directory (:obj:`str`): Name of the directory to unpack the archive into.\n force (:obj:`bool`): Force download from non-approved host. Default is ``False``.\n compress (:obj:`bool`): Keep the archive in a compressed ``tar.gz`` form. Default is ``False``.\n\n Raises:\n :class:`~pyhf.exceptions.InvalidArchiveHost`: if the provided archive host name is not known to be valid\n \"\"\"\n if not force:\n valid_hosts = [\"www.hepdata.net\", \"doi.org\"]\n netloc = urlparse(archive_url).netloc\n if netloc not in valid_hosts:\n raise exceptions.InvalidArchiveHost(\n f\"{netloc} is not an approved archive host: {', '.join(str(host) for host in valid_hosts)}\\n\"\n + \"To download an archive from this host use the --force option.\"\n )\n\n with requests.get(archive_url) as response:\n if compress:\n with open(output_directory, \"wb\") as archive:\n archive.write(response.content)\n else:\n with tarfile.open(\n mode=\"r|gz\", fileobj=BytesIO(response.content)\n ) as archive:\n archive.extractall(output_directory)\n\n\nexcept ModuleNotFoundError:\n log.error(\n \"\\nInstallation of the contrib extra is required to use pyhf.contrib.utils.download\"\n + \"\\nPlease install with: python -m pip install pyhf[contrib]\\n\",\n exc_info=True,\n )\n",
"path": "src/pyhf/contrib/utils.py"
}
] | [
{
"content": "\"\"\"Helper utilities for common tasks.\"\"\"\n\nfrom urllib.parse import urlparse\nimport tarfile\nfrom io import BytesIO\nimport logging\nfrom .. import exceptions\n\nlog = logging.getLogger(__name__)\n\n__all__ = [\"download\"]\n\n\ndef __dir__():\n return __all__\n\n\ntry:\n import requests\n\n def download(archive_url, output_directory, force=False, compress=False):\n \"\"\"\n Download the patchset archive from the remote URL and extract it in a\n directory at the path given.\n\n Example:\n\n >>> from pyhf.contrib.utils import download\n >>> download(\"https://doi.org/10.17182/hepdata.90607.v3/r3\", \"1Lbb-likelihoods\")\n >>> import os\n >>> sorted(os.listdir(\"1Lbb-likelihoods\"))\n ['BkgOnly.json', 'README.md', 'patchset.json']\n >>> download(\"https://doi.org/10.17182/hepdata.90607.v3/r3\", \"1Lbb-likelihoods.tar.gz\", compress=True)\n >>> import glob\n >>> glob.glob(\"1Lbb-likelihoods.tar.gz\")\n ['1Lbb-likelihoods.tar.gz']\n\n Args:\n archive_url (:obj:`str`): The URL of the :class:`~pyhf.patchset.PatchSet` archive to download.\n output_directory (:obj:`str`): Name of the directory to unpack the archive into.\n force (:obj:`bool`): Force download from non-approved host. Default is ``False``.\n compress (:obj:`bool`): Keep the archive in a compressed ``tar.gz`` form. Default is ``False``.\n\n Raises:\n :class:`~pyhf.exceptions.InvalidArchiveHost`: if the provided archive host name is not known to be valid\n \"\"\"\n if not force:\n valid_hosts = [\"www.hepdata.net\", \"doi.org\"]\n netloc = urlparse(archive_url).netloc\n if netloc not in valid_hosts:\n raise exceptions.InvalidArchiveHost(\n f\"{netloc} is not an approved archive host: {', '.join(str(host) for host in valid_hosts)}\\n\"\n + \"To download an archive from this host use the --force option.\"\n )\n\n with requests.get(archive_url) as response:\n if compress:\n with open(output_directory, \"wb\") as archive:\n archive.write(response.content)\n else:\n with tarfile.open(\n mode=\"r|gz\", fileobj=BytesIO(response.content)\n ) as archive:\n archive.extractall(output_directory)\n\n\nexcept ModuleNotFoundError:\n log.error(\n \"\\nInstallation of the contrib extra is required to use pyhf.contrib.utils.download\"\n + \"\\nPlease install with: python -m pip install pyhf[contrib]\\n\",\n exc_info=True,\n )\n",
"path": "src/pyhf/contrib/utils.py"
}
] | diff --git a/src/pyhf/contrib/utils.py b/src/pyhf/contrib/utils.py
index da60ac01fb..06020bac39 100644
--- a/src/pyhf/contrib/utils.py
+++ b/src/pyhf/contrib/utils.py
@@ -6,7 +6,6 @@
import logging
from .. import exceptions
-logging.basicConfig()
log = logging.getLogger(__name__)
__all__ = ["download"]
|
jazzband__pip-tools-2042 | Broken build due to failed `linkcheck` job
I've noticed that matrix badges are frequently inaccessible, see README:
<img width="893" alt="image" src="https://github.com/jazzband/pip-tools/assets/7377671/94c2d45a-12ef-4237-8a85-434ee1bd7c05">
Sometimes, a certain issue even results in CI builds [breaking](https://github.com/jazzband/pip-tools/actions/runs/5920050370/job/16051009863#step:10:446) (caught in #1973):
```
broken https://img.shields.io/matrix/pip-tools:matrix.org?label=Discuss%20on%20Matrix%20at%20%23pip-tools%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat - 408 Client Error: Request Timeout for url: https://img.shields.io/matrix/pip-tools:matrix.org?label=Discuss%20on%20Matrix%20at%20%23pip-tools%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
```
Perhaps we should consider [ignoring](https://github.com/jazzband/pip-tools/blob/04d2235716bc43cad3c10288081a4d2b7ee56944/docs/conf.py#L55-L57) `https://img.shields.io/matrix` as well?
/cc @webknjaz
| [
{
"content": "# https://www.sphinx-doc.org/en/master/usage/configuration.html\n\"\"\"Configuration file for the Sphinx documentation builder.\"\"\"\n\nfrom __future__ import annotations\n\nfrom importlib.metadata import version as get_version\nfrom pathlib import Path\n\nfrom sphinx.util import logging\nfrom sphinx.util.console import bold\n\nlogger = logging.getLogger(__name__)\n\n# -- Path setup --------------------------------------------------------------\n\nPROJECT_ROOT_DIR = Path(__file__).parents[1].resolve()\n\n\n# -- Project information -----------------------------------------------------\n\nproject = \"pip-tools\"\nauthor = f\"{project} Contributors\"\ncopyright = f\"The {author}\"\n\n# The full version, including alpha/beta/rc tags\nrelease = get_version(project)\n\n# The short X.Y version\nversion = \".\".join(release.split(\".\")[:3])\n\nlogger.info(bold(\"%s version: %s\"), project, version)\nlogger.info(bold(\"%s release: %s\"), project, release)\n\n# -- General configuration ---------------------------------------------------\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nextensions = [\"myst_parser\", \"sphinxcontrib.programoutput\"]\n\n\n# -- Options for HTML output -------------------------------------------------\n\n# The theme to use for HTML and HTML Help pages. See the documentation for\n# a list of builtin themes.\n#\nhtml_theme = \"furo\"\nhtml_title = f\"<nobr>{project}</nobr> documentation v{release}\"\n\n\n# -------------------------------------------------------------------------\ndefault_role = \"any\"\nnitpicky = True\n\nlinkcheck_ignore = [\n r\"^https://matrix\\.to/#\",\n]\n\nsuppress_warnings = [\"myst.xref_missing\"]\n",
"path": "docs/conf.py"
}
] | [
{
"content": "# https://www.sphinx-doc.org/en/master/usage/configuration.html\n\"\"\"Configuration file for the Sphinx documentation builder.\"\"\"\n\nfrom __future__ import annotations\n\nfrom importlib.metadata import version as get_version\nfrom pathlib import Path\n\nfrom sphinx.util import logging\nfrom sphinx.util.console import bold\n\nlogger = logging.getLogger(__name__)\n\n# -- Path setup --------------------------------------------------------------\n\nPROJECT_ROOT_DIR = Path(__file__).parents[1].resolve()\n\n\n# -- Project information -----------------------------------------------------\n\nproject = \"pip-tools\"\nauthor = f\"{project} Contributors\"\ncopyright = f\"The {author}\"\n\n# The full version, including alpha/beta/rc tags\nrelease = get_version(project)\n\n# The short X.Y version\nversion = \".\".join(release.split(\".\")[:3])\n\nlogger.info(bold(\"%s version: %s\"), project, version)\nlogger.info(bold(\"%s release: %s\"), project, release)\n\n# -- General configuration ---------------------------------------------------\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nextensions = [\"myst_parser\", \"sphinxcontrib.programoutput\"]\n\n\n# -- Options for HTML output -------------------------------------------------\n\n# The theme to use for HTML and HTML Help pages. See the documentation for\n# a list of builtin themes.\n#\nhtml_theme = \"furo\"\nhtml_title = f\"<nobr>{project}</nobr> documentation v{release}\"\n\n\n# -------------------------------------------------------------------------\ndefault_role = \"any\"\nnitpicky = True\n\nlinkcheck_ignore = [\n r\"^https://matrix\\.to/#\",\n r\"^https://img.shields.io/matrix\",\n]\n\nsuppress_warnings = [\"myst.xref_missing\"]\n",
"path": "docs/conf.py"
}
] | diff --git a/docs/conf.py b/docs/conf.py
index 1f8491603..7e886590f 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -54,6 +54,7 @@
linkcheck_ignore = [
r"^https://matrix\.to/#",
+ r"^https://img.shields.io/matrix",
]
suppress_warnings = ["myst.xref_missing"]
|
keras-team__keras-2992 | Why TF-IDF matrix generated by keras.preprocessing.text.Tokenizer() has negative values?
Say, if run the following script:
> > > import keras
> > > tk = keras.preprocessing.text.Tokenizer()
> > > texts = ['I love you.', 'I love you, too.']
> > > tk.fit_on_texts(texts)
> > > tk.texts_to_matrix(texts, mode='tfidf')
The output will be:
array([[ 0. , -1.09861229, -1.09861229, -1.09861229, 0. ],
[ 0. , -1.38629436, -1.38629436, -1.38629436, -1.38629436]])
But tf-idf values seems should be non-negative?
By the way, is there a neat way to get the word by its index, or the vocabulary (in the order of word indices) of the Tokenizer() class? Say, sometimes I want to know what's the most frequent word in the documents, then I want to access word with index 1.
I can do it by running:
> > > vocab = tk.word_index.items()
> > > vocab.sort(key=lambda x:x[1])
This gives:
> > > vocab
[('i', 1), ('you', 2), ('love', 3), ('too', 4)]
But is it somehow hacky?
Thank you!
| [
{
"content": "# -*- coding: utf-8 -*-\n'''These preprocessing utilities would greatly benefit\nfrom a fast Cython rewrite.\n'''\nfrom __future__ import absolute_import\n\nimport string\nimport sys\nimport numpy as np\nfrom six.moves import range\nfrom six.moves import zip\n\nif sys.version_info < (3,):\n maketrans = string.maketrans\nelse:\n maketrans = str.maketrans\n\n\ndef base_filter():\n f = string.punctuation\n f = f.replace(\"'\", '')\n f += '\\t\\n'\n return f\n\n\ndef text_to_word_sequence(text, filters=base_filter(), lower=True, split=\" \"):\n '''prune: sequence of characters to filter out\n '''\n if lower:\n text = text.lower()\n text = text.translate(maketrans(filters, split*len(filters)))\n seq = text.split(split)\n return [_f for _f in seq if _f]\n\n\ndef one_hot(text, n, filters=base_filter(), lower=True, split=\" \"):\n seq = text_to_word_sequence(text, filters=filters, lower=lower, split=split)\n return [(abs(hash(w)) % (n - 1) + 1) for w in seq]\n\n\nclass Tokenizer(object):\n def __init__(self, nb_words=None, filters=base_filter(),\n lower=True, split=' ', char_level=False):\n '''The class allows to vectorize a text corpus, by turning each\n text into either a sequence of integers (each integer being the index\n of a token in a dictionary) or into a vector where the coefficient\n for each token could be binary, based on word count, based on tf-idf...\n\n # Arguments\n nb_words: the maximum number of words to keep, based\n on word frequency. Only the most common `nb_words` words will\n be kept.\n filters: a string where each element is a character that will be\n filtered from the texts. The default is all punctuation, plus\n tabs and line breaks, minus the `'` character.\n lower: boolean. Whether to convert the texts to lowercase.\n split: character or string to use for token splitting.\n char_level: if True, every character will be treated as a word.\n\n By default, all punctuation is removed, turning the texts into\n space-separated sequences of words\n (words maybe include the `'` character). These sequences are then\n split into lists of tokens. They will then be indexed or vectorized.\n\n `0` is a reserved index that won't be assigned to any word.\n '''\n self.word_counts = {}\n self.word_docs = {}\n self.filters = filters\n self.split = split\n self.lower = lower\n self.nb_words = nb_words\n self.document_count = 0\n self.char_level = char_level\n\n def fit_on_texts(self, texts):\n '''Required before using texts_to_sequences or texts_to_matrix\n\n # Arguments\n texts: can be a list of strings,\n or a generator of strings (for memory-efficiency)\n '''\n self.document_count = 0\n for text in texts:\n self.document_count += 1\n seq = text if self.char_level else text_to_word_sequence(text, self.filters, self.lower, self.split)\n for w in seq:\n if w in self.word_counts:\n self.word_counts[w] += 1\n else:\n self.word_counts[w] = 1\n for w in set(seq):\n if w in self.word_docs:\n self.word_docs[w] += 1\n else:\n self.word_docs[w] = 1\n\n wcounts = list(self.word_counts.items())\n wcounts.sort(key=lambda x: x[1], reverse=True)\n sorted_voc = [wc[0] for wc in wcounts]\n self.word_index = dict(list(zip(sorted_voc, list(range(1, len(sorted_voc) + 1)))))\n\n self.index_docs = {}\n for w, c in list(self.word_docs.items()):\n self.index_docs[self.word_index[w]] = c\n\n def fit_on_sequences(self, sequences):\n '''Required before using sequences_to_matrix\n (if fit_on_texts was never called)\n '''\n self.document_count = len(sequences)\n self.index_docs = {}\n for seq in sequences:\n seq = set(seq)\n for i in seq:\n if i not in self.index_docs:\n self.index_docs[i] = 1\n else:\n self.index_docs[i] += 1\n\n def texts_to_sequences(self, texts):\n '''Transforms each text in texts in a sequence of integers.\n Only top \"nb_words\" most frequent words will be taken into account.\n Only words known by the tokenizer will be taken into account.\n\n Returns a list of sequences.\n '''\n res = []\n for vect in self.texts_to_sequences_generator(texts):\n res.append(vect)\n return res\n\n def texts_to_sequences_generator(self, texts):\n '''Transforms each text in texts in a sequence of integers.\n Only top \"nb_words\" most frequent words will be taken into account.\n Only words known by the tokenizer will be taken into account.\n\n Yields individual sequences.\n\n # Arguments:\n texts: list of strings.\n '''\n nb_words = self.nb_words\n for text in texts:\n seq = text if self.char_level else text_to_word_sequence(text, self.filters, self.lower, self.split)\n vect = []\n for w in seq:\n i = self.word_index.get(w)\n if i is not None:\n if nb_words and i >= nb_words:\n continue\n else:\n vect.append(i)\n yield vect\n\n def texts_to_matrix(self, texts, mode='binary'):\n '''Convert a list of texts to a Numpy matrix,\n according to some vectorization mode.\n\n # Arguments:\n texts: list of strings.\n modes: one of \"binary\", \"count\", \"tfidf\", \"freq\"\n '''\n sequences = self.texts_to_sequences(texts)\n return self.sequences_to_matrix(sequences, mode=mode)\n\n def sequences_to_matrix(self, sequences, mode='binary'):\n '''Converts a list of sequences into a Numpy matrix,\n according to some vectorization mode.\n\n # Arguments:\n sequences: list of sequences\n (a sequence is a list of integer word indices).\n modes: one of \"binary\", \"count\", \"tfidf\", \"freq\"\n '''\n if not self.nb_words:\n if self.word_index:\n nb_words = len(self.word_index) + 1\n else:\n raise Exception('Specify a dimension (nb_words argument), '\n 'or fit on some text data first.')\n else:\n nb_words = self.nb_words\n\n if mode == 'tfidf' and not self.document_count:\n raise Exception('Fit the Tokenizer on some data '\n 'before using tfidf mode.')\n\n X = np.zeros((len(sequences), nb_words))\n for i, seq in enumerate(sequences):\n if not seq:\n continue\n counts = {}\n for j in seq:\n if j >= nb_words:\n continue\n if j not in counts:\n counts[j] = 1.\n else:\n counts[j] += 1\n for j, c in list(counts.items()):\n if mode == 'count':\n X[i][j] = c\n elif mode == 'freq':\n X[i][j] = c / len(seq)\n elif mode == 'binary':\n X[i][j] = 1\n elif mode == 'tfidf':\n # Use weighting scheme 2 in\n # https://en.wikipedia.org/wiki/Tf%E2%80%93idf\n tf = 1 + np.log(c)\n idf = np.log(1 + self.document_count / (1 + self.index_docs.get(j, 0)))\n X[i][j] = tf * idf\n else:\n raise Exception('Unknown vectorization mode: ' + str(mode))\n return X\n",
"path": "keras/preprocessing/text.py"
}
] | [
{
"content": "# -*- coding: utf-8 -*-\n'''These preprocessing utilities would greatly benefit\nfrom a fast Cython rewrite.\n'''\nfrom __future__ import absolute_import\nfrom __future__ import division\n\nimport string\nimport sys\nimport numpy as np\nfrom six.moves import range\nfrom six.moves import zip\n\nif sys.version_info < (3,):\n maketrans = string.maketrans\nelse:\n maketrans = str.maketrans\n\n\ndef base_filter():\n f = string.punctuation\n f = f.replace(\"'\", '')\n f += '\\t\\n'\n return f\n\n\ndef text_to_word_sequence(text, filters=base_filter(), lower=True, split=\" \"):\n '''prune: sequence of characters to filter out\n '''\n if lower:\n text = text.lower()\n text = text.translate(maketrans(filters, split*len(filters)))\n seq = text.split(split)\n return [_f for _f in seq if _f]\n\n\ndef one_hot(text, n, filters=base_filter(), lower=True, split=\" \"):\n seq = text_to_word_sequence(text, filters=filters, lower=lower, split=split)\n return [(abs(hash(w)) % (n - 1) + 1) for w in seq]\n\n\nclass Tokenizer(object):\n def __init__(self, nb_words=None, filters=base_filter(),\n lower=True, split=' ', char_level=False):\n '''The class allows to vectorize a text corpus, by turning each\n text into either a sequence of integers (each integer being the index\n of a token in a dictionary) or into a vector where the coefficient\n for each token could be binary, based on word count, based on tf-idf...\n\n # Arguments\n nb_words: the maximum number of words to keep, based\n on word frequency. Only the most common `nb_words` words will\n be kept.\n filters: a string where each element is a character that will be\n filtered from the texts. The default is all punctuation, plus\n tabs and line breaks, minus the `'` character.\n lower: boolean. Whether to convert the texts to lowercase.\n split: character or string to use for token splitting.\n char_level: if True, every character will be treated as a word.\n\n By default, all punctuation is removed, turning the texts into\n space-separated sequences of words\n (words maybe include the `'` character). These sequences are then\n split into lists of tokens. They will then be indexed or vectorized.\n\n `0` is a reserved index that won't be assigned to any word.\n '''\n self.word_counts = {}\n self.word_docs = {}\n self.filters = filters\n self.split = split\n self.lower = lower\n self.nb_words = nb_words\n self.document_count = 0\n self.char_level = char_level\n\n def fit_on_texts(self, texts):\n '''Required before using texts_to_sequences or texts_to_matrix\n\n # Arguments\n texts: can be a list of strings,\n or a generator of strings (for memory-efficiency)\n '''\n self.document_count = 0\n for text in texts:\n self.document_count += 1\n seq = text if self.char_level else text_to_word_sequence(text, self.filters, self.lower, self.split)\n for w in seq:\n if w in self.word_counts:\n self.word_counts[w] += 1\n else:\n self.word_counts[w] = 1\n for w in set(seq):\n if w in self.word_docs:\n self.word_docs[w] += 1\n else:\n self.word_docs[w] = 1\n\n wcounts = list(self.word_counts.items())\n wcounts.sort(key=lambda x: x[1], reverse=True)\n sorted_voc = [wc[0] for wc in wcounts]\n self.word_index = dict(list(zip(sorted_voc, list(range(1, len(sorted_voc) + 1)))))\n\n self.index_docs = {}\n for w, c in list(self.word_docs.items()):\n self.index_docs[self.word_index[w]] = c\n\n def fit_on_sequences(self, sequences):\n '''Required before using sequences_to_matrix\n (if fit_on_texts was never called)\n '''\n self.document_count = len(sequences)\n self.index_docs = {}\n for seq in sequences:\n seq = set(seq)\n for i in seq:\n if i not in self.index_docs:\n self.index_docs[i] = 1\n else:\n self.index_docs[i] += 1\n\n def texts_to_sequences(self, texts):\n '''Transforms each text in texts in a sequence of integers.\n Only top \"nb_words\" most frequent words will be taken into account.\n Only words known by the tokenizer will be taken into account.\n\n Returns a list of sequences.\n '''\n res = []\n for vect in self.texts_to_sequences_generator(texts):\n res.append(vect)\n return res\n\n def texts_to_sequences_generator(self, texts):\n '''Transforms each text in texts in a sequence of integers.\n Only top \"nb_words\" most frequent words will be taken into account.\n Only words known by the tokenizer will be taken into account.\n\n Yields individual sequences.\n\n # Arguments:\n texts: list of strings.\n '''\n nb_words = self.nb_words\n for text in texts:\n seq = text if self.char_level else text_to_word_sequence(text, self.filters, self.lower, self.split)\n vect = []\n for w in seq:\n i = self.word_index.get(w)\n if i is not None:\n if nb_words and i >= nb_words:\n continue\n else:\n vect.append(i)\n yield vect\n\n def texts_to_matrix(self, texts, mode='binary'):\n '''Convert a list of texts to a Numpy matrix,\n according to some vectorization mode.\n\n # Arguments:\n texts: list of strings.\n modes: one of \"binary\", \"count\", \"tfidf\", \"freq\"\n '''\n sequences = self.texts_to_sequences(texts)\n return self.sequences_to_matrix(sequences, mode=mode)\n\n def sequences_to_matrix(self, sequences, mode='binary'):\n '''Converts a list of sequences into a Numpy matrix,\n according to some vectorization mode.\n\n # Arguments:\n sequences: list of sequences\n (a sequence is a list of integer word indices).\n modes: one of \"binary\", \"count\", \"tfidf\", \"freq\"\n '''\n if not self.nb_words:\n if self.word_index:\n nb_words = len(self.word_index) + 1\n else:\n raise Exception('Specify a dimension (nb_words argument), '\n 'or fit on some text data first.')\n else:\n nb_words = self.nb_words\n\n if mode == 'tfidf' and not self.document_count:\n raise Exception('Fit the Tokenizer on some data '\n 'before using tfidf mode.')\n\n X = np.zeros((len(sequences), nb_words))\n for i, seq in enumerate(sequences):\n if not seq:\n continue\n counts = {}\n for j in seq:\n if j >= nb_words:\n continue\n if j not in counts:\n counts[j] = 1.\n else:\n counts[j] += 1\n for j, c in list(counts.items()):\n if mode == 'count':\n X[i][j] = c\n elif mode == 'freq':\n X[i][j] = c / len(seq)\n elif mode == 'binary':\n X[i][j] = 1\n elif mode == 'tfidf':\n # Use weighting scheme 2 in\n # https://en.wikipedia.org/wiki/Tf%E2%80%93idf\n tf = 1 + np.log(c)\n idf = np.log(1 + self.document_count / (1 + self.index_docs.get(j, 0)))\n X[i][j] = tf * idf\n else:\n raise Exception('Unknown vectorization mode: ' + str(mode))\n return X\n",
"path": "keras/preprocessing/text.py"
}
] | diff --git a/keras/preprocessing/text.py b/keras/preprocessing/text.py
index 4c4e45155057..c543666cef1a 100644
--- a/keras/preprocessing/text.py
+++ b/keras/preprocessing/text.py
@@ -3,6 +3,7 @@
from a fast Cython rewrite.
'''
from __future__ import absolute_import
+from __future__ import division
import string
import sys
|
mitmproxy__mitmproxy-6117 | Warn new users about the lazy creation of connections (when requests are expected to be served in the script fully and only)
#### Problem Description
The [example script](https://docs.mitmproxy.org/stable/addons-examples/#http-reply-from-proxy) for not sending any data to the server does not prevent mitmproxy from **establishing a connection** to the server.
For which reason is said connection established when no data has to be sent to this host right away and possibly never in the future?
I trusted mitmproxy to **not send _any_ data, as stated**, but I had to discover (the hard way) that **that's not the case**.
I used mitmproxy in an environment where it required to stay silent, but it wasn't compliant.
Could you please consider warning new users about this behavior?
<strike>Is there an easy way to prevent establishing connections?
Is it planned to do so on default in this case?</strike>
*EDIT*: Trying to prevent connections by rerouting the connection to a closed port killed the flow for the client. Routing to a different host with invalid certificate worked though, warning me in the event log and suggesting setting connection strategy to lazy and it worked.
#### Steps to reproduce the behavior:
1. Load the example script
2. Have the client request examle.com
3. View the event log
#### System Information
Mitmproxy: 9.0.1
Python: 3.10.6
OpenSSL: OpenSSL 3.0.7 1 Nov 2022
Platform: Linux-5.15.0-71-generic-x86_64-with-glibc2.35
| [
{
"content": "\"\"\"Send a reply from the proxy without sending any data to the remote server.\"\"\"\nfrom mitmproxy import http\n\n\ndef request(flow: http.HTTPFlow) -> None:\n if flow.request.pretty_url == \"http://example.com/path\":\n flow.response = http.Response.make(\n 200, # (optional) status code\n b\"Hello World\", # (optional) content\n {\"Content-Type\": \"text/html\"}, # (optional) headers\n )\n",
"path": "examples/addons/http-reply-from-proxy.py"
}
] | [
{
"content": "\"\"\"Send a reply from the proxy without sending the request to the remote server.\"\"\"\nfrom mitmproxy import http\n\n\ndef request(flow: http.HTTPFlow) -> None:\n if flow.request.pretty_url == \"http://example.com/path\":\n flow.response = http.Response.make(\n 200, # (optional) status code\n b\"Hello World\", # (optional) content\n {\"Content-Type\": \"text/html\"}, # (optional) headers\n )\n",
"path": "examples/addons/http-reply-from-proxy.py"
}
] | diff --git a/CHANGELOG.md b/CHANGELOG.md
index 16a539b060..a018cf5ed1 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,6 +2,8 @@
## Unreleased: mitmproxy next
+* Change wording in the [http-reply-from-proxy.py example](https://github.com/mitmproxy/mitmproxy/blob/main/examples/addons/http-reply-from-proxy.py).
+ ([#6117](https://github.com/mitmproxy/mitmproxy/pull/6117), @Semnodime)
* Added option to specify an elliptic curve for key exchange between mitmproxy <-> server
([#6170](https://github.com/mitmproxy/mitmproxy/pull/6170), @Mike-Ki-ASD)
* Add "Prettier" code linting tool to mitmweb.
diff --git a/examples/addons/http-reply-from-proxy.py b/examples/addons/http-reply-from-proxy.py
index 3ce35c5a4f..e6470a3da4 100644
--- a/examples/addons/http-reply-from-proxy.py
+++ b/examples/addons/http-reply-from-proxy.py
@@ -1,4 +1,4 @@
-"""Send a reply from the proxy without sending any data to the remote server."""
+"""Send a reply from the proxy without sending the request to the remote server."""
from mitmproxy import http
|
pwndbg__pwndbg-908 | telescope skipping repeated vals is incorrect for last elements
@anthraxx see this pls, for count being 16 we are not showing the repeated value :(
```
pwndbg> telescope 0x21c108086888 15
00:0000│ 0x21c108086888 ◂— 0x3ff199999999999a
01:0008│ 0x21c108086890 ◂— 0x3ff3333333333333
02:0010│ 0x21c108086898 ◂— 0x3ff4cccccccccccd
03:0018│ 0x21c1080868a0 ◂— 0x3ff6666666666666
04:0020│ 0x21c1080868a8 ◂— 0x3ff8000000000000
05:0028│ 0x21c1080868b0 ◂— 0x80cd3f9082858b9
06:0030│ 0x21c1080868b8 ◂— 0x80cd44108086881
07:0038│ 0x21c1080868c0 ◂— 0x804222d08283139
08:0040│ 0x21c1080868c8 ◂— 0x2000080b7191
09:0048│ 0x21c1080868d0 ◂— 0x804222d08282d49
0a:0050│ 0x21c1080868d8 ◂— 0x100804222d
0b:0058│ 0x21c1080868e0 ◂— 0xa0c01000000000
0c:0060│ 0x21c1080868e8 ◂— 0x947770000039f2
0d:0068│ 0x21c1080868f0 ◂— 0x2000039f2
0e:0070│ 0x21c1080868f8 ◂— 0x0
pwndbg> telescope 0x21c108086888 16
00:0000│ 0x21c108086888 ◂— 0x3ff199999999999a
01:0008│ 0x21c108086890 ◂— 0x3ff3333333333333
02:0010│ 0x21c108086898 ◂— 0x3ff4cccccccccccd
03:0018│ 0x21c1080868a0 ◂— 0x3ff6666666666666
04:0020│ 0x21c1080868a8 ◂— 0x3ff8000000000000
05:0028│ 0x21c1080868b0 ◂— 0x80cd3f9082858b9
06:0030│ 0x21c1080868b8 ◂— 0x80cd44108086881
07:0038│ 0x21c1080868c0 ◂— 0x804222d08283139
08:0040│ 0x21c1080868c8 ◂— 0x2000080b7191
09:0048│ 0x21c1080868d0 ◂— 0x804222d08282d49
0a:0050│ 0x21c1080868d8 ◂— 0x100804222d
0b:0058│ 0x21c1080868e0 ◂— 0xa0c01000000000
0c:0060│ 0x21c1080868e8 ◂— 0x947770000039f2
0d:0068│ 0x21c1080868f0 ◂— 0x2000039f2
0e:0070│ 0x21c1080868f8 ◂— 0x0
pwndbg> telescope 0x21c108086888 17
00:0000│ 0x21c108086888 ◂— 0x3ff199999999999a
01:0008│ 0x21c108086890 ◂— 0x3ff3333333333333
02:0010│ 0x21c108086898 ◂— 0x3ff4cccccccccccd
03:0018│ 0x21c1080868a0 ◂— 0x3ff6666666666666
04:0020│ 0x21c1080868a8 ◂— 0x3ff8000000000000
05:0028│ 0x21c1080868b0 ◂— 0x80cd3f9082858b9
06:0030│ 0x21c1080868b8 ◂— 0x80cd44108086881
07:0038│ 0x21c1080868c0 ◂— 0x804222d08283139
08:0040│ 0x21c1080868c8 ◂— 0x2000080b7191
09:0048│ 0x21c1080868d0 ◂— 0x804222d08282d49
0a:0050│ 0x21c1080868d8 ◂— 0x100804222d
0b:0058│ 0x21c1080868e0 ◂— 0xa0c01000000000
0c:0060│ 0x21c1080868e8 ◂— 0x947770000039f2
0d:0068│ 0x21c1080868f0 ◂— 0x2000039f2
0e:0070│ 0x21c1080868f8 ◂— 0x0
0f:0078│ 0x21c108086900 ◂— 0x0
10:0080│ 0x21c108086908 ◂— 0x2208042205
pwndbg>
```
| [
{
"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\nPrints out pointer chains starting at some address in memory.\n\nGenerally used to print out the stack or register values.\n\"\"\"\n\nimport argparse\nimport collections\nimport math\n\nimport pwndbg.arch\nimport pwndbg.chain\nimport pwndbg.color.telescope as T\nimport pwndbg.color.theme as theme\nimport pwndbg.commands\nimport pwndbg.config\nimport pwndbg.memory\nimport pwndbg.regs\nimport pwndbg.typeinfo\n\ntelescope_lines = pwndbg.config.Parameter('telescope-lines', 8, 'number of lines to printed by the telescope command')\nskip_repeating_values = pwndbg.config.Parameter('telescope-skip-repeating-val', True,\n 'whether to skip repeating values of the telescope command')\nskip_repeating_values_minimum = pwndbg.config.Parameter('telescope-skip-repeating-val-minimum', 3,\n 'minimum amount of repeated values before skipping lines')\n\noffset_separator = theme.Parameter('telescope-offset-separator', '│', 'offset separator of the telescope command')\noffset_delimiter = theme.Parameter('telescope-offset-delimiter', ':', 'offset delimiter of the telescope command')\nrepeating_marker = theme.Parameter('telescope-repeating-marker', '... ↓',\n 'repeating values marker of the telescope command')\n\n\nparser = argparse.ArgumentParser(description=\"\"\"\n Recursively dereferences pointers starting at the specified address\n ($sp by default)\n \"\"\")\nparser.add_argument(\"address\", nargs=\"?\", default=None, type=int, help=\"The address to telescope at.\")\nparser.add_argument(\"count\", nargs=\"?\", default=telescope_lines, type=int, help=\"The number of lines to show.\")\[email protected](parser)\[email protected]\ndef telescope(address=None, count=telescope_lines, to_string=False):\n \"\"\"\n Recursively dereferences pointers starting at the specified address\n ($sp by default)\n \"\"\"\n ptrsize = pwndbg.typeinfo.ptrsize\n if telescope.repeat:\n address = telescope.last_address + ptrsize\n telescope.offset += 1\n else:\n telescope.offset = 0\n\n address = int(address if address else pwndbg.regs.sp) & pwndbg.arch.ptrmask\n count = max(int(count), 1) & pwndbg.arch.ptrmask\n delimiter = T.delimiter(offset_delimiter)\n separator = T.separator(offset_separator)\n\n # Allow invocation of \"telescope 20\" to dump 20 bytes at the stack pointer\n if address < pwndbg.memory.MMAP_MIN_ADDR and not pwndbg.memory.peek(address):\n count = address\n address = pwndbg.regs.sp\n\n # Allow invocation of \"telescope a b\" to dump all bytes from A to B\n if int(address) <= int(count):\n # adjust count if it is an address. use ceil division as count is number of\n # ptrsize values and we don't want to strip out a value if dest is unaligned\n count -= address\n count = max(math.ceil(count / ptrsize), 1)\n\n reg_values = collections.defaultdict(lambda: [])\n for reg in pwndbg.regs.common:\n reg_values[pwndbg.regs[reg]].append(reg)\n # address = pwndbg.memory.poi(pwndbg.typeinfo.ppvoid, address)\n\n start = address\n stop = address + (count*ptrsize)\n step = ptrsize\n\n # Find all registers which show up in the trace\n regs = {}\n for i in range(start, stop, step):\n values = list(reg_values[i])\n\n for width in range(1, pwndbg.arch.ptrsize):\n values.extend('%s-%i' % (r,width) for r in reg_values[i+width])\n\n regs[i] = ' '.join(values)\n\n # Find the longest set of register information\n if regs:\n longest_regs = max(map(len, regs.values()))\n else:\n longest_regs = 0\n\n # Print everything out\n result = []\n last = None\n collapse_buffer = []\n skipped_padding = 2 + len(offset_delimiter) + 4 + len(offset_separator) + 1 + longest_regs + 1 - len(repeating_marker)\n\n # Collapse repeating values exceeding minimum delta.\n def collapse_repeating_values():\n # The first line was already printed, hence increment by 1\n if collapse_buffer and len(collapse_buffer) + 1 >= skip_repeating_values_minimum:\n result.append(T.repeating_marker('%s%s%i skipped' % (repeating_marker, ' ' * skipped_padding, len(collapse_buffer))))\n else:\n result.extend(collapse_buffer)\n collapse_buffer.clear()\n\n for i, addr in enumerate(range(start, stop, step)):\n if not pwndbg.memory.peek(addr):\n collapse_repeating_values()\n result.append(\"<Could not read memory at %#x>\" % addr)\n break\n\n line = ' '.join((T.offset(\"%02x%s%04x%s\" % (i + telescope.offset, delimiter,\n addr - start + (telescope.offset * ptrsize), separator)),\n T.register(regs[addr].ljust(longest_regs)),\n pwndbg.chain.format(addr)))\n\n # Buffer repeating values.\n if skip_repeating_values:\n value = pwndbg.memory.pvoid(addr)\n if last == value:\n collapse_buffer.append(line)\n continue\n collapse_repeating_values()\n last = value\n\n result.append(line)\n\n telescope.offset += i\n telescope.last_address = addr\n\n if not to_string:\n print('\\n'.join(result))\n\n return result\n\n\nparser = argparse.ArgumentParser(description='dereferences on stack data with specified count and offset.')\nparser.add_argument('count', nargs='?', default=8, type=int,\n help='number of element to dump')\nparser.add_argument('offset', nargs='?', default=0, type=int,\n help='Element offset from $sp (support negative offset)')\n\n\[email protected](parser)\[email protected]\ndef stack(count, offset):\n ptrsize = pwndbg.typeinfo.ptrsize\n telescope.repeat = stack.repeat\n telescope(address=pwndbg.regs.sp + offset * ptrsize, count=count)\n\n\ntelescope.last_address = 0\ntelescope.offset = 0\n",
"path": "pwndbg/commands/telescope.py"
}
] | [
{
"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\nPrints out pointer chains starting at some address in memory.\n\nGenerally used to print out the stack or register values.\n\"\"\"\n\nimport argparse\nimport collections\nimport math\n\nimport pwndbg.arch\nimport pwndbg.chain\nimport pwndbg.color.telescope as T\nimport pwndbg.color.theme as theme\nimport pwndbg.commands\nimport pwndbg.config\nimport pwndbg.memory\nimport pwndbg.regs\nimport pwndbg.typeinfo\n\ntelescope_lines = pwndbg.config.Parameter('telescope-lines', 8, 'number of lines to printed by the telescope command')\nskip_repeating_values = pwndbg.config.Parameter('telescope-skip-repeating-val', True,\n 'whether to skip repeating values of the telescope command')\nskip_repeating_values_minimum = pwndbg.config.Parameter('telescope-skip-repeating-val-minimum', 3,\n 'minimum amount of repeated values before skipping lines')\n\noffset_separator = theme.Parameter('telescope-offset-separator', '│', 'offset separator of the telescope command')\noffset_delimiter = theme.Parameter('telescope-offset-delimiter', ':', 'offset delimiter of the telescope command')\nrepeating_marker = theme.Parameter('telescope-repeating-marker', '... ↓',\n 'repeating values marker of the telescope command')\n\n\nparser = argparse.ArgumentParser(description=\"\"\"\n Recursively dereferences pointers starting at the specified address\n ($sp by default)\n \"\"\")\nparser.add_argument(\"address\", nargs=\"?\", default=None, type=int, help=\"The address to telescope at.\")\nparser.add_argument(\"count\", nargs=\"?\", default=telescope_lines, type=int, help=\"The number of lines to show.\")\[email protected](parser)\[email protected]\ndef telescope(address=None, count=telescope_lines, to_string=False):\n \"\"\"\n Recursively dereferences pointers starting at the specified address\n ($sp by default)\n \"\"\"\n ptrsize = pwndbg.typeinfo.ptrsize\n if telescope.repeat:\n address = telescope.last_address + ptrsize\n telescope.offset += 1\n else:\n telescope.offset = 0\n\n address = int(address if address else pwndbg.regs.sp) & pwndbg.arch.ptrmask\n count = max(int(count), 1) & pwndbg.arch.ptrmask\n delimiter = T.delimiter(offset_delimiter)\n separator = T.separator(offset_separator)\n\n # Allow invocation of \"telescope 20\" to dump 20 bytes at the stack pointer\n if address < pwndbg.memory.MMAP_MIN_ADDR and not pwndbg.memory.peek(address):\n count = address\n address = pwndbg.regs.sp\n\n # Allow invocation of \"telescope a b\" to dump all bytes from A to B\n if int(address) <= int(count):\n # adjust count if it is an address. use ceil division as count is number of\n # ptrsize values and we don't want to strip out a value if dest is unaligned\n count -= address\n count = max(math.ceil(count / ptrsize), 1)\n\n reg_values = collections.defaultdict(lambda: [])\n for reg in pwndbg.regs.common:\n reg_values[pwndbg.regs[reg]].append(reg)\n # address = pwndbg.memory.poi(pwndbg.typeinfo.ppvoid, address)\n\n start = address\n stop = address + (count*ptrsize)\n step = ptrsize\n\n # Find all registers which show up in the trace\n regs = {}\n for i in range(start, stop, step):\n values = list(reg_values[i])\n\n for width in range(1, pwndbg.arch.ptrsize):\n values.extend('%s-%i' % (r,width) for r in reg_values[i+width])\n\n regs[i] = ' '.join(values)\n\n # Find the longest set of register information\n if regs:\n longest_regs = max(map(len, regs.values()))\n else:\n longest_regs = 0\n\n # Print everything out\n result = []\n last = None\n collapse_buffer = []\n skipped_padding = 2 + len(offset_delimiter) + 4 + len(offset_separator) + 1 + longest_regs + 1 - len(repeating_marker)\n\n # Collapse repeating values exceeding minimum delta.\n def collapse_repeating_values():\n # The first line was already printed, hence increment by 1\n if collapse_buffer and len(collapse_buffer) + 1 >= skip_repeating_values_minimum:\n result.append(T.repeating_marker('%s%s%i skipped' % (repeating_marker, ' ' * skipped_padding, len(collapse_buffer))))\n else:\n result.extend(collapse_buffer)\n collapse_buffer.clear()\n\n for i, addr in enumerate(range(start, stop, step)):\n if not pwndbg.memory.peek(addr):\n collapse_repeating_values()\n result.append(\"<Could not read memory at %#x>\" % addr)\n break\n\n line = ' '.join((T.offset(\"%02x%s%04x%s\" % (i + telescope.offset, delimiter,\n addr - start + (telescope.offset * ptrsize), separator)),\n T.register(regs[addr].ljust(longest_regs)),\n pwndbg.chain.format(addr)))\n\n # Buffer repeating values.\n if skip_repeating_values:\n value = pwndbg.memory.pvoid(addr)\n if last == value:\n collapse_buffer.append(line)\n continue\n collapse_repeating_values()\n last = value\n\n result.append(line)\n\n collapse_repeating_values()\n telescope.offset += i\n telescope.last_address = addr\n\n if not to_string:\n print('\\n'.join(result))\n\n return result\n\n\nparser = argparse.ArgumentParser(description='dereferences on stack data with specified count and offset.')\nparser.add_argument('count', nargs='?', default=8, type=int,\n help='number of element to dump')\nparser.add_argument('offset', nargs='?', default=0, type=int,\n help='Element offset from $sp (support negative offset)')\n\n\[email protected](parser)\[email protected]\ndef stack(count, offset):\n ptrsize = pwndbg.typeinfo.ptrsize\n telescope.repeat = stack.repeat\n telescope(address=pwndbg.regs.sp + offset * ptrsize, count=count)\n\n\ntelescope.last_address = 0\ntelescope.offset = 0\n",
"path": "pwndbg/commands/telescope.py"
}
] | diff --git a/pwndbg/commands/telescope.py b/pwndbg/commands/telescope.py
index 1385590c4c1..c524705eb00 100644
--- a/pwndbg/commands/telescope.py
+++ b/pwndbg/commands/telescope.py
@@ -131,6 +131,7 @@ def collapse_repeating_values():
result.append(line)
+ collapse_repeating_values()
telescope.offset += i
telescope.last_address = addr
|
ipython__ipython-3013 | cython_pyximport reload broken in python3
python3.3 notebook, tested in 0.13.1 but the code looks the same in HEAD:
%%cython_pyximport foo
def f(x):
return 4.0*x
execute twice and you get
```
/usr/lib/python3/dist-packages/IPython/extensions/cythonmagic.py in cython_pyximport(self, line, cell)
99 if module_name in self._reloads:
100 module = self._reloads[module_name]
--> 101 reload(module)
102 else:
103 __import__(module_name)
NameError: global name 'reload' is not defined
```
imp.reload should be used here
| [
{
"content": "# -*- coding: utf-8 -*-\n\"\"\"\n=====================\nCython related magics\n=====================\n\nUsage\n=====\n\n``%%cython``\n\n{CYTHON_DOC}\n\n``%%cython_inline``\n\n{CYTHON_INLINE_DOC}\n\n``%%cython_pyximport``\n\n{CYTHON_PYXIMPORT_DOC}\n\nAuthor:\n* Brian Granger\n\nParts of this code were taken from Cython.inline.\n\"\"\"\n#-----------------------------------------------------------------------------\n# Copyright (C) 2010-2011, IPython Development Team.\n#\n# Distributed under the terms of the Modified BSD License.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n#-----------------------------------------------------------------------------\n\nfrom __future__ import print_function\n\nimport imp\nimport io\nimport os\nimport re\nimport sys\nimport time\n\ntry:\n import hashlib\nexcept ImportError:\n import md5 as hashlib\n\nfrom distutils.core import Distribution, Extension\nfrom distutils.command.build_ext import build_ext\n\nfrom IPython.core import display\nfrom IPython.core import magic_arguments\nfrom IPython.core.magic import Magics, magics_class, cell_magic\nfrom IPython.testing.skipdoctest import skip_doctest\nfrom IPython.utils import py3compat\n\nimport Cython\nfrom Cython.Compiler.Errors import CompileError\nfrom Cython.Build.Dependencies import cythonize\n\n\n@magics_class\nclass CythonMagics(Magics):\n\n def __init__(self, shell):\n super(CythonMagics,self).__init__(shell)\n self._reloads = {}\n self._code_cache = {}\n\n def _import_all(self, module):\n for k,v in module.__dict__.items():\n if not k.startswith('__'):\n self.shell.push({k:v})\n\n @cell_magic\n def cython_inline(self, line, cell):\n \"\"\"Compile and run a Cython code cell using Cython.inline.\n\n This magic simply passes the body of the cell to Cython.inline\n and returns the result. If the variables `a` and `b` are defined\n in the user's namespace, here is a simple example that returns\n their sum::\n\n %%cython_inline\n return a+b\n\n For most purposes, we recommend the usage of the `%%cython` magic.\n \"\"\"\n locs = self.shell.user_global_ns\n globs = self.shell.user_ns\n return Cython.inline(cell, locals=locs, globals=globs)\n\n @cell_magic\n def cython_pyximport(self, line, cell):\n \"\"\"Compile and import a Cython code cell using pyximport.\n\n The contents of the cell are written to a `.pyx` file in the current\n working directory, which is then imported using `pyximport`. This\n magic requires a module name to be passed::\n\n %%cython_pyximport modulename\n def f(x):\n return 2.0*x\n\n The compiled module is then imported and all of its symbols are\n injected into the user's namespace. For most purposes, we recommend\n the usage of the `%%cython` magic.\n \"\"\"\n module_name = line.strip()\n if not module_name:\n raise ValueError('module name must be given')\n fname = module_name + '.pyx'\n with io.open(fname, 'w', encoding='utf-8') as f:\n f.write(cell)\n if 'pyximport' not in sys.modules:\n import pyximport\n pyximport.install(reload_support=True)\n if module_name in self._reloads:\n module = self._reloads[module_name]\n reload(module)\n else:\n __import__(module_name)\n module = sys.modules[module_name]\n self._reloads[module_name] = module\n self._import_all(module)\n\n @magic_arguments.magic_arguments()\n @magic_arguments.argument(\n '-c', '--compile-args', action='append', default=[],\n help=\"Extra flags to pass to compiler via the `extra_compile_args` \"\n \"Extension flag (can be specified multiple times).\"\n )\n @magic_arguments.argument(\n '--link-args', action='append', default=[],\n help=\"Extra flags to pass to linker via the `extra_link_args` \"\n \"Extension flag (can be specified multiple times).\"\n )\n @magic_arguments.argument(\n '-l', '--lib', action='append', default=[],\n help=\"Add a library to link the extension against (can be specified \"\n \"multiple times).\"\n )\n @magic_arguments.argument(\n '-L', dest='library_dirs', metavar='dir', action='append', default=[],\n help=\"Add a path to the list of libary directories (can be specified \"\n \"multiple times).\"\n )\n @magic_arguments.argument(\n '-I', '--include', action='append', default=[],\n help=\"Add a path to the list of include directories (can be specified \"\n \"multiple times).\"\n )\n @magic_arguments.argument(\n '-+', '--cplus', action='store_true', default=False,\n help=\"Output a C++ rather than C file.\"\n )\n @magic_arguments.argument(\n '-f', '--force', action='store_true', default=False,\n help=\"Force the compilation of a new module, even if the source has been \"\n \"previously compiled.\"\n )\n @magic_arguments.argument(\n '-a', '--annotate', action='store_true', default=False,\n help=\"Produce a colorized HTML version of the source.\"\n )\n @cell_magic\n def cython(self, line, cell):\n \"\"\"Compile and import everything from a Cython code cell.\n\n The contents of the cell are written to a `.pyx` file in the\n directory `IPYTHONDIR/cython` using a filename with the hash of the\n code. This file is then cythonized and compiled. The resulting module\n is imported and all of its symbols are injected into the user's\n namespace. The usage is similar to that of `%%cython_pyximport` but\n you don't have to pass a module name::\n\n %%cython\n def f(x):\n return 2.0*x\n\n To compile OpenMP codes, pass the required `--compile-args`\n and `--link-args`. For example with gcc::\n\n %%cython --compile-args=-fopenmp --link-args=-fopenmp\n ...\n \"\"\"\n args = magic_arguments.parse_argstring(self.cython, line)\n code = cell if cell.endswith('\\n') else cell+'\\n'\n lib_dir = os.path.join(self.shell.ipython_dir, 'cython')\n quiet = True\n key = code, sys.version_info, sys.executable, Cython.__version__\n\n if not os.path.exists(lib_dir):\n os.makedirs(lib_dir)\n\n if args.force:\n # Force a new module name by adding the current time to the\n # key which is hashed to determine the module name.\n key += time.time(),\n\n module_name = \"_cython_magic_\" + hashlib.md5(str(key).encode('utf-8')).hexdigest()\n module_path = os.path.join(lib_dir, module_name + self.so_ext)\n\n have_module = os.path.isfile(module_path)\n need_cythonize = not have_module\n\n if args.annotate:\n html_file = os.path.join(lib_dir, module_name + '.html')\n if not os.path.isfile(html_file):\n need_cythonize = True\n\n if need_cythonize:\n c_include_dirs = args.include\n if 'numpy' in code:\n import numpy\n c_include_dirs.append(numpy.get_include())\n pyx_file = os.path.join(lib_dir, module_name + '.pyx')\n pyx_file = py3compat.cast_bytes_py2(pyx_file, encoding=sys.getfilesystemencoding())\n with io.open(pyx_file, 'w', encoding='utf-8') as f:\n f.write(code)\n extension = Extension(\n name = module_name,\n sources = [pyx_file],\n include_dirs = c_include_dirs,\n library_dirs = args.library_dirs,\n extra_compile_args = args.compile_args,\n extra_link_args = args.link_args,\n libraries = args.lib,\n language = 'c++' if args.cplus else 'c',\n )\n build_extension = self._get_build_extension()\n try:\n opts = dict(\n quiet=quiet,\n annotate = args.annotate,\n force = True,\n )\n build_extension.extensions = cythonize([extension], **opts)\n except CompileError:\n return\n\n if not have_module:\n build_extension.build_temp = os.path.dirname(pyx_file)\n build_extension.build_lib = lib_dir\n build_extension.run()\n self._code_cache[key] = module_name\n\n module = imp.load_dynamic(module_name, module_path)\n self._import_all(module)\n\n if args.annotate:\n try:\n with io.open(html_file, encoding='utf-8') as f:\n annotated_html = f.read()\n except IOError as e:\n # File could not be opened. Most likely the user has a version\n # of Cython before 0.15.1 (when `cythonize` learned the\n # `force` keyword argument) and has already compiled this\n # exact source without annotation.\n print('Cython completed successfully but the annotated '\n 'source could not be read.', file=sys.stderr)\n print(e, file=sys.stderr)\n else:\n return display.HTML(self.clean_annotated_html(annotated_html))\n\n @property\n def so_ext(self):\n \"\"\"The extension suffix for compiled modules.\"\"\"\n try:\n return self._so_ext\n except AttributeError:\n self._so_ext = self._get_build_extension().get_ext_filename('')\n return self._so_ext\n\n def _get_build_extension(self):\n dist = Distribution()\n config_files = dist.find_config_files()\n try:\n config_files.remove('setup.cfg')\n except ValueError:\n pass\n dist.parse_config_files(config_files)\n build_extension = build_ext(dist)\n build_extension.finalize_options()\n return build_extension\n\n @staticmethod\n def clean_annotated_html(html):\n \"\"\"Clean up the annotated HTML source.\n\n Strips the link to the generated C or C++ file, which we do not\n present to the user.\n \"\"\"\n r = re.compile('<p>Raw output: <a href=\"(.*)\">(.*)</a>')\n html = '\\n'.join(l for l in html.splitlines() if not r.match(l))\n return html\n\n__doc__ = __doc__.format(\n CYTHON_DOC = ' '*8 + CythonMagics.cython.__doc__,\n CYTHON_INLINE_DOC = ' '*8 + CythonMagics.cython_inline.__doc__,\n CYTHON_PYXIMPORT_DOC = ' '*8 + CythonMagics.cython_pyximport.__doc__,\n)\n\ndef load_ipython_extension(ip):\n \"\"\"Load the extension in IPython.\"\"\"\n ip.register_magics(CythonMagics)\n",
"path": "IPython/extensions/cythonmagic.py"
}
] | [
{
"content": "# -*- coding: utf-8 -*-\n\"\"\"\n=====================\nCython related magics\n=====================\n\nUsage\n=====\n\n``%%cython``\n\n{CYTHON_DOC}\n\n``%%cython_inline``\n\n{CYTHON_INLINE_DOC}\n\n``%%cython_pyximport``\n\n{CYTHON_PYXIMPORT_DOC}\n\nAuthor:\n* Brian Granger\n\nParts of this code were taken from Cython.inline.\n\"\"\"\n#-----------------------------------------------------------------------------\n# Copyright (C) 2010-2011, IPython Development Team.\n#\n# Distributed under the terms of the Modified BSD License.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n#-----------------------------------------------------------------------------\n\nfrom __future__ import print_function\n\nimport imp\nimport io\nimport os\nimport re\nimport sys\nimport time\n\ntry:\n reload\nexcept NameError: # Python 3\n from imp import reload\n\ntry:\n import hashlib\nexcept ImportError:\n import md5 as hashlib\n\nfrom distutils.core import Distribution, Extension\nfrom distutils.command.build_ext import build_ext\n\nfrom IPython.core import display\nfrom IPython.core import magic_arguments\nfrom IPython.core.magic import Magics, magics_class, cell_magic\nfrom IPython.testing.skipdoctest import skip_doctest\nfrom IPython.utils import py3compat\n\nimport Cython\nfrom Cython.Compiler.Errors import CompileError\nfrom Cython.Build.Dependencies import cythonize\n\n\n@magics_class\nclass CythonMagics(Magics):\n\n def __init__(self, shell):\n super(CythonMagics,self).__init__(shell)\n self._reloads = {}\n self._code_cache = {}\n\n def _import_all(self, module):\n for k,v in module.__dict__.items():\n if not k.startswith('__'):\n self.shell.push({k:v})\n\n @cell_magic\n def cython_inline(self, line, cell):\n \"\"\"Compile and run a Cython code cell using Cython.inline.\n\n This magic simply passes the body of the cell to Cython.inline\n and returns the result. If the variables `a` and `b` are defined\n in the user's namespace, here is a simple example that returns\n their sum::\n\n %%cython_inline\n return a+b\n\n For most purposes, we recommend the usage of the `%%cython` magic.\n \"\"\"\n locs = self.shell.user_global_ns\n globs = self.shell.user_ns\n return Cython.inline(cell, locals=locs, globals=globs)\n\n @cell_magic\n def cython_pyximport(self, line, cell):\n \"\"\"Compile and import a Cython code cell using pyximport.\n\n The contents of the cell are written to a `.pyx` file in the current\n working directory, which is then imported using `pyximport`. This\n magic requires a module name to be passed::\n\n %%cython_pyximport modulename\n def f(x):\n return 2.0*x\n\n The compiled module is then imported and all of its symbols are\n injected into the user's namespace. For most purposes, we recommend\n the usage of the `%%cython` magic.\n \"\"\"\n module_name = line.strip()\n if not module_name:\n raise ValueError('module name must be given')\n fname = module_name + '.pyx'\n with io.open(fname, 'w', encoding='utf-8') as f:\n f.write(cell)\n if 'pyximport' not in sys.modules:\n import pyximport\n pyximport.install(reload_support=True)\n if module_name in self._reloads:\n module = self._reloads[module_name]\n reload(module)\n else:\n __import__(module_name)\n module = sys.modules[module_name]\n self._reloads[module_name] = module\n self._import_all(module)\n\n @magic_arguments.magic_arguments()\n @magic_arguments.argument(\n '-c', '--compile-args', action='append', default=[],\n help=\"Extra flags to pass to compiler via the `extra_compile_args` \"\n \"Extension flag (can be specified multiple times).\"\n )\n @magic_arguments.argument(\n '--link-args', action='append', default=[],\n help=\"Extra flags to pass to linker via the `extra_link_args` \"\n \"Extension flag (can be specified multiple times).\"\n )\n @magic_arguments.argument(\n '-l', '--lib', action='append', default=[],\n help=\"Add a library to link the extension against (can be specified \"\n \"multiple times).\"\n )\n @magic_arguments.argument(\n '-L', dest='library_dirs', metavar='dir', action='append', default=[],\n help=\"Add a path to the list of libary directories (can be specified \"\n \"multiple times).\"\n )\n @magic_arguments.argument(\n '-I', '--include', action='append', default=[],\n help=\"Add a path to the list of include directories (can be specified \"\n \"multiple times).\"\n )\n @magic_arguments.argument(\n '-+', '--cplus', action='store_true', default=False,\n help=\"Output a C++ rather than C file.\"\n )\n @magic_arguments.argument(\n '-f', '--force', action='store_true', default=False,\n help=\"Force the compilation of a new module, even if the source has been \"\n \"previously compiled.\"\n )\n @magic_arguments.argument(\n '-a', '--annotate', action='store_true', default=False,\n help=\"Produce a colorized HTML version of the source.\"\n )\n @cell_magic\n def cython(self, line, cell):\n \"\"\"Compile and import everything from a Cython code cell.\n\n The contents of the cell are written to a `.pyx` file in the\n directory `IPYTHONDIR/cython` using a filename with the hash of the\n code. This file is then cythonized and compiled. The resulting module\n is imported and all of its symbols are injected into the user's\n namespace. The usage is similar to that of `%%cython_pyximport` but\n you don't have to pass a module name::\n\n %%cython\n def f(x):\n return 2.0*x\n\n To compile OpenMP codes, pass the required `--compile-args`\n and `--link-args`. For example with gcc::\n\n %%cython --compile-args=-fopenmp --link-args=-fopenmp\n ...\n \"\"\"\n args = magic_arguments.parse_argstring(self.cython, line)\n code = cell if cell.endswith('\\n') else cell+'\\n'\n lib_dir = os.path.join(self.shell.ipython_dir, 'cython')\n quiet = True\n key = code, sys.version_info, sys.executable, Cython.__version__\n\n if not os.path.exists(lib_dir):\n os.makedirs(lib_dir)\n\n if args.force:\n # Force a new module name by adding the current time to the\n # key which is hashed to determine the module name.\n key += time.time(),\n\n module_name = \"_cython_magic_\" + hashlib.md5(str(key).encode('utf-8')).hexdigest()\n module_path = os.path.join(lib_dir, module_name + self.so_ext)\n\n have_module = os.path.isfile(module_path)\n need_cythonize = not have_module\n\n if args.annotate:\n html_file = os.path.join(lib_dir, module_name + '.html')\n if not os.path.isfile(html_file):\n need_cythonize = True\n\n if need_cythonize:\n c_include_dirs = args.include\n if 'numpy' in code:\n import numpy\n c_include_dirs.append(numpy.get_include())\n pyx_file = os.path.join(lib_dir, module_name + '.pyx')\n pyx_file = py3compat.cast_bytes_py2(pyx_file, encoding=sys.getfilesystemencoding())\n with io.open(pyx_file, 'w', encoding='utf-8') as f:\n f.write(code)\n extension = Extension(\n name = module_name,\n sources = [pyx_file],\n include_dirs = c_include_dirs,\n library_dirs = args.library_dirs,\n extra_compile_args = args.compile_args,\n extra_link_args = args.link_args,\n libraries = args.lib,\n language = 'c++' if args.cplus else 'c',\n )\n build_extension = self._get_build_extension()\n try:\n opts = dict(\n quiet=quiet,\n annotate = args.annotate,\n force = True,\n )\n build_extension.extensions = cythonize([extension], **opts)\n except CompileError:\n return\n\n if not have_module:\n build_extension.build_temp = os.path.dirname(pyx_file)\n build_extension.build_lib = lib_dir\n build_extension.run()\n self._code_cache[key] = module_name\n\n module = imp.load_dynamic(module_name, module_path)\n self._import_all(module)\n\n if args.annotate:\n try:\n with io.open(html_file, encoding='utf-8') as f:\n annotated_html = f.read()\n except IOError as e:\n # File could not be opened. Most likely the user has a version\n # of Cython before 0.15.1 (when `cythonize` learned the\n # `force` keyword argument) and has already compiled this\n # exact source without annotation.\n print('Cython completed successfully but the annotated '\n 'source could not be read.', file=sys.stderr)\n print(e, file=sys.stderr)\n else:\n return display.HTML(self.clean_annotated_html(annotated_html))\n\n @property\n def so_ext(self):\n \"\"\"The extension suffix for compiled modules.\"\"\"\n try:\n return self._so_ext\n except AttributeError:\n self._so_ext = self._get_build_extension().get_ext_filename('')\n return self._so_ext\n\n def _get_build_extension(self):\n dist = Distribution()\n config_files = dist.find_config_files()\n try:\n config_files.remove('setup.cfg')\n except ValueError:\n pass\n dist.parse_config_files(config_files)\n build_extension = build_ext(dist)\n build_extension.finalize_options()\n return build_extension\n\n @staticmethod\n def clean_annotated_html(html):\n \"\"\"Clean up the annotated HTML source.\n\n Strips the link to the generated C or C++ file, which we do not\n present to the user.\n \"\"\"\n r = re.compile('<p>Raw output: <a href=\"(.*)\">(.*)</a>')\n html = '\\n'.join(l for l in html.splitlines() if not r.match(l))\n return html\n\n__doc__ = __doc__.format(\n CYTHON_DOC = ' '*8 + CythonMagics.cython.__doc__,\n CYTHON_INLINE_DOC = ' '*8 + CythonMagics.cython_inline.__doc__,\n CYTHON_PYXIMPORT_DOC = ' '*8 + CythonMagics.cython_pyximport.__doc__,\n)\n\ndef load_ipython_extension(ip):\n \"\"\"Load the extension in IPython.\"\"\"\n ip.register_magics(CythonMagics)\n",
"path": "IPython/extensions/cythonmagic.py"
}
] | diff --git a/IPython/extensions/cythonmagic.py b/IPython/extensions/cythonmagic.py
index a4217943269..45a529bdcd5 100644
--- a/IPython/extensions/cythonmagic.py
+++ b/IPython/extensions/cythonmagic.py
@@ -41,6 +41,11 @@
import sys
import time
+try:
+ reload
+except NameError: # Python 3
+ from imp import reload
+
try:
import hashlib
except ImportError:
|
PyGithub__PyGithub-1807 | Adding new attribute fails in case new name is the last in the list
### Problem Statement
```bash
$ python scripts/add_attribute.py Permissions triage bool
Traceback (most recent call last):
File "<...>\PyGithub\scripts\add_attribute.py", line 124, in <module>
line = lines[i].rstrip()
IndexError: list index out of range
```
--> Adding a new attribute at the end of the existing list of attributes in class `Permissions` fails.
--> In this case the last attribute name was "push", so "triage" comes last.
https://github.com/PyGithub/PyGithub/blob/34d097ce473601624722b90fc5d0396011dd3acb/github/Permissions.py#L63-L72
### Solution Approach
In case the new attribute name will result in adding it at the end of the list of attributes, then the processing within the script at https://github.com/PyGithub/PyGithub/blob/master/scripts/add_attribute.py#L89 was already processing the next source code line which already contains the `_initAttributes` function.
Subsequently at https://github.com/PyGithub/PyGithub/blob/master/scripts/add_attribute.py#L122 `inInit` is set to `False`, but only checked again after reading already the next line. This means the following code block will never again notice the place of the `_initAttributes` and fails at the end of the file due to endless loop.
Problem can be fixed by conditionally remembering we already reached the `_initAttributes` function, so replace:
https://github.com/PyGithub/PyGithub/blob/34d097ce473601624722b90fc5d0396011dd3acb/scripts/add_attribute.py#L122
with
```python
inInit = True if line == " def _initAttributes(self):" else False
```
Adding new attribute fails in case new name is the last in the list
### Problem Statement
```bash
$ python scripts/add_attribute.py Permissions triage bool
Traceback (most recent call last):
File "<...>\PyGithub\scripts\add_attribute.py", line 124, in <module>
line = lines[i].rstrip()
IndexError: list index out of range
```
--> Adding a new attribute at the end of the existing list of attributes in class `Permissions` fails.
--> In this case the last attribute name was "push", so "triage" comes last.
https://github.com/PyGithub/PyGithub/blob/34d097ce473601624722b90fc5d0396011dd3acb/github/Permissions.py#L63-L72
### Solution Approach
In case the new attribute name will result in adding it at the end of the list of attributes, then the processing within the script at https://github.com/PyGithub/PyGithub/blob/master/scripts/add_attribute.py#L89 was already processing the next source code line which already contains the `_initAttributes` function.
Subsequently at https://github.com/PyGithub/PyGithub/blob/master/scripts/add_attribute.py#L122 `inInit` is set to `False`, but only checked again after reading already the next line. This means the following code block will never again notice the place of the `_initAttributes` and fails at the end of the file due to endless loop.
Problem can be fixed by conditionally remembering we already reached the `_initAttributes` function, so replace:
https://github.com/PyGithub/PyGithub/blob/34d097ce473601624722b90fc5d0396011dd3acb/scripts/add_attribute.py#L122
with
```python
inInit = True if line == " def _initAttributes(self):" else False
```
| [
{
"content": "#!/usr/bin/env python\n\n############################ Copyrights and license ############################\n# #\n# Copyright 2013 Vincent Jacques <[email protected]> #\n# Copyright 2014 Thialfihar <[email protected]> #\n# Copyright 2014 Vincent Jacques <[email protected]> #\n# Copyright 2016 Peter Buckley <[email protected]> #\n# Copyright 2018 sfdye <[email protected]> #\n# Copyright 2018 bbi-yggy <[email protected]> #\n# #\n# This file is part of PyGithub. #\n# http://pygithub.readthedocs.io/ #\n# #\n# PyGithub is free software: you can redistribute it and/or modify it under #\n# the terms of the GNU Lesser General Public License as published by the Free #\n# Software Foundation, either version 3 of the License, or (at your option) #\n# any later version. #\n# #\n# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #\n# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #\n# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #\n# details. #\n# #\n# You should have received a copy of the GNU Lesser General Public License #\n# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #\n# #\n################################################################################\n\nimport os.path\nimport sys\n\nclassName, attributeName, attributeType = sys.argv[1:4]\nif len(sys.argv) > 4:\n attributeClassType = sys.argv[4]\nelse:\n attributeClassType = \"\"\n\n\ntypes = {\n \"string\": (\n \"string\",\n None,\n 'self._makeStringAttribute(attributes[\"' + attributeName + '\"])',\n ),\n \"int\": (\n \"integer\",\n None,\n 'self._makeIntAttribute(attributes[\"' + attributeName + '\"])',\n ),\n \"bool\": (\n \"bool\",\n None,\n 'self._makeBoolAttribute(attributes[\"' + attributeName + '\"])',\n ),\n \"datetime\": (\n \"datetime.datetime\",\n \"(str, unicode)\",\n 'self._makeDatetimeAttribute(attributes[\"' + attributeName + '\"])',\n ),\n \"class\": (\n \":class:`\" + attributeClassType + \"`\",\n None,\n \"self._makeClassAttribute(\"\n + attributeClassType\n + ', attributes[\"'\n + attributeName\n + '\"])',\n ),\n}\n\nattributeDocType, attributeAssertType, attributeValue = types[attributeType]\n\n\nfileName = os.path.join(\"github\", className + \".py\")\n\nwith open(fileName) as f:\n lines = list(f)\n\nnewLines = []\n\ni = 0\n\nadded = False\n\nisCompletable = True\nisProperty = False\nwhile not added:\n line = lines[i].rstrip()\n i += 1\n if line.startswith(\"class \"):\n if \"NonCompletableGithubObject\" in line:\n isCompletable = False\n elif line == \" @property\":\n isProperty = True\n elif line.startswith(\" def \"):\n attrName = line[8:-7]\n # Properties will be inserted after __repr__, but before any other function.\n if attrName != \"__repr__\" and (\n attrName == \"_identity\" or attrName > attributeName or not isProperty\n ):\n if not isProperty:\n newLines.append(\" @property\")\n newLines.append(\" def \" + attributeName + \"(self):\")\n newLines.append(' \"\"\"')\n newLines.append(\" :type: \" + attributeDocType)\n newLines.append(' \"\"\"')\n if isCompletable:\n newLines.append(\n \" self._completeIfNotSet(self._\" + attributeName + \")\"\n )\n newLines.append(\" return self._\" + attributeName + \".value\")\n newLines.append(\"\")\n if isProperty:\n newLines.append(\" @property\")\n added = True\n isProperty = False\n newLines.append(line)\n\nadded = False\n\ninInit = False\nwhile not added:\n line = lines[i].rstrip()\n i += 1\n if line == \" def _initAttributes(self):\":\n inInit = True\n if inInit:\n if not line or line.endswith(\" = github.GithubObject.NotSet\"):\n if line:\n attrName = line[14:-29]\n if not line or attrName > attributeName:\n newLines.append(\n \" self._\" + attributeName + \" = github.GithubObject.NotSet\"\n )\n added = True\n newLines.append(line)\n\nadded = False\n\ninUse = False\nwhile not added:\n try:\n line = lines[i].rstrip()\n except IndexError:\n line = \"\"\n i += 1\n if line == \" def _useAttributes(self, attributes):\":\n inUse = True\n if inUse:\n if not line or line.endswith(\" in attributes: # pragma no branch\"):\n if line:\n attrName = line[12:-36]\n if not line or attrName > attributeName:\n newLines.append(\n ' if \"'\n + attributeName\n + '\" in attributes: # pragma no branch'\n )\n if attributeAssertType:\n newLines.append(\n ' assert attributes[\"'\n + attributeName\n + '\"] is None or isinstance(attributes[\"'\n + attributeName\n + '\"], '\n + attributeAssertType\n + '), attributes[\"'\n + attributeName\n + '\"]'\n )\n newLines.append(\n \" self._\" + attributeName + \" = \" + attributeValue\n )\n added = True\n newLines.append(line)\n\n\nwhile i < len(lines):\n line = lines[i].rstrip()\n i += 1\n newLines.append(line)\n\nwith open(fileName, \"w\") as f:\n for line in newLines:\n f.write(line + \"\\n\")\n",
"path": "scripts/add_attribute.py"
}
] | [
{
"content": "#!/usr/bin/env python\n\n############################ Copyrights and license ############################\n# #\n# Copyright 2013 Vincent Jacques <[email protected]> #\n# Copyright 2014 Thialfihar <[email protected]> #\n# Copyright 2014 Vincent Jacques <[email protected]> #\n# Copyright 2016 Peter Buckley <[email protected]> #\n# Copyright 2018 sfdye <[email protected]> #\n# Copyright 2018 bbi-yggy <[email protected]> #\n# #\n# This file is part of PyGithub. #\n# http://pygithub.readthedocs.io/ #\n# #\n# PyGithub is free software: you can redistribute it and/or modify it under #\n# the terms of the GNU Lesser General Public License as published by the Free #\n# Software Foundation, either version 3 of the License, or (at your option) #\n# any later version. #\n# #\n# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #\n# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #\n# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #\n# details. #\n# #\n# You should have received a copy of the GNU Lesser General Public License #\n# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #\n# #\n################################################################################\n\nimport os.path\nimport sys\n\nclassName, attributeName, attributeType = sys.argv[1:4]\nif len(sys.argv) > 4:\n attributeClassType = sys.argv[4]\nelse:\n attributeClassType = \"\"\n\n\ntypes = {\n \"string\": (\n \"string\",\n None,\n 'self._makeStringAttribute(attributes[\"' + attributeName + '\"])',\n ),\n \"int\": (\n \"integer\",\n None,\n 'self._makeIntAttribute(attributes[\"' + attributeName + '\"])',\n ),\n \"bool\": (\n \"bool\",\n None,\n 'self._makeBoolAttribute(attributes[\"' + attributeName + '\"])',\n ),\n \"datetime\": (\n \"datetime.datetime\",\n \"(str, unicode)\",\n 'self._makeDatetimeAttribute(attributes[\"' + attributeName + '\"])',\n ),\n \"class\": (\n \":class:`\" + attributeClassType + \"`\",\n None,\n \"self._makeClassAttribute(\"\n + attributeClassType\n + ', attributes[\"'\n + attributeName\n + '\"])',\n ),\n}\n\nattributeDocType, attributeAssertType, attributeValue = types[attributeType]\n\n\nfileName = os.path.join(\"github\", className + \".py\")\n\nwith open(fileName) as f:\n lines = list(f)\n\nnewLines = []\n\ni = 0\n\nadded = False\n\nisCompletable = True\nisProperty = False\nwhile not added:\n line = lines[i].rstrip()\n i += 1\n if line.startswith(\"class \"):\n if \"NonCompletableGithubObject\" in line:\n isCompletable = False\n elif line == \" @property\":\n isProperty = True\n elif line.startswith(\" def \"):\n attrName = line[8:-7]\n # Properties will be inserted after __repr__, but before any other function.\n if attrName != \"__repr__\" and (\n attrName == \"_identity\" or attrName > attributeName or not isProperty\n ):\n if not isProperty:\n newLines.append(\" @property\")\n newLines.append(\" def \" + attributeName + \"(self):\")\n newLines.append(' \"\"\"')\n newLines.append(\" :type: \" + attributeDocType)\n newLines.append(' \"\"\"')\n if isCompletable:\n newLines.append(\n \" self._completeIfNotSet(self._\" + attributeName + \")\"\n )\n newLines.append(\" return self._\" + attributeName + \".value\")\n newLines.append(\"\")\n if isProperty:\n newLines.append(\" @property\")\n added = True\n isProperty = False\n newLines.append(line)\n\nadded = False\n\ninInit = line.endswith(\"def _initAttributes(self):\")\nwhile not added:\n line = lines[i].rstrip()\n i += 1\n if line == \" def _initAttributes(self):\":\n inInit = True\n if inInit:\n if not line or line.endswith(\" = github.GithubObject.NotSet\"):\n if line:\n attrName = line[14:-29]\n if not line or attrName > attributeName:\n newLines.append(\n \" self._\" + attributeName + \" = github.GithubObject.NotSet\"\n )\n added = True\n newLines.append(line)\n\nadded = False\n\ninUse = False\nwhile not added:\n try:\n line = lines[i].rstrip()\n except IndexError:\n line = \"\"\n i += 1\n if line == \" def _useAttributes(self, attributes):\":\n inUse = True\n if inUse:\n if not line or line.endswith(\" in attributes: # pragma no branch\"):\n if line:\n attrName = line[12:-36]\n if not line or attrName > attributeName:\n newLines.append(\n ' if \"'\n + attributeName\n + '\" in attributes: # pragma no branch'\n )\n if attributeAssertType:\n newLines.append(\n ' assert attributes[\"'\n + attributeName\n + '\"] is None or isinstance(attributes[\"'\n + attributeName\n + '\"], '\n + attributeAssertType\n + '), attributes[\"'\n + attributeName\n + '\"]'\n )\n newLines.append(\n \" self._\" + attributeName + \" = \" + attributeValue\n )\n added = True\n newLines.append(line)\n\n\nwhile i < len(lines):\n line = lines[i].rstrip()\n i += 1\n newLines.append(line)\n\nwith open(fileName, \"w\") as f:\n for line in newLines:\n f.write(line + \"\\n\")\n",
"path": "scripts/add_attribute.py"
}
] | diff --git a/scripts/add_attribute.py b/scripts/add_attribute.py
index d8b29be2e0..73bc1a58c5 100644
--- a/scripts/add_attribute.py
+++ b/scripts/add_attribute.py
@@ -119,7 +119,7 @@
added = False
-inInit = False
+inInit = line.endswith("def _initAttributes(self):")
while not added:
line = lines[i].rstrip()
i += 1
|
sanic-org__sanic-1559 | 2 failed tests when tox is not used (missing fixture "benchmark")
`pytest-benchmark` is not present in `tests_require`, so there are 2 failed tests in `tests/benchmark/test_route_resolution_benchmark.py` when tox is not used.
This requirement is present in `tox.ini` so tox and Travis CI are working fine.
(I don't know what's a better fix — disable the benchmark tests or add `pytest-benchmark` to `tests_require`, so I didn't create a PR)
| [
{
"content": "\"\"\"\nSanic\n\"\"\"\nimport codecs\nimport os\nimport re\nimport sys\nfrom distutils.util import strtobool\n\nfrom setuptools import setup\nfrom setuptools.command.test import test as TestCommand\n\n\nclass PyTest(TestCommand):\n \"\"\"\n Provide a Test runner to be used from setup.py to run unit tests\n \"\"\"\n\n user_options = [(\"pytest-args=\", \"a\", \"Arguments to pass to pytest\")]\n\n def initialize_options(self):\n TestCommand.initialize_options(self)\n self.pytest_args = \"\"\n\n def run_tests(self):\n import shlex\n import pytest\n\n errno = pytest.main(shlex.split(self.pytest_args))\n sys.exit(errno)\n\n\ndef open_local(paths, mode=\"r\", encoding=\"utf8\"):\n path = os.path.join(os.path.abspath(os.path.dirname(__file__)), *paths)\n\n return codecs.open(path, mode, encoding)\n\n\nwith open_local([\"sanic\", \"__init__.py\"], encoding=\"latin1\") as fp:\n try:\n version = re.findall(\n r\"^__version__ = \\\"([^']+)\\\"\\r?$\", fp.read(), re.M\n )[0]\n except IndexError:\n raise RuntimeError(\"Unable to determine version.\")\n\nwith open_local([\"README.rst\"]) as rm:\n long_description = rm.read()\n\nsetup_kwargs = {\n \"name\": \"sanic\",\n \"version\": version,\n \"url\": \"http://github.com/channelcat/sanic/\",\n \"license\": \"MIT\",\n \"author\": \"Channel Cat\",\n \"author_email\": \"[email protected]\",\n \"description\": (\n \"A microframework based on uvloop, httptools, and learnings of flask\"\n ),\n \"long_description\": long_description,\n \"packages\": [\"sanic\"],\n \"platforms\": \"any\",\n \"classifiers\": [\n \"Development Status :: 4 - Beta\",\n \"Environment :: Web Environment\",\n \"License :: OSI Approved :: MIT License\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n ],\n}\n\nenv_dependency = (\n '; sys_platform != \"win32\" ' 'and implementation_name == \"cpython\"'\n)\nujson = \"ujson>=1.35\" + env_dependency\nuvloop = \"uvloop>=0.5.3\" + env_dependency\n\nrequirements = [\n \"httptools>=0.0.10\",\n uvloop,\n ujson,\n \"aiofiles>=0.3.0\",\n \"websockets>=6.0,<7.0\",\n \"multidict>=4.0,<5.0\",\n]\n\ntests_require = [\n \"pytest==4.1.0\",\n \"multidict>=4.0,<5.0\",\n \"gunicorn\",\n \"pytest-cov\",\n \"aiohttp>=2.3.0,<=3.2.1\",\n \"beautifulsoup4\",\n uvloop,\n ujson,\n \"pytest-sanic\",\n \"pytest-sugar\",\n]\n\nif strtobool(os.environ.get(\"SANIC_NO_UJSON\", \"no\")):\n print(\"Installing without uJSON\")\n requirements.remove(ujson)\n tests_require.remove(ujson)\n\n# 'nt' means windows OS\nif strtobool(os.environ.get(\"SANIC_NO_UVLOOP\", \"no\")):\n print(\"Installing without uvLoop\")\n requirements.remove(uvloop)\n tests_require.remove(uvloop)\n\nextras_require = {\n \"test\": tests_require,\n \"dev\": tests_require + [\"aiofiles\", \"tox\", \"black\", \"flake8\"],\n \"docs\": [\n \"sphinx\",\n \"sphinx_rtd_theme\",\n \"recommonmark\",\n \"sphinxcontrib-asyncio\",\n \"docutils\",\n \"pygments\"\n ],\n}\n\nsetup_kwargs[\"install_requires\"] = requirements\nsetup_kwargs[\"tests_require\"] = tests_require\nsetup_kwargs[\"extras_require\"] = extras_require\nsetup_kwargs[\"cmdclass\"] = {\"test\": PyTest}\nsetup(**setup_kwargs)\n",
"path": "setup.py"
}
] | [
{
"content": "\"\"\"\nSanic\n\"\"\"\nimport codecs\nimport os\nimport re\nimport sys\nfrom distutils.util import strtobool\n\nfrom setuptools import setup\nfrom setuptools.command.test import test as TestCommand\n\n\nclass PyTest(TestCommand):\n \"\"\"\n Provide a Test runner to be used from setup.py to run unit tests\n \"\"\"\n\n user_options = [(\"pytest-args=\", \"a\", \"Arguments to pass to pytest\")]\n\n def initialize_options(self):\n TestCommand.initialize_options(self)\n self.pytest_args = \"\"\n\n def run_tests(self):\n import shlex\n import pytest\n\n errno = pytest.main(shlex.split(self.pytest_args))\n sys.exit(errno)\n\n\ndef open_local(paths, mode=\"r\", encoding=\"utf8\"):\n path = os.path.join(os.path.abspath(os.path.dirname(__file__)), *paths)\n\n return codecs.open(path, mode, encoding)\n\n\nwith open_local([\"sanic\", \"__init__.py\"], encoding=\"latin1\") as fp:\n try:\n version = re.findall(\n r\"^__version__ = \\\"([^']+)\\\"\\r?$\", fp.read(), re.M\n )[0]\n except IndexError:\n raise RuntimeError(\"Unable to determine version.\")\n\nwith open_local([\"README.rst\"]) as rm:\n long_description = rm.read()\n\nsetup_kwargs = {\n \"name\": \"sanic\",\n \"version\": version,\n \"url\": \"http://github.com/channelcat/sanic/\",\n \"license\": \"MIT\",\n \"author\": \"Channel Cat\",\n \"author_email\": \"[email protected]\",\n \"description\": (\n \"A microframework based on uvloop, httptools, and learnings of flask\"\n ),\n \"long_description\": long_description,\n \"packages\": [\"sanic\"],\n \"platforms\": \"any\",\n \"classifiers\": [\n \"Development Status :: 4 - Beta\",\n \"Environment :: Web Environment\",\n \"License :: OSI Approved :: MIT License\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n ],\n}\n\nenv_dependency = (\n '; sys_platform != \"win32\" ' 'and implementation_name == \"cpython\"'\n)\nujson = \"ujson>=1.35\" + env_dependency\nuvloop = \"uvloop>=0.5.3\" + env_dependency\n\nrequirements = [\n \"httptools>=0.0.10\",\n uvloop,\n ujson,\n \"aiofiles>=0.3.0\",\n \"websockets>=6.0,<7.0\",\n \"multidict>=4.0,<5.0\",\n]\n\ntests_require = [\n \"pytest==4.1.0\",\n \"multidict>=4.0,<5.0\",\n \"gunicorn\",\n \"pytest-cov\",\n \"aiohttp>=2.3.0,<=3.2.1\",\n \"beautifulsoup4\",\n uvloop,\n ujson,\n \"pytest-sanic\",\n \"pytest-sugar\",\n \"pytest-benchmark\",\n]\n\nif strtobool(os.environ.get(\"SANIC_NO_UJSON\", \"no\")):\n print(\"Installing without uJSON\")\n requirements.remove(ujson)\n tests_require.remove(ujson)\n\n# 'nt' means windows OS\nif strtobool(os.environ.get(\"SANIC_NO_UVLOOP\", \"no\")):\n print(\"Installing without uvLoop\")\n requirements.remove(uvloop)\n tests_require.remove(uvloop)\n\nextras_require = {\n \"test\": tests_require,\n \"dev\": tests_require + [\"aiofiles\", \"tox\", \"black\", \"flake8\"],\n \"docs\": [\n \"sphinx\",\n \"sphinx_rtd_theme\",\n \"recommonmark\",\n \"sphinxcontrib-asyncio\",\n \"docutils\",\n \"pygments\"\n ],\n}\n\nsetup_kwargs[\"install_requires\"] = requirements\nsetup_kwargs[\"tests_require\"] = tests_require\nsetup_kwargs[\"extras_require\"] = extras_require\nsetup_kwargs[\"cmdclass\"] = {\"test\": PyTest}\nsetup(**setup_kwargs)\n",
"path": "setup.py"
}
] | diff --git a/setup.py b/setup.py
index 05d1363729..4a682151de 100644
--- a/setup.py
+++ b/setup.py
@@ -96,6 +96,7 @@ def open_local(paths, mode="r", encoding="utf8"):
ujson,
"pytest-sanic",
"pytest-sugar",
+ "pytest-benchmark",
]
if strtobool(os.environ.get("SANIC_NO_UJSON", "no")):
|
SCons__scons-4475 | Pseudo() global function missing
Initiated from discord discussion https://discord.com/channels/571796279483564041/571796280146133047/1204494883369263154
The documentation indicates both `Pseudo` and `env.Pseudo` work; in practice, the global function form does not work, generating an `AttributeError`.
A quick examination shows the table of environment methods (in `SCons/Script/__init__.py`) which should be made into global functions does not contain `Pseudo`, looks like an oversight.
| [
{
"content": "# MIT License\n#\n# Copyright The SCons Foundation\n#\n# Permission is hereby granted, free of charge, to any person obtaining\n# a copy of this software and associated documentation files (the\n# \"Software\"), to deal in the Software without restriction, including\n# without limitation the rights to use, copy, modify, merge, publish,\n# distribute, sublicense, and/or sell copies of the Software, and to\n# permit persons to whom the Software is furnished to do so, subject to\n# the following conditions:\n#\n# The above copyright notice and this permission notice shall be included\n# in all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY\n# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE\n# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND\n# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE\n# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\n# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION\n# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n\"\"\"The main() function used by the scons script.\n\nArchitecturally, this *is* the scons script, and will likely only be\ncalled from the external \"scons\" wrapper. Consequently, anything here\nshould not be, or be considered, part of the build engine. If it's\nsomething that we expect other software to want to use, it should go in\nsome other module. If it's specific to the \"scons\" script invocation,\nit goes here.\n\"\"\"\n\nimport time\nstart_time = time.time()\n\nimport collections\nimport os\nfrom io import StringIO\n\nimport sys\n\n# Special chicken-and-egg handling of the \"--debug=memoizer\" flag:\n#\n# SCons.Memoize contains a metaclass implementation that affects how\n# the other classes are instantiated. The Memoizer may add shim methods\n# to classes that have methods that cache computed values in order to\n# count and report the hits and misses.\n#\n# If we wait to enable the Memoization until after we've parsed the\n# command line options normally, it will be too late, because the Memoizer\n# will have already analyzed the classes that it's Memoizing and decided\n# to not add the shims. So we use a special-case, up-front check for\n# the \"--debug=memoizer\" flag and enable Memoizer before we import any\n# of the other modules that use it.\n\n_args = sys.argv + os.environ.get('SCONSFLAGS', '').split()\nif \"--debug=memoizer\" in _args:\n import SCons.Memoize\n import SCons.Warnings\n try:\n SCons.Memoize.EnableMemoization()\n except SCons.Warnings.SConsWarning:\n # Some warning was thrown. Arrange for it to be displayed\n # or not after warnings are configured.\n from . import Main\n exc_type, exc_value, tb = sys.exc_info()\n Main.delayed_warnings.append((exc_type, exc_value))\ndel _args\n\nimport SCons.Action\nimport SCons.Builder\nimport SCons.Environment\nimport SCons.Node.FS\nimport SCons.Platform\nimport SCons.Platform.virtualenv\nimport SCons.Scanner\nimport SCons.SConf\nimport SCons.Subst\nimport SCons.Tool\nimport SCons.Util\nimport SCons.Variables\nimport SCons.Defaults\n\nfrom . import Main\n\nmain = Main.main\n\n# The following are global class definitions and variables that used to\n# live directly in this module back before 0.96.90, when it contained\n# a lot of code. Some SConscript files in widely-distributed packages\n# (Blender is the specific example) actually reached into SCons.Script\n# directly to use some of these. Rather than break those SConscript\n# files, we're going to propagate these names into the SCons.Script\n# namespace here.\n#\n# Some of these are commented out because it's *really* unlikely anyone\n# used them, but we're going to leave the comment here to try to make\n# it obvious what to do if the situation arises.\nBuildTask = Main.BuildTask\nCleanTask = Main.CleanTask\nQuestionTask = Main.QuestionTask\n#SConscriptSettableOptions = Main.SConscriptSettableOptions\n\nAddOption = Main.AddOption\nPrintHelp = Main.PrintHelp\nGetOption = Main.GetOption\nSetOption = Main.SetOption\nValidateOptions = Main.ValidateOptions\nProgress = Main.Progress\nGetBuildFailures = Main.GetBuildFailures\nDebugOptions = Main.DebugOptions\n\n#keep_going_on_error = Main.keep_going_on_error\n#print_dtree = Main.print_dtree\n#print_explanations = Main.print_explanations\n#print_includes = Main.print_includes\n#print_objects = Main.print_objects\n#print_time = Main.print_time\n#print_tree = Main.print_tree\n#memory_stats = Main.memory_stats\n#ignore_errors = Main.ignore_errors\n#sconscript_time = Main.sconscript_time\n#command_time = Main.command_time\n#exit_status = Main.exit_status\n#profiling = Main.profiling\n#repositories = Main.repositories\n\nfrom . import SConscript as _SConscript\n\ncall_stack = _SConscript.call_stack\n\n#\nAction = SCons.Action.Action\nAddMethod = SCons.Util.AddMethod\nAllowSubstExceptions = SCons.Subst.SetAllowableExceptions\nBuilder = SCons.Builder.Builder\nConfigure = _SConscript.Configure\nEnvironment = SCons.Environment.Environment\n#OptParser = SCons.SConsOptions.OptParser\nFindPathDirs = SCons.Scanner.FindPathDirs\nPlatform = SCons.Platform.Platform\nVirtualenv = SCons.Platform.virtualenv.Virtualenv\nReturn = _SConscript.Return\nScanner = SCons.Scanner.ScannerBase\nTool = SCons.Tool.Tool\nWhereIs = SCons.Util.WhereIs\n\n#\nBoolVariable = SCons.Variables.BoolVariable\nEnumVariable = SCons.Variables.EnumVariable\nListVariable = SCons.Variables.ListVariable\nPackageVariable = SCons.Variables.PackageVariable\nPathVariable = SCons.Variables.PathVariable\n\n\n# Action factories.\nChmod = SCons.Defaults.Chmod\nCopy = SCons.Defaults.Copy\nDelete = SCons.Defaults.Delete\nMkdir = SCons.Defaults.Mkdir\nMove = SCons.Defaults.Move\nTouch = SCons.Defaults.Touch\n\n# Pre-made, public scanners.\nCScanner = SCons.Tool.CScanner\nDScanner = SCons.Tool.DScanner\nDirScanner = SCons.Defaults.DirScanner\nProgramScanner = SCons.Tool.ProgramScanner\nSourceFileScanner = SCons.Tool.SourceFileScanner\n\n# Functions we might still convert to Environment methods.\nCScan = SCons.Defaults.CScan\nDefaultEnvironment = SCons.Defaults.DefaultEnvironment\n\n# Other variables we provide.\nclass TargetList(collections.UserList):\n def _do_nothing(self, *args, **kw) -> None:\n pass\n def _add_Default(self, list) -> None:\n self.extend(list)\n def _clear(self) -> None:\n del self[:]\n\nARGUMENTS = {}\nARGLIST = []\nBUILD_TARGETS = TargetList()\nCOMMAND_LINE_TARGETS = []\nDEFAULT_TARGETS = []\n\n# BUILD_TARGETS can be modified in the SConscript files. If so, we\n# want to treat the modified BUILD_TARGETS list as if they specified\n# targets on the command line. To do that, though, we need to know if\n# BUILD_TARGETS was modified through \"official\" APIs or by hand. We do\n# this by updating two lists in parallel, the documented BUILD_TARGETS\n# list, above, and this internal _build_plus_default targets list which\n# should only have \"official\" API changes. Then Script/Main.py can\n# compare these two afterwards to figure out if the user added their\n# own targets to BUILD_TARGETS.\n_build_plus_default = TargetList()\n\ndef _Add_Arguments(alist) -> None:\n for arg in alist:\n a, b = arg.split('=', 1)\n ARGUMENTS[a] = b\n ARGLIST.append((a, b))\n\ndef _Add_Targets(tlist) -> None:\n if tlist:\n COMMAND_LINE_TARGETS.extend(tlist)\n BUILD_TARGETS.extend(tlist)\n BUILD_TARGETS._add_Default = BUILD_TARGETS._do_nothing\n BUILD_TARGETS._clear = BUILD_TARGETS._do_nothing\n _build_plus_default.extend(tlist)\n _build_plus_default._add_Default = _build_plus_default._do_nothing\n _build_plus_default._clear = _build_plus_default._do_nothing\n\ndef _Set_Default_Targets_Has_Been_Called(d, fs):\n return DEFAULT_TARGETS\n\ndef _Set_Default_Targets_Has_Not_Been_Called(d, fs):\n if d is None:\n d = [fs.Dir('.')]\n return d\n\n_Get_Default_Targets = _Set_Default_Targets_Has_Not_Been_Called\n\ndef _Set_Default_Targets(env, tlist) -> None:\n global DEFAULT_TARGETS\n global _Get_Default_Targets\n _Get_Default_Targets = _Set_Default_Targets_Has_Been_Called\n for t in tlist:\n if t is None:\n # Delete the elements from the list in-place, don't\n # reassign an empty list to DEFAULT_TARGETS, so that the\n # variables will still point to the same object we point to.\n del DEFAULT_TARGETS[:]\n BUILD_TARGETS._clear()\n _build_plus_default._clear()\n elif isinstance(t, SCons.Node.Node):\n DEFAULT_TARGETS.append(t)\n BUILD_TARGETS._add_Default([t])\n _build_plus_default._add_Default([t])\n else:\n nodes = env.arg2nodes(t, env.fs.Entry)\n DEFAULT_TARGETS.extend(nodes)\n BUILD_TARGETS._add_Default(nodes)\n _build_plus_default._add_Default(nodes)\n\n\nhelp_text = None\n\n\ndef HelpFunction(text, append: bool = False, keep_local: bool = False) -> None:\n \"\"\"The implementaion of the the ``Help`` method.\n\n See :meth:`~SCons.Script.SConscript.Help`.\n\n .. versionchanged:: 4.6.0\n The *keep_local* parameter was added.\n \"\"\"\n global help_text\n if help_text is None:\n if append:\n with StringIO() as s:\n PrintHelp(s, local_only=keep_local)\n help_text = s.getvalue()\n else:\n help_text = \"\"\n\n help_text += text\n\n\n# Will be non-zero if we are reading an SConscript file.\nsconscript_reading: int = 0\n\n_no_missing_sconscript = True\n_warn_missing_sconscript_deprecated = False # TODO: now unused\n\ndef set_missing_sconscript_error(flag: bool = True) -> bool:\n \"\"\"Set behavior on missing file in SConscript() call.\n\n Returns:\n previous value\n \"\"\"\n global _no_missing_sconscript\n old = _no_missing_sconscript\n _no_missing_sconscript = flag\n return old\n\n\ndef Variables(files=None, args=ARGUMENTS):\n return SCons.Variables.Variables(files, args)\n\n\n# Adding global functions to the SConscript name space.\n#\n# Static functions that do not trigger initialization of\n# DefaultEnvironment() and don't use its state.\nEnsureSConsVersion = _SConscript.SConsEnvironment.EnsureSConsVersion\nEnsurePythonVersion = _SConscript.SConsEnvironment.EnsurePythonVersion\nExit = _SConscript.SConsEnvironment.Exit\nGetLaunchDir = _SConscript.SConsEnvironment.GetLaunchDir\nSConscriptChdir = _SConscript.SConsEnvironment.SConscriptChdir\n\n# Functions that end up calling methods or Builders in the\n# DefaultEnvironment().\nGlobalDefaultEnvironmentFunctions = [\n # Methods from the SConsEnvironment class, above.\n 'Default',\n 'Export',\n 'Help',\n 'Import',\n #'SConscript', is handled separately, below.\n\n # Methods from the Environment.Base class.\n 'AddPostAction',\n 'AddPreAction',\n 'Alias',\n 'AlwaysBuild',\n 'CacheDir',\n 'Clean',\n #The Command() method is handled separately, below.\n 'Decider',\n 'Depends',\n 'Dir',\n 'NoClean',\n 'NoCache',\n 'Entry',\n 'Execute',\n 'File',\n 'FindFile',\n 'FindInstalledFiles',\n 'FindSourceFiles',\n 'Flatten',\n 'GetBuildPath',\n 'Glob',\n 'Ignore',\n 'Install',\n 'InstallAs',\n 'InstallVersionedLib',\n 'Literal',\n 'Local',\n 'ParseDepends',\n 'Precious',\n 'PyPackageDir',\n 'Repository',\n 'Requires',\n 'SConsignFile',\n 'SideEffect',\n 'Split',\n 'Tag',\n 'Value',\n 'VariantDir',\n]\n\nGlobalDefaultBuilders = [\n # Supported builders.\n 'CFile',\n 'CXXFile',\n 'DVI',\n 'Jar',\n 'Java',\n 'JavaH',\n 'Library',\n 'LoadableModule',\n 'M4',\n 'MSVSProject',\n 'Object',\n 'PCH',\n 'PDF',\n 'PostScript',\n 'Program',\n 'RES',\n 'RMIC',\n 'SharedLibrary',\n 'SharedObject',\n 'StaticLibrary',\n 'StaticObject',\n 'Substfile',\n 'Tar',\n 'Textfile',\n 'TypeLibrary',\n 'Zip',\n 'Package',\n]\n\n# DefaultEnvironmentCall() initializes DefaultEnvironment() if it is not\n# created yet.\nfor name in GlobalDefaultEnvironmentFunctions + GlobalDefaultBuilders:\n exec (\"%s = _SConscript.DefaultEnvironmentCall(%s)\" % (name, repr(name)))\ndel name\n\n# There are a handful of variables that used to live in the\n# Script/SConscript.py module that some SConscript files out there were\n# accessing directly as SCons.Script.SConscript.*. The problem is that\n# \"SConscript\" in this namespace is no longer a module, it's a global\n# function call--or more precisely, an object that implements a global\n# function call through the default Environment. Nevertheless, we can\n# maintain backwards compatibility for SConscripts that were reaching in\n# this way by hanging some attributes off the \"SConscript\" object here.\nSConscript = _SConscript.DefaultEnvironmentCall('SConscript')\n\n# Make SConscript look enough like the module it used to be so\n# that pychecker doesn't barf.\nSConscript.__name__ = 'SConscript'\n\nSConscript.Arguments = ARGUMENTS\nSConscript.ArgList = ARGLIST\nSConscript.BuildTargets = BUILD_TARGETS\nSConscript.CommandLineTargets = COMMAND_LINE_TARGETS\nSConscript.DefaultTargets = DEFAULT_TARGETS\n\n# The global Command() function must be handled differently than the\n# global functions for other construction environment methods because\n# we want people to be able to use Actions that must expand $TARGET\n# and $SOURCE later, when (and if) the Action is invoked to build\n# the target(s). We do this with the subst=1 argument, which creates\n# a DefaultEnvironmentCall instance that wraps up a normal default\n# construction environment that performs variable substitution, not a\n# proxy that doesn't.\n#\n# There's a flaw here, though, because any other $-variables on a command\n# line will *also* be expanded, each to a null string, but that should\n# only be a problem in the unusual case where someone was passing a '$'\n# on a command line and *expected* the $ to get through to the shell\n# because they were calling Command() and not env.Command()... This is\n# unlikely enough that we're going to leave this as is and cross that\n# bridge if someone actually comes to it.\nCommand = _SConscript.DefaultEnvironmentCall('Command', subst=1)\n\n# Local Variables:\n# tab-width:4\n# indent-tabs-mode:nil\n# End:\n# vim: set expandtab tabstop=4 shiftwidth=4:\n",
"path": "SCons/Script/__init__.py"
}
] | [
{
"content": "# MIT License\n#\n# Copyright The SCons Foundation\n#\n# Permission is hereby granted, free of charge, to any person obtaining\n# a copy of this software and associated documentation files (the\n# \"Software\"), to deal in the Software without restriction, including\n# without limitation the rights to use, copy, modify, merge, publish,\n# distribute, sublicense, and/or sell copies of the Software, and to\n# permit persons to whom the Software is furnished to do so, subject to\n# the following conditions:\n#\n# The above copyright notice and this permission notice shall be included\n# in all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY\n# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE\n# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND\n# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE\n# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\n# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION\n# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n\"\"\"The main() function used by the scons script.\n\nArchitecturally, this *is* the scons script, and will likely only be\ncalled from the external \"scons\" wrapper. Consequently, anything here\nshould not be, or be considered, part of the build engine. If it's\nsomething that we expect other software to want to use, it should go in\nsome other module. If it's specific to the \"scons\" script invocation,\nit goes here.\n\"\"\"\n\nimport time\nstart_time = time.time()\n\nimport collections\nimport os\nfrom io import StringIO\n\nimport sys\n\n# Special chicken-and-egg handling of the \"--debug=memoizer\" flag:\n#\n# SCons.Memoize contains a metaclass implementation that affects how\n# the other classes are instantiated. The Memoizer may add shim methods\n# to classes that have methods that cache computed values in order to\n# count and report the hits and misses.\n#\n# If we wait to enable the Memoization until after we've parsed the\n# command line options normally, it will be too late, because the Memoizer\n# will have already analyzed the classes that it's Memoizing and decided\n# to not add the shims. So we use a special-case, up-front check for\n# the \"--debug=memoizer\" flag and enable Memoizer before we import any\n# of the other modules that use it.\n\n_args = sys.argv + os.environ.get('SCONSFLAGS', '').split()\nif \"--debug=memoizer\" in _args:\n import SCons.Memoize\n import SCons.Warnings\n try:\n SCons.Memoize.EnableMemoization()\n except SCons.Warnings.SConsWarning:\n # Some warning was thrown. Arrange for it to be displayed\n # or not after warnings are configured.\n from . import Main\n exc_type, exc_value, tb = sys.exc_info()\n Main.delayed_warnings.append((exc_type, exc_value))\ndel _args\n\nimport SCons.Action\nimport SCons.Builder\nimport SCons.Environment\nimport SCons.Node.FS\nimport SCons.Platform\nimport SCons.Platform.virtualenv\nimport SCons.Scanner\nimport SCons.SConf\nimport SCons.Subst\nimport SCons.Tool\nimport SCons.Util\nimport SCons.Variables\nimport SCons.Defaults\n\nfrom . import Main\n\nmain = Main.main\n\n# The following are global class definitions and variables that used to\n# live directly in this module back before 0.96.90, when it contained\n# a lot of code. Some SConscript files in widely-distributed packages\n# (Blender is the specific example) actually reached into SCons.Script\n# directly to use some of these. Rather than break those SConscript\n# files, we're going to propagate these names into the SCons.Script\n# namespace here.\n#\n# Some of these are commented out because it's *really* unlikely anyone\n# used them, but we're going to leave the comment here to try to make\n# it obvious what to do if the situation arises.\nBuildTask = Main.BuildTask\nCleanTask = Main.CleanTask\nQuestionTask = Main.QuestionTask\n#SConscriptSettableOptions = Main.SConscriptSettableOptions\n\nAddOption = Main.AddOption\nPrintHelp = Main.PrintHelp\nGetOption = Main.GetOption\nSetOption = Main.SetOption\nValidateOptions = Main.ValidateOptions\nProgress = Main.Progress\nGetBuildFailures = Main.GetBuildFailures\nDebugOptions = Main.DebugOptions\n\n#keep_going_on_error = Main.keep_going_on_error\n#print_dtree = Main.print_dtree\n#print_explanations = Main.print_explanations\n#print_includes = Main.print_includes\n#print_objects = Main.print_objects\n#print_time = Main.print_time\n#print_tree = Main.print_tree\n#memory_stats = Main.memory_stats\n#ignore_errors = Main.ignore_errors\n#sconscript_time = Main.sconscript_time\n#command_time = Main.command_time\n#exit_status = Main.exit_status\n#profiling = Main.profiling\n#repositories = Main.repositories\n\nfrom . import SConscript as _SConscript\n\ncall_stack = _SConscript.call_stack\n\n#\nAction = SCons.Action.Action\nAddMethod = SCons.Util.AddMethod\nAllowSubstExceptions = SCons.Subst.SetAllowableExceptions\nBuilder = SCons.Builder.Builder\nConfigure = _SConscript.Configure\nEnvironment = SCons.Environment.Environment\n#OptParser = SCons.SConsOptions.OptParser\nFindPathDirs = SCons.Scanner.FindPathDirs\nPlatform = SCons.Platform.Platform\nVirtualenv = SCons.Platform.virtualenv.Virtualenv\nReturn = _SConscript.Return\nScanner = SCons.Scanner.ScannerBase\nTool = SCons.Tool.Tool\nWhereIs = SCons.Util.WhereIs\n\n#\nBoolVariable = SCons.Variables.BoolVariable\nEnumVariable = SCons.Variables.EnumVariable\nListVariable = SCons.Variables.ListVariable\nPackageVariable = SCons.Variables.PackageVariable\nPathVariable = SCons.Variables.PathVariable\n\n\n# Action factories.\nChmod = SCons.Defaults.Chmod\nCopy = SCons.Defaults.Copy\nDelete = SCons.Defaults.Delete\nMkdir = SCons.Defaults.Mkdir\nMove = SCons.Defaults.Move\nTouch = SCons.Defaults.Touch\n\n# Pre-made, public scanners.\nCScanner = SCons.Tool.CScanner\nDScanner = SCons.Tool.DScanner\nDirScanner = SCons.Defaults.DirScanner\nProgramScanner = SCons.Tool.ProgramScanner\nSourceFileScanner = SCons.Tool.SourceFileScanner\n\n# Functions we might still convert to Environment methods.\nCScan = SCons.Defaults.CScan\nDefaultEnvironment = SCons.Defaults.DefaultEnvironment\n\n# Other variables we provide.\nclass TargetList(collections.UserList):\n def _do_nothing(self, *args, **kw) -> None:\n pass\n def _add_Default(self, list) -> None:\n self.extend(list)\n def _clear(self) -> None:\n del self[:]\n\nARGUMENTS = {}\nARGLIST = []\nBUILD_TARGETS = TargetList()\nCOMMAND_LINE_TARGETS = []\nDEFAULT_TARGETS = []\n\n# BUILD_TARGETS can be modified in the SConscript files. If so, we\n# want to treat the modified BUILD_TARGETS list as if they specified\n# targets on the command line. To do that, though, we need to know if\n# BUILD_TARGETS was modified through \"official\" APIs or by hand. We do\n# this by updating two lists in parallel, the documented BUILD_TARGETS\n# list, above, and this internal _build_plus_default targets list which\n# should only have \"official\" API changes. Then Script/Main.py can\n# compare these two afterwards to figure out if the user added their\n# own targets to BUILD_TARGETS.\n_build_plus_default = TargetList()\n\ndef _Add_Arguments(alist) -> None:\n for arg in alist:\n a, b = arg.split('=', 1)\n ARGUMENTS[a] = b\n ARGLIST.append((a, b))\n\ndef _Add_Targets(tlist) -> None:\n if tlist:\n COMMAND_LINE_TARGETS.extend(tlist)\n BUILD_TARGETS.extend(tlist)\n BUILD_TARGETS._add_Default = BUILD_TARGETS._do_nothing\n BUILD_TARGETS._clear = BUILD_TARGETS._do_nothing\n _build_plus_default.extend(tlist)\n _build_plus_default._add_Default = _build_plus_default._do_nothing\n _build_plus_default._clear = _build_plus_default._do_nothing\n\ndef _Set_Default_Targets_Has_Been_Called(d, fs):\n return DEFAULT_TARGETS\n\ndef _Set_Default_Targets_Has_Not_Been_Called(d, fs):\n if d is None:\n d = [fs.Dir('.')]\n return d\n\n_Get_Default_Targets = _Set_Default_Targets_Has_Not_Been_Called\n\ndef _Set_Default_Targets(env, tlist) -> None:\n global DEFAULT_TARGETS\n global _Get_Default_Targets\n _Get_Default_Targets = _Set_Default_Targets_Has_Been_Called\n for t in tlist:\n if t is None:\n # Delete the elements from the list in-place, don't\n # reassign an empty list to DEFAULT_TARGETS, so that the\n # variables will still point to the same object we point to.\n del DEFAULT_TARGETS[:]\n BUILD_TARGETS._clear()\n _build_plus_default._clear()\n elif isinstance(t, SCons.Node.Node):\n DEFAULT_TARGETS.append(t)\n BUILD_TARGETS._add_Default([t])\n _build_plus_default._add_Default([t])\n else:\n nodes = env.arg2nodes(t, env.fs.Entry)\n DEFAULT_TARGETS.extend(nodes)\n BUILD_TARGETS._add_Default(nodes)\n _build_plus_default._add_Default(nodes)\n\n\nhelp_text = None\n\n\ndef HelpFunction(text, append: bool = False, keep_local: bool = False) -> None:\n \"\"\"The implementaion of the the ``Help`` method.\n\n See :meth:`~SCons.Script.SConscript.Help`.\n\n .. versionchanged:: 4.6.0\n The *keep_local* parameter was added.\n \"\"\"\n global help_text\n if help_text is None:\n if append:\n with StringIO() as s:\n PrintHelp(s, local_only=keep_local)\n help_text = s.getvalue()\n else:\n help_text = \"\"\n\n help_text += text\n\n\n# Will be non-zero if we are reading an SConscript file.\nsconscript_reading: int = 0\n\n_no_missing_sconscript = True\n_warn_missing_sconscript_deprecated = False # TODO: now unused\n\ndef set_missing_sconscript_error(flag: bool = True) -> bool:\n \"\"\"Set behavior on missing file in SConscript() call.\n\n Returns:\n previous value\n \"\"\"\n global _no_missing_sconscript\n old = _no_missing_sconscript\n _no_missing_sconscript = flag\n return old\n\n\ndef Variables(files=None, args=ARGUMENTS):\n return SCons.Variables.Variables(files, args)\n\n\n# Adding global functions to the SConscript name space.\n#\n# Static functions that do not trigger initialization of\n# DefaultEnvironment() and don't use its state.\nEnsureSConsVersion = _SConscript.SConsEnvironment.EnsureSConsVersion\nEnsurePythonVersion = _SConscript.SConsEnvironment.EnsurePythonVersion\nExit = _SConscript.SConsEnvironment.Exit\nGetLaunchDir = _SConscript.SConsEnvironment.GetLaunchDir\nSConscriptChdir = _SConscript.SConsEnvironment.SConscriptChdir\n\n# Functions that end up calling methods or Builders in the\n# DefaultEnvironment().\nGlobalDefaultEnvironmentFunctions = [\n # Methods from the SConsEnvironment class, above.\n 'Default',\n 'Export',\n 'Help',\n 'Import',\n #'SConscript', is handled separately, below.\n\n # Methods from the Environment.Base class.\n 'AddPostAction',\n 'AddPreAction',\n 'Alias',\n 'AlwaysBuild',\n 'CacheDir',\n 'Clean',\n #The Command() method is handled separately, below.\n 'Decider',\n 'Depends',\n 'Dir',\n 'NoClean',\n 'NoCache',\n 'Entry',\n 'Execute',\n 'File',\n 'FindFile',\n 'FindInstalledFiles',\n 'FindSourceFiles',\n 'Flatten',\n 'GetBuildPath',\n 'Glob',\n 'Ignore',\n 'Install',\n 'InstallAs',\n 'InstallVersionedLib',\n 'Literal',\n 'Local',\n 'ParseDepends',\n 'Precious',\n 'Pseudo',\n 'PyPackageDir',\n 'Repository',\n 'Requires',\n 'SConsignFile',\n 'SideEffect',\n 'Split',\n 'Tag',\n 'Value',\n 'VariantDir',\n]\n\nGlobalDefaultBuilders = [\n # Supported builders.\n 'CFile',\n 'CXXFile',\n 'DVI',\n 'Jar',\n 'Java',\n 'JavaH',\n 'Library',\n 'LoadableModule',\n 'M4',\n 'MSVSProject',\n 'Object',\n 'PCH',\n 'PDF',\n 'PostScript',\n 'Program',\n 'RES',\n 'RMIC',\n 'SharedLibrary',\n 'SharedObject',\n 'StaticLibrary',\n 'StaticObject',\n 'Substfile',\n 'Tar',\n 'Textfile',\n 'TypeLibrary',\n 'Zip',\n 'Package',\n]\n\n# DefaultEnvironmentCall() initializes DefaultEnvironment() if it is not\n# created yet.\nfor name in GlobalDefaultEnvironmentFunctions + GlobalDefaultBuilders:\n exec (\"%s = _SConscript.DefaultEnvironmentCall(%s)\" % (name, repr(name)))\ndel name\n\n# There are a handful of variables that used to live in the\n# Script/SConscript.py module that some SConscript files out there were\n# accessing directly as SCons.Script.SConscript.*. The problem is that\n# \"SConscript\" in this namespace is no longer a module, it's a global\n# function call--or more precisely, an object that implements a global\n# function call through the default Environment. Nevertheless, we can\n# maintain backwards compatibility for SConscripts that were reaching in\n# this way by hanging some attributes off the \"SConscript\" object here.\nSConscript = _SConscript.DefaultEnvironmentCall('SConscript')\n\n# Make SConscript look enough like the module it used to be so\n# that pychecker doesn't barf.\nSConscript.__name__ = 'SConscript'\n\nSConscript.Arguments = ARGUMENTS\nSConscript.ArgList = ARGLIST\nSConscript.BuildTargets = BUILD_TARGETS\nSConscript.CommandLineTargets = COMMAND_LINE_TARGETS\nSConscript.DefaultTargets = DEFAULT_TARGETS\n\n# The global Command() function must be handled differently than the\n# global functions for other construction environment methods because\n# we want people to be able to use Actions that must expand $TARGET\n# and $SOURCE later, when (and if) the Action is invoked to build\n# the target(s). We do this with the subst=1 argument, which creates\n# a DefaultEnvironmentCall instance that wraps up a normal default\n# construction environment that performs variable substitution, not a\n# proxy that doesn't.\n#\n# There's a flaw here, though, because any other $-variables on a command\n# line will *also* be expanded, each to a null string, but that should\n# only be a problem in the unusual case where someone was passing a '$'\n# on a command line and *expected* the $ to get through to the shell\n# because they were calling Command() and not env.Command()... This is\n# unlikely enough that we're going to leave this as is and cross that\n# bridge if someone actually comes to it.\nCommand = _SConscript.DefaultEnvironmentCall('Command', subst=1)\n\n# Local Variables:\n# tab-width:4\n# indent-tabs-mode:nil\n# End:\n# vim: set expandtab tabstop=4 shiftwidth=4:\n",
"path": "SCons/Script/__init__.py"
}
] | diff --git a/CHANGES.txt b/CHANGES.txt
index 44a458a8f7..b3a0d6ec4e 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -74,6 +74,7 @@ RELEASE VERSION/DATE TO BE FILLED IN LATER
Fixes #3529.
- Clarify/fix documentation of Scanners in User Guide and Manpage.
Fixes #4468.
+ - Add Pseudo() to global functions, had been omitted. Fixes #4474.
RELEASE 4.6.0 - Sun, 19 Nov 2023 17:22:20 -0700
diff --git a/RELEASE.txt b/RELEASE.txt
index 2952d4f396..28b6c6ac8b 100644
--- a/RELEASE.txt
+++ b/RELEASE.txt
@@ -52,6 +52,8 @@ FIXES
build of the project file fails.
- On Windows platform, when collecting command output (Configure checks),
make sure decoding of bytes doesn't fail.
+- Documentation indicated that both Pseudo() and env.Pseudo() were usable,
+ but Pseudo() did not work; is now enabled.
IMPROVEMENTS
------------
diff --git a/SCons/Script/__init__.py b/SCons/Script/__init__.py
index 0d2940c6ae..a62650f7f6 100644
--- a/SCons/Script/__init__.py
+++ b/SCons/Script/__init__.py
@@ -343,6 +343,7 @@ def Variables(files=None, args=ARGUMENTS):
'Local',
'ParseDepends',
'Precious',
+ 'Pseudo',
'PyPackageDir',
'Repository',
'Requires',
diff --git a/test/Pseudo.py b/test/Pseudo.py
index db3c30c05b..ec953f7b2a 100644
--- a/test/Pseudo.py
+++ b/test/Pseudo.py
@@ -1,6 +1,8 @@
#!/usr/bin/env python
#
-# __COPYRIGHT__
+# MIT License
+#
+# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@@ -20,41 +22,66 @@
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-#
-__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
+"""
+Test the Pseudo method
+"""
import TestSCons
test = TestSCons.TestSCons()
-# Firstly, build a pseudo target and make sure we get no warnings it
-# doesn't exist under any circumstances
-test.write('SConstruct', """
+test.write('SConstruct', """\
env = Environment()
-env.Pseudo(env.Command('foo.out', [], '@echo boo'))
-""")
-
-test.run(arguments='-Q', stdout = 'boo\n')
+foo = env.Command('foo.out', [], '@echo boo')
+bar = env.Command('bar.out', [], Touch('$TARGET'))
+env.Pseudo(foo, bar)
-test.run(arguments='-Q --warning=target-not-built', stdout = "boo\n")
-
-# Now do the same thing again but create the target and check we get an
-# error if it exists after the build
-test.write('SConstruct', """
-env = Environment()
-env.Pseudo(env.Command('foo.out', [], Touch('$TARGET')))
+gfoo = Command('foo.glb', [], '@echo boo')
+gbar = Command('bar.glb', [], Touch('$TARGET'))
+Pseudo(gfoo, gbar)
""")
-test.run(arguments='-Q', stdout = 'Touch("foo.out")\n', stderr = None,
- status = 2)
-test.must_contain_all_lines(test.stderr(),
- 'scons: *** Pseudo target foo.out must not exist')
-test.run(arguments='-Q --warning=target-not-built',
- stdout = 'Touch("foo.out")\n',
- stderr = None, status = 2)
-test.must_contain_all_lines(test.stderr(),
- 'scons: *** Pseudo target foo.out must not exist')
+# foo.out build does not create file, should generate no errors
+test.run(arguments='-Q foo.out', stdout='boo\n')
+# missing target warning triggers if requested
+test.run(arguments='-Q foo.out --warning=target-not-built', stdout="boo\n")
+# bar.out build creates file, error if it exists after the build
+test.run(arguments='-Q bar.out', stdout='Touch("bar.out")\n', stderr=None, status=2)
+test.must_contain_all_lines(
+ test.stderr(),
+ 'scons: *** Pseudo target bar.out must not exist',
+)
+# warning must not appear since target created
+test.run(
+ arguments='-Q bar.out --warning=target-not-built',
+ stdout='Touch("bar.out")\n',
+ stderr=None,
+ status=2,
+)
+test.must_contain_all_lines(
+ test.stderr(),
+ 'scons: *** Pseudo target bar.out must not exist',
+)
+
+# repeat the process for the global function form (was missing initially)
+test.run(arguments='-Q foo.glb', stdout='boo\n')
+test.run(arguments='-Q foo.glb --warning=target-not-built', stdout="boo\n")
+test.run(arguments='-Q bar.glb', stdout='Touch("bar.glb")\n', stderr=None, status=2)
+test.must_contain_all_lines(
+ test.stderr(),
+ 'scons: *** Pseudo target bar.glb must not exist',
+)
+test.run(
+ arguments='-Q bar.glb --warning=target-not-built',
+ stdout='Touch("bar.glb")\n',
+ stderr=None,
+ status=2,
+)
+test.must_contain_all_lines(
+ test.stderr(),
+ 'scons: *** Pseudo target bar.glb must not exist',
+)
test.pass_test()
|
wagtail__wagtail-8940 | __str__ method doesn't return a string.
This code sample:
>>> from wagtail.contrib.forms.models import FormSubmission
>>> FormSubmission.objects.count()
1
>>> FormSubmission.objects.first()
Traceback (most recent call last):
File "<console>", line 1, in <module>
File "lib64/python3.10/site-packages/django/db/models/base.py", line 580, in __repr__
return "<%s: %s>" % (self.__class__.__name__, self)
TypeError: __str__ returned non-string (type dict)
This method:
https://github.com/wagtail/wagtail/blob/18ad15a18f8e533b858ccde7d060b9d4e85dcfd4/wagtail/contrib/forms/models.py#L61-L62
should be:
def __str__(self):
return f"{self.form_data}"
| [
{
"content": "import datetime\nimport os\n\nfrom django.conf import settings\nfrom django.core.serializers.json import DjangoJSONEncoder\nfrom django.core.validators import validate_email\nfrom django.db import models\nfrom django.template.response import TemplateResponse\nfrom django.utils.formats import date_format\nfrom django.utils.translation import gettext_lazy as _\n\nfrom wagtail.admin.mail import send_mail\nfrom wagtail.admin.panels import FieldPanel\nfrom wagtail.contrib.forms.utils import get_field_clean_name\nfrom wagtail.models import Orderable, Page\n\nfrom .forms import FormBuilder, WagtailAdminFormPageForm\n\nFORM_FIELD_CHOICES = (\n (\"singleline\", _(\"Single line text\")),\n (\"multiline\", _(\"Multi-line text\")),\n (\"email\", _(\"Email\")),\n (\"number\", _(\"Number\")),\n (\"url\", _(\"URL\")),\n (\"checkbox\", _(\"Checkbox\")),\n (\"checkboxes\", _(\"Checkboxes\")),\n (\"dropdown\", _(\"Drop down\")),\n (\"multiselect\", _(\"Multiple select\")),\n (\"radio\", _(\"Radio buttons\")),\n (\"date\", _(\"Date\")),\n (\"datetime\", _(\"Date/time\")),\n (\"hidden\", _(\"Hidden field\")),\n)\n\n\nclass AbstractFormSubmission(models.Model):\n \"\"\"\n Data for a form submission.\n\n You can create custom submission model based on this abstract model.\n For example, if you need to save additional data or a reference to a user.\n \"\"\"\n\n form_data = models.JSONField(encoder=DjangoJSONEncoder)\n page = models.ForeignKey(Page, on_delete=models.CASCADE)\n\n submit_time = models.DateTimeField(verbose_name=_(\"submit time\"), auto_now_add=True)\n\n def get_data(self):\n \"\"\"\n Returns dict with form data.\n\n You can override this method to add additional data.\n \"\"\"\n\n return {\n **self.form_data,\n \"submit_time\": self.submit_time,\n }\n\n def __str__(self):\n return self.form_data\n\n class Meta:\n abstract = True\n verbose_name = _(\"form submission\")\n verbose_name_plural = _(\"form submissions\")\n\n\nclass FormSubmission(AbstractFormSubmission):\n \"\"\"Data for a Form submission.\"\"\"\n\n\nclass AbstractFormField(Orderable):\n \"\"\"\n Database Fields required for building a Django Form field.\n \"\"\"\n\n clean_name = models.CharField(\n verbose_name=_(\"name\"),\n max_length=255,\n blank=True,\n default=\"\",\n help_text=_(\n \"Safe name of the form field, the label converted to ascii_snake_case\"\n ),\n )\n label = models.CharField(\n verbose_name=_(\"label\"),\n max_length=255,\n help_text=_(\"The label of the form field\"),\n )\n field_type = models.CharField(\n verbose_name=_(\"field type\"), max_length=16, choices=FORM_FIELD_CHOICES\n )\n required = models.BooleanField(verbose_name=_(\"required\"), default=True)\n choices = models.TextField(\n verbose_name=_(\"choices\"),\n blank=True,\n help_text=_(\n \"Comma or new line separated list of choices. Only applicable in checkboxes, radio and dropdown.\"\n ),\n )\n default_value = models.TextField(\n verbose_name=_(\"default value\"),\n blank=True,\n help_text=_(\n \"Default value. Comma or new line separated values supported for checkboxes.\"\n ),\n )\n help_text = models.CharField(\n verbose_name=_(\"help text\"), max_length=255, blank=True\n )\n\n panels = [\n FieldPanel(\"label\"),\n FieldPanel(\"help_text\"),\n FieldPanel(\"required\"),\n FieldPanel(\"field_type\", classname=\"formbuilder-type\"),\n FieldPanel(\"choices\", classname=\"formbuilder-choices\"),\n FieldPanel(\"default_value\", classname=\"formbuilder-default\"),\n ]\n\n def get_field_clean_name(self):\n \"\"\"\n Prepare an ascii safe lower_snake_case variant of the field name to use as the field key.\n This key is used to reference the field responses in the JSON store and as the field name in forms.\n Called for new field creation, validation of duplicate labels and form previews.\n When called, does not have access to the Page, nor its own id as the record is not yet created.\n \"\"\"\n\n return get_field_clean_name(self.label)\n\n def save(self, *args, **kwargs):\n \"\"\"\n When new fields are created, generate a template safe ascii name to use as the\n JSON storage reference for this field. Previously created fields will be updated\n to use the legacy unidecode method via checks & _migrate_legacy_clean_name.\n We do not want to update the clean name on any subsequent changes to the label\n as this would invalidate any previously submitted data.\n \"\"\"\n\n is_new = self.pk is None\n if is_new:\n clean_name = self.get_field_clean_name()\n self.clean_name = clean_name\n\n super().save(*args, **kwargs)\n\n class Meta:\n abstract = True\n ordering = [\"sort_order\"]\n\n\nclass AbstractForm(Page):\n \"\"\"\n A Form Page. Pages implementing a form should inherit from it\n \"\"\"\n\n base_form_class = WagtailAdminFormPageForm\n\n form_builder = FormBuilder\n\n submissions_list_view_class = None\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n if not hasattr(self, \"landing_page_template\"):\n name, ext = os.path.splitext(self.template)\n self.landing_page_template = name + \"_landing\" + ext\n\n class Meta:\n abstract = True\n\n def get_form_fields(self):\n \"\"\"\n Form page expects `form_fields` to be declared.\n If you want to change backwards relation name,\n you need to override this method.\n \"\"\"\n\n return self.form_fields.all()\n\n def get_data_fields(self):\n \"\"\"\n Returns a list of tuples with (field_name, field_label).\n \"\"\"\n\n data_fields = [\n (\"submit_time\", _(\"Submission date\")),\n ]\n data_fields += [\n (field.clean_name, field.label) for field in self.get_form_fields()\n ]\n\n return data_fields\n\n def get_form_class(self):\n fb = self.form_builder(self.get_form_fields())\n return fb.get_form_class()\n\n def get_form_parameters(self):\n return {}\n\n def get_form(self, *args, **kwargs):\n form_class = self.get_form_class()\n form_params = self.get_form_parameters()\n form_params.update(kwargs)\n\n return form_class(*args, **form_params)\n\n def get_landing_page_template(self, request, *args, **kwargs):\n return self.landing_page_template\n\n def get_submission_class(self):\n \"\"\"\n Returns submission class.\n\n You can override this method to provide custom submission class.\n Your class must be inherited from AbstractFormSubmission.\n \"\"\"\n\n return FormSubmission\n\n def get_submissions_list_view_class(self):\n from .views import SubmissionsListView\n\n return self.submissions_list_view_class or SubmissionsListView\n\n def process_form_submission(self, form):\n \"\"\"\n Accepts form instance with submitted data, user and page.\n Creates submission instance.\n\n You can override this method if you want to have custom creation logic.\n For example, if you want to save reference to a user.\n \"\"\"\n\n return self.get_submission_class().objects.create(\n form_data=form.cleaned_data,\n page=self,\n )\n\n def render_landing_page(self, request, form_submission=None, *args, **kwargs):\n \"\"\"\n Renders the landing page.\n\n You can override this method to return a different HttpResponse as\n landing page. E.g. you could return a redirect to a separate page.\n \"\"\"\n context = self.get_context(request)\n context[\"form_submission\"] = form_submission\n return TemplateResponse(\n request, self.get_landing_page_template(request), context\n )\n\n def serve_submissions_list_view(self, request, *args, **kwargs):\n \"\"\"\n Returns list submissions view for admin.\n\n `list_submissions_view_class` can bse set to provide custom view class.\n Your class must be inherited from SubmissionsListView.\n \"\"\"\n view = self.get_submissions_list_view_class().as_view()\n return view(request, form_page=self, *args, **kwargs)\n\n def serve(self, request, *args, **kwargs):\n if request.method == \"POST\":\n form = self.get_form(\n request.POST, request.FILES, page=self, user=request.user\n )\n\n if form.is_valid():\n form_submission = self.process_form_submission(form)\n return self.render_landing_page(\n request, form_submission, *args, **kwargs\n )\n else:\n form = self.get_form(page=self, user=request.user)\n\n context = self.get_context(request)\n context[\"form\"] = form\n return TemplateResponse(request, self.get_template(request), context)\n\n preview_modes = [\n (\"form\", _(\"Form\")),\n (\"landing\", _(\"Landing page\")),\n ]\n\n def serve_preview(self, request, mode_name):\n if mode_name == \"landing\":\n return self.render_landing_page(request)\n else:\n return super().serve_preview(request, mode_name)\n\n\ndef validate_to_address(value):\n for address in value.split(\",\"):\n validate_email(address.strip())\n\n\nclass AbstractEmailForm(AbstractForm):\n \"\"\"\n A Form Page that sends email. Pages implementing a form to be send to an email should inherit from it\n \"\"\"\n\n to_address = models.CharField(\n verbose_name=_(\"to address\"),\n max_length=255,\n blank=True,\n help_text=_(\n \"Optional - form submissions will be emailed to these addresses. Separate multiple addresses by comma.\"\n ),\n validators=[validate_to_address],\n )\n from_address = models.EmailField(\n verbose_name=_(\"from address\"), max_length=255, blank=True\n )\n subject = models.CharField(verbose_name=_(\"subject\"), max_length=255, blank=True)\n\n def process_form_submission(self, form):\n submission = super().process_form_submission(form)\n if self.to_address:\n self.send_mail(form)\n return submission\n\n def send_mail(self, form):\n addresses = [x.strip() for x in self.to_address.split(\",\")]\n send_mail(\n self.subject,\n self.render_email(form),\n addresses,\n self.from_address,\n )\n\n def render_email(self, form):\n content = []\n\n cleaned_data = form.cleaned_data\n for field in form:\n if field.name not in cleaned_data:\n continue\n\n value = cleaned_data.get(field.name)\n\n if isinstance(value, list):\n value = \", \".join(value)\n\n # Format dates and datetimes with SHORT_DATE(TIME)_FORMAT\n if isinstance(value, datetime.datetime):\n value = date_format(value, settings.SHORT_DATETIME_FORMAT)\n elif isinstance(value, datetime.date):\n value = date_format(value, settings.SHORT_DATE_FORMAT)\n\n content.append(\"{}: {}\".format(field.label, value))\n\n return \"\\n\".join(content)\n\n class Meta:\n abstract = True\n",
"path": "wagtail/contrib/forms/models.py"
}
] | [
{
"content": "import datetime\nimport os\n\nfrom django.conf import settings\nfrom django.core.serializers.json import DjangoJSONEncoder\nfrom django.core.validators import validate_email\nfrom django.db import models\nfrom django.template.response import TemplateResponse\nfrom django.utils.formats import date_format\nfrom django.utils.translation import gettext_lazy as _\n\nfrom wagtail.admin.mail import send_mail\nfrom wagtail.admin.panels import FieldPanel\nfrom wagtail.contrib.forms.utils import get_field_clean_name\nfrom wagtail.models import Orderable, Page\n\nfrom .forms import FormBuilder, WagtailAdminFormPageForm\n\nFORM_FIELD_CHOICES = (\n (\"singleline\", _(\"Single line text\")),\n (\"multiline\", _(\"Multi-line text\")),\n (\"email\", _(\"Email\")),\n (\"number\", _(\"Number\")),\n (\"url\", _(\"URL\")),\n (\"checkbox\", _(\"Checkbox\")),\n (\"checkboxes\", _(\"Checkboxes\")),\n (\"dropdown\", _(\"Drop down\")),\n (\"multiselect\", _(\"Multiple select\")),\n (\"radio\", _(\"Radio buttons\")),\n (\"date\", _(\"Date\")),\n (\"datetime\", _(\"Date/time\")),\n (\"hidden\", _(\"Hidden field\")),\n)\n\n\nclass AbstractFormSubmission(models.Model):\n \"\"\"\n Data for a form submission.\n\n You can create custom submission model based on this abstract model.\n For example, if you need to save additional data or a reference to a user.\n \"\"\"\n\n form_data = models.JSONField(encoder=DjangoJSONEncoder)\n page = models.ForeignKey(Page, on_delete=models.CASCADE)\n\n submit_time = models.DateTimeField(verbose_name=_(\"submit time\"), auto_now_add=True)\n\n def get_data(self):\n \"\"\"\n Returns dict with form data.\n\n You can override this method to add additional data.\n \"\"\"\n\n return {\n **self.form_data,\n \"submit_time\": self.submit_time,\n }\n\n def __str__(self):\n return f\"{self.form_data}\"\n\n class Meta:\n abstract = True\n verbose_name = _(\"form submission\")\n verbose_name_plural = _(\"form submissions\")\n\n\nclass FormSubmission(AbstractFormSubmission):\n \"\"\"Data for a Form submission.\"\"\"\n\n\nclass AbstractFormField(Orderable):\n \"\"\"\n Database Fields required for building a Django Form field.\n \"\"\"\n\n clean_name = models.CharField(\n verbose_name=_(\"name\"),\n max_length=255,\n blank=True,\n default=\"\",\n help_text=_(\n \"Safe name of the form field, the label converted to ascii_snake_case\"\n ),\n )\n label = models.CharField(\n verbose_name=_(\"label\"),\n max_length=255,\n help_text=_(\"The label of the form field\"),\n )\n field_type = models.CharField(\n verbose_name=_(\"field type\"), max_length=16, choices=FORM_FIELD_CHOICES\n )\n required = models.BooleanField(verbose_name=_(\"required\"), default=True)\n choices = models.TextField(\n verbose_name=_(\"choices\"),\n blank=True,\n help_text=_(\n \"Comma or new line separated list of choices. Only applicable in checkboxes, radio and dropdown.\"\n ),\n )\n default_value = models.TextField(\n verbose_name=_(\"default value\"),\n blank=True,\n help_text=_(\n \"Default value. Comma or new line separated values supported for checkboxes.\"\n ),\n )\n help_text = models.CharField(\n verbose_name=_(\"help text\"), max_length=255, blank=True\n )\n\n panels = [\n FieldPanel(\"label\"),\n FieldPanel(\"help_text\"),\n FieldPanel(\"required\"),\n FieldPanel(\"field_type\", classname=\"formbuilder-type\"),\n FieldPanel(\"choices\", classname=\"formbuilder-choices\"),\n FieldPanel(\"default_value\", classname=\"formbuilder-default\"),\n ]\n\n def get_field_clean_name(self):\n \"\"\"\n Prepare an ascii safe lower_snake_case variant of the field name to use as the field key.\n This key is used to reference the field responses in the JSON store and as the field name in forms.\n Called for new field creation, validation of duplicate labels and form previews.\n When called, does not have access to the Page, nor its own id as the record is not yet created.\n \"\"\"\n\n return get_field_clean_name(self.label)\n\n def save(self, *args, **kwargs):\n \"\"\"\n When new fields are created, generate a template safe ascii name to use as the\n JSON storage reference for this field. Previously created fields will be updated\n to use the legacy unidecode method via checks & _migrate_legacy_clean_name.\n We do not want to update the clean name on any subsequent changes to the label\n as this would invalidate any previously submitted data.\n \"\"\"\n\n is_new = self.pk is None\n if is_new:\n clean_name = self.get_field_clean_name()\n self.clean_name = clean_name\n\n super().save(*args, **kwargs)\n\n class Meta:\n abstract = True\n ordering = [\"sort_order\"]\n\n\nclass AbstractForm(Page):\n \"\"\"\n A Form Page. Pages implementing a form should inherit from it\n \"\"\"\n\n base_form_class = WagtailAdminFormPageForm\n\n form_builder = FormBuilder\n\n submissions_list_view_class = None\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n if not hasattr(self, \"landing_page_template\"):\n name, ext = os.path.splitext(self.template)\n self.landing_page_template = name + \"_landing\" + ext\n\n class Meta:\n abstract = True\n\n def get_form_fields(self):\n \"\"\"\n Form page expects `form_fields` to be declared.\n If you want to change backwards relation name,\n you need to override this method.\n \"\"\"\n\n return self.form_fields.all()\n\n def get_data_fields(self):\n \"\"\"\n Returns a list of tuples with (field_name, field_label).\n \"\"\"\n\n data_fields = [\n (\"submit_time\", _(\"Submission date\")),\n ]\n data_fields += [\n (field.clean_name, field.label) for field in self.get_form_fields()\n ]\n\n return data_fields\n\n def get_form_class(self):\n fb = self.form_builder(self.get_form_fields())\n return fb.get_form_class()\n\n def get_form_parameters(self):\n return {}\n\n def get_form(self, *args, **kwargs):\n form_class = self.get_form_class()\n form_params = self.get_form_parameters()\n form_params.update(kwargs)\n\n return form_class(*args, **form_params)\n\n def get_landing_page_template(self, request, *args, **kwargs):\n return self.landing_page_template\n\n def get_submission_class(self):\n \"\"\"\n Returns submission class.\n\n You can override this method to provide custom submission class.\n Your class must be inherited from AbstractFormSubmission.\n \"\"\"\n\n return FormSubmission\n\n def get_submissions_list_view_class(self):\n from .views import SubmissionsListView\n\n return self.submissions_list_view_class or SubmissionsListView\n\n def process_form_submission(self, form):\n \"\"\"\n Accepts form instance with submitted data, user and page.\n Creates submission instance.\n\n You can override this method if you want to have custom creation logic.\n For example, if you want to save reference to a user.\n \"\"\"\n\n return self.get_submission_class().objects.create(\n form_data=form.cleaned_data,\n page=self,\n )\n\n def render_landing_page(self, request, form_submission=None, *args, **kwargs):\n \"\"\"\n Renders the landing page.\n\n You can override this method to return a different HttpResponse as\n landing page. E.g. you could return a redirect to a separate page.\n \"\"\"\n context = self.get_context(request)\n context[\"form_submission\"] = form_submission\n return TemplateResponse(\n request, self.get_landing_page_template(request), context\n )\n\n def serve_submissions_list_view(self, request, *args, **kwargs):\n \"\"\"\n Returns list submissions view for admin.\n\n `list_submissions_view_class` can bse set to provide custom view class.\n Your class must be inherited from SubmissionsListView.\n \"\"\"\n view = self.get_submissions_list_view_class().as_view()\n return view(request, form_page=self, *args, **kwargs)\n\n def serve(self, request, *args, **kwargs):\n if request.method == \"POST\":\n form = self.get_form(\n request.POST, request.FILES, page=self, user=request.user\n )\n\n if form.is_valid():\n form_submission = self.process_form_submission(form)\n return self.render_landing_page(\n request, form_submission, *args, **kwargs\n )\n else:\n form = self.get_form(page=self, user=request.user)\n\n context = self.get_context(request)\n context[\"form\"] = form\n return TemplateResponse(request, self.get_template(request), context)\n\n preview_modes = [\n (\"form\", _(\"Form\")),\n (\"landing\", _(\"Landing page\")),\n ]\n\n def serve_preview(self, request, mode_name):\n if mode_name == \"landing\":\n return self.render_landing_page(request)\n else:\n return super().serve_preview(request, mode_name)\n\n\ndef validate_to_address(value):\n for address in value.split(\",\"):\n validate_email(address.strip())\n\n\nclass AbstractEmailForm(AbstractForm):\n \"\"\"\n A Form Page that sends email. Pages implementing a form to be send to an email should inherit from it\n \"\"\"\n\n to_address = models.CharField(\n verbose_name=_(\"to address\"),\n max_length=255,\n blank=True,\n help_text=_(\n \"Optional - form submissions will be emailed to these addresses. Separate multiple addresses by comma.\"\n ),\n validators=[validate_to_address],\n )\n from_address = models.EmailField(\n verbose_name=_(\"from address\"), max_length=255, blank=True\n )\n subject = models.CharField(verbose_name=_(\"subject\"), max_length=255, blank=True)\n\n def process_form_submission(self, form):\n submission = super().process_form_submission(form)\n if self.to_address:\n self.send_mail(form)\n return submission\n\n def send_mail(self, form):\n addresses = [x.strip() for x in self.to_address.split(\",\")]\n send_mail(\n self.subject,\n self.render_email(form),\n addresses,\n self.from_address,\n )\n\n def render_email(self, form):\n content = []\n\n cleaned_data = form.cleaned_data\n for field in form:\n if field.name not in cleaned_data:\n continue\n\n value = cleaned_data.get(field.name)\n\n if isinstance(value, list):\n value = \", \".join(value)\n\n # Format dates and datetimes with SHORT_DATE(TIME)_FORMAT\n if isinstance(value, datetime.datetime):\n value = date_format(value, settings.SHORT_DATETIME_FORMAT)\n elif isinstance(value, datetime.date):\n value = date_format(value, settings.SHORT_DATE_FORMAT)\n\n content.append(\"{}: {}\".format(field.label, value))\n\n return \"\\n\".join(content)\n\n class Meta:\n abstract = True\n",
"path": "wagtail/contrib/forms/models.py"
}
] | diff --git a/wagtail/contrib/forms/models.py b/wagtail/contrib/forms/models.py
index f864c5195b16..a56deedc5361 100644
--- a/wagtail/contrib/forms/models.py
+++ b/wagtail/contrib/forms/models.py
@@ -59,7 +59,7 @@ def get_data(self):
}
def __str__(self):
- return self.form_data
+ return f"{self.form_data}"
class Meta:
abstract = True
diff --git a/wagtail/contrib/forms/tests/test_models.py b/wagtail/contrib/forms/tests/test_models.py
index 4b395ab73413..11a565522f44 100644
--- a/wagtail/contrib/forms/tests/test_models.py
+++ b/wagtail/contrib/forms/tests/test_models.py
@@ -183,6 +183,26 @@ def test_invalid_from_address(self):
with self.assertRaises(ValidationError):
make_form_page(from_address="not an email")
+ def test_string_representation_form_submission(self):
+ """
+ Ensure that a form submission can be logged / printed without error.
+ Broke when converting field to JSON - see #8927
+ """
+
+ self.client.post(
+ "/contact-us/",
+ {
+ "your_email": "[email protected]",
+ "your_message": "hello world",
+ "your_choices": {},
+ },
+ )
+
+ self.assertGreaterEqual(FormSubmission.objects.count(), 1)
+
+ submission = FormSubmission.objects.first()
+ self.assertIn("hello world", str(submission))
+
class TestFormWithCustomSubmission(TestCase, WagtailTestUtils):
def setUp(self):
|
mlcommons__GaNDLF-809 | `gdown` does not seem to be working
**Describe the bug**
Current CI seems to be broken.
**To Reproduce**
Steps to reproduce the behavior:
1. Run any CI test
2. See error:
```python-traceback
[SNIP!]
if gdrive_file_id and is_gdrive_download_link:
content_disposition = six.moves.urllib_parse.unquote(
res.headers["Content-Disposition"]
)
m = re.search(r"filename\*=UTF-8''(.*)", content_disposition)
> filename_from_url = m.groups()[0]
E AttributeError: 'NoneType' object has no attribute 'groups'
```
Example: https://github.com/mlcommons/GaNDLF/actions/runs/7489779631/job/20387346791?pr=764#step:9:219
**Expected behavior**
The sample data file download should work.
**Screenshots**
N.A.
**GaNDLF Version**
Current master
**Desktop (please complete the following information):**
N.A.
**Additional context**
Basically, it is this error: https://github.com/wkentaro/gdown/issues/291
| [
{
"content": "#!/usr/bin/env python\n\n\"\"\"The setup script.\"\"\"\n\n\nimport sys, re, os\nfrom setuptools import setup, find_packages\nfrom setuptools.command.install import install\nfrom setuptools.command.develop import develop\nfrom setuptools.command.egg_info import egg_info\n\ntry:\n with open(\"README.md\") as readme_file:\n readme = readme_file.read()\nexcept Exception as error:\n readme = \"No README information found.\"\n sys.stderr.write(\n \"Warning: Could not open '%s' due %s\\n\" % (\"README.md\", error)\n )\n\n\nclass CustomInstallCommand(install):\n def run(self):\n install.run(self)\n\n\nclass CustomDevelopCommand(develop):\n def run(self):\n develop.run(self)\n\n\nclass CustomEggInfoCommand(egg_info):\n def run(self):\n egg_info.run(self)\n\n\ntry:\n filepath = \"GANDLF/version.py\"\n version_file = open(filepath)\n (__version__,) = re.findall('__version__ = \"(.*)\"', version_file.read())\n\nexcept Exception as error:\n __version__ = \"0.0.1\"\n sys.stderr.write(\n \"Warning: Could not open '%s' due %s\\n\" % (filepath, error)\n )\n\n# Handle cases where specific files need to be bundled into the final package as installed via PyPI\ndockerfiles = [\n item\n for item in os.listdir(os.path.dirname(os.path.abspath(__file__)))\n if (os.path.isfile(item) and item.startswith(\"Dockerfile-\"))\n]\nentrypoint_files = [\n item\n for item in os.listdir(os.path.dirname(os.path.abspath(__file__)))\n if (os.path.isfile(item) and item.startswith(\"gandlf_\"))\n]\nsetup_files = [\"setup.py\", \".dockerignore\", \"pyproject.toml\", \"MANIFEST.in\"]\nall_extra_files = dockerfiles + entrypoint_files + setup_files\nall_extra_files_pathcorrected = [\n os.path.join(\"../\", item) for item in all_extra_files\n]\n# find_packages should only ever find these as subpackages of gandlf, not as top-level packages\n# generate this dynamically?\n# GANDLF.GANDLF is needed to prevent recursion madness in deployments\ntoplevel_package_excludes = [\n \"GANDLF.GANDLF\",\n \"anonymize\",\n \"cli\",\n \"compute\",\n \"data\",\n \"grad_clipping\",\n \"losses\",\n \"metrics\",\n \"models\",\n \"optimizers\",\n \"schedulers\",\n \"utils\",\n]\n\n\nrequirements = [\n \"torch==2.1.2\",\n \"black==23.11.0\",\n \"numpy==1.25.0\",\n \"scipy\",\n \"SimpleITK!=2.0.*\",\n \"SimpleITK!=2.2.1\", # https://github.com/mlcommons/GaNDLF/issues/536\n \"torchvision\",\n \"tqdm\",\n \"torchio==0.19.5\",\n \"pandas>=2.0.0\",\n \"scikit-learn>=0.23.2\",\n \"scikit-image>=0.19.1\",\n \"setuptools\",\n \"seaborn\",\n \"pyyaml\",\n \"tiffslide\",\n \"matplotlib\",\n \"gdown==4.6.3\",\n \"pytest\",\n \"coverage\",\n \"pytest-cov\",\n \"psutil\",\n \"medcam\",\n \"opencv-python\",\n \"torchmetrics==1.1.2\",\n \"zarr==2.10.3\",\n \"pydicom\",\n \"onnx\",\n \"torchinfo==1.7.0\",\n \"segmentation-models-pytorch==0.3.3\",\n \"ACSConv==0.1.1\",\n \"docker\",\n \"dicom-anonymizer\",\n \"twine\",\n \"zarr\",\n \"keyring\",\n \"monai==1.3.0\",\n]\n\nif __name__ == \"__main__\":\n setup(\n name=\"GANDLF\",\n version=__version__,\n author=\"MLCommons\",\n author_email=\"[email protected]\",\n python_requires=\">3.8, <3.12\",\n packages=find_packages(\n where=os.path.dirname(os.path.abspath(__file__)),\n exclude=toplevel_package_excludes,\n ),\n cmdclass={\n \"install\": CustomInstallCommand,\n \"develop\": CustomDevelopCommand,\n \"egg_info\": CustomEggInfoCommand,\n },\n scripts=[\n \"gandlf_run\",\n \"gandlf_constructCSV\",\n \"gandlf_collectStats\",\n \"gandlf_patchMiner\",\n \"gandlf_preprocess\",\n \"gandlf_anonymizer\",\n \"gandlf_verifyInstall\",\n \"gandlf_configGenerator\",\n \"gandlf_recoverConfig\",\n \"gandlf_deploy\",\n \"gandlf_optimizeModel\",\n \"gandlf_generateMetrics\",\n ],\n classifiers=[\n \"Development Status :: 3 - Alpha\",\n \"Intended Audience :: Science/Research\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Natural Language :: English\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"Programming Language :: Python :: 3.11\",\n \"Topic :: Scientific/Engineering :: Medical Science Apps.\",\n ],\n description=(\n \"PyTorch-based framework that handles segmentation/regression/classification using various DL architectures for medical imaging.\"\n ),\n install_requires=requirements,\n license=\"Apache-2.0\",\n long_description=readme,\n long_description_content_type=\"text/markdown\",\n include_package_data=True,\n package_data={\"GANDLF\": all_extra_files_pathcorrected},\n keywords=\"semantic, segmentation, regression, classification, data-augmentation, medical-imaging, clinical-workflows, deep-learning, pytorch\",\n zip_safe=False,\n )\n",
"path": "setup.py"
}
] | [
{
"content": "#!/usr/bin/env python\n\n\"\"\"The setup script.\"\"\"\n\n\nimport sys, re, os\nfrom setuptools import setup, find_packages\nfrom setuptools.command.install import install\nfrom setuptools.command.develop import develop\nfrom setuptools.command.egg_info import egg_info\n\ntry:\n with open(\"README.md\") as readme_file:\n readme = readme_file.read()\nexcept Exception as error:\n readme = \"No README information found.\"\n sys.stderr.write(\n \"Warning: Could not open '%s' due %s\\n\" % (\"README.md\", error)\n )\n\n\nclass CustomInstallCommand(install):\n def run(self):\n install.run(self)\n\n\nclass CustomDevelopCommand(develop):\n def run(self):\n develop.run(self)\n\n\nclass CustomEggInfoCommand(egg_info):\n def run(self):\n egg_info.run(self)\n\n\ntry:\n filepath = \"GANDLF/version.py\"\n version_file = open(filepath)\n (__version__,) = re.findall('__version__ = \"(.*)\"', version_file.read())\n\nexcept Exception as error:\n __version__ = \"0.0.1\"\n sys.stderr.write(\n \"Warning: Could not open '%s' due %s\\n\" % (filepath, error)\n )\n\n# Handle cases where specific files need to be bundled into the final package as installed via PyPI\ndockerfiles = [\n item\n for item in os.listdir(os.path.dirname(os.path.abspath(__file__)))\n if (os.path.isfile(item) and item.startswith(\"Dockerfile-\"))\n]\nentrypoint_files = [\n item\n for item in os.listdir(os.path.dirname(os.path.abspath(__file__)))\n if (os.path.isfile(item) and item.startswith(\"gandlf_\"))\n]\nsetup_files = [\"setup.py\", \".dockerignore\", \"pyproject.toml\", \"MANIFEST.in\"]\nall_extra_files = dockerfiles + entrypoint_files + setup_files\nall_extra_files_pathcorrected = [\n os.path.join(\"../\", item) for item in all_extra_files\n]\n# find_packages should only ever find these as subpackages of gandlf, not as top-level packages\n# generate this dynamically?\n# GANDLF.GANDLF is needed to prevent recursion madness in deployments\ntoplevel_package_excludes = [\n \"GANDLF.GANDLF\",\n \"anonymize\",\n \"cli\",\n \"compute\",\n \"data\",\n \"grad_clipping\",\n \"losses\",\n \"metrics\",\n \"models\",\n \"optimizers\",\n \"schedulers\",\n \"utils\",\n]\n\n\nrequirements = [\n \"torch==2.1.2\",\n \"black==23.11.0\",\n \"numpy==1.25.0\",\n \"scipy\",\n \"SimpleITK!=2.0.*\",\n \"SimpleITK!=2.2.1\", # https://github.com/mlcommons/GaNDLF/issues/536\n \"torchvision\",\n \"tqdm\",\n \"torchio==0.19.5\",\n \"pandas>=2.0.0\",\n \"scikit-learn>=0.23.2\",\n \"scikit-image>=0.19.1\",\n \"setuptools\",\n \"seaborn\",\n \"pyyaml\",\n \"tiffslide\",\n \"matplotlib\",\n \"gdown==5.1.0\",\n \"pytest\",\n \"coverage\",\n \"pytest-cov\",\n \"psutil\",\n \"medcam\",\n \"opencv-python\",\n \"torchmetrics==1.1.2\",\n \"zarr==2.10.3\",\n \"pydicom\",\n \"onnx\",\n \"torchinfo==1.7.0\",\n \"segmentation-models-pytorch==0.3.3\",\n \"ACSConv==0.1.1\",\n \"docker\",\n \"dicom-anonymizer\",\n \"twine\",\n \"zarr\",\n \"keyring\",\n \"monai==1.3.0\",\n]\n\nif __name__ == \"__main__\":\n setup(\n name=\"GANDLF\",\n version=__version__,\n author=\"MLCommons\",\n author_email=\"[email protected]\",\n python_requires=\">3.8, <3.12\",\n packages=find_packages(\n where=os.path.dirname(os.path.abspath(__file__)),\n exclude=toplevel_package_excludes,\n ),\n cmdclass={\n \"install\": CustomInstallCommand,\n \"develop\": CustomDevelopCommand,\n \"egg_info\": CustomEggInfoCommand,\n },\n scripts=[\n \"gandlf_run\",\n \"gandlf_constructCSV\",\n \"gandlf_collectStats\",\n \"gandlf_patchMiner\",\n \"gandlf_preprocess\",\n \"gandlf_anonymizer\",\n \"gandlf_verifyInstall\",\n \"gandlf_configGenerator\",\n \"gandlf_recoverConfig\",\n \"gandlf_deploy\",\n \"gandlf_optimizeModel\",\n \"gandlf_generateMetrics\",\n ],\n classifiers=[\n \"Development Status :: 3 - Alpha\",\n \"Intended Audience :: Science/Research\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Natural Language :: English\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"Programming Language :: Python :: 3.11\",\n \"Topic :: Scientific/Engineering :: Medical Science Apps.\",\n ],\n description=(\n \"PyTorch-based framework that handles segmentation/regression/classification using various DL architectures for medical imaging.\"\n ),\n install_requires=requirements,\n license=\"Apache-2.0\",\n long_description=readme,\n long_description_content_type=\"text/markdown\",\n include_package_data=True,\n package_data={\"GANDLF\": all_extra_files_pathcorrected},\n keywords=\"semantic, segmentation, regression, classification, data-augmentation, medical-imaging, clinical-workflows, deep-learning, pytorch\",\n zip_safe=False,\n )\n",
"path": "setup.py"
}
] | diff --git a/setup.py b/setup.py
index 3d9b06820..93222d5f4 100644
--- a/setup.py
+++ b/setup.py
@@ -98,7 +98,7 @@ def run(self):
"pyyaml",
"tiffslide",
"matplotlib",
- "gdown==4.6.3",
+ "gdown==5.1.0",
"pytest",
"coverage",
"pytest-cov",
|
numpy__numpy-3245 | 2to3 run `standarderror` fixer
| [
{
"content": "#!/usr/bin/env python3\n# -*- python -*-\n\"\"\"\n%prog SUBMODULE...\n\nHack to pipe submodules of Numpy through 2to3 and build them in-place\none-by-one.\n\nExample usage:\n\n python3 tools/py3tool.py testing distutils core\n\nThis will copy files to _py3k/numpy, add a dummy __init__.py and\nversion.py on the top level, and copy and 2to3 the files of the three\nsubmodules.\n\nWhen running py3tool again, only changed files are re-processed, which\nmakes the test-bugfix cycle faster.\n\n\"\"\"\nfrom __future__ import division, absolute_import, print_function\n\nfrom optparse import OptionParser\nimport shutil\nimport os\nimport sys\nimport re\nimport subprocess\nimport fnmatch\n\nif os.environ.get('USE_2TO3CACHE'):\n import lib2to3cache\n\nBASE = os.path.normpath(os.path.join(os.path.dirname(__file__), '..'))\nTEMP = os.path.normpath(os.path.join(BASE, '_py3k'))\n\nSCRIPT_2TO3 = os.path.join(BASE, 'tools', '2to3.py')\n\nEXTRA_2TO3_FLAGS = {\n 'numpy/core/defchararray.py': '-x unicode',\n 'numpy/compat/py3k.py': '-x unicode',\n 'numpy/ma/timer_comparison.py': 'skip',\n}\n\n# Names of fixers to skip when running 2to3. This is a complete list of\n# available fixers, with fixers not currently skipped commented out.\nFIXES_TO_SKIP = [\n 'apply',\n 'basestring',\n 'buffer',\n 'callable',\n 'dict',\n 'exec',\n 'execfile',\n 'exitfunc',\n 'filter',\n 'funcattrs',\n 'future',\n 'getcwdu',\n 'has_key',\n# 'idioms',\n 'import',\n 'imports',\n 'imports2',\n 'input',\n 'intern',\n# 'isinstance',\n 'itertools',\n 'itertools_imports',\n 'long',\n 'map',\n 'metaclass',\n 'methodattrs',\n 'ne',\n# 'next',\n 'nonzero',\n 'numliterals',\n 'operator',\n 'paren',\n 'print',\n 'raise',\n 'raw_input',\n 'reduce',\n 'renames',\n 'repr',\n 'setliteral',\n 'standarderror',\n 'sys_exc',\n 'throw',\n 'tuple_params',\n# 'types',\n# 'unicode',\n# 'urllib',\n# 'ws_comma',\n 'xrange',\n 'xreadlines',\n 'zip',\n]\n\nskip_fixes= []\nfor _t in FIXES_TO_SKIP:\n skip_fixes.append('-x')\n skip_fixes.append(_t)\n\n\ndef main():\n p = OptionParser(usage=__doc__.strip())\n p.add_option(\"--clean\", \"-c\", action=\"store_true\",\n help=\"clean source directory\")\n options, args = p.parse_args()\n\n if not args:\n p.error('no submodules given')\n else:\n dirs = ['numpy/%s' % x for x in map(os.path.basename, args)]\n\n # Prepare\n if not os.path.isdir(TEMP):\n os.makedirs(TEMP)\n\n # Set up dummy files (for building only submodules)\n dummy_files = {\n '__init__.py': 'from numpy.version import version as __version__',\n 'version.py': 'version = \"1.4.0.dev\"'\n }\n\n for fn, content in dummy_files.items():\n fn = os.path.join(TEMP, 'numpy', fn)\n if not os.path.isfile(fn):\n try:\n os.makedirs(os.path.dirname(fn))\n except OSError:\n pass\n f = open(fn, 'wb+')\n f.write(content.encode('ascii'))\n f.close()\n\n # Environment\n pp = [os.path.abspath(TEMP)]\n def getenv():\n env = dict(os.environ)\n env.update({'PYTHONPATH': ':'.join(pp)})\n return env\n\n # Copy\n for d in dirs:\n src = os.path.join(BASE, d)\n dst = os.path.join(TEMP, d)\n\n # Run 2to3\n sync_2to3(dst=dst,\n src=src,\n patchfile=os.path.join(TEMP, os.path.basename(d) + '.patch'),\n clean=options.clean)\n\n # Run setup.py, falling back to Pdb post-mortem on exceptions\n setup_py = os.path.join(dst, 'setup.py')\n if os.path.isfile(setup_py):\n code = \"\"\"\\\nimport pdb, sys, traceback\np = pdb.Pdb()\ntry:\n import __main__\n __main__.__dict__.update({\n \"__name__\": \"__main__\", \"__file__\": \"setup.py\",\n \"__builtins__\": __builtins__})\n fp = open(\"setup.py\", \"rb\")\n try:\n exec(compile(fp.read(), \"setup.py\", 'exec'))\n finally:\n fp.close()\nexcept SystemExit:\n raise\nexcept:\n traceback.print_exc()\n t = sys.exc_info()[2]\n p.interaction(None, t)\n\"\"\"\n ret = subprocess.call([sys.executable, '-c', code,\n 'build_ext', '-i'],\n cwd=dst,\n env=getenv())\n if ret != 0:\n raise RuntimeError(\"Build failed.\")\n\n # Run nosetests\n subprocess.call(['nosetests3', '-v', d], cwd=TEMP)\n\n\ndef walk_sync(dir1, dir2, _seen=None):\n if _seen is None:\n seen = {}\n else:\n seen = _seen\n\n if not dir1.endswith(os.path.sep):\n dir1 = dir1 + os.path.sep\n\n # Walk through stuff (which we haven't yet gone through) in dir1\n for root, dirs, files in os.walk(dir1):\n sub = root[len(dir1):]\n if sub in seen:\n dirs = [x for x in dirs if x not in seen[sub][0]]\n files = [x for x in files if x not in seen[sub][1]]\n seen[sub][0].extend(dirs)\n seen[sub][1].extend(files)\n else:\n seen[sub] = (dirs, files)\n if not dirs and not files:\n continue\n yield os.path.join(dir1, sub), os.path.join(dir2, sub), dirs, files\n\n if _seen is None:\n # Walk through stuff (which we haven't yet gone through) in dir2\n for root2, root1, dirs, files in walk_sync(dir2, dir1, _seen=seen):\n yield root1, root2, dirs, files\n\ndef sync_2to3(src, dst, patchfile=None, clean=False):\n import lib2to3.main\n from io import StringIO\n\n to_convert = []\n\n for src_dir, dst_dir, dirs, files in walk_sync(src, dst):\n for fn in dirs + files:\n src_fn = os.path.join(src_dir, fn)\n dst_fn = os.path.join(dst_dir, fn)\n\n # skip temporary etc. files\n if fn.startswith('.#') or fn.endswith('~'):\n continue\n\n # remove non-existing\n if os.path.exists(dst_fn) and not os.path.exists(src_fn):\n if clean:\n if os.path.isdir(dst_fn):\n shutil.rmtree(dst_fn)\n else:\n os.unlink(dst_fn)\n continue\n\n # make directories\n if os.path.isdir(src_fn):\n if not os.path.isdir(dst_fn):\n os.makedirs(dst_fn)\n continue\n\n dst_dir = os.path.dirname(dst_fn)\n if os.path.isfile(dst_fn) and not os.path.isdir(dst_dir):\n os.makedirs(dst_dir)\n\n # don't replace up-to-date files\n try:\n if os.path.isfile(dst_fn) and \\\n os.stat(dst_fn).st_mtime >= os.stat(src_fn).st_mtime:\n continue\n except OSError:\n pass\n\n # copy file\n shutil.copyfile(src_fn, dst_fn)\n\n # add .py files to 2to3 list\n if dst_fn.endswith('.py'):\n to_convert.append((src_fn, dst_fn))\n\n # run 2to3\n flag_sets = {}\n for fn, dst_fn in to_convert:\n flag = ''\n for pat, opt in EXTRA_2TO3_FLAGS.items():\n if fnmatch.fnmatch(fn, pat):\n flag = opt\n break\n flag_sets.setdefault(flag, []).append(dst_fn)\n\n if patchfile:\n p = open(patchfile, 'wb+')\n else:\n p = open(os.devnull, 'wb')\n\n for flags, filenames in flag_sets.items():\n if flags == 'skip':\n continue\n\n _old_stdout = sys.stdout\n try:\n sys.stdout = StringIO()\n opt = []\n opt.extend(['-w', '-n'])\n opt.extend(skip_fixes)\n opt.extend(flags.split())\n opt.extend(filenames)\n lib2to3.main.main(\"lib2to3.fixes\", opt)\n finally:\n sys.stdout = _old_stdout\n\n p.close()\n\nif __name__ == \"__main__\":\n main()\n",
"path": "tools/py3tool.py"
}
] | [
{
"content": "#!/usr/bin/env python3\n# -*- python -*-\n\"\"\"\n%prog SUBMODULE...\n\nHack to pipe submodules of Numpy through 2to3 and build them in-place\none-by-one.\n\nExample usage:\n\n python3 tools/py3tool.py testing distutils core\n\nThis will copy files to _py3k/numpy, add a dummy __init__.py and\nversion.py on the top level, and copy and 2to3 the files of the three\nsubmodules.\n\nWhen running py3tool again, only changed files are re-processed, which\nmakes the test-bugfix cycle faster.\n\n\"\"\"\nfrom __future__ import division, absolute_import, print_function\n\nfrom optparse import OptionParser\nimport shutil\nimport os\nimport sys\nimport re\nimport subprocess\nimport fnmatch\n\nif os.environ.get('USE_2TO3CACHE'):\n import lib2to3cache\n\nBASE = os.path.normpath(os.path.join(os.path.dirname(__file__), '..'))\nTEMP = os.path.normpath(os.path.join(BASE, '_py3k'))\n\nSCRIPT_2TO3 = os.path.join(BASE, 'tools', '2to3.py')\n\nEXTRA_2TO3_FLAGS = {\n 'numpy/core/defchararray.py': '-x unicode',\n 'numpy/compat/py3k.py': '-x unicode',\n 'numpy/ma/timer_comparison.py': 'skip',\n}\n\n# Names of fixers to skip when running 2to3. This is a complete list of\n# available fixers, with fixers not currently skipped commented out.\nFIXES_TO_SKIP = [\n 'apply',\n 'basestring',\n 'buffer',\n 'callable',\n 'dict',\n 'exec',\n 'execfile',\n 'exitfunc',\n 'filter',\n 'funcattrs',\n 'future',\n 'getcwdu',\n 'has_key',\n# 'idioms',\n 'import',\n 'imports',\n 'imports2',\n 'input',\n 'intern',\n 'isinstance',\n 'itertools',\n 'itertools_imports',\n 'long',\n 'map',\n 'metaclass',\n 'methodattrs',\n 'ne',\n# 'next',\n 'nonzero',\n 'numliterals',\n 'operator',\n 'paren',\n 'print',\n 'raise',\n 'raw_input',\n 'reduce',\n 'renames',\n 'repr',\n 'setliteral',\n 'standarderror',\n 'sys_exc',\n 'throw',\n 'tuple_params',\n# 'types',\n# 'unicode',\n# 'urllib',\n# 'ws_comma',\n 'xrange',\n 'xreadlines',\n 'zip',\n]\n\nskip_fixes= []\nfor _t in FIXES_TO_SKIP:\n skip_fixes.append('-x')\n skip_fixes.append(_t)\n\n\ndef main():\n p = OptionParser(usage=__doc__.strip())\n p.add_option(\"--clean\", \"-c\", action=\"store_true\",\n help=\"clean source directory\")\n options, args = p.parse_args()\n\n if not args:\n p.error('no submodules given')\n else:\n dirs = ['numpy/%s' % x for x in map(os.path.basename, args)]\n\n # Prepare\n if not os.path.isdir(TEMP):\n os.makedirs(TEMP)\n\n # Set up dummy files (for building only submodules)\n dummy_files = {\n '__init__.py': 'from numpy.version import version as __version__',\n 'version.py': 'version = \"1.4.0.dev\"'\n }\n\n for fn, content in dummy_files.items():\n fn = os.path.join(TEMP, 'numpy', fn)\n if not os.path.isfile(fn):\n try:\n os.makedirs(os.path.dirname(fn))\n except OSError:\n pass\n f = open(fn, 'wb+')\n f.write(content.encode('ascii'))\n f.close()\n\n # Environment\n pp = [os.path.abspath(TEMP)]\n def getenv():\n env = dict(os.environ)\n env.update({'PYTHONPATH': ':'.join(pp)})\n return env\n\n # Copy\n for d in dirs:\n src = os.path.join(BASE, d)\n dst = os.path.join(TEMP, d)\n\n # Run 2to3\n sync_2to3(dst=dst,\n src=src,\n patchfile=os.path.join(TEMP, os.path.basename(d) + '.patch'),\n clean=options.clean)\n\n # Run setup.py, falling back to Pdb post-mortem on exceptions\n setup_py = os.path.join(dst, 'setup.py')\n if os.path.isfile(setup_py):\n code = \"\"\"\\\nimport pdb, sys, traceback\np = pdb.Pdb()\ntry:\n import __main__\n __main__.__dict__.update({\n \"__name__\": \"__main__\", \"__file__\": \"setup.py\",\n \"__builtins__\": __builtins__})\n fp = open(\"setup.py\", \"rb\")\n try:\n exec(compile(fp.read(), \"setup.py\", 'exec'))\n finally:\n fp.close()\nexcept SystemExit:\n raise\nexcept:\n traceback.print_exc()\n t = sys.exc_info()[2]\n p.interaction(None, t)\n\"\"\"\n ret = subprocess.call([sys.executable, '-c', code,\n 'build_ext', '-i'],\n cwd=dst,\n env=getenv())\n if ret != 0:\n raise RuntimeError(\"Build failed.\")\n\n # Run nosetests\n subprocess.call(['nosetests3', '-v', d], cwd=TEMP)\n\n\ndef walk_sync(dir1, dir2, _seen=None):\n if _seen is None:\n seen = {}\n else:\n seen = _seen\n\n if not dir1.endswith(os.path.sep):\n dir1 = dir1 + os.path.sep\n\n # Walk through stuff (which we haven't yet gone through) in dir1\n for root, dirs, files in os.walk(dir1):\n sub = root[len(dir1):]\n if sub in seen:\n dirs = [x for x in dirs if x not in seen[sub][0]]\n files = [x for x in files if x not in seen[sub][1]]\n seen[sub][0].extend(dirs)\n seen[sub][1].extend(files)\n else:\n seen[sub] = (dirs, files)\n if not dirs and not files:\n continue\n yield os.path.join(dir1, sub), os.path.join(dir2, sub), dirs, files\n\n if _seen is None:\n # Walk through stuff (which we haven't yet gone through) in dir2\n for root2, root1, dirs, files in walk_sync(dir2, dir1, _seen=seen):\n yield root1, root2, dirs, files\n\ndef sync_2to3(src, dst, patchfile=None, clean=False):\n import lib2to3.main\n from io import StringIO\n\n to_convert = []\n\n for src_dir, dst_dir, dirs, files in walk_sync(src, dst):\n for fn in dirs + files:\n src_fn = os.path.join(src_dir, fn)\n dst_fn = os.path.join(dst_dir, fn)\n\n # skip temporary etc. files\n if fn.startswith('.#') or fn.endswith('~'):\n continue\n\n # remove non-existing\n if os.path.exists(dst_fn) and not os.path.exists(src_fn):\n if clean:\n if os.path.isdir(dst_fn):\n shutil.rmtree(dst_fn)\n else:\n os.unlink(dst_fn)\n continue\n\n # make directories\n if os.path.isdir(src_fn):\n if not os.path.isdir(dst_fn):\n os.makedirs(dst_fn)\n continue\n\n dst_dir = os.path.dirname(dst_fn)\n if os.path.isfile(dst_fn) and not os.path.isdir(dst_dir):\n os.makedirs(dst_dir)\n\n # don't replace up-to-date files\n try:\n if os.path.isfile(dst_fn) and \\\n os.stat(dst_fn).st_mtime >= os.stat(src_fn).st_mtime:\n continue\n except OSError:\n pass\n\n # copy file\n shutil.copyfile(src_fn, dst_fn)\n\n # add .py files to 2to3 list\n if dst_fn.endswith('.py'):\n to_convert.append((src_fn, dst_fn))\n\n # run 2to3\n flag_sets = {}\n for fn, dst_fn in to_convert:\n flag = ''\n for pat, opt in EXTRA_2TO3_FLAGS.items():\n if fnmatch.fnmatch(fn, pat):\n flag = opt\n break\n flag_sets.setdefault(flag, []).append(dst_fn)\n\n if patchfile:\n p = open(patchfile, 'wb+')\n else:\n p = open(os.devnull, 'wb')\n\n for flags, filenames in flag_sets.items():\n if flags == 'skip':\n continue\n\n _old_stdout = sys.stdout\n try:\n sys.stdout = StringIO()\n opt = []\n opt.extend(['-w', '-n'])\n opt.extend(skip_fixes)\n opt.extend(flags.split())\n opt.extend(filenames)\n lib2to3.main.main(\"lib2to3.fixes\", opt)\n finally:\n sys.stdout = _old_stdout\n\n p.close()\n\nif __name__ == \"__main__\":\n main()\n",
"path": "tools/py3tool.py"
}
] | diff --git a/tools/py3tool.py b/tools/py3tool.py
index 48d5ba2f75c5..a6fd5b3f4b4f 100755
--- a/tools/py3tool.py
+++ b/tools/py3tool.py
@@ -64,7 +64,7 @@
'imports2',
'input',
'intern',
-# 'isinstance',
+ 'isinstance',
'itertools',
'itertools_imports',
'long',
|
fail2ban__fail2ban-249 | weak regex'es for apache
See email on fail2ban-users
| [
{
"content": "# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: t -*-\n# vi: set ft=python sts=4 ts=4 sw=4 noet :\n\n# This file is part of Fail2Ban.\n#\n# Fail2Ban is free software; you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation; either version 2 of the License, or\n# (at your option) any later version.\n#\n# Fail2Ban is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with Fail2Ban; if not, write to the Free Software\n# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.\n\n# Author: Cyril Jaquier\n#\n\n__author__ = \"Cyril Jaquier, Yaroslav Halchenko\"\n__copyright__ = \"Copyright (c) 2004 Cyril Jaquier, 2011-2013 Yaroslav Halchenko\"\n__license__ = \"GPL\"\n\nversion = \"0.8.9.dev\"\n",
"path": "common/version.py"
}
] | [
{
"content": "# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: t -*-\n# vi: set ft=python sts=4 ts=4 sw=4 noet :\n\n# This file is part of Fail2Ban.\n#\n# Fail2Ban is free software; you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation; either version 2 of the License, or\n# (at your option) any later version.\n#\n# Fail2Ban is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with Fail2Ban; if not, write to the Free Software\n# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.\n\n# Author: Cyril Jaquier\n#\n\n__author__ = \"Cyril Jaquier, Yaroslav Halchenko\"\n__copyright__ = \"Copyright (c) 2004 Cyril Jaquier, 2011-2013 Yaroslav Halchenko\"\n__license__ = \"GPL\"\n\nversion = \"0.8.10\"\n",
"path": "common/version.py"
}
] | diff --git a/ChangeLog b/ChangeLog
index e58bce068d..230ee10d45 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -4,17 +4,23 @@
|_| \__,_|_|_/___|_.__/\__,_|_||_|
================================================================================
-Fail2Ban (version 0.8.9.dev) 2013/??/??
+Fail2Ban (version 0.8.10) 2013/06/12
================================================================================
-ver. 0.8.10 (2013/XX/XXX) - NOT-YET-RELEASED
+ver. 0.8.10 (2013/06/12) - wanna-be-secure
-----------
-- Fixes:
- Yaroslav Halchenko
+Primarily bugfix and enhancements release, triggered by "bugs" in
+apache- filters. If you are relying on listed below apache- filters,
+upgrade asap and seek your distributions to patch their fail2ban
+distribution with [6ccd5781].
+
+- Fixes: Yaroslav Halchenko
+ * [6ccd5781] filter.d/apache-{auth,nohome,noscript,overflows} - anchor
+ failregex at the beginning (and where applicable at the end).
+ Addresses a possible DoS. Closes gh-248
* action.d/{route,shorewall}.conf - blocktype must be defined
within [Init]. Closes gh-232
-- New Features
- Enhancements
Yaroslav Halchenko
* jail.conf -- assure all jails have actions and remove unused
@@ -23,10 +29,10 @@ ver. 0.8.10 (2013/XX/XXX) - NOT-YET-RELEASED
* config/filter.d/roundcube-auth.conf -- support roundcube 0.9+
Daniel Black
* files/suse-initd -- update to the copy from stock SUSE
- silviogarbes
- * Updates to asterisk filter closes gh-227/gh-230.
- Carlos Alberto Lopez Perez
- * Updates to asterisk to include AUTH_UNKNOWN_DOMAIN - gh-244.
+ silviogarbes & Daniel Black
+ * Updates to asterisk filter. Closes gh-227/gh-230.
+ Carlos Alberto Lopez Perez
+ * Updates to asterisk to include AUTH_UNKNOWN_DOMAIN. Closes gh-244.
ver. 0.8.9 (2013/05/13) - wanna-be-stable
----------
diff --git a/README.md b/README.md
index 91deaf195a..05e92fd64b 100644
--- a/README.md
+++ b/README.md
@@ -2,9 +2,9 @@
/ _|__ _(_) |_ ) |__ __ _ _ _
| _/ _` | | |/ /| '_ \/ _` | ' \
|_| \__,_|_|_/___|_.__/\__,_|_||_|
- v0.8.9 2013/05/13
+ v0.8.10 2013/06/12
-## Fail2Ban: ban hosts that cause multiple authentication errors
+## Fail2Ban: ban hosts that cause multiple authentication errors
Fail2Ban scans log files like /var/log/pwdfail and bans IP that makes too many
password failures. It updates firewall rules to reject the IP address. These
@@ -30,8 +30,8 @@ Optional:
To install, just do:
- tar xvfj fail2ban-0.8.9.tar.bz2
- cd fail2ban-0.8.9
+ tar xvfj fail2ban-0.8.10.tar.bz2
+ cd fail2ban-0.8.10
python setup.py install
This will install Fail2Ban into /usr/share/fail2ban. The executable scripts are
@@ -63,9 +63,14 @@ Code status:
Contact:
--------
+### You found a severe security vulnerability in Fail2Ban?
+email details to fail2ban-vulnerabilities at lists dot sourceforge dot net .
+
### You need some new features, you found bugs?
visit [Issues](https://github.com/fail2ban/fail2ban/issues)
-and if your issue is not yet known -- file a bug report.
+and if your issue is not yet known -- file a bug report. See
+[Fail2Ban wiki](http://www.fail2ban.org/wiki/index.php/HOWTO_Seek_Help)
+on further instructions.
### You would like to troubleshoot or discuss?
join the [mailing list](https://lists.sourceforge.net/lists/listinfo/fail2ban-users)
diff --git a/common/version.py b/common/version.py
index 86c45760f9..fe99f95e60 100644
--- a/common/version.py
+++ b/common/version.py
@@ -24,4 +24,4 @@
__copyright__ = "Copyright (c) 2004 Cyril Jaquier, 2011-2013 Yaroslav Halchenko"
__license__ = "GPL"
-version = "0.8.9.dev"
+version = "0.8.10"
diff --git a/config/filter.d/apache-auth.conf b/config/filter.d/apache-auth.conf
index 66f6a1d620..ae3232f246 100644
--- a/config/filter.d/apache-auth.conf
+++ b/config/filter.d/apache-auth.conf
@@ -4,6 +4,12 @@
#
#
+[INCLUDES]
+
+# Read common prefixes. If any customizations available -- read them from
+# common.local
+before = apache-common.conf
+
[Definition]
# Option: failregex
@@ -13,9 +19,7 @@
# (?:::f{4,6}:)?(?P<host>[\w\-.^_]+)
# Values: TEXT
#
-failregex = [[]client <HOST>[]] user .* authentication failure
- [[]client <HOST>[]] user .* not found
- [[]client <HOST>[]] user .* password mismatch
+failregex = ^%(_apache_error_client)s user .* (authentication failure|not found|password mismatch)\s*$
# Option: ignoreregex
# Notes.: regex to ignore. If this regex matches, the line is ignored.
diff --git a/config/filter.d/apache-common.conf b/config/filter.d/apache-common.conf
new file mode 100644
index 0000000000..c3829e2fb0
--- /dev/null
+++ b/config/filter.d/apache-common.conf
@@ -0,0 +1,17 @@
+# Generic configuration items (to be used as interpolations) in other
+# apache filters
+#
+# Author: Yaroslav Halchenko
+#
+#
+
+[INCLUDES]
+
+# Load customizations if any available
+after = apache-common.local
+
+
+[DEFAULT]
+
+# Common prefix for [error] apache messages which also would include <HOST>
+_apache_error_client = \[[^]]+\] \[error\] \[client <HOST>\]
diff --git a/config/filter.d/apache-nohome.conf b/config/filter.d/apache-nohome.conf
index 6e738c6850..1347b10d62 100644
--- a/config/filter.d/apache-nohome.conf
+++ b/config/filter.d/apache-nohome.conf
@@ -4,6 +4,12 @@
#
#
+[INCLUDES]
+
+# Read common prefixes. If any customizations available -- read them from
+# common.local
+before = apache-common.conf
+
[Definition]
# Option: failregex
@@ -13,7 +19,7 @@
# per-domain log files.
# Values: TEXT
#
-failregex = [[]client <HOST>[]] File does not exist: .*/~.*
+failregex = ^%(_apache_error_client)s File does not exist: .*/~.*
# Option: ignoreregex
# Notes.: regex to ignore. If this regex matches, the line is ignored.
diff --git a/config/filter.d/apache-noscript.conf b/config/filter.d/apache-noscript.conf
index 5b48cb32b3..295e1b9fc6 100644
--- a/config/filter.d/apache-noscript.conf
+++ b/config/filter.d/apache-noscript.conf
@@ -4,6 +4,12 @@
#
#
+[INCLUDES]
+
+# Read common prefixes. If any customizations available -- read them from
+# common.local
+before = apache-common.conf
+
[Definition]
# Option: failregex
@@ -13,8 +19,8 @@
# (?:::f{4,6}:)?(?P<host>[\w\-.^_]+)
# Values: TEXT
#
-failregex = [[]client <HOST>[]] (File does not exist|script not found or unable to stat): /\S*(\.php|\.asp|\.exe|\.pl)
- [[]client <HOST>[]] script '/\S*(\.php|\.asp|\.exe|\.pl)\S*' not found or unable to stat *$
+failregex = ^%(_apache_error_client)s (File does not exist|script not found or unable to stat): /\S*(\.php|\.asp|\.exe|\.pl)\s*$
+ ^%(_apache_error_client)s script '/\S*(\.php|\.asp|\.exe|\.pl)\S*' not found or unable to stat\s*$
# Option: ignoreregex
# Notes.: regex to ignore. If this regex matches, the line is ignored.
diff --git a/config/filter.d/apache-overflows.conf b/config/filter.d/apache-overflows.conf
index e25b79a4e7..1cf08db736 100644
--- a/config/filter.d/apache-overflows.conf
+++ b/config/filter.d/apache-overflows.conf
@@ -4,13 +4,19 @@
#
#
+[INCLUDES]
+
+# Read common prefixes. If any customizations available -- read them from
+# common.local
+before = apache-common.conf
+
[Definition]
# Option: failregex
# Notes.: Regexp to catch Apache overflow attempts.
# Values: TEXT
#
-failregex = [[]client <HOST>[]] (Invalid (method|URI) in request|request failed: URI too long|erroneous characters after protocol string)
+failregex = ^%(_apache_error_client)s (Invalid (method|URI) in request|request failed: URI too long|erroneous characters after protocol string)
# Option: ignoreregex
# Notes.: regex to ignore. If this regex matches, the line is ignored.
diff --git a/man/fail2ban-client.1 b/man/fail2ban-client.1
index d7d620bc07..a6eb461e18 100644
--- a/man/fail2ban-client.1
+++ b/man/fail2ban-client.1
@@ -1,12 +1,12 @@
-.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.40.10.
-.TH FAIL2BAN-CLIENT "1" "May 2013" "fail2ban-client v0.8.9" "User Commands"
+.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.41.2.
+.TH FAIL2BAN-CLIENT "1" "June 2013" "fail2ban-client v0.8.10" "User Commands"
.SH NAME
fail2ban-client \- configure and control the server
.SH SYNOPSIS
.B fail2ban-client
[\fIOPTIONS\fR] \fI<COMMAND>\fR
.SH DESCRIPTION
-Fail2Ban v0.8.9 reads log file that contains password failure report
+Fail2Ban v0.8.10 reads log file that contains password failure report
and bans the corresponding IP addresses using firewall rules.
.SH OPTIONS
.TP
diff --git a/man/fail2ban-regex.1 b/man/fail2ban-regex.1
index a42d96d558..379cd76171 100644
--- a/man/fail2ban-regex.1
+++ b/man/fail2ban-regex.1
@@ -1,12 +1,12 @@
-.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.40.10.
-.TH FAIL2BAN-REGEX "1" "May 2013" "fail2ban-regex v0.8.9" "User Commands"
+.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.41.2.
+.TH FAIL2BAN-REGEX "1" "June 2013" "fail2ban-regex v0.8.10" "User Commands"
.SH NAME
fail2ban-regex \- test Fail2ban "failregex" option
.SH SYNOPSIS
.B fail2ban-regex
[\fIOPTIONS\fR] \fI<LOG> <REGEX> \fR[\fIIGNOREREGEX\fR]
.SH DESCRIPTION
-Fail2Ban v0.8.9 reads log file that contains password failure report
+Fail2Ban v0.8.10 reads log file that contains password failure report
and bans the corresponding IP addresses using firewall rules.
.PP
This tools can test regular expressions for "fail2ban".
@@ -26,7 +26,7 @@ verbose output
a string representing a log line
.TP
\fBfilename\fR
-path to a log file (/var/log/auth.log)
+path to a log file (\fI/var/log/auth.log\fP)
.SH REGEX
.TP
\fBstring\fR
diff --git a/man/fail2ban-server.1 b/man/fail2ban-server.1
index 43e9d6d405..3851db9130 100644
--- a/man/fail2ban-server.1
+++ b/man/fail2ban-server.1
@@ -1,12 +1,12 @@
-.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.40.10.
-.TH FAIL2BAN-SERVER "1" "May 2013" "fail2ban-server v0.8.9" "User Commands"
+.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.41.2.
+.TH FAIL2BAN-SERVER "1" "June 2013" "fail2ban-server v0.8.10" "User Commands"
.SH NAME
fail2ban-server \- start the server
.SH SYNOPSIS
.B fail2ban-server
[\fIOPTIONS\fR]
.SH DESCRIPTION
-Fail2Ban v0.8.9 reads log file that contains password failure report
+Fail2Ban v0.8.10 reads log file that contains password failure report
and bans the corresponding IP addresses using firewall rules.
.PP
Only use this command for debugging purpose. Start the server with
diff --git a/testcases/files/logs/apache-auth b/testcases/files/logs/apache-auth
new file mode 100644
index 0000000000..cf0f6d3025
--- /dev/null
+++ b/testcases/files/logs/apache-auth
@@ -0,0 +1,5 @@
+# Should not match -- DoS vector https://vndh.net/note:fail2ban-089-denial-service
+[Sat Jun 01 02:17:42 2013] [error] [client 192.168.33.1] File does not exist: /srv/http/site/[client 192.168.0.1] user root not found
+
+# should match
+[Sat Jun 01 02:17:42 2013] [error] [client 192.168.0.2] user root not found
diff --git a/testcases/files/logs/apache-noscript b/testcases/files/logs/apache-noscript
new file mode 100644
index 0000000000..5d5d35ff58
--- /dev/null
+++ b/testcases/files/logs/apache-noscript
@@ -0,0 +1 @@
+[Sun Jun 09 07:57:47 2013] [error] [client 192.0.43.10] script '/usr/lib/cgi-bin/gitweb.cgiwp-login.php' not found or unable to stat
|
statsmodels__statsmodels-3976 | The compat modules should use absolute imports
The [statsmodels.compat.collections](https://github.com/statsmodels/statsmodels/blob/a88830efc3a99cfbe0ebc9fbfd77820fe748fc59/statsmodels/compat/collections.py#L7) imports the namesake standard library module without requesting absolute imports. While it seems to work in many cases, it causes a problem to packages that override `__import__`. See enlnt/pyq#18.
Please consider adding
```python
from __future__ import absolute_import
```
to the compat modules.
| [
{
"content": "'''backported compatibility functions for Python's collections\n\n'''\n\ntry:\n #python >= 2.7\n from collections import OrderedDict\nexcept ImportError:\n #http://code.activestate.com/recipes/576693/\n #author: Raymond Hettinger\n from .ordereddict import OrderedDict\n\ntry:\n #python >= 2.7\n from collections import Counter\nexcept ImportError:\n #http://code.activestate.com/recipes/576611/\n #author: Raymond Hettinger\n from .counter import Counter\n",
"path": "statsmodels/compat/collections.py"
}
] | [
{
"content": "'''backported compatibility functions for Python's collections\n\n'''\nfrom __future__ import absolute_import\n\ntry:\n #python >= 2.7\n from collections import OrderedDict\nexcept ImportError:\n #http://code.activestate.com/recipes/576693/\n #author: Raymond Hettinger\n from .ordereddict import OrderedDict\n\ntry:\n #python >= 2.7\n from collections import Counter\nexcept ImportError:\n #http://code.activestate.com/recipes/576611/\n #author: Raymond Hettinger\n from .counter import Counter\n",
"path": "statsmodels/compat/collections.py"
}
] | diff --git a/statsmodels/compat/collections.py b/statsmodels/compat/collections.py
index c6366b5810d..f796b340ad2 100644
--- a/statsmodels/compat/collections.py
+++ b/statsmodels/compat/collections.py
@@ -1,6 +1,7 @@
'''backported compatibility functions for Python's collections
'''
+from __future__ import absolute_import
try:
#python >= 2.7
|
graphql-python__graphene-django-701 | Found different types with the same name in the schema: ErrorType, ErrorType.
After updating from 2.1.3 to 2.1.6 this error shows up.
It seems that importing `ErrorType` from `graphene_django/forms/types.py` is obsolete now, as `ErrorType` now lives in `graphene_django/types.py`
This module should be removed or it just needs to import `ErrorType` from new location for backwards compatibility?
| [
{
"content": "import graphene\n\n\nclass ErrorType(graphene.ObjectType):\n field = graphene.String()\n messages = graphene.List(graphene.String)\n",
"path": "graphene_django/forms/types.py"
}
] | [
{
"content": "import graphene\n\nfrom ..types import ErrorType # noqa Import ErrorType for backwards compatability\n",
"path": "graphene_django/forms/types.py"
}
] | diff --git a/graphene_django/forms/types.py b/graphene_django/forms/types.py
index 1fe33f38e..5005040f6 100644
--- a/graphene_django/forms/types.py
+++ b/graphene_django/forms/types.py
@@ -1,6 +1,3 @@
import graphene
-
-class ErrorType(graphene.ObjectType):
- field = graphene.String()
- messages = graphene.List(graphene.String)
+from ..types import ErrorType # noqa Import ErrorType for backwards compatability
|
aws__aws-cli-3790 | The aws-cli bundle package uses an insecure version of PyYAML
### awscli version:<br>
`aws-cli/1.16.52 Python/2.7.15 Linux/4.14.77-69.57.amzn1.x86_64 exec-env/AWS_ECS_EC2 botocore/1.12.42`
[NVD entry](https://nvd.nist.gov/vuln/detail/CVE-2017-18342)
This issue was found when vulnerability alerts started appearing in Twistlock in response to scans of Docker images that we are using in several applications. The generic error found in these outlines is as such:<br>
```
Impacted versions: <=3.13
In PyYAML before 4.1, the yaml.load() API could execute arbitrary code. In other words, yaml.safe_load is not used.
```
These images are not natively using PyYAML, so this led us to a Docker `RUN` line in a Dockerfile that executed a script that contains a line of code that executes the installation of the `aws-cli` bundle using the following URL:<br>
`https://s3.amazonaws.com/aws-cli/awscli-bundle.zip`
Unpacking this archive shows a list of package dependencies that includes the vulnerable version of PyYAML:<br>
`awscli-bundle/packages/PyYAML-3.13.tar.gz`
The latest (and actually secure) version of PyYAML appears to be 4.1 according to the developer via the [GitHub repo](https://github.com/yaml/pyyaml).
### Request
Is it possible to have the patched version of PyYAML added to this bundle to avoid this vulnerability?
Thank you!
| [
{
"content": "# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"). You\n# may not use this file except in compliance with the License. A copy of\n# the License is located at\n#\n# http://aws.amazon.com/apache2.0/\n#\n# or in the \"license\" file accompanying this file. This file is\n# distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF\n# ANY KIND, either express or implied. See the License for the specific\n# language governing permissions and limitations under the License.\n\nimport json\nimport yaml\n\nfrom awscli.customizations.ecs import exceptions\n\nMAX_CHAR_LENGTH = 46\nAPP_PREFIX = 'AppECS-'\nDGP_PREFIX = 'DgpECS-'\n\n\ndef find_required_key(resource_name, obj, key):\n\n if obj is None:\n raise exceptions.MissingPropertyError(\n resource=resource_name, prop_name=key)\n\n result = _get_case_insensitive_key(obj, key)\n\n if result is None:\n raise exceptions.MissingPropertyError(\n resource=resource_name, prop_name=key)\n else:\n return result\n\n\ndef _get_case_insensitive_key(target_obj, target_key):\n key_to_match = target_key.lower()\n key_list = target_obj.keys()\n\n for key in key_list:\n if key.lower() == key_to_match:\n return key\n\n\ndef get_app_name(service, cluster, app_value):\n if app_value is not None:\n return app_value\n else:\n suffix = _get_ecs_suffix(service, cluster)\n return APP_PREFIX + suffix\n\n\ndef get_cluster_name_from_arn(arn):\n return arn.split('/')[1]\n\n\ndef get_deploy_group_name(service, cluster, dg_value):\n if dg_value is not None:\n return dg_value\n else:\n suffix = _get_ecs_suffix(service, cluster)\n return DGP_PREFIX + suffix\n\n\ndef _get_ecs_suffix(service, cluster):\n if cluster is None:\n cluster_name = 'default'\n else:\n cluster_name = cluster[:MAX_CHAR_LENGTH]\n\n return cluster_name + '-' + service[:MAX_CHAR_LENGTH]\n\n\ndef parse_appspec(appspec_str):\n try:\n return json.loads(appspec_str)\n except ValueError:\n return yaml.load(appspec_str)\n",
"path": "awscli/customizations/ecs/filehelpers.py"
}
] | [
{
"content": "# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"). You\n# may not use this file except in compliance with the License. A copy of\n# the License is located at\n#\n# http://aws.amazon.com/apache2.0/\n#\n# or in the \"license\" file accompanying this file. This file is\n# distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF\n# ANY KIND, either express or implied. See the License for the specific\n# language governing permissions and limitations under the License.\n\nimport json\nimport yaml\n\nfrom awscli.customizations.ecs import exceptions\n\nMAX_CHAR_LENGTH = 46\nAPP_PREFIX = 'AppECS-'\nDGP_PREFIX = 'DgpECS-'\n\n\ndef find_required_key(resource_name, obj, key):\n\n if obj is None:\n raise exceptions.MissingPropertyError(\n resource=resource_name, prop_name=key)\n\n result = _get_case_insensitive_key(obj, key)\n\n if result is None:\n raise exceptions.MissingPropertyError(\n resource=resource_name, prop_name=key)\n else:\n return result\n\n\ndef _get_case_insensitive_key(target_obj, target_key):\n key_to_match = target_key.lower()\n key_list = target_obj.keys()\n\n for key in key_list:\n if key.lower() == key_to_match:\n return key\n\n\ndef get_app_name(service, cluster, app_value):\n if app_value is not None:\n return app_value\n else:\n suffix = _get_ecs_suffix(service, cluster)\n return APP_PREFIX + suffix\n\n\ndef get_cluster_name_from_arn(arn):\n return arn.split('/')[1]\n\n\ndef get_deploy_group_name(service, cluster, dg_value):\n if dg_value is not None:\n return dg_value\n else:\n suffix = _get_ecs_suffix(service, cluster)\n return DGP_PREFIX + suffix\n\n\ndef _get_ecs_suffix(service, cluster):\n if cluster is None:\n cluster_name = 'default'\n else:\n cluster_name = cluster[:MAX_CHAR_LENGTH]\n\n return cluster_name + '-' + service[:MAX_CHAR_LENGTH]\n\n\ndef parse_appspec(appspec_str):\n try:\n return json.loads(appspec_str)\n except ValueError:\n return yaml.safe_load(appspec_str)\n",
"path": "awscli/customizations/ecs/filehelpers.py"
}
] | diff --git a/awscli/customizations/ecs/filehelpers.py b/awscli/customizations/ecs/filehelpers.py
index c3b023589791..4001af70cf6a 100644
--- a/awscli/customizations/ecs/filehelpers.py
+++ b/awscli/customizations/ecs/filehelpers.py
@@ -78,4 +78,4 @@ def parse_appspec(appspec_str):
try:
return json.loads(appspec_str)
except ValueError:
- return yaml.load(appspec_str)
+ return yaml.safe_load(appspec_str)
|
hpcaitech__ColossalAI-3944 | [tensor] fix some unittests
[tensor] fix some unittests
[tensor] fix some unittests
[elixir] make README consistent in style
The README for the `Elixir` module is rather a draft, we should polish it to make it consistent with the README files found in other modules.
| [
{
"content": "from .wrapper import ElixirModule, ElixirOptimizer\n",
"path": "colossalai/elixir/__init__.py"
}
] | [
{
"content": "from .search import minimum_waste_search, optimal_search\nfrom .wrapper import ElixirModule, ElixirOptimizer\n\n__all__ = ['ElixirModule', 'ElixirOptimizer', 'minimum_waste_search', 'optimal_search']\n",
"path": "colossalai/elixir/__init__.py"
}
] | diff --git a/colossalai/elixir/README.md b/colossalai/elixir/README.md
index 8adce38dc822..40ac7f79493e 100644
--- a/colossalai/elixir/README.md
+++ b/colossalai/elixir/README.md
@@ -1,47 +1,96 @@
-# Elixir (Gemini2.0)
-Elixir, also known as Gemini, is a technology designed to facilitate the training of large models on a small GPU cluster.
+# ⚡️ Elixir (Gemini2.0)
+
+## 📚 Table of Contents
+
+- [⚡️ Elixir (Gemini2.0)](#️-elixir-gemini20)
+ - [📚 Table of Contents](#-table-of-contents)
+ - [🔗 Introduction](#-introduction)
+ - [💡 Design and Implementation](#-design-and-implementation)
+ - [🔨 API Usage](#-api-usage)
+ - [General Usage](#general-usage)
+ - [Advanced Usage](#advanced-usage)
+
+## 🔗 Introduction
+
+Elixir, also known as Gemini 2.0, is a distributed training technique designed to facilitate large-scale model training on a small GPU cluster.
Its goal is to eliminate data redundancy and leverage CPU memory to accommodate really large models.
-In addition, Elixir automatically profiles each training step prior to execution and selects the optimal configuration for the ratio of redundancy and the device for each parameter.
-This repository is used to benchmark the performance of Elixir.
-Elixir will be integrated into ColossalAI for usability.
+Elixir automatically profiles each training step before execution and selects the optimal configuration for the ratio of memory redundancy (tensor sharding) and the device placement for each parameter (tensor offloading).
+
+Please note the following before you try this feature:
+
+- **This feature is still in its experimental stage and the API is subject to future changes.**
+- **We have only tested this feature with PyTorch 1.13**
+
+
+## 💡 Design and Implementation
+
+Existing methods such as DeepSpeed and FSDP often lead to suboptimal efficiency due to the large combination of hyperparameters to tune and only experienced experts can unleash the full potential of hardware by carefully tuning the distributed configuration.
+Thus, we present a novel solution, Elixir, which automates efficient large model training based on pre-runtime model profiling.
+Elixir aims to identify the optimal combination of partitioning and offloading techniques to maximize training throughput.
+
+Some contributions of Elixir are listed below:
+- We build a pre-runtime profiler designed for large models. It is capable of obtaining the computation
+graph and the memory usage of the model before training. We bring this powerful tool to support
+large model profiling.
+- We introduce rCache to control the degree of memory redundancy. Moreover, we build a search
+engine to find the optimal configuration, maximizing training efficiency automatically. Different
+from previous works, our optimal configuration considers both memory partitioning and memory
+offloading.
+- We conduct evaluations on a large scale by testing various model sizes, GPU capacities, numbers of
+GPUs, and batch sizes. When compared to current SOTA solutions, we observe that Elixir achieves
+up to 3.4× acceleration without manual tuning.
+
+You can find more details about this system in our paper [Elixir: Train a Large Language Model on a Small GPU Cluster](https://arxiv.org/abs/2212.05339).
-## Environment
-This version is a beta release, so the running environment is somewhat restrictive.
-We are only demonstrating our running environment here, as we have not yet tested its compatibility.
-We have set the CUDA version to `11.6` and the PyTorch version to `1.13.1+cu11.6`.
+## 🔨 API Usage
-## Examples
+Below is the API for the Elixir module, these APIs are experimental and subject to future changes.
+
+### General Usage
-Here is a simple example to wrap your model and optimizer for [fine-tuning](https://github.com/hpcaitech/Elixir/tree/main/example/fine-tune).
```python
-from elixir.search import minimum_waste_search
-from elixir.wrapper import ElixirModule, ElixirOptimizer
+import torch
+import transformers
+
+import torch.distributed as dist
+
+from colossalai.elixir import ElixirModule, ElixirOptimizer, minimum_waste_search
-model = BertForSequenceClassification.from_pretrained('bert-base-uncased')
+# initialize your distributed backend
+...
+
+# create your model and optimizer
+model = transformers.BertForSequenceClassification.from_pretrained('bert-base-uncased')
optimizer = torch.optim.Adam(model.parameters(), lr=1e-4, eps=1e-8)
-sr = minimum_waste_search(model, world_size)
-model = ElixirModule(model, sr, world_group)
+# search for configuration
+world_size = dist.get_world_size()
+search_result = minimum_waste_search(model, world_size)
+
+# wrap the model and optimizer
+model = ElixirModule(model, search_result, world_group)
optimizer = ElixirOptimizer(model, optimizer)
```
-Here is an advanced example for performance, which is used in our [benchmark](https://github.com/hpcaitech/Elixir/blob/main/example/common/elx.py).
+### Advanced Usage
```python
import torch
import torch.distributed as dist
from colossalai.nn.optimizer import HybridAdam
-from elixir.wrapper import ElixirModule, ElixirOptimizer
+from colossalai.elixir import ElixirModule, ElixirOptimizer
+
+# initialize your distributed backend
+...
-# get the world communication group
-global_group = dist.GroupMember.WORLD
# get the communication world size
global_size = dist.get_world_size()
# initialize the model in CPU
model = get_model(model_name)
+
# HybridAdam allows a part of parameters updated on CPU and a part updated on GPU
optimizer = HybridAdam(model.parameters(), lr=1e-3)
@@ -54,6 +103,8 @@ sr = optimal_search(
inp=data, # proivde an example input data in dictionary format
step_fn=train_step # provide an example step function
)
+
+# wrap your model with ElixirModule and optimizer with ElixirOptimizer
model = ElixirModule(
model,
sr,
@@ -65,7 +116,7 @@ model = ElixirModule(
optimizer = ElixirOptimizer(
model,
optimizer,
- initial_scale=64, # loss scale used in AMP
+ initial_scale=1024, # loss scale used in AMP
init_step=True # enable for the stability of training
)
```
diff --git a/colossalai/elixir/__init__.py b/colossalai/elixir/__init__.py
index b7fd76a5da7d..0ccc045550af 100644
--- a/colossalai/elixir/__init__.py
+++ b/colossalai/elixir/__init__.py
@@ -1 +1,4 @@
+from .search import minimum_waste_search, optimal_search
from .wrapper import ElixirModule, ElixirOptimizer
+
+__all__ = ['ElixirModule', 'ElixirOptimizer', 'minimum_waste_search', 'optimal_search']
|
django-cms__django-filer-1408 | Field verbose_name should use gettext_lazy
Hi,
model field verbose_names should use gettext_lazy, because it creates migrations based on user language settings.
https://github.com/django-cms/django-filer/blob/master/filer/models/foldermodels.py#L9
This is migration generated after upgrade to django-filer 3.0

Thanks.
| [
{
"content": "\"\"\"\nSee PEP 386 (https://www.python.org/dev/peps/pep-0386/)\n\nRelease logic:\n 1. Increase version number (change __version__ below).\n 2. Check that all changes have been documented in CHANGELOG.rst.\n 3. git add filer/__init__.py CHANGELOG.rst\n 4. git commit -m 'Bump to {new version}'\n 5. git push\n 6. Assure that all tests pass on https://github.com/django-cms/django-filer/actions\n 7. Create a new release on github. Create the new tag against the latest master commit and auto generate\n the release notes https://github.com/django-cms/django-filer/releases/new\n 8. Publish the release and it will automatically release to pypi\n\"\"\"\n\n__version__ = '3.0.3'\n\ndefault_app_config = 'filer.apps.FilerConfig'\n",
"path": "filer/__init__.py"
}
] | [
{
"content": "\"\"\"\nSee PEP 386 (https://www.python.org/dev/peps/pep-0386/)\n\nRelease logic:\n 1. Increase version number (change __version__ below).\n 2. Check that all changes have been documented in CHANGELOG.rst.\n 3. git add filer/__init__.py CHANGELOG.rst\n 4. git commit -m 'Bump to {new version}'\n 5. git push\n 6. Assure that all tests pass on https://github.com/django-cms/django-filer/actions\n 7. Create a new release on github. Create the new tag against the latest master commit and auto generate\n the release notes https://github.com/django-cms/django-filer/releases/new\n 8. Publish the release and it will automatically release to pypi\n\"\"\"\n\n__version__ = '3.0.4'\n",
"path": "filer/__init__.py"
}
] | diff --git a/CHANGELOG.rst b/CHANGELOG.rst
index d8a599f1f..3f9c0ccf8 100644
--- a/CHANGELOG.rst
+++ b/CHANGELOG.rst
@@ -2,13 +2,13 @@
CHANGELOG
=========
-unreleased
-==========
+3.0.4 (2023-08-04)
+==================
* Fix bug when submitting permission admin form
* Fix folder select field css of permission admin form
* Fix requirements (Django>=3.2) in setup.py and docs
-* Update Dutch and French locale
+* Update Dutch, Spanish and French locale
3.0.3 (2023-07-21)
==================
diff --git a/filer/__init__.py b/filer/__init__.py
index 5adab2331..d70df812f 100644
--- a/filer/__init__.py
+++ b/filer/__init__.py
@@ -13,6 +13,4 @@
8. Publish the release and it will automatically release to pypi
"""
-__version__ = '3.0.3'
-
-default_app_config = 'filer.apps.FilerConfig'
+__version__ = '3.0.4'
diff --git a/filer/locale/es/LC_MESSAGES/django.mo b/filer/locale/es/LC_MESSAGES/django.mo
index e1c2c9c5e..58062ee45 100644
Binary files a/filer/locale/es/LC_MESSAGES/django.mo and b/filer/locale/es/LC_MESSAGES/django.mo differ
diff --git a/filer/locale/es/LC_MESSAGES/django.po b/filer/locale/es/LC_MESSAGES/django.po
index bdfb39280..3adb0ec9b 100644
--- a/filer/locale/es/LC_MESSAGES/django.po
+++ b/filer/locale/es/LC_MESSAGES/django.po
@@ -1,10 +1,11 @@
# SOME DESCRIPTIVE TITLE.
# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
# This file is distributed under the same license as the PACKAGE package.
-#
+#
# Translators:
# Translators:
# Translators:
+# Biel Frontera, 2023
# Cristian Acevedo <[email protected]>, 2016
# David <[email protected]>, 2015
# Jason Gass Martinez <[email protected]>, 2016
@@ -19,27 +20,28 @@ msgstr ""
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2023-07-31 22:21+0200\n"
"PO-Revision-Date: 2012-07-13 15:50+0000\n"
-"Last-Translator: Luis Zárate <[email protected]>, 2019\n"
-"Language-Team: Spanish (http://app.transifex.com/divio/django-filer/language/"
-"es/)\n"
-"Language: es\n"
+"Last-Translator: Biel Frontera, 2023\n"
+"Language-Team: Spanish (http://app.transifex.com/divio/django-filer/language/es/)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-"Plural-Forms: nplurals=3; plural=n == 1 ? 0 : n != 0 && n % 1000000 == 0 ? "
-"1 : 2;\n"
+"Language: es\n"
+"Plural-Forms: nplurals=3; plural=n == 1 ? 0 : n != 0 && n % 1000000 == 0 ? 1 : 2;\n"
#: admin/clipboardadmin.py:16
+#| msgid ""
+#| "ccount doesn't have permissions to rename all of the selected files."
msgid "You do not have permission to upload files."
-msgstr ""
+msgstr "No tienes autorización para subir ficheros. "
#: admin/clipboardadmin.py:17
msgid "Can't find folder to upload. Please refresh and try again"
-msgstr ""
+msgstr "No se ha encontrado la carpeta donde guardar el fichero. Por favor, refresca la página y vuelve a probar."
#: admin/clipboardadmin.py:19
-msgid "Can't use this folder, Permission Denied. Please select another folder."
-msgstr ""
+msgid ""
+"Can't use this folder, Permission Denied. Please select another folder."
+msgstr "No se puede utilizar esta carpeta: permiso denegado. Por favor, selecciona otra carpeta."
#: admin/fileadmin.py:47
msgid "Advanced"
@@ -53,9 +55,7 @@ msgstr "URL canónica"
msgid ""
"Items must be selected in order to perform actions on them. No items have "
"been changed."
-msgstr ""
-"Los elementos deben estar seleccionados para efectuar acciones sobre ellos. "
-"Ningún elemento ha sido modificado."
+msgstr "Los elementos deben estar seleccionados para efectuar acciones sobre ellos. Ningún elemento ha sido modificado."
#: admin/folderadmin.py:422
#, python-format
@@ -137,9 +137,7 @@ msgstr "Las carpetas con los nombres %s ya existen en el destino seleccionado"
msgid ""
"Successfully moved %(count)d files and/or folders to folder "
"'%(destination)s'."
-msgstr ""
-"Movidos con éxito %(count)d ficheros y/o directorios al directorio "
-"'%(destination)s'."
+msgstr "Movidos con éxito %(count)d ficheros y/o directorios al directorio '%(destination)s'."
#: admin/folderadmin.py:930 admin/folderadmin.py:932
msgid "Move files and/or folders"
@@ -164,9 +162,7 @@ msgstr "Cambiar el nombre de los archivos"
msgid ""
"Successfully copied %(count)d files and/or folders to folder "
"'%(destination)s'."
-msgstr ""
-"Copiados con éxito %(count)d ficheros y/o directorios al directorio "
-"'%(destination)s'."
+msgstr "Copiados con éxito %(count)d ficheros y/o directorios al directorio '%(destination)s'."
#: admin/folderadmin.py:1143 admin/folderadmin.py:1145
msgid "Copy files and/or folders"
@@ -191,23 +187,19 @@ msgstr "Cambiar el tamaño de imágenes seleccionadas."
#: admin/forms.py:24
msgid "Suffix which will be appended to filenames of copied files."
-msgstr ""
-"Sufijo que se añadirá al nombre de los archivos de los archivos copiados."
+msgstr "Sufijo que se añadirá al nombre de los archivos de los archivos copiados."
#: admin/forms.py:31
#, python-format
msgid ""
"Suffix should be a valid, simple and lowercase filename part, like "
"\"%(valid)s\"."
-msgstr ""
-"El sufijo debe ser una parte válida de un nombre de fichero, simple y en "
-"minúsculas, como \"%(valid)s\"."
+msgstr "El sufijo debe ser una parte válida de un nombre de fichero, simple y en minúsculas, como \"%(valid)s\"."
#: admin/forms.py:52
#, python-format
msgid "Unknown rename format value key \"%(key)s\"."
-msgstr ""
-"Formato de cambio de nombre con valor de clave \"%(key)s\" desconocido."
+msgstr "Formato de cambio de nombre con valor de clave \"%(key)s\" desconocido."
#: admin/forms.py:54
#, python-format
@@ -236,9 +228,7 @@ msgstr "ampliar"
#: admin/forms.py:75
msgid "Thumbnail option or resize parameters must be choosen."
-msgstr ""
-"Se debe elegir una opción de miniatura o unos parámetros para el cambio de "
-"tamaño."
+msgstr "Se debe elegir una opción de miniatura o unos parámetros para el cambio de tamaño."
#: admin/forms.py:77
msgid "Resize parameters must be choosen."
@@ -267,11 +257,11 @@ msgstr "Tu entrada: \"{subject_location}\"."
#: admin/permissionadmin.py:10 models/foldermodels.py:380
msgid "Who"
-msgstr ""
+msgstr "Quién"
#: admin/permissionadmin.py:11 models/foldermodels.py:401
msgid "What"
-msgstr ""
+msgstr "Qué"
#: admin/views.py:55
msgid "Folder with this name already exists."
@@ -389,13 +379,11 @@ msgstr "Permisos desactivados"
msgid ""
"Disable any permission checking for this file. File will be publicly "
"accessible to anyone."
-msgstr ""
-"Desactiva cualquier comprobación de permiso para este archivo. El archivo "
-"será accesible públicamente para todos."
+msgstr "Desactiva cualquier comprobación de permiso para este archivo. El archivo será accesible públicamente para todos."
#: models/foldermodels.py:94
msgid "parent"
-msgstr ""
+msgstr "Padre"
#: models/foldermodels.py:121
msgid "created at"
@@ -425,7 +413,7 @@ msgstr "este elemento y todos los hijos"
#: models/foldermodels.py:266
msgid "inherit"
-msgstr ""
+msgstr "hereda"
#: models/foldermodels.py:267
msgid "allow"
@@ -469,53 +457,56 @@ msgstr "permisos de la carpeta"
#: models/foldermodels.py:348
msgid "Folder cannot be selected with type \"all items\"."
-msgstr ""
+msgstr "La carpeta no se puede seleccionar con el tipo \"todos los elementos\"."
#: models/foldermodels.py:350
msgid "Folder has to be selected when type is not \"all items\"."
-msgstr ""
+msgstr "La carpeta se tiene que seleccionar cuando el tipo no es \"todos los elementos\"."
#: models/foldermodels.py:352
msgid "User or group cannot be selected together with \"everybody\"."
-msgstr ""
+msgstr "Usuario y grupo no se pueden seleccionar a la vez con \"todos\"."
#: models/foldermodels.py:354
msgid "At least one of user, group, or \"everybody\" has to be selected."
-msgstr ""
+msgstr "Al menos se debe seleccionar un usuario, un grupo o \"todos\"."
#: models/foldermodels.py:360
+#| msgid "Folders"
msgid "All Folders"
-msgstr ""
+msgstr "Todas las carpetas"
#: models/foldermodels.py:362
msgid "Logical Path"
-msgstr ""
+msgstr "Path lógico"
#: models/foldermodels.py:371
#, python-brace-format
msgid "User: {user}"
-msgstr ""
+msgstr "Usuario: {user}"
#: models/foldermodels.py:373
#, python-brace-format
msgid "Group: {group}"
-msgstr ""
+msgstr "Grupo: {group}"
#: models/foldermodels.py:375
+#| msgid "everybody"
msgid "Everybody"
-msgstr ""
+msgstr "Todos"
#: models/foldermodels.py:388 templates/admin/filer/widgets/admin_file.html:45
msgid "Edit"
-msgstr ""
+msgstr "Editar"
#: models/foldermodels.py:389
msgid "Read"
-msgstr ""
+msgstr "Leer"
#: models/foldermodels.py:390
+#| msgid "can add children"
msgid "Add children"
-msgstr ""
+msgstr "Añadir hijos"
#: models/imagemodels.py:18
msgid "date taken"
@@ -565,11 +556,12 @@ msgstr "raíz"
#: settings.py:273
msgid "Show table view"
-msgstr ""
+msgstr "Muestra la vista de tabla"
#: settings.py:278
+#| msgid "thumbnail option"
msgid "Show thumbnail view"
-msgstr ""
+msgstr "Muestra la vista de miniaturas"
#: templates/admin/filer/actions.html:5
msgid "Run the selected action"
@@ -583,8 +575,7 @@ msgstr "Continuar"
#: templates/admin/filer/folder/directory_table_list.html:232
#: templates/admin/filer/folder/directory_thumbnail_list.html:210
msgid "Click here to select the objects across all pages"
-msgstr ""
-"Haz clic aquí para seleccionar los objetos a través de todas las páginas"
+msgstr "Haz clic aquí para seleccionar los objetos a través de todas las páginas"
#: templates/admin/filer/actions.html:14
#, python-format
@@ -636,26 +627,19 @@ msgid ""
"Deleting the selected files and/or folders would result in deleting related "
"objects, but your account doesn't have permission to delete the following "
"types of objects:"
-msgstr ""
-"Borrar los archivos y/o carpetas seleccionados borraría los objetos "
-"seleccionados, pero tu cuenta no tiene permiso para borrar los siguientes "
-"tipos de objetos:"
+msgstr "Borrar los archivos y/o carpetas seleccionados borraría los objetos seleccionados, pero tu cuenta no tiene permiso para borrar los siguientes tipos de objetos:"
#: templates/admin/filer/delete_selected_files_confirmation.html:19
msgid ""
"Deleting the selected files and/or folders would require deleting the "
"following protected related objects:"
-msgstr ""
-"Borrar los archivos y/o carpetas requeriría borrar los siguientes objetos "
-"relacionados protegidos:"
+msgstr "Borrar los archivos y/o carpetas requeriría borrar los siguientes objetos relacionados protegidos:"
#: templates/admin/filer/delete_selected_files_confirmation.html:27
msgid ""
"Are you sure you want to delete the selected files and/or folders? All of "
"the following objects and their related items will be deleted:"
-msgstr ""
-"¿Estás seguro de que quieres borrar los archivos y/o carpetas seleccionados? "
-"Los siguientes objetos y sus elementos relacionados serán borrados:"
+msgstr "¿Estás seguro de que quieres borrar los archivos y/o carpetas seleccionados? Los siguientes objetos y sus elementos relacionados serán borrados:"
#: templates/admin/filer/delete_selected_files_confirmation.html:46
#: templates/admin/filer/folder/choose_copy_destination.html:64
@@ -704,11 +688,9 @@ msgstr "Icono de la Carpeta"
#: templates/admin/filer/folder/choose_copy_destination.html:23
msgid ""
-"Your account doesn't have permissions to copy all of the selected files and/"
-"or folders."
-msgstr ""
-"Tu cuenta no tiene permisos para copiar todos los archivos y/o carpetas "
-"seleccionados."
+"Your account doesn't have permissions to copy all of the selected files "
+"and/or folders."
+msgstr "Tu cuenta no tiene permisos para copiar todos los archivos y/o carpetas seleccionados."
#: templates/admin/filer/folder/choose_copy_destination.html:25
#: templates/admin/filer/folder/choose_copy_destination.html:31
@@ -732,9 +714,7 @@ msgstr "No hay archivos y/o carpetas disponibles para copiar."
msgid ""
"The following files and/or folders will be copied to a destination folder "
"(retaining their tree structure):"
-msgstr ""
-"Los siguientes archivos y/o carpetas serán copiados a una carpeta de destino "
-"(manteniendo su estructura en árbol):"
+msgstr "Los siguientes archivos y/o carpetas serán copiados a una carpeta de destino (manteniendo su estructura en árbol):"
#: templates/admin/filer/folder/choose_copy_destination.html:54
#: templates/admin/filer/folder/choose_move_destination.html:64
@@ -754,9 +734,7 @@ msgstr "No está permitido copiar los archivos dentro de la misma carpeta"
#: templates/admin/filer/folder/choose_images_resize_options.html:15
msgid ""
"Your account doesn't have permissions to resize all of the selected images."
-msgstr ""
-"Tu cuenta no tiene permisos para cambiar el tamaño de todas las imágenes "
-"seleccionadas."
+msgstr "Tu cuenta no tiene permisos para cambiar el tamaño de todas las imágenes seleccionadas."
#: templates/admin/filer/folder/choose_images_resize_options.html:18
msgid "There are no images available to resize."
@@ -768,9 +746,7 @@ msgstr "Se les cambiará el tamaño a las siguientes imágenes:"
#: templates/admin/filer/folder/choose_images_resize_options.html:33
msgid "Choose an existing thumbnail option or enter resize parameters:"
-msgstr ""
-"Elige una opción de miniatura existente o introduce parámetros para el "
-"cambio de tamaño:"
+msgstr "Elige una opción de miniatura existente o introduce parámetros para el cambio de tamaño:"
#: templates/admin/filer/folder/choose_images_resize_options.html:35
msgid "Choose resize parameters:"
@@ -780,10 +756,7 @@ msgstr "Elegir parámetros para el cambio de tamaño:"
msgid ""
"Warning: Images will be resized in-place and originals will be lost. Maybe "
"first make a copy of them to retain the originals."
-msgstr ""
-"Aviso: se cambiará el tamaño de las imágenes en el mismo sitio y los "
-"originales se perderán. Considera realizar una copia de aquellas para "
-"conservar los originales."
+msgstr "Aviso: se cambiará el tamaño de las imágenes en el mismo sitio y los originales se perderán. Considera realizar una copia de aquellas para conservar los originales."
#: templates/admin/filer/folder/choose_images_resize_options.html:41
msgid "Resize"
@@ -791,11 +764,9 @@ msgstr "Cambiar de tamaño"
#: templates/admin/filer/folder/choose_move_destination.html:35
msgid ""
-"Your account doesn't have permissions to move all of the selected files and/"
-"or folders."
-msgstr ""
-"Tu cuenta no tiene permisos para mover todos los archivos y/o carpetas "
-"seleccionados."
+"Your account doesn't have permissions to move all of the selected files "
+"and/or folders."
+msgstr "Tu cuenta no tiene permisos para mover todos los archivos y/o carpetas seleccionados."
#: templates/admin/filer/folder/choose_move_destination.html:47
msgid "There are no files and/or folders available to move."
@@ -805,9 +776,7 @@ msgstr "No hay archivos y/o carpetas disponibles para mover."
msgid ""
"The following files and/or folders will be moved to a destination folder "
"(retaining their tree structure):"
-msgstr ""
-"Los siguientes archivos y/o directorios serán movidos a una carpeta de "
-"destino (manteniendo su estructura de árbol):"
+msgstr "Los siguientes archivos y/o directorios serán movidos a una carpeta de destino (manteniendo su estructura de árbol):"
#: templates/admin/filer/folder/choose_move_destination.html:73
#: templates/admin/filer/folder/choose_move_destination.html:76
@@ -822,8 +791,7 @@ msgstr "No está permitido mover los archivos dentro de la misma carpeta"
#: templates/admin/filer/folder/choose_rename_format.html:15
msgid ""
"Your account doesn't have permissions to rename all of the selected files."
-msgstr ""
-"Tu cuenta no tiene permisos para renombrar todos los objetos seleccionados."
+msgstr "Tu cuenta no tiene permisos para renombrar todos los objetos seleccionados."
#: templates/admin/filer/folder/choose_rename_format.html:18
msgid "There are no files available to rename."
@@ -833,9 +801,7 @@ msgstr "No hay archivos disponibles a los que cambiar el nombre."
msgid ""
"The following files will be renamed (they will stay in their folders and "
"keep original filename, only displayed filename will be changed):"
-msgstr ""
-"Los siguientes archivos serán renombrados (se quedarán en sus carpetas y "
-"mantendrán su nombre original, solo los nombres mostrados serán cambiados):"
+msgstr "Los siguientes archivos serán renombrados (se quedarán en sus carpetas y mantendrán su nombre original, solo los nombres mostrados serán cambiados):"
#: templates/admin/filer/folder/choose_rename_format.html:59
msgid "Rename"
@@ -992,11 +958,13 @@ msgstr "activado"
#: templates/admin/filer/folder/directory_table_list.html:144
#, python-format
+#| msgid "Change '%(item_label)s' details"
msgid "Canonical url '%(item_label)s'"
msgstr "Url canónica '%(item_label)s'"
#: templates/admin/filer/folder/directory_table_list.html:148
#, python-format
+#| msgid "Change '%(item_label)s' details"
msgid "Download '%(item_label)s'"
msgstr "Descargar '%(item_label)s'"
@@ -1058,12 +1026,14 @@ msgstr "Seleccionar todo %(total_count)s"
#: templates/admin/filer/folder/directory_thumbnail_list.html:15
#: templates/admin/filer/folder/directory_thumbnail_list.html:80
+#| msgid "Select this file"
msgid "Select all"
-msgstr ""
+msgstr "Selecciona todas"
#: templates/admin/filer/folder/directory_thumbnail_list.html:77
+#| msgid "Filer"
msgid "Files"
-msgstr ""
+msgstr "Ficheros"
#: templates/admin/filer/folder/new_folder_form.html:4
#: templates/admin/filer/folder/new_folder_form.html:7
@@ -1087,11 +1057,11 @@ msgstr "Guardar"
#: templates/admin/filer/templatetags/file_icon.html:9
msgid "Your browser does not support audio."
-msgstr ""
+msgstr "El navegador no soporta audio."
#: templates/admin/filer/templatetags/file_icon.html:14
msgid "Your browser does not support video."
-msgstr ""
+msgstr "El navegador no soporta vídeo."
#: templates/admin/filer/tools/clipboard/clipboard.html:9
msgid "Clipboard"
@@ -1119,17 +1089,18 @@ msgstr "fallo en la subida"
#: templates/admin/filer/tools/detail_info.html:11
msgid "Download"
-msgstr ""
+msgstr "Descarga"
#: templates/admin/filer/tools/detail_info.html:15
msgid "Expand"
-msgstr ""
+msgstr "Expande"
#: templates/admin/filer/tools/detail_info.html:20
#: templates/admin/filer/widgets/admin_file.html:32
#: templatetags/filer_admin_tags.py:107
+#| msgid "file missing"
msgid "File is missing"
-msgstr ""
+msgstr "Fichero no encontrado"
#: templates/admin/filer/tools/detail_info.html:29
msgid "Type"
@@ -1183,30 +1154,31 @@ msgid "Choose File"
msgstr "Selecciona el archivo"
#: templates/admin/filer/widgets/admin_folder.html:16
+#| msgid "Choose File"
msgid "Choose Folder"
-msgstr ""
+msgstr "Escoge una carpeta"
#: validation.py:19
#, python-brace-format
msgid "File \"{file_name}\": Upload denied by site security policy"
-msgstr ""
+msgstr "Fichero \"{file_name}\": carga denegada por políticas de seguridad del sitio web"
#: validation.py:22
#, python-brace-format
msgid "File \"{file_name}\": {file_type} upload denied by site security policy"
-msgstr ""
+msgstr "Fichero \"{file_name}\": carga del tipo {file_type} denegada por políticas de seguridad del sitio web"
#: validation.py:33
#, python-brace-format
msgid "File \"{file_name}\": HTML upload denied by site security policy"
-msgstr ""
+msgstr "Fichero \"{file_name}\": carga de HTML denegada por políticas de seguridad del sitio web"
#: validation.py:71
#, python-brace-format
msgid ""
"File \"{file_name}\": Rejected due to potential cross site scripting "
"vulnerability"
-msgstr ""
+msgstr "Fichero \"{file_name}\": rechazado por posible vulnerabilidad XSS (Cross-site scripting)"
#~ msgid "Open file"
#~ msgstr "Open file"
|
pyjanitor-devs__pyjanitor-497 | [DOC] Clarify Python version requirements
# Brief Description of Fix
I was looking through documentation (for users and contributors), and it was unclear to me which python versions we actually support. It seems that we support python 3.6 + 3.7. This arose as I was updating the `pyproject.toml` file to avoid the warning:
```
--py36 is deprecated and will be removed in a future version. Use --target-version py36 instead.
```
Our current locations of explicit python versions are in:
- `pyproject.toml`
- `py36 = true`
- `environment-dev.yml`
- `- python >= 3.6`
- `.azure-pipelines/pipeline-master.yml`
- `python.version: "3.7"`
# Proposed Fix
If `pyjanitor` is in fact meant to function on 3.6+, we should
- Explicitly inform contributors that their code should be 3.6+ compatible
- Inform users which python versions the package requires, on the documentation site, PyPI etc
- Add `python_requires=">=3.6"` to `setup.py`
| [
{
"content": "from setuptools import setup\n\n\ndef requirements():\n with open(\"requirements.txt\", \"r+\") as f:\n return f.read()\n\n\nsetup(\n name=\"pyjanitor\",\n version=\"0.18.0\",\n description=\"Tools for cleaning pandas DataFrames\",\n author=\"Eric J. Ma\",\n author_email=\"[email protected]\",\n url=\"https://github.com/ericmjl/pyjanitor\",\n packages=[\"janitor\"],\n install_requires=requirements(),\n)\n",
"path": "setup.py"
}
] | [
{
"content": "from setuptools import setup\n\n\ndef requirements():\n with open(\"requirements.txt\", \"r+\") as f:\n return f.read()\n\n\nsetup(\n name=\"pyjanitor\",\n version=\"0.18.0\",\n description=\"Tools for cleaning pandas DataFrames\",\n author=\"Eric J. Ma\",\n author_email=\"[email protected]\",\n url=\"https://github.com/ericmjl/pyjanitor\",\n packages=[\"janitor\"],\n install_requires=requirements(),\n python_requires=\">=3.6\",\n)\n",
"path": "setup.py"
}
] | diff --git a/AUTHORS.rst b/AUTHORS.rst
index 1090d2f03..cf1883228 100644
--- a/AUTHORS.rst
+++ b/AUTHORS.rst
@@ -20,7 +20,7 @@ Leads
- `@szuckerman <https://github.com/szuckerman>`_ | `contributions <https://github.com/ericmjl/pyjanitor/pulls?utf8=%E2%9C%93&q=is%3Aclosed+mentions%3Aszuckerman>`_
- `@zbarry <https://github.com/zbarry>`_ | `contributions <https://github.com/ericmjl/pyjanitor/pulls?utf8=%E2%9C%93&q=is%3Aclosed+mentions%3Azbarry>`_
- Co-led sprint at SciPy 2019.
-- `@HectorM14 <https://github.com/HectorM14>`_ | `contributions <https://github.com/ericmjl/pyjanitor/pulls?utf8=%E2%9C%93&q=is%3Aclosed+mentions%3AHectorM14>`_
+- `@hectormz <https://github.com/hectormz>`_ | `contributions <https://github.com/ericmjl/pyjanitor/pulls?utf8=%E2%9C%93&q=is%3Aclosed+mentions%3Ahectormz>`_
- `@jk3587 <https://github.com/jk3587>`_ | `contributions <https://github.com/ericmjl/pyjanitor/pulls?utf8=%E2%9C%93&q=is%3Aclosed+mentions%3Ajk3587>`_
- Tagged issues at SciPy 2019.
- `@sallyhong <https://github.com/sallyhong>`_ | `contributions <https://github.com/ericmjl/pyjanitor/pulls?utf8=%E2%9C%93&q=is%3Aclosed+mentions%3Asallyhong>`_
@@ -76,4 +76,3 @@ Contributors
- `@puruckertom <https://github.com/puruckertom>`_ | `contributions <https://github.com/ericmjl/pyjanitor/pulls?utf8=%E2%9C%93&q=is%3Apr+author%3Apuruckertom>`_
- `@thomasjpfan <https://github.com/thomasjpfan>`_ | `contributions <https://github.com/ericmjl/pyjanitor/issues?q=is%3Aclosed+mentions%3Athomasjpfan>`_
- `@jiafengkevinchen <https://github.com/jiafengkevinchen>`_ | `contributions <https://github.com/ericmjl/pyjanitor/pull/480#issue-298730562>`_
-
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index c358a4755..f9facd531 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -228,6 +228,11 @@ and append bin/python to the end of the path.
Click OK and you should be good to go!
+Code Compatibility
+------------------
+
+pyjanitor supports Python 3.6+, so all contributed code must maintain this compatibility.
+
Pull Request Guidelines
-----------------------
diff --git a/README.rst b/README.rst
index 6f75e1f91..368490aea 100644
--- a/README.rst
+++ b/README.rst
@@ -143,6 +143,8 @@ Installation
conda install pyjanitor -c conda-forge
+``pyjanitor`` requires Python 3.6+.
+
Functionality
-------------
diff --git a/pyproject.toml b/pyproject.toml
index f21de5254..7ef69bef7 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.black]
line-length = 79
-py36 = true
+target-version = ['py36', 'py37']
include = '\.pyi?$'
exclude = '''
/(
diff --git a/setup.py b/setup.py
index eec6e8012..e7f2c1e84 100644
--- a/setup.py
+++ b/setup.py
@@ -15,4 +15,5 @@ def requirements():
url="https://github.com/ericmjl/pyjanitor",
packages=["janitor"],
install_requires=requirements(),
+ python_requires=">=3.6",
)
|
typeddjango__django-stubs-871 | Mypy version 0.940 released, causes tests to fail
On [11th March 2022, Mypy version 0.940](https://pypi.org/project/mypy/#history) was released, causing django-stubs tests run by pytest to fail in Github Actions started on or after 11th Match.
We either need to use version 0.931 of mypy for local testing and Github actions, or find a way around the error `Cannot determine type of "Any"` being thrown due to mypy version 0.940 on running the pytest command.
| [
{
"content": "import os\nfrom distutils.core import setup\nfrom typing import List\n\nfrom setuptools import find_packages\n\n\ndef find_stub_files(name: str) -> List[str]:\n result = []\n for root, dirs, files in os.walk(name):\n for file in files:\n if file.endswith(\".pyi\"):\n if os.path.sep in root:\n sub_root = root.split(os.path.sep, 1)[-1]\n file = os.path.join(sub_root, file)\n result.append(file)\n return result\n\n\nwith open(\"README.md\") as f:\n readme = f.read()\n\ndependencies = [\n \"mypy>=0.931\",\n \"django\",\n \"django-stubs-ext>=0.3.0\",\n \"tomli\",\n # Types:\n \"typing-extensions\",\n \"types-pytz\",\n \"types-PyYAML\",\n]\n\nsetup(\n name=\"django-stubs\",\n version=\"1.9.0\",\n description=\"Mypy stubs for Django\",\n long_description=readme,\n long_description_content_type=\"text/markdown\",\n license=\"MIT\",\n url=\"https://github.com/typeddjango/django-stubs\",\n author=\"Maksim Kurnikov\",\n author_email=\"[email protected]\",\n py_modules=[],\n python_requires=\">=3.7\",\n install_requires=dependencies,\n packages=[\"django-stubs\", *find_packages(exclude=[\"scripts\"])],\n package_data={\n \"django-stubs\": find_stub_files(\"django-stubs\"),\n \"mypy_django_plugin\": [\"py.typed\"],\n },\n classifiers=[\n \"License :: OSI Approved :: MIT License\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Typing :: Typed\",\n \"Framework :: Django\",\n \"Framework :: Django :: 2.2\",\n \"Framework :: Django :: 3.0\",\n \"Framework :: Django :: 3.1\",\n \"Framework :: Django :: 3.2\",\n ],\n project_urls={\n \"Release notes\": \"https://github.com/typeddjango/django-stubs/releases\",\n },\n)\n",
"path": "setup.py"
}
] | [
{
"content": "import os\nfrom distutils.core import setup\nfrom typing import List\n\nfrom setuptools import find_packages\n\n\ndef find_stub_files(name: str) -> List[str]:\n result = []\n for root, dirs, files in os.walk(name):\n for file in files:\n if file.endswith(\".pyi\"):\n if os.path.sep in root:\n sub_root = root.split(os.path.sep, 1)[-1]\n file = os.path.join(sub_root, file)\n result.append(file)\n return result\n\n\nwith open(\"README.md\") as f:\n readme = f.read()\n\ndependencies = [\n \"mypy>=0.930,<0.940\",\n \"django\",\n \"django-stubs-ext>=0.3.0\",\n \"tomli\",\n # Types:\n \"typing-extensions\",\n \"types-pytz\",\n \"types-PyYAML\",\n]\n\nsetup(\n name=\"django-stubs\",\n version=\"1.9.0\",\n description=\"Mypy stubs for Django\",\n long_description=readme,\n long_description_content_type=\"text/markdown\",\n license=\"MIT\",\n url=\"https://github.com/typeddjango/django-stubs\",\n author=\"Maksim Kurnikov\",\n author_email=\"[email protected]\",\n py_modules=[],\n python_requires=\">=3.7\",\n install_requires=dependencies,\n packages=[\"django-stubs\", *find_packages(exclude=[\"scripts\"])],\n package_data={\n \"django-stubs\": find_stub_files(\"django-stubs\"),\n \"mypy_django_plugin\": [\"py.typed\"],\n },\n classifiers=[\n \"License :: OSI Approved :: MIT License\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Typing :: Typed\",\n \"Framework :: Django\",\n \"Framework :: Django :: 2.2\",\n \"Framework :: Django :: 3.0\",\n \"Framework :: Django :: 3.1\",\n \"Framework :: Django :: 3.2\",\n ],\n project_urls={\n \"Release notes\": \"https://github.com/typeddjango/django-stubs/releases\",\n },\n)\n",
"path": "setup.py"
}
] | diff --git a/requirements.txt b/requirements.txt
index 485644f5f..d6bb5025c 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -9,3 +9,4 @@ pytest-mypy-plugins==1.9.3
psycopg2-binary
-e ./django_stubs_ext
-e .
+mypy==0.931
diff --git a/setup.py b/setup.py
index 91a770c8b..75fd48357 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ def find_stub_files(name: str) -> List[str]:
readme = f.read()
dependencies = [
- "mypy>=0.931",
+ "mypy>=0.930,<0.940",
"django",
"django-stubs-ext>=0.3.0",
"tomli",
|
google-deepmind__dm-haiku-48 | Jax version upgrade (AttributeError: CallPrimitive)
Using the current version of master 66f9c69 of Haiku, I am getting the following error on Colab
```
AttributeError Traceback (most recent call last)
<ipython-input-3-3a9e6adbfff5> in <module>()
----> 1 import haiku as hk
/usr/local/lib/python3.6/dist-packages/haiku/__init__.py in <module>()
17
18 from haiku import data_structures
---> 19 from haiku import experimental
20 from haiku import initializers
21 from haiku import nets
/usr/local/lib/python3.6/dist-packages/haiku/experimental.py in <module>()
22 from haiku._src.base import custom_getter
23 from haiku._src.base import ParamContext
---> 24 from haiku._src.dot import to_dot
25 from haiku._src.lift import lift
26 from haiku._src.module import profiler_name_scopes
/usr/local/lib/python3.6/dist-packages/haiku/_src/dot.py in <module>()
23
24 from haiku._src import data_structures
---> 25 from haiku._src import module
26 from haiku._src import utils
27 import jax
/usr/local/lib/python3.6/dist-packages/haiku/_src/module.py in <module>()
26 from haiku._src import base
27 from haiku._src import data_structures
---> 28 from haiku._src import named_call
29 from haiku._src import utils
30 import jax.numpy as jnp
/usr/local/lib/python3.6/dist-packages/haiku/_src/named_call.py in <module>()
29
30 # Registering named call as a primitive
---> 31 named_call_p = core.CallPrimitive('named_call')
32 # named_call is implemented as a plain core.call and only diverges
33 # under compilation (see named_call_translation_rule)
AttributeError: module 'jax.core' has no attribute 'CallPrimitive'
```
I believe that's because Haiku now requires `jax>=0.1.71`, while the version by default on Colab is `jax==0.1.69`. `CallPrimitive` was introduced in jax 0.1.71.
https://github.com/google/jax/blob/1545a29e6d69a7b3c7fdf9a49b38004759a9fbfa/jax/core.py#L1106-L1115
To reproduce (inside a Colab):
```python
import jax
print(jax.__version__) # 0.1.69
!pip install -q git+https://github.com/deepmind/dm-haiku
import haiku as hk
```
Run `!pip install -q --upgrade jax jaxlib` first in your Colab to fix this issue.
| [
{
"content": "# Lint as: python3\n# Copyright 2019 DeepMind Technologies Limited. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\"\"\"Setup for pip package.\"\"\"\n\nfrom setuptools import find_namespace_packages\nfrom setuptools import setup\n\n\ndef _get_version():\n with open('haiku/__init__.py') as fp:\n for line in fp:\n if line.startswith('__version__'):\n g = {}\n exec(line, g) # pylint: disable=exec-used\n return g['__version__']\n raise ValueError('`__version__` not defined in `haiku/__init__.py`')\n\n\ndef _parse_requirements(requirements_txt_path):\n with open(requirements_txt_path) as fp:\n return fp.read().splitlines()\n\n\n_VERSION = _get_version()\n\nEXTRA_PACKAGES = {\n 'jax': ['jax>=0.1.55'],\n 'jaxlib': ['jaxlib>=0.1.37'],\n}\n\nsetup(\n name='dm-haiku',\n version=_VERSION,\n url='https://github.com/deepmind/dm-haiku',\n license='Apache 2.0',\n author='DeepMind',\n description='Haiku is a library for building neural networks in JAX.',\n long_description=open('README.md').read(),\n long_description_content_type='text/markdown',\n author_email='[email protected]',\n # Contained modules and scripts.\n packages=find_namespace_packages(exclude=['*_test.py']),\n install_requires=_parse_requirements('requirements.txt'),\n extras_require=EXTRA_PACKAGES,\n tests_require=_parse_requirements('requirements-test.txt'),\n requires_python='>=3.6',\n include_package_data=True,\n zip_safe=False,\n # PyPI package information.\n classifiers=[\n 'Development Status :: 4 - Beta',\n 'Intended Audience :: Developers',\n 'Intended Audience :: Education',\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: Apache Software License',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Topic :: Scientific/Engineering :: Mathematics',\n 'Topic :: Software Development :: Libraries :: Python Modules',\n 'Topic :: Software Development :: Libraries',\n ],\n)\n",
"path": "setup.py"
}
] | [
{
"content": "# Lint as: python3\n# Copyright 2019 DeepMind Technologies Limited. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\"\"\"Setup for pip package.\"\"\"\n\nfrom setuptools import find_namespace_packages\nfrom setuptools import setup\n\n\ndef _get_version():\n with open('haiku/__init__.py') as fp:\n for line in fp:\n if line.startswith('__version__'):\n g = {}\n exec(line, g) # pylint: disable=exec-used\n return g['__version__']\n raise ValueError('`__version__` not defined in `haiku/__init__.py`')\n\n\ndef _parse_requirements(requirements_txt_path):\n with open(requirements_txt_path) as fp:\n return fp.read().splitlines()\n\n\n_VERSION = _get_version()\n\nEXTRA_PACKAGES = {\n 'jax': ['jax>=0.1.71'],\n 'jaxlib': ['jaxlib>=0.1.49'],\n}\n\nsetup(\n name='dm-haiku',\n version=_VERSION,\n url='https://github.com/deepmind/dm-haiku',\n license='Apache 2.0',\n author='DeepMind',\n description='Haiku is a library for building neural networks in JAX.',\n long_description=open('README.md').read(),\n long_description_content_type='text/markdown',\n author_email='[email protected]',\n # Contained modules and scripts.\n packages=find_namespace_packages(exclude=['*_test.py']),\n install_requires=_parse_requirements('requirements.txt'),\n extras_require=EXTRA_PACKAGES,\n tests_require=_parse_requirements('requirements-test.txt'),\n requires_python='>=3.6',\n include_package_data=True,\n zip_safe=False,\n # PyPI package information.\n classifiers=[\n 'Development Status :: 4 - Beta',\n 'Intended Audience :: Developers',\n 'Intended Audience :: Education',\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: Apache Software License',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Topic :: Scientific/Engineering :: Mathematics',\n 'Topic :: Software Development :: Libraries :: Python Modules',\n 'Topic :: Software Development :: Libraries',\n ],\n)\n",
"path": "setup.py"
}
] | diff --git a/setup.py b/setup.py
index 0734734ec..c91e06295 100644
--- a/setup.py
+++ b/setup.py
@@ -37,8 +37,8 @@ def _parse_requirements(requirements_txt_path):
_VERSION = _get_version()
EXTRA_PACKAGES = {
- 'jax': ['jax>=0.1.55'],
- 'jaxlib': ['jaxlib>=0.1.37'],
+ 'jax': ['jax>=0.1.71'],
+ 'jaxlib': ['jaxlib>=0.1.49'],
}
setup(
|
kserve__kserve-2726 | Knative installation keeps failing in e2e tests
/kind bug
**What steps did you take and what happened:**
[A clear and concise description of what the bug is.]
The e2e tests are failing every now and then while running the knative installation step, more specifically while patching the configmap. A solution has to be provided so that the installation completes successfully using some kind of retry mechanism.
**What did you expect to happen:**
All e2e tests to run without any issues.
**Environment:**
e2e environment
| [
{
"content": "# Copyright 2021 The KServe Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport pathlib\n\nimport setuptools\n\nTESTS_REQUIRES = [\n 'pytest',\n 'pytest-xdist',\n 'pytest-cov',\n 'pytest-asyncio',\n 'mypy',\n 'portforward',\n]\n\nwith open('requirements.txt') as f:\n REQUIRES = f.readlines()\n\nwith open(pathlib.Path(__file__).parent.parent / 'VERSION') as version_file:\n version = version_file.read().strip()\n\nsetuptools.setup(\n name='kserve',\n version=version,\n author=\"The KServe Authors\",\n author_email='[email protected], [email protected], [email protected]',\n license=\"Apache License Version 2.0\",\n url=\"https://github.com/kserve/kserve/tree/master/python/kserve\",\n description=\"KServe Python SDK\",\n long_description=\"Python SDK for KServe Server and Client.\",\n python_requires='>=3.7',\n packages=[\n 'kserve',\n 'kserve.api',\n 'kserve.constants',\n 'kserve.models',\n 'kserve.utils',\n 'kserve.protocol',\n 'kserve.protocol.rest',\n 'kserve.protocol.grpc',\n ],\n package_data={'': ['requirements.txt']},\n include_package_data=True,\n zip_safe=False,\n classifiers=[\n 'Intended Audience :: Developers',\n 'Intended Audience :: Education',\n 'Intended Audience :: Science/Research',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9',\n \"License :: OSI Approved :: Apache Software License\",\n \"Operating System :: OS Independent\",\n 'Topic :: Scientific/Engineering',\n 'Topic :: Scientific/Engineering :: Artificial Intelligence',\n 'Topic :: Software Development',\n 'Topic :: Software Development :: Libraries',\n 'Topic :: Software Development :: Libraries :: Python Modules',\n ],\n install_requires=REQUIRES,\n tests_require=TESTS_REQUIRES,\n extras_require={'test': TESTS_REQUIRES}\n)\n",
"path": "python/kserve/setup.py"
}
] | [
{
"content": "# Copyright 2021 The KServe Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport pathlib\n\nimport setuptools\n\nTESTS_REQUIRES = [\n 'pytest',\n 'pytest-xdist',\n 'pytest-cov',\n 'pytest-asyncio',\n 'mypy',\n 'portforward==0.4.0',\n]\n\nwith open('requirements.txt') as f:\n REQUIRES = f.readlines()\n\nwith open(pathlib.Path(__file__).parent.parent / 'VERSION') as version_file:\n version = version_file.read().strip()\n\nsetuptools.setup(\n name='kserve',\n version=version,\n author=\"The KServe Authors\",\n author_email='[email protected], [email protected], [email protected]',\n license=\"Apache License Version 2.0\",\n url=\"https://github.com/kserve/kserve/tree/master/python/kserve\",\n description=\"KServe Python SDK\",\n long_description=\"Python SDK for KServe Server and Client.\",\n python_requires='>=3.7',\n packages=[\n 'kserve',\n 'kserve.api',\n 'kserve.constants',\n 'kserve.models',\n 'kserve.utils',\n 'kserve.protocol',\n 'kserve.protocol.rest',\n 'kserve.protocol.grpc',\n ],\n package_data={'': ['requirements.txt']},\n include_package_data=True,\n zip_safe=False,\n classifiers=[\n 'Intended Audience :: Developers',\n 'Intended Audience :: Education',\n 'Intended Audience :: Science/Research',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9',\n \"License :: OSI Approved :: Apache Software License\",\n \"Operating System :: OS Independent\",\n 'Topic :: Scientific/Engineering',\n 'Topic :: Scientific/Engineering :: Artificial Intelligence',\n 'Topic :: Software Development',\n 'Topic :: Software Development :: Libraries',\n 'Topic :: Software Development :: Libraries :: Python Modules',\n ],\n install_requires=REQUIRES,\n tests_require=TESTS_REQUIRES,\n extras_require={'test': TESTS_REQUIRES}\n)\n",
"path": "python/kserve/setup.py"
}
] | diff --git a/.github/workflows/e2e-test.yml b/.github/workflows/e2e-test.yml
index e1c455b8ee5..9234cee89b7 100644
--- a/.github/workflows/e2e-test.yml
+++ b/.github/workflows/e2e-test.yml
@@ -324,6 +324,9 @@ jobs:
- uses: actions/setup-go@v2
with:
go-version: '1.17.3'
+ - uses: actions/setup-python@v4
+ with:
+ python-version: '3.9'
- uses: ./.github/actions/minikube-setup
- uses: ./.github/actions/base-download
- name: Build queue proxy extension image
diff --git a/python/kserve/setup.py b/python/kserve/setup.py
index b654c9993a8..b26ace45ddd 100644
--- a/python/kserve/setup.py
+++ b/python/kserve/setup.py
@@ -21,7 +21,7 @@
'pytest-cov',
'pytest-asyncio',
'mypy',
- 'portforward',
+ 'portforward==0.4.0',
]
with open('requirements.txt') as f:
diff --git a/test/scripts/gh-actions/setup-deps.sh b/test/scripts/gh-actions/setup-deps.sh
index bce763bcfa8..645c258abfa 100755
--- a/test/scripts/gh-actions/setup-deps.sh
+++ b/test/scripts/gh-actions/setup-deps.sh
@@ -71,11 +71,16 @@ for i in 1 2 3 ; do kubectl apply -k test/overlays/knative && break || sleep 15;
echo "Waiting for Knative to be ready ..."
kubectl wait --for=condition=Ready pods --all --timeout=300s -n knative-serving -l 'app in (webhook, activator,autoscaler,autoscaler-hpa,controller,net-istio-controller,net-istio-webhook)'
-echo "Add knative hpa..."
+# echo "Add knative hpa..."
# kubectl apply -f https://github.com/knative/serving/releases/download/knative-v1.0.0/serving-hpa.yaml
# Skip tag resolution for certain domains
-kubectl patch cm config-deployment --patch '{"data":{"registries-skipping-tag-resolving":"nvcr.io,index.docker.io"}}' -n knative-serving
+# sleep to avoid knative webhook timeout error
+sleep 5
+# Retry if configmap patch fails
+for i in 1 2 3; do
+ kubectl patch cm config-deployment --patch '{"data":{"registries-skipping-tag-resolving":"nvcr.io,index.docker.io"}}' -n knative-serving && break || sleep 15
+done
echo "Installing cert-manager ..."
kubectl create namespace cert-manager
|
mindee__doctr-123 | [docs] Enable documentation of multiple versions at once
As of now, the documentation that would be deployed publicly is only the latest version. The better alternative would be:
- having the latest version by default
- having the documentation of each release accessible as well using a displayed selector
Hugginface transformers did the following: https://github.com/huggingface/transformers/blob/master/.circleci/deploy.sh
| [
{
"content": "# Configuration file for the Sphinx documentation builder.\n#\n# This file only contains a selection of the most common options. For a full\n# list see the documentation:\n# https://www.sphinx-doc.org/en/master/usage/configuration.html\n\n# -- Path setup --------------------------------------------------------------\n\nimport sphinx_rtd_theme\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\n#\nimport os\nimport sys\nsys.path.insert(0, os.path.abspath('../..'))\nimport doctr\n\n# -- Project information -----------------------------------------------------\n\nmaster_doc = 'index'\nproject = 'doctr'\ncopyright = '2021, Mindee'\nauthor = 'François-Guillaume Fernandez, Charles Gaillard, Mohamed Biaz'\n\n# The full version, including alpha/beta/rc tags\nversion = doctr.__version__\nrelease = doctr.__version__ + '-git'\n\n\n# -- General configuration ---------------------------------------------------\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nextensions = [\n\t'sphinx.ext.autodoc',\n\t'sphinx.ext.napoleon',\n\t'sphinx.ext.viewcode',\n 'sphinx.ext.coverage',\n 'sphinx.ext.mathjax',\n 'sphinxemoji.sphinxemoji', # cf. https://sphinxemojicodes.readthedocs.io/en/stable/\n 'sphinx_copybutton',\n]\n\nnapoleon_use_ivar = True\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = ['_templates']\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\n# This pattern also affects html_static_path and html_extra_path.\nexclude_patterns = [u'_build', 'Thumbs.db', '.DS_Store']\n\n\n# The name of the Pygments (syntax highlighting) style to use.\npygments_style = 'sphinx'\nhighlight_language = 'python3'\n\n# -- Options for HTML output -------------------------------------------------\n\n# The theme to use for HTML and HTML Help pages. See the documentation for\n# a list of builtin themes.\n#\nhtml_theme = 'sphinx_rtd_theme'\nhtml_theme_path = [sphinx_rtd_theme.get_html_theme_path()]\n\n# Theme options are theme-specific and customize the look and feel of a theme\n# further. For a list of options available for each theme, see the\n# documentation.\n#\nhtml_theme_options = {\n 'collapse_navigation': False,\n 'display_version': True,\n 'logo_only': False,\n}\n\n# html_logo = '_static/images/logo.png'\n\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = ['_static']\n\n# A list of files that should not be packed into the epub file.\nepub_exclude_files = ['search.html']\n\ndef setup(app):\n app.add_css_file('css/mindee.css')\n app.add_js_file('js/custom.js')\n",
"path": "docs/source/conf.py"
}
] | [
{
"content": "# Configuration file for the Sphinx documentation builder.\n#\n# This file only contains a selection of the most common options. For a full\n# list see the documentation:\n# https://www.sphinx-doc.org/en/master/usage/configuration.html\n\n# -- Path setup --------------------------------------------------------------\n\nimport sphinx_rtd_theme\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\n#\nimport os\nimport sys\nsys.path.insert(0, os.path.abspath('../..'))\nimport doctr\n\n# -- Project information -----------------------------------------------------\n\nmaster_doc = 'index'\nproject = 'doctr'\ncopyright = '2021, Mindee'\nauthor = 'François-Guillaume Fernandez, Charles Gaillard, Mohamed Biaz'\n\n# The full version, including alpha/beta/rc tags\nversion = doctr.__version__\nrelease = doctr.__version__ + '-git'\n\n\n# -- General configuration ---------------------------------------------------\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nextensions = [\n\t'sphinx.ext.autodoc',\n\t'sphinx.ext.napoleon',\n\t'sphinx.ext.viewcode',\n 'sphinx.ext.coverage',\n 'sphinx.ext.mathjax',\n 'sphinxemoji.sphinxemoji', # cf. https://sphinxemojicodes.readthedocs.io/en/stable/\n 'sphinx_copybutton',\n]\n\nnapoleon_use_ivar = True\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = ['_templates']\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\n# This pattern also affects html_static_path and html_extra_path.\nexclude_patterns = [u'_build', 'Thumbs.db', '.DS_Store']\n\n\n# The name of the Pygments (syntax highlighting) style to use.\npygments_style = 'sphinx'\nhighlight_language = 'python3'\n\n# -- Options for HTML output -------------------------------------------------\n\n# The theme to use for HTML and HTML Help pages. See the documentation for\n# a list of builtin themes.\n#\nhtml_theme = 'sphinx_rtd_theme'\nhtml_theme_path = [sphinx_rtd_theme.get_html_theme_path()]\n\n# Theme options are theme-specific and customize the look and feel of a theme\n# further. For a list of options available for each theme, see the\n# documentation.\n#\nhtml_theme_options = {\n 'collapse_navigation': False,\n 'display_version': False,\n 'logo_only': False,\n}\n\n# html_logo = '_static/images/logo.png'\n\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = ['_static']\n\n# A list of files that should not be packed into the epub file.\nepub_exclude_files = ['search.html']\n\ndef setup(app):\n app.add_css_file('css/mindee.css')\n app.add_js_file('js/custom.js')\n",
"path": "docs/source/conf.py"
}
] | diff --git a/.github/workflows/doc-deploy.yaml b/.github/workflows/doc-deploy.yaml
index 6cccc30b57..498972adf1 100644
--- a/.github/workflows/doc-deploy.yaml
+++ b/.github/workflows/doc-deploy.yaml
@@ -31,8 +31,7 @@ jobs:
pip install -r docs/requirements.txt
- name: Build documentation
- run: |
- sphinx-build docs/source docs/_build -a
+ run: cd docs && bash build.sh
- name: Install SSH Client 🔑
uses: webfactory/[email protected]
@@ -43,7 +42,7 @@ jobs:
uses: JamesIves/[email protected]
with:
BRANCH: gh-pages
- FOLDER: 'docs/_build'
+ FOLDER: 'docs/build'
COMMIT_MESSAGE: '[skip ci] Documentation updates'
CLEAN: true
SSH: true
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index d079c99e47..fbedf2c635 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -147,5 +147,4 @@ jobs:
pip install -r docs/requirements.txt
- name: Build documentation
- run: |
- sphinx-build docs/source docs/_build -a
+ run: cd docs && bash build.sh
diff --git a/docs/build.sh b/docs/build.sh
new file mode 100644
index 0000000000..2472f7171b
--- /dev/null
+++ b/docs/build.sh
@@ -0,0 +1,34 @@
+function deploy_doc(){
+ if [ ! -z "$1" ]
+ then
+ git checkout $1
+ fi
+ COMMIT=$(git rev-parse --short HEAD)
+ echo "Creating doc at commit" $COMMIT "and pushing to folder $2"
+ pip install -U ..
+ if [ ! -z "$2" ]
+ then
+ if [ "$2" == "latest" ]; then
+ echo "Pushing main"
+ sphinx-build source _build -a && mkdir build && mkdir build/$2 && cp -a _build/* build/$2/
+ elif ssh -oStrictHostKeyChecking=no $doc "[ -d build/$2 ]"; then
+ echo "Directory" $2 "already exists"
+ else
+ echo "Pushing version" $2
+ cp -r _static source/
+ sphinx-build source _build -a
+ mkdir build/$2 && cp -a _build/* build/$2/
+ fi
+ else
+ echo "Pushing stable"
+ cp -r _static source/
+ sphinx-build source build -a
+ fi
+}
+
+# You can find the commit for each tag on https://github.com/mindee/doctr/tags
+if [ -d build ]; then rm -Rf build; fi
+cp -r source/_static .
+deploy_doc "" latest
+deploy_doc "571af3dc" # v0.1.0 Latest stable release
+rm -rf _build _static
diff --git a/docs/source/_static/css/mindee.css b/docs/source/_static/css/mindee.css
index ff25fad37a..f3df619aa2 100644
--- a/docs/source/_static/css/mindee.css
+++ b/docs/source/_static/css/mindee.css
@@ -9,27 +9,27 @@
}
.version-button:hover, .version-button:focus {
- background-color: #bdbdbd;
+ background-color: #5eb2e6;
}
-
+
.version-dropdown {
display: none;
min-width: 160px;
overflow: auto;
font-size: 15px;
}
-
+
.version-dropdown a {
color: white;
padding: 3px 4px;
text-decoration: none;
display: block;
}
-
+
.version-dropdown a:hover {
- background-color: #bdbdbd;
+ background-color: #5eb2e6;
}
-
+
.version-show {
display: block;
}
diff --git a/docs/source/_static/js/custom.js b/docs/source/_static/js/custom.js
index 338ff97072..c837af7352 100644
--- a/docs/source/_static/js/custom.js
+++ b/docs/source/_static/js/custom.js
@@ -6,8 +6,9 @@
const stableVersion = "v0.1.0"
// Dictionary doc folder to label. The last stable version should have an empty key.
const versionMapping = {
- "main": "main",
+ "latest": "latest",
"": "v0.1.0 (stable)",
+ // "v0.1.1": "v0.1.1",
}
function addGithubButton() {
@@ -72,11 +73,12 @@ function addVersionControl() {
const div = document.createElement("div");
div.appendChild(versionButton);
div.appendChild(versionMenu);
- div.style.paddingTop = '25px';
+ div.style.paddingTop = '5px';
+ div.style.paddingBottom = '5px';
div.style.display = 'block';
div.style.textAlign = 'center';
- const scrollDiv = document.querySelector(".wy-side-scroll");
+ const scrollDiv = document.querySelector(".wy-side-nav-search");
scrollDiv.insertBefore(div, scrollDiv.children[1]);
}
@@ -91,7 +93,7 @@ function addVersionControl() {
function parseGithubButtons (){"use strict";var e=window.document,t=e.location,o=window.encodeURIComponent,r=window.decodeURIComponent,n=window.Math,a=window.HTMLElement,i=window.XMLHttpRequest,l="https://unpkg.com/[email protected]/dist/buttons.html",c=i&&i.prototype&&"withCredentials"in i.prototype,d=c&&a&&a.prototype.attachShadow&&!a.prototype.attachShadow.prototype,s=function(e,t,o){e.addEventListener?e.addEventListener(t,o):e.attachEvent("on"+t,o)},u=function(e,t,o){e.removeEventListener?e.removeEventListener(t,o):e.detachEvent("on"+t,o)},h=function(e,t,o){var r=function(n){return u(e,t,r),o(n)};s(e,t,r)},f=function(e,t,o){var r=function(n){if(t.test(e.readyState))return u(e,"readystatechange",r),o(n)};s(e,"readystatechange",r)},p=function(e){return function(t,o,r){var n=e.createElement(t);if(o)for(var a in o){var i=o[a];null!=i&&(null!=n[a]?n[a]=i:n.setAttribute(a,i))}if(r)for(var l=0,c=r.length;l<c;l++){var d=r[l];n.appendChild("string"==typeof d?e.createTextNode(d):d)}return n}},g=p(e),b=function(e){var t;return function(){t||(t=1,e.apply(this,arguments))}},m="body{margin:0}a{color:#24292e;text-decoration:none;outline:0}.octicon{display:inline-block;vertical-align:text-top;fill:currentColor}.widget{ display:inline-block;overflow:hidden;font-family:-apple-system, BlinkMacSystemFont, \"Segoe UI\", Helvetica, Arial, sans-serif;font-size:0;white-space:nowrap;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none}.btn,.social-count{display:inline-block;height:14px;padding:2px 5px;font-size:11px;font-weight:600;line-height:14px;vertical-align:bottom;cursor:pointer;border:1px solid #c5c9cc;border-radius:0.25em}.btn{background-color:#eff3f6;background-image:-webkit-linear-gradient(top, #fafbfc, #eff3f6 90%);background-image:-moz-linear-gradient(top, #fafbfc, #eff3f6 90%);background-image:linear-gradient(180deg, #fafbfc, #eff3f6 90%);background-position:-1px -1px;background-repeat:repeat-x;background-size:110% 110%;border-color:rgba(27,31,35,0.2);-ms-filter:\"progid:DXImageTransform.Microsoft.Gradient(startColorstr='#FFFAFBFC', endColorstr='#FFEEF2F5')\";*filter:progid:DXImageTransform.Microsoft.Gradient(startColorstr='#FFFAFBFC', endColorstr='#FFEEF2F5')}.btn:active{background-color:#e9ecef;background-image:none;border-color:#a5a9ac;border-color:rgba(27,31,35,0.35);box-shadow:inset 0 0.15em 0.3em rgba(27,31,35,0.15)}.btn:focus,.btn:hover{background-color:#e6ebf1;background-image:-webkit-linear-gradient(top, #f0f3f6, #e6ebf1 90%);background-image:-moz-linear-gradient(top, #f0f3f6, #e6ebf1 90%);background-image:linear-gradient(180deg, #f0f3f6, #e6ebf1 90%);border-color:#a5a9ac;border-color:rgba(27,31,35,0.35);-ms-filter:\"progid:DXImageTransform.Microsoft.Gradient(startColorstr='#FFF0F3F6', endColorstr='#FFE5EAF0')\";*filter:progid:DXImageTransform.Microsoft.Gradient(startColorstr='#FFF0F3F6', endColorstr='#FFE5EAF0')}.social-count{position:relative;margin-left:5px;background-color:#fff}.social-count:focus,.social-count:hover{color:#0366d6}.social-count b,.social-count i{position:absolute;top:50%;left:0;display:block;width:0;height:0;margin:-4px 0 0 -4px;border:solid transparent;border-width:4px 4px 4px 0;_line-height:0;_border-top-color:red !important;_border-bottom-color:red !important;_border-left-color:red !important;_filter:chroma(color=red)}.social-count b{border-right-color:#c5c9cc}.social-count i{margin-left:-3px;border-right-color:#fff}.lg .btn,.lg .social-count{height:16px;padding:5px 10px;font-size:12px;line-height:16px}.lg .social-count{margin-left:6px}.lg .social-count b,.lg .social-count i{margin:-5px 0 0 -5px;border-width:5px 5px 5px 0}.lg .social-count i{margin-left:-4px}\n",v={"mark-github":{width:16,height:16,path:'<path fill-rule="evenodd" d="M8 0C3.58 0 0 3.58 0 8c0 3.54 2.29 6.53 5.47 7.59.4.07.55-.17.55-.38 0-.19-.01-.82-.01-1.49-2.01.37-2.53-.49-2.69-.94-.09-.23-.48-.94-.82-1.13-.28-.15-.68-.52-.01-.53.63-.01 1.08.58 1.23.82.72 1.21 1.87.87 2.33.66.07-.52.28-.87.51-1.07-1.78-.2-3.64-.89-3.64-3.95 0-.87.31-1.59.82-2.15-.08-.2-.36-1.02.08-2.12 0 0 .67-.21 2.2.82.64-.18 1.32-.27 2-.27.68 0 1.36.09 2 .27 1.53-1.04 2.2-.82 2.2-.82.44 1.1.16 1.92.08 2.12.51.56.82 1.27.82 2.15 0 3.07-1.87 3.75-3.65 3.95.29.25.54.73.54 1.48 0 1.07-.01 1.93-.01 2.2 0 .21.15.46.55.38A8.013 8.013 0 0 0 16 8c0-4.42-3.58-8-8-8z"/>'},eye:{width:16,height:16,path:'<path fill-rule="evenodd" d="M8.06 2C3 2 0 8 0 8s3 6 8.06 6C13 14 16 8 16 8s-3-6-7.94-6zM8 12c-2.2 0-4-1.78-4-4 0-2.2 1.8-4 4-4 2.22 0 4 1.8 4 4 0 2.22-1.78 4-4 4zm2-4c0 1.11-.89 2-2 2-1.11 0-2-.89-2-2 0-1.11.89-2 2-2 1.11 0 2 .89 2 2z"/>'},star:{width:14,height:16,path:'<path fill-rule="evenodd" d="M14 6l-4.9-.64L7 1 4.9 5.36 0 6l3.6 3.26L2.67 14 7 11.67 11.33 14l-.93-4.74L14 6z"/>'},"repo-forked":{width:10,height:16,path:'<path fill-rule="evenodd" d="M8 1a1.993 1.993 0 0 0-1 3.72V6L5 8 3 6V4.72A1.993 1.993 0 0 0 2 1a1.993 1.993 0 0 0-1 3.72V6.5l3 3v1.78A1.993 1.993 0 0 0 5 15a1.993 1.993 0 0 0 1-3.72V9.5l3-3V4.72A1.993 1.993 0 0 0 8 1zM2 4.2C1.34 4.2.8 3.65.8 3c0-.65.55-1.2 1.2-1.2.65 0 1.2.55 1.2 1.2 0 .65-.55 1.2-1.2 1.2zm3 10c-.66 0-1.2-.55-1.2-1.2 0-.65.55-1.2 1.2-1.2.65 0 1.2.55 1.2 1.2 0 .65-.55 1.2-1.2 1.2zm3-10c-.66 0-1.2-.55-1.2-1.2 0-.65.55-1.2 1.2-1.2.65 0 1.2.55 1.2 1.2 0 .65-.55 1.2-1.2 1.2z"/>'},"issue-opened":{width:14,height:16,path:'<path fill-rule="evenodd" d="M7 2.3c3.14 0 5.7 2.56 5.7 5.7s-2.56 5.7-5.7 5.7A5.71 5.71 0 0 1 1.3 8c0-3.14 2.56-5.7 5.7-5.7zM7 1C3.14 1 0 4.14 0 8s3.14 7 7 7 7-3.14 7-7-3.14-7-7-7zm1 3H6v5h2V4zm0 6H6v2h2v-2z"/>'},"cloud-download":{width:16,height:16,path:'<path fill-rule="evenodd" d="M9 12h2l-3 3-3-3h2V7h2v5zm3-8c0-.44-.91-3-4.5-3C5.08 1 3 2.92 3 5 1.02 5 0 6.52 0 8c0 1.53 1 3 3 3h3V9.7H3C1.38 9.7 1.3 8.28 1.3 8c0-.17.05-1.7 1.7-1.7h1.3V5c0-1.39 1.56-2.7 3.2-2.7 2.55 0 3.13 1.55 3.2 1.8v1.2H12c.81 0 2.7.22 2.7 2.2 0 2.09-2.25 2.2-2.7 2.2h-2V11h2c2.08 0 4-1.16 4-3.5C16 5.06 14.08 4 12 4z"/>'}},w={},x=function(e,t,o){var r=p(e.ownerDocument),n=e.appendChild(r("style",{type:"text/css"}));n.styleSheet?n.styleSheet.cssText=m:n.appendChild(e.ownerDocument.createTextNode(m));var a,l,d=r("a",{className:"btn",href:t.href,target:"_blank",innerHTML:(a=t["data-icon"],l=/^large$/i.test(t["data-size"])?16:14,a=(""+a).toLowerCase().replace(/^octicon-/,""),{}.hasOwnProperty.call(v,a)||(a="mark-github"),'<svg version="1.1" width="'+l*v[a].width/v[a].height+'" height="'+l+'" viewBox="0 0 '+v[a].width+" "+v[a].height+'" class="octicon octicon-'+a+'" aria-hidden="true">'+v[a].path+"</svg>"),"aria-label":t["aria-label"]||void 0},[" ",r("span",{},[t["data-text"]||""])]);/\.github\.com$/.test("."+d.hostname)?/^https?:\/\/((gist\.)?github\.com\/[^\/?#]+\/[^\/?#]+\/archive\/|github\.com\/[^\/?#]+\/[^\/?#]+\/releases\/download\/|codeload\.github\.com\/)/.test(d.href)&&(d.target="_top"):(d.href="#",d.target="_self");var u,h,g,x,y=e.appendChild(r("div",{className:"widget"+(/^large$/i.test(t["data-size"])?" lg":"")},[d]));/^(true|1)$/i.test(t["data-show-count"])&&"github.com"===d.hostname&&(u=d.pathname.replace(/^(?!\/)/,"/").match(/^\/([^\/?#]+)(?:\/([^\/?#]+)(?:\/(?:(subscription)|(fork)|(issues)|([^\/?#]+)))?)?(?:[\/?#]|$)/))&&!u[6]?(u[2]?(h="/repos/"+u[1]+"/"+u[2],u[3]?(x="subscribers_count",g="watchers"):u[4]?(x="forks_count",g="network"):u[5]?(x="open_issues_count",g="issues"):(x="stargazers_count",g="stargazers")):(h="/users/"+u[1],g=x="followers"),function(e,t){var o=w[e]||(w[e]=[]);if(!(o.push(t)>1)){var r=b(function(){for(delete w[e];t=o.shift();)t.apply(null,arguments)});if(c){var n=new i;s(n,"abort",r),s(n,"error",r),s(n,"load",function(){var e;try{e=JSON.parse(n.responseText)}catch(e){return void r(e)}r(200!==n.status,e)}),n.open("GET",e),n.send()}else{var a=this||window;a._=function(e){a._=null,r(200!==e.meta.status,e.data)};var l=p(a.document)("script",{async:!0,src:e+(/\?/.test(e)?"&":"?")+"callback=_"}),d=function(){a._&&a._({meta:{}})};s(l,"load",d),s(l,"error",d),l.readyState&&f(l,/de|m/,d),a.document.getElementsByTagName("head")[0].appendChild(l)}}}.call(this,"https://api.github.com"+h,function(e,t){if(!e){var n=t[x];y.appendChild(r("a",{className:"social-count",href:t.html_url+"/"+g,target:"_blank","aria-label":n+" "+x.replace(/_count$/,"").replace("_"," ").slice(0,n<2?-1:void 0)+" on GitHub"},[r("b"),r("i"),r("span",{},[(""+n).replace(/\B(?=(\d{3})+(?!\d))/g,",")])]))}o&&o(y)})):o&&o(y)},y=window.devicePixelRatio||1,C=function(e){return(y>1?n.ceil(n.round(e*y)/y*2)/2:n.ceil(e))||0},F=function(e,t){e.style.width=t[0]+"px",e.style.height=t[1]+"px"},k=function(t,r){if(null!=t&&null!=r)if(t.getAttribute&&(t=function(e){for(var t={href:e.href,title:e.title,"aria-label":e.getAttribute("aria-label")},o=["icon","text","size","show-count"],r=0,n=o.length;r<n;r++){var a="data-"+o[r];t[a]=e.getAttribute(a)}return null==t["data-text"]&&(t["data-text"]=e.textContent||e.innerText),t}(t)),d){var a=g("span",{title:t.title||void 0});x(a.attachShadow({mode:"closed"}),t,function(){r(a)})}else{var i=g("iframe",{src:"javascript:0",title:t.title||void 0,allowtransparency:!0,scrolling:"no",frameBorder:0});F(i,[0,0]),i.style.border="none";var c=function(){var a,d=i.contentWindow;try{a=d.document.body}catch(t){return void e.body.appendChild(i.parentNode.removeChild(i))}u(i,"load",c),x.call(d,a,t,function(e){var a=function(e){var t=e.offsetWidth,o=e.offsetHeight;if(e.getBoundingClientRect){var r=e.getBoundingClientRect();t=n.max(t,C(r.width)),o=n.max(o,C(r.height))}return[t,o]}(e);i.parentNode.removeChild(i),h(i,"load",function(){F(i,a)}),i.src=l+"#"+(i.name=function(e){var t=[];for(var r in e){var n=e[r];null!=n&&t.push(o(r)+"="+o(n))}return t.join("&")}(t)),r(i)})};s(i,"load",c),e.body.appendChild(i)}};t.protocol+"//"+t.host+t.pathname===l?x(e.body,function(e){for(var t={},o=e.split("&"),n=0,a=o.length;n<a;n++){var i=o[n];if(""!==i){var l=i.split("=");t[r(l[0])]=null!=l[1]?r(l.slice(1).join("=")):void 0}}return t}(window.name||t.hash.replace(/^#/,""))):function(t){if(/m/.test(e.readyState)||!/g/.test(e.readyState)&&!e.documentElement.doScroll)setTimeout(t);else if(e.addEventListener){var o=b(t);h(e,"DOMContentLoaded",o),h(window,"load",o)}else f(e,/m/,t)}(function(){for(var t=e.querySelectorAll?e.querySelectorAll("a.github-button"):function(){for(var t=[],o=e.getElementsByTagName("a"),r=0,n=o.length;r<n;r++)~(" "+o[r].className+" ").replace(/[ \t\n\f\r]+/g," ").indexOf(" github-button ")&&t.push(o[r]);return t}(),o=0,r=t.length;o<r;o++)!function(e){k(e,function(t){e.parentNode.replaceChild(t,e)})}(t[o])})};
function onLoad() {
- // addVersionControl();
+ addVersionControl();
addGithubButton();
parseGithubButtons();
}
diff --git a/docs/source/conf.py b/docs/source/conf.py
index 4efdcad267..45ed8b4c33 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -73,7 +73,7 @@
#
html_theme_options = {
'collapse_navigation': False,
- 'display_version': True,
+ 'display_version': False,
'logo_only': False,
}
|
rasterio__rasterio-892 | Decimated read result differs depending on GDAL version
```
$ rio insp tests/data/alpha_masked_values.tif
Rasterio 1.0a1 Interactive Inspector (Python 3.5.1)
Type "src.meta", "src.read(1)", or "help(src)" for more information.
>>> src.read(4, out_shape=(19, 19), masked=False)[-2:, 0:5]
array([[ 0, 0, 0, 255, 0],
[255, 255, 255, 255, 0]], dtype=uint8)
>>> rasterio.__version__
'1.0a1'
>>> rasterio.__gdal_version__
'1.11.5'
```
versus
```
$ rio insp tests/data/alpha_masked_values.tif
Rasterio 1.0a1 Interactive Inspector (Python 3.5.1)
Type "src.meta", "src.read(1)", or "help(src)" for more information.
>>> src.read(4, out_shape=(19, 19), masked=False)[-2:, 0:5]
array([[ 0, 0, 32, 64, 0],
[255, 255, 255, 255, 0]], dtype=uint8)
>>> rasterio.__version__
'1.0a1'
>>> rasterio.__gdal_version__
'2.1.1'
```
I'll start a new branch with a similar test so we can put it through the travis build matrix.
cc @dnomadb @sgillies
| [
{
"content": "\"\"\"Rasterio's GDAL/AWS environment\"\"\"\n\nimport logging\n\nfrom rasterio._drivers import (\n GDALEnv, del_gdal_config, get_gdal_config, set_gdal_config)\nfrom rasterio.dtypes import check_dtype\nfrom rasterio.errors import EnvError\nfrom rasterio.compat import string_types\nfrom rasterio.transform import guard_transform\nfrom rasterio.vfs import parse_path, vsi_path\n\n\n# The currently active GDAL/AWS environment is a private attribute.\n_env = None\n\nlog = logging.getLogger(__name__)\n\n# Rasterio defaults\ndefault_options = {\n 'CHECK_WITH_INVERT_PROJ': True\n}\n\nclass Env(object):\n \"\"\"Abstraction for GDAL and AWS configuration\n\n The GDAL library is stateful: it has a registry of format drivers,\n an error stack, and dozens of configuration options.\n\n Rasterio's approach to working with GDAL is to wrap all the state\n up using a Python context manager (see PEP 343,\n https://www.python.org/dev/peps/pep-0343/). When the context is\n entered GDAL drivers are registered, error handlers are\n configured, and configuration options are set. When the context\n is exited, drivers are removed from the registry and other\n configurations are removed.\n\n Example:\n\n with rasterio.Env(GDAL_CACHEMAX=512) as env:\n # All drivers are registered, GDAL's raster block cache\n # size is set to 512MB.\n # Commence processing...\n ...\n # End of processing.\n\n # At this point, configuration options are set to their\n # previous (possible unset) values.\n\n A boto3 session or boto3 session constructor arguments\n `aws_access_key_id`, `aws_secret_access_key`, `aws_session_token`\n may be passed to Env's constructor. In the latter case, a session\n will be created as soon as needed. AWS credentials are configured\n for GDAL as needed.\n \"\"\"\n\n def __init__(self, aws_session=None, aws_access_key_id=None,\n aws_secret_access_key=None, aws_session_token=None,\n region_name=None, profile_name=None, **options):\n \"\"\"Create a new GDAL/AWS environment.\n\n Note: this class is a context manager. GDAL isn't configured\n until the context is entered via `with rasterio.Env():`\n\n Parameters\n ----------\n aws_session: object, optional\n A boto3 session.\n aws_access_key_id: string, optional\n An access key id, as per boto3.\n aws_secret_access_key: string, optional\n A secret access key, as per boto3.\n aws_session_token: string, optional\n A session token, as per boto3.\n region_name: string, optional\n A region name, as per boto3.\n profile_name: string, optional\n A shared credentials profile name, as per boto3.\n **options: optional\n A mapping of GDAL configuration options, e.g.,\n `CPL_DEBUG=True, CHECK_WITH_INVERT_PROJ=False`.\n\n Returns\n -------\n A new instance of Env.\n\n Note: We raise EnvError if the GDAL config options\n AWS_ACCESS_KEY_ID or AWS_SECRET_ACCESS_KEY are given. AWS\n credentials are handled exclusively by boto3.\n \"\"\"\n if ('AWS_ACCESS_KEY_ID' in options or\n 'AWS_SECRET_ACCESS_KEY' in options):\n raise EnvError(\n \"GDAL's AWS config options can not be directly set. \"\n \"AWS credentials are handled exclusively by boto3.\")\n self.aws_access_key_id = aws_access_key_id\n self.aws_secret_access_key = aws_secret_access_key\n self.aws_session_token = aws_session_token\n self.region_name = region_name\n self.profile_name = profile_name\n self.aws_session = aws_session\n self._creds = (\n self.aws_session._session.get_credentials()\n if self.aws_session else None)\n self.options = options.copy()\n self.previous_options = {}\n defenv()\n\n def get_aws_credentials(self):\n \"\"\"Get credentials and configure GDAL.\"\"\"\n import boto3\n options = {}\n if not self.aws_session:\n self.aws_session = boto3.Session(\n aws_access_key_id=self.aws_access_key_id,\n aws_secret_access_key=self.aws_secret_access_key,\n aws_session_token=self.aws_session_token,\n region_name=self.region_name,\n profile_name=self.profile_name)\n self._creds = self.aws_session._session.get_credentials()\n\n # Pass these credentials to the GDAL environment.\n if self._creds.access_key: # pragma: no branch\n options.update(aws_access_key_id=self._creds.access_key)\n if self._creds.secret_key: # pragma: no branch\n options.update(aws_secret_access_key=self._creds.secret_key)\n if self._creds.token:\n options.update(aws_session_token=self._creds.token)\n if self.aws_session.region_name:\n options.update(aws_region=self.aws_session.region_name)\n\n # Pass these credentials to the GDAL environment.\n defenv()\n global _env\n _env.update_config_options(**options)\n\n def drivers(self):\n \"\"\"Return a mapping of registered drivers.\"\"\"\n global _env\n return _env.drivers()\n\n def __enter__(self):\n self.previous_options = getenv()\n setenv(**self.options)\n log.debug(\"Entering env %r context\", self)\n return self\n\n def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):\n delenv()\n setenv(**self.previous_options)\n log.debug(\"Exiting env %r context\", self)\n\n\ndef defenv():\n \"\"\"Create a default environment if necessary.\"\"\"\n global _env\n if _env:\n log.debug(\"Environment %r exists\", _env)\n else:\n _env = GDALEnv()\n _env.update_config_options(**default_options)\n log.debug(\n \"New GDAL environment %r created\", _env)\n\n\ndef getenv():\n \"\"\"Get a mapping of current options.\"\"\"\n global _env\n if not _env:\n raise EnvError(\"No environment exists\")\n else:\n log.debug(\"Got a copy of environment %r options\", _env)\n return _env.options.copy()\n\n\ndef setenv(**options):\n \"\"\"Set options in the existing environment.\"\"\"\n global _env\n if not _env:\n raise EnvError(\"No environment exists\")\n else:\n _env.update_config_options(**options)\n log.debug(\"Updated existing %r with options %r\", _env, options)\n\n\ndef delenv():\n \"\"\"Delete options in the existing environment.\"\"\"\n global _env\n if not _env:\n raise EnvError(\"No environment exists\")\n else:\n _env.clear_config_options()\n log.debug(\"Cleared existing %r options\", _env)\n\n\ndef ensure_env(f):\n \"\"\"A decorator that ensures an env exists before a function\n calls any GDAL C functions.\"\"\"\n defenv()\n return f\n",
"path": "rasterio/env.py"
}
] | [
{
"content": "\"\"\"Rasterio's GDAL/AWS environment\"\"\"\n\nimport logging\n\nfrom rasterio._drivers import (\n GDALEnv, del_gdal_config, get_gdal_config, set_gdal_config)\nfrom rasterio.dtypes import check_dtype\nfrom rasterio.errors import EnvError\nfrom rasterio.compat import string_types\nfrom rasterio.transform import guard_transform\nfrom rasterio.vfs import parse_path, vsi_path\n\n\n# The currently active GDAL/AWS environment is a private attribute.\n_env = None\n\nlog = logging.getLogger(__name__)\n\n# Rasterio defaults\ndefault_options = {\n 'CHECK_WITH_INVERT_PROJ': True,\n 'GTIFF_IMPLICIT_JPEG_OVR': False\n}\n\nclass Env(object):\n \"\"\"Abstraction for GDAL and AWS configuration\n\n The GDAL library is stateful: it has a registry of format drivers,\n an error stack, and dozens of configuration options.\n\n Rasterio's approach to working with GDAL is to wrap all the state\n up using a Python context manager (see PEP 343,\n https://www.python.org/dev/peps/pep-0343/). When the context is\n entered GDAL drivers are registered, error handlers are\n configured, and configuration options are set. When the context\n is exited, drivers are removed from the registry and other\n configurations are removed.\n\n Example:\n\n with rasterio.Env(GDAL_CACHEMAX=512) as env:\n # All drivers are registered, GDAL's raster block cache\n # size is set to 512MB.\n # Commence processing...\n ...\n # End of processing.\n\n # At this point, configuration options are set to their\n # previous (possible unset) values.\n\n A boto3 session or boto3 session constructor arguments\n `aws_access_key_id`, `aws_secret_access_key`, `aws_session_token`\n may be passed to Env's constructor. In the latter case, a session\n will be created as soon as needed. AWS credentials are configured\n for GDAL as needed.\n \"\"\"\n\n def __init__(self, aws_session=None, aws_access_key_id=None,\n aws_secret_access_key=None, aws_session_token=None,\n region_name=None, profile_name=None, **options):\n \"\"\"Create a new GDAL/AWS environment.\n\n Note: this class is a context manager. GDAL isn't configured\n until the context is entered via `with rasterio.Env():`\n\n Parameters\n ----------\n aws_session: object, optional\n A boto3 session.\n aws_access_key_id: string, optional\n An access key id, as per boto3.\n aws_secret_access_key: string, optional\n A secret access key, as per boto3.\n aws_session_token: string, optional\n A session token, as per boto3.\n region_name: string, optional\n A region name, as per boto3.\n profile_name: string, optional\n A shared credentials profile name, as per boto3.\n **options: optional\n A mapping of GDAL configuration options, e.g.,\n `CPL_DEBUG=True, CHECK_WITH_INVERT_PROJ=False`.\n\n Returns\n -------\n A new instance of Env.\n\n Note: We raise EnvError if the GDAL config options\n AWS_ACCESS_KEY_ID or AWS_SECRET_ACCESS_KEY are given. AWS\n credentials are handled exclusively by boto3.\n \"\"\"\n if ('AWS_ACCESS_KEY_ID' in options or\n 'AWS_SECRET_ACCESS_KEY' in options):\n raise EnvError(\n \"GDAL's AWS config options can not be directly set. \"\n \"AWS credentials are handled exclusively by boto3.\")\n self.aws_access_key_id = aws_access_key_id\n self.aws_secret_access_key = aws_secret_access_key\n self.aws_session_token = aws_session_token\n self.region_name = region_name\n self.profile_name = profile_name\n self.aws_session = aws_session\n self._creds = (\n self.aws_session._session.get_credentials()\n if self.aws_session else None)\n self.options = options.copy()\n self.previous_options = {}\n defenv()\n\n def get_aws_credentials(self):\n \"\"\"Get credentials and configure GDAL.\"\"\"\n import boto3\n options = {}\n if not self.aws_session:\n self.aws_session = boto3.Session(\n aws_access_key_id=self.aws_access_key_id,\n aws_secret_access_key=self.aws_secret_access_key,\n aws_session_token=self.aws_session_token,\n region_name=self.region_name,\n profile_name=self.profile_name)\n self._creds = self.aws_session._session.get_credentials()\n\n # Pass these credentials to the GDAL environment.\n if self._creds.access_key: # pragma: no branch\n options.update(aws_access_key_id=self._creds.access_key)\n if self._creds.secret_key: # pragma: no branch\n options.update(aws_secret_access_key=self._creds.secret_key)\n if self._creds.token:\n options.update(aws_session_token=self._creds.token)\n if self.aws_session.region_name:\n options.update(aws_region=self.aws_session.region_name)\n\n # Pass these credentials to the GDAL environment.\n defenv()\n global _env\n _env.update_config_options(**options)\n\n def drivers(self):\n \"\"\"Return a mapping of registered drivers.\"\"\"\n global _env\n return _env.drivers()\n\n def __enter__(self):\n self.previous_options = getenv()\n setenv(**self.options)\n log.debug(\"Entering env %r context\", self)\n return self\n\n def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):\n delenv()\n setenv(**self.previous_options)\n log.debug(\"Exiting env %r context\", self)\n\n\ndef defenv():\n \"\"\"Create a default environment if necessary.\"\"\"\n global _env\n if _env:\n log.debug(\"Environment %r exists\", _env)\n else:\n _env = GDALEnv()\n _env.update_config_options(**default_options)\n log.debug(\n \"New GDAL environment %r created\", _env)\n\n\ndef getenv():\n \"\"\"Get a mapping of current options.\"\"\"\n global _env\n if not _env:\n raise EnvError(\"No environment exists\")\n else:\n log.debug(\"Got a copy of environment %r options\", _env)\n return _env.options.copy()\n\n\ndef setenv(**options):\n \"\"\"Set options in the existing environment.\"\"\"\n global _env\n if not _env:\n raise EnvError(\"No environment exists\")\n else:\n _env.update_config_options(**options)\n log.debug(\"Updated existing %r with options %r\", _env, options)\n\n\ndef delenv():\n \"\"\"Delete options in the existing environment.\"\"\"\n global _env\n if not _env:\n raise EnvError(\"No environment exists\")\n else:\n _env.clear_config_options()\n log.debug(\"Cleared existing %r options\", _env)\n\n\ndef ensure_env(f):\n \"\"\"A decorator that ensures an env exists before a function\n calls any GDAL C functions.\"\"\"\n defenv()\n return f\n",
"path": "rasterio/env.py"
}
] | diff --git a/rasterio/env.py b/rasterio/env.py
index 0191a2f8a..64c77e176 100644
--- a/rasterio/env.py
+++ b/rasterio/env.py
@@ -18,7 +18,8 @@
# Rasterio defaults
default_options = {
- 'CHECK_WITH_INVERT_PROJ': True
+ 'CHECK_WITH_INVERT_PROJ': True,
+ 'GTIFF_IMPLICIT_JPEG_OVR': False
}
class Env(object):
|
jazzband__pip-tools-28 | pip-review should compare version, not test equality
```
$ pip-review
pelican==3.0.1 is available (you have 3.1)
```
I'm locally testing this package, and `pip-review` will just test if current installed version is the same as the latest version in `pip`. Which causes problem as shown above.
| [
{
"content": "\"\"\"\npip-tools keeps your pinned dependencies fresh.\n\"\"\"\nimport sys\nfrom setuptools import setup\n\n\ndef get_dependencies():\n deps = []\n if sys.version_info < (2, 7):\n deps += ['argparse']\n return deps\n\n\nsetup(\n name='pip-tools',\n version='0.2.1',\n url='https://github.com/nvie/pip-tools/',\n license='BSD',\n author='Vincent Driessen',\n author_email='[email protected]',\n description=__doc__.strip('\\n'),\n #packages=[],\n scripts=['bin/pip-review', 'bin/pip-dump'],\n #include_package_data=True,\n zip_safe=False,\n platforms='any',\n install_requires=get_dependencies(),\n classifiers=[\n # As from http://pypi.python.org/pypi?%3Aaction=list_classifiers\n #'Development Status :: 1 - Planning',\n #'Development Status :: 2 - Pre-Alpha',\n #'Development Status :: 3 - Alpha',\n 'Development Status :: 4 - Beta',\n #'Development Status :: 5 - Production/Stable',\n #'Development Status :: 6 - Mature',\n #'Development Status :: 7 - Inactive',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n #'Programming Language :: Python :: 2.3',\n #'Programming Language :: Python :: 2.4',\n #'Programming Language :: Python :: 2.5',\n 'Programming Language :: Python :: 2.6',\n 'Programming Language :: Python :: 2.7',\n #'Programming Language :: Python :: 3',\n #'Programming Language :: Python :: 3.0',\n #'Programming Language :: Python :: 3.1',\n #'Programming Language :: Python :: 3.2',\n #'Programming Language :: Python :: 3.3',\n 'Intended Audience :: Developers',\n 'Intended Audience :: System Administrators',\n 'License :: OSI Approved :: BSD License',\n 'Operating System :: OS Independent',\n 'Topic :: System :: Systems Administration',\n ]\n)\n",
"path": "setup.py"
}
] | [
{
"content": "\"\"\"\npip-tools keeps your pinned dependencies fresh.\n\"\"\"\nimport sys\nfrom setuptools import setup\n\n\ndef get_dependencies():\n deps = ['verlib']\n if sys.version_info < (2, 7):\n deps += ['argparse']\n return deps\n\n\nsetup(\n name='pip-tools',\n version='0.2.1',\n url='https://github.com/nvie/pip-tools/',\n license='BSD',\n author='Vincent Driessen',\n author_email='[email protected]',\n description=__doc__.strip('\\n'),\n #packages=[],\n scripts=['bin/pip-review', 'bin/pip-dump'],\n #include_package_data=True,\n zip_safe=False,\n platforms='any',\n install_requires=get_dependencies(),\n classifiers=[\n # As from http://pypi.python.org/pypi?%3Aaction=list_classifiers\n #'Development Status :: 1 - Planning',\n #'Development Status :: 2 - Pre-Alpha',\n #'Development Status :: 3 - Alpha',\n 'Development Status :: 4 - Beta',\n #'Development Status :: 5 - Production/Stable',\n #'Development Status :: 6 - Mature',\n #'Development Status :: 7 - Inactive',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n #'Programming Language :: Python :: 2.3',\n #'Programming Language :: Python :: 2.4',\n #'Programming Language :: Python :: 2.5',\n 'Programming Language :: Python :: 2.6',\n 'Programming Language :: Python :: 2.7',\n #'Programming Language :: Python :: 3',\n #'Programming Language :: Python :: 3.0',\n #'Programming Language :: Python :: 3.1',\n #'Programming Language :: Python :: 3.2',\n #'Programming Language :: Python :: 3.3',\n 'Intended Audience :: Developers',\n 'Intended Audience :: System Administrators',\n 'License :: OSI Approved :: BSD License',\n 'Operating System :: OS Independent',\n 'Topic :: System :: Systems Administration',\n ]\n)\n",
"path": "setup.py"
}
] | diff --git a/bin/pip-review b/bin/pip-review
index 165e97a17..cd541562b 100755
--- a/bin/pip-review
+++ b/bin/pip-review
@@ -7,6 +7,7 @@ import logging
import urllib2
import json
from urllib2 import HTTPError
+from verlib import NormalizedVersion, suggest_normalized_version
try:
from subprocess import check_ouput as _check_ouput
except ImportError:
@@ -22,6 +23,10 @@ except ImportError:
raise error
return output
+
+class InvalidVersion(ValueError): pass
+
+
check_output = partial(_check_output, shell=True)
@@ -53,6 +58,21 @@ def get_pkg_info(pkg_name):
else:
raise ValueError('Package %r not found on PyPI.' % (pkg_name,))
+
+def validate_version(pkg_name, version):
+ rversion = suggest_normalized_version(version)
+ if rversion is None:
+ raise InvalidVersion('Cannot work with {name}=={version} because version '
+ 'number can\'t be normalized.'.format(name=pkg_name,
+ version=version))
+ if rversion != version:
+ logging.warning('Package "{name}" has wrong version. '
+ 'It was transformed from {vfrom} into {vto} '
+ 'for interoperability.'.format(name=pkg_name,
+ vfrom=version,
+ vto=rversion))
+ return NormalizedVersion(rversion)
+
def latest_version(pkg_name, silent=False):
try:
@@ -62,7 +82,7 @@ def latest_version(pkg_name, silent=False):
return None
else:
raise
- return info['info']['version']
+ return validate_version(pkg_name, info['info']['version'])
def get_latest_versions(pkg_names):
@@ -85,7 +105,12 @@ def get_installed_pkgs():
yield name, 'dev', True
else:
name, version = line.split('==')
- yield name, version, False
+ try:
+ version = validate_version(name, version)
+ except InvalidVersion as e:
+ logging.error(e)
+ else:
+ yield name, version, False
def setup_logging(verbose):
@@ -159,22 +184,23 @@ def main():
if latest_version is None:
logging.warning('No update information found for %s' % (pkg,))
all_ok = False
- elif latest_version != installed_version:
- if args.raw:
- logging.info('%s==%s' % (pkg, latest_version))
- else:
- if args.auto:
- update_pkg(pkg, latest_version)
+ else:
+ if latest_version > installed_version:
+ if args.raw:
+ logging.info('%s==%s' % (pkg, latest_version))
else:
- logging.info('%s==%s is available (you have %s)' % (pkg,
- latest_version, installed_version))
- if args.interactive:
- answer = ask_to_install()
- if answer in ['y', 'a']:
- update_pkg(pkg, latest_version)
- all_ok = False
- elif not args.raw:
- logging.debug('%s==%s is up-to-date' % (pkg, installed_version))
+ if args.auto:
+ update_pkg(pkg, latest_version)
+ else:
+ logging.info('%s==%s is available (you have %s)' % (pkg,
+ latest_version, installed_version))
+ if args.interactive:
+ answer = ask_to_install()
+ if answer in ['y', 'a']:
+ update_pkg(pkg, latest_version)
+ all_ok = False
+ elif not args.raw:
+ logging.debug('%s==%s is up-to-date' % (pkg, installed_version))
if all_ok and not args.raw:
logging.info('Everything up-to-date')
diff --git a/setup.py b/setup.py
index f61e3cfaa..1238487e9 100644
--- a/setup.py
+++ b/setup.py
@@ -6,7 +6,7 @@
def get_dependencies():
- deps = []
+ deps = ['verlib']
if sys.version_info < (2, 7):
deps += ['argparse']
return deps
|
facebookresearch__habitat-lab-66 | Mistake in Agent class' docs
Agent's class docs string states that user has to implement 2 methods: `reset` and `step`. However, If I understood correctly, there's no method named `step` and there is method `act` instead. This is quite tiny issue but still.
https://github.com/facebookresearch/habitat-api/blob/c7443c39c5186e517d8a4b7c87a1b42106e4e77a/habitat/core/agent.py#L10-L17
| [
{
"content": "#!/usr/bin/env python3\n\n# Copyright (c) Facebook, Inc. and its affiliates.\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n\nfrom habitat.core.simulator import Observations\n\n\nclass Agent:\n \"\"\"Abstract class for defining agents which act inside Env. This abstract\n class standardizes agents to allow seamless benchmarking. To implement an\n agent the user has to implement two methods:\n\n reset\n step\n \"\"\"\n\n def reset(self) -> None:\n \"\"\"Called before starting a new episode in environment.\n \"\"\"\n raise NotImplementedError\n\n def act(self, observations: Observations) -> int:\n \"\"\"\n\n Args:\n observations: observations coming in from environment to be used\n by agent to decide action.\n\n Returns:\n action to be taken inside the environment\n \"\"\"\n raise NotImplementedError\n",
"path": "habitat/core/agent.py"
}
] | [
{
"content": "#!/usr/bin/env python3\n\n# Copyright (c) Facebook, Inc. and its affiliates.\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n\nfrom habitat.core.simulator import Observations\n\n\nclass Agent:\n \"\"\"Abstract class for defining agents which act inside Env. This abstract\n class standardizes agents to allow seamless benchmarking. To implement an\n agent the user has to implement two methods:\n\n reset\n act\n \"\"\"\n\n def reset(self) -> None:\n \"\"\"Called before starting a new episode in environment.\n \"\"\"\n raise NotImplementedError\n\n def act(self, observations: Observations) -> int:\n \"\"\"\n\n Args:\n observations: observations coming in from environment to be used\n by agent to decide action.\n\n Returns:\n action to be taken inside the environment\n \"\"\"\n raise NotImplementedError\n",
"path": "habitat/core/agent.py"
}
] | diff --git a/habitat/core/agent.py b/habitat/core/agent.py
index eab2c4395e..bb39009207 100644
--- a/habitat/core/agent.py
+++ b/habitat/core/agent.py
@@ -13,7 +13,7 @@ class standardizes agents to allow seamless benchmarking. To implement an
agent the user has to implement two methods:
reset
- step
+ act
"""
def reset(self) -> None:
|
google-parfait__tensorflow-federated-1748 | Error in loading the GLDv2 dataset
Hello! Thanks for the fantastic library. I have run into an error while loading the GLDv2 dataset via TFF.
Please see the snippet below:
```
>>> import tensorflow_federated as tff
>>> dataset = tff.simulation.datasets.gldv2.load_data()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/path/to/python3.8/site-packages/tensorflow_federated/python/simulation/datasets/gldv2.py", line 396, in load_data
qh = logging.handlers.QueueHandler(q)
AttributeError: module 'logging' has no attribute 'handlers'
```
## The root cause of this error
The error appears to be because of a missing import in `gldv2.py`. We can isolate this error to the following lines which occur within `gldv2.py`. Compare this snippet which does not work
```
>>> import logging, multiprocessing
>>> q = multiprocessing.Queue(-1)
>>> qh = logging.handlers.QueueHandler(q)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: module 'logging' has no attribute 'handlers'
```
to this one which does
```
>>> import logging, multiprocessing
>>> import logging.handlers # Add this import statement
>>> q = multiprocessing.Queue(-1)
>>> qh = logging.handlers.QueueHandler(q) # works!
```
## Fixing the error
Adding an import statement `import logging.handlers` in `gldv2.py` fixes the issue -- I've tested this out locally. I can send a pull request.
**Environment (please complete the following information):**
* OS Platform and Distribution: Ubuntu 20.04
* Python package versions (e.g., TensorFlow Federated, TensorFlow): TFF: 0.19.0 and TF: 2.5.0. The same error also occurs in the nightly version: TFF: 0.19.0.dev20210821 and TF: 2.7.0-dev20210822.
* Python version: 3.8
* Bazel version (if building from source): N/A (installed via pip)
* CUDA/cuDNN version: N/A
* What TensorFlow Federated execution stack are you using? simulation
**Expected behavior**
I expect the data to be loaded correctly.
| [
{
"content": "# Copyright 2020, The TensorFlow Federated Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"Libraries for the federated Google Landmark v2 dataset for simulation.\"\"\"\n\nimport collections\nimport logging\nimport multiprocessing.pool\nimport os\nimport shutil\nimport sys\nimport tempfile\nimport traceback\n\nfrom typing import Dict\nfrom typing import List\nfrom typing import Set\nfrom typing import Tuple\n\nimport tensorflow as tf\n\nfrom tensorflow_federated.python.simulation.datasets import vision_datasets_utils\nfrom tensorflow_federated.python.simulation.datasets.client_data import ClientData\n\nFED_GLD_SPLIT_FILE_BUNDLE = 'landmarks-user-160k'\nFED_GLD_SPLIT_FILE_DOWNLOAD_URL = 'http://storage.googleapis.com/gresearch/federated-vision-datasets/%s.zip' % FED_GLD_SPLIT_FILE_BUNDLE\nFED_GLD_SPLIT_FILE_BUNDLE_MD5_CHECKSUM = '53c36bd7d5fc12f927af2820b7e4a57c'\nFED_GLD_TRAIN_SPLIT_FILE = 'federated_train.csv'\nFED_GLD_TEST_SPLIT_FILE = 'test.csv'\nGLD_SHARD_BASE_URL = 'https://s3.amazonaws.com/google-landmark'\nNUM_SHARD_TRAIN = 500\nMINI_GLD_TRAIN_DOWNLOAD_URL = 'https://storage.googleapis.com/tff-datasets-public/mini_gld_train_split.csv'\nMINI_GLD_TRAIN_SPLIT_FILE = 'mini_gld_train_split.csv'\nMINI_GLD_TEST_DOWNLOAD_URL = 'https://storage.googleapis.com/tff-datasets-public/mini_gld_test.csv'\nMINI_GLD_TEST_SPLIT_FILE = 'mini_gld_test.csv'\nMINI_GLD_TRAIN_SPLIT_FILE_MD5_CHECKSUM = '9fd62cf79a67046fdd673d3a0ac52841'\nMINI_GLD_TEST_SPLIT_FILE_MD5_CHECKSUM = '298e9d19d66357236f66fe8e22920933'\nFED_GLD_CACHE = 'gld160k'\nMINI_GLD_CACHE = 'gld23k'\nTRAIN_SUB_DIR = 'train'\nTEST_FILE_NAME = 'test.tfRecord'\nLOGGER = 'gldv2'\n\n\ndef _listener_process(queue: multiprocessing.Queue, log_file: str):\n \"\"\"Sets up a separate process for handling logging messages.\n\n This setup is required because without it, the logging messages will be\n duplicated when multiple processes are created for downloading GLD dataset.\n\n Args:\n queue: The queue to receive logging messages.\n log_file: The file which the messages will be written to.\n \"\"\"\n root = logging.getLogger()\n h = logging.FileHandler(log_file)\n fmt = logging.Formatter(\n fmt='%(asctime)s %(levelname)-8s %(message)s',\n datefmt='%Y-%m-%d %H:%M:%S')\n h.setFormatter(fmt)\n root.addHandler(h)\n while True:\n try:\n record = queue.get()\n # We send None as signal to stop\n if record is None:\n break\n logger = logging.getLogger(record.name)\n logger.handle(record)\n except Exception: # pylint: disable=broad-except\n print('Something went wrong:', file=sys.stderr)\n traceback.print_exc(file=sys.stderr)\n\n\ndef _create_dataset_with_mapping(\n image_dir: str, mapping: List[Dict[str, str]]) -> List[tf.train.Example]:\n \"\"\"Builds a dataset based on the mapping file and the images in the image dir.\n\n Args:\n image_dir: The directory contains the image files.\n mapping: A list of dictionaries. Each dictionary contains 'image_id' and\n 'class' columns.\n\n Returns:\n A list of `tf.train.Example`.\n \"\"\"\n logger = logging.getLogger(LOGGER)\n examples = []\n for row in mapping:\n img_path = os.path.join(image_dir, '%s.jpg' % row['image_id'])\n try:\n with open(img_path, 'rb') as f:\n img_bytes = f.read()\n examples.append(\n vision_datasets_utils.create_example(img_bytes, int(row['class'])))\n except IOError as e:\n logger.warning('Image %s is not found. Exception: %s', img_path, e)\n continue\n return examples\n\n\ndef _create_train_data_files(cache_dir: str, image_dir: str, mapping_file: str):\n \"\"\"Create the train data and persist it into a separate file per user.\n\n Args:\n cache_dir: The directory caching the intermediate results.\n image_dir: The directory containing all the downloaded images.\n mapping_file: The file containing 'image_id' to 'class' mappings.\n \"\"\"\n logger = logging.getLogger(LOGGER)\n if not os.path.isdir(image_dir):\n logger.error('Image directory %s does not exist', image_dir)\n raise ValueError('%s does not exist or is not a directory' % image_dir)\n\n mapping_table = vision_datasets_utils.read_csv(mapping_file)\n expected_cols = ['user_id', 'image_id', 'class']\n if not all(col in mapping_table[0].keys() for col in expected_cols):\n logger.error('%s has wrong format.', mapping_file)\n raise ValueError(\n 'The mapping file must contain user_id, image_id and class columns. '\n 'The existing columns are %s' % ','.join(mapping_table[0].keys()))\n if not os.path.exists(cache_dir):\n os.makedirs(cache_dir)\n mapping_per_user = collections.defaultdict(list)\n for row in mapping_table:\n user_id = row['user_id']\n mapping_per_user[user_id].append(row)\n for user_id, data in mapping_per_user.items():\n examples = _create_dataset_with_mapping(image_dir, data)\n with tf.io.TFRecordWriter(os.path.join(cache_dir, str(user_id))) as writer:\n for example in examples:\n writer.write(example.SerializeToString())\n logger.info('Created tfrecord file for user %s with %d examples, at %s',\n user_id, len(examples), cache_dir)\n\n\ndef _create_test_data_file(cache_dir: str, image_dir: str, mapping_file: str):\n \"\"\"Create the test data and persist it into a file.\n\n Args:\n cache_dir: The directory caching the intermediate results.\n image_dir: The directory containing all the downloaded images.\n mapping_file: The file containing 'image_id' to 'class' mappings.\n \"\"\"\n logger = logging.getLogger(LOGGER)\n if not os.path.isdir(image_dir):\n logger.error('Image directory %s does not exist', image_dir)\n raise ValueError('%s does not exist or is not a directory' % image_dir)\n mapping_table = vision_datasets_utils.read_csv(mapping_file)\n expected_cols = ['image_id', 'class']\n if not all(col in mapping_table[0].keys() for col in expected_cols):\n logger.error('%s has wrong format.', mapping_file)\n raise ValueError(\n 'The mapping file must contain image_id and class columns. The existing'\n ' columns are %s' % ','.join(mapping_table[0].keys()))\n if not os.path.exists(cache_dir):\n os.makedirs(cache_dir)\n examples = _create_dataset_with_mapping(image_dir, mapping_table)\n with tf.io.TFRecordWriter(os.path.join(cache_dir, TEST_FILE_NAME)) as writer:\n for example in examples:\n writer.write(example.SerializeToString())\n logger.info('Created tfrecord file at %s', cache_dir)\n\n\ndef _create_federated_gld_dataset(\n cache_dir: str, image_dir: str, train_mapping_file: str,\n test_mapping_file: str) -> Tuple[ClientData, tf.data.Dataset]:\n \"\"\"Generate fedreated GLDv2 dataset with the downloaded images.\n\n Args:\n cache_dir: The directory for caching the intermediate results.\n image_dir: The directory that contains the filtered images.\n train_mapping_file: The mapping file for the train set.\n test_mapping_file: The mapping file for the test set.\n\n Returns:\n A tuple of `(ClientData, tf.data.Dataset)`.\n \"\"\"\n\n _create_train_data_files(\n cache_dir=os.path.join(cache_dir, FED_GLD_CACHE, TRAIN_SUB_DIR),\n image_dir=image_dir,\n mapping_file=train_mapping_file)\n _create_test_data_file(\n cache_dir=os.path.join(cache_dir, FED_GLD_CACHE),\n image_dir=image_dir,\n mapping_file=test_mapping_file)\n return vision_datasets_utils.load_data_from_cache(\n os.path.join(cache_dir, FED_GLD_CACHE), LOGGER)\n\n\ndef _create_mini_gld_dataset(\n cache_dir: str, image_dir: str) -> Tuple[ClientData, tf.data.Dataset]:\n \"\"\"Generate mini federated GLDv2 dataset with the downloaded images.\n\n Args:\n cache_dir: The directory for caching the intermediate results.\n image_dir: The directory that contains the filtered images.\n\n Returns:\n A tuple of `ClientData`, `tf.data.Dataset`.\n \"\"\"\n train_path = tf.keras.utils.get_file(\n MINI_GLD_TRAIN_SPLIT_FILE,\n origin=MINI_GLD_TRAIN_DOWNLOAD_URL,\n file_hash=MINI_GLD_TRAIN_SPLIT_FILE_MD5_CHECKSUM,\n hash_algorithm='md5',\n cache_dir=cache_dir)\n test_path = tf.keras.utils.get_file(\n MINI_GLD_TEST_SPLIT_FILE,\n origin=MINI_GLD_TEST_DOWNLOAD_URL,\n file_hash=MINI_GLD_TEST_SPLIT_FILE_MD5_CHECKSUM,\n hash_algorithm='md5',\n cache_dir=cache_dir)\n _create_train_data_files(\n cache_dir=os.path.join(cache_dir, MINI_GLD_CACHE, TRAIN_SUB_DIR),\n image_dir=image_dir,\n mapping_file=train_path)\n _create_test_data_file(\n cache_dir=os.path.join(cache_dir, MINI_GLD_CACHE),\n image_dir=image_dir,\n mapping_file=test_path)\n return vision_datasets_utils.load_data_from_cache(\n os.path.join(cache_dir, MINI_GLD_CACHE), LOGGER)\n\n\ndef _filter_images(shard: int, all_images: Set[str], image_dir: str,\n base_url: str):\n \"\"\"Download full GLDv2 dataset, only keep images that are included in the federated gld v2 dataset.\n\n Args:\n shard: The shard of the GLDv2 dataset.\n all_images: A set which contains all images included in the federated GLD\n dataset.\n image_dir: The directory to keep all filtered images.\n base_url: The base url for downloading GLD v2 dataset images.\n\n Raises:\n IOError: when failed to download checksum.\n \"\"\"\n shard_str = '%03d' % shard\n images_tar_url = '%s/train/images_%s.tar' % (base_url, shard_str)\n images_md5_url = '%s/md5sum/train/md5.images_%s.txt' % (base_url, shard_str)\n with tempfile.TemporaryDirectory() as tmp_dir:\n logger = logging.getLogger(LOGGER)\n logger.info('Start to download checksum for shard %s', shard_str)\n md5_path = tf.keras.utils.get_file(\n 'images_md5_%s.txt' % shard_str,\n origin=images_md5_url,\n cache_dir=tmp_dir)\n with open(md5_path, 'r') as f:\n md5_hash = f.read()\n if not md5_hash:\n msg = 'Failed to download checksum for shard %s.' % shard_str\n logger.info(msg)\n raise IOError(msg)\n logger.info('Downloaded checksum for shard %s successfully.', shard_str)\n logger.info('Start to download data for shard %s', shard_str)\n tf.keras.utils.get_file(\n 'images_%s.tar' % shard_str,\n origin=images_tar_url,\n file_hash=md5_hash,\n hash_algorithm='md5',\n extract=True,\n cache_dir=tmp_dir)\n logger.info('Data for shard %s was downloaded successfully.', shard_str)\n count = 0\n for root, _, files in os.walk(tmp_dir):\n for filename in files:\n name, extension = os.path.splitext(filename)\n if extension == '.jpg' and name in all_images:\n count += 1\n shutil.copyfile(\n os.path.join(root, filename), os.path.join(image_dir, filename))\n logger.info('Moved %d images from shard %s to %s', count, shard_str,\n image_dir)\n\n\ndef _download_data(\n num_worker: int, cache_dir: str, base_url: str\n) -> Tuple[ClientData, tf.data.Dataset, ClientData, tf.data.Dataset]:\n \"\"\"Create a `tff.simulation.datasets.ClientData` for the chosen data split.\n\n Download the entire GLD v2 dataset, subset the dataset to only include the\n images in the federated GLD v2 dataset, and create both gld23k and gld160k\n datasets.\n\n Args:\n num_worker: The number of threads for downloading the GLD v2 dataset.\n cache_dir: The directory for caching temporary results.\n base_url: The base url for downloading GLD images.\n\n Returns:\n A tuple of `tff.simulation.datasets.ClientData`, `tf.data.Dataset`.\n \"\"\"\n logger = logging.getLogger(LOGGER)\n logger.info('Start to download fed gldv2 mapping files')\n path = tf.keras.utils.get_file(\n '%s.zip' % FED_GLD_SPLIT_FILE_BUNDLE,\n origin=FED_GLD_SPLIT_FILE_DOWNLOAD_URL,\n file_hash=FED_GLD_SPLIT_FILE_BUNDLE_MD5_CHECKSUM,\n hash_algorithm='md5',\n extract=True,\n archive_format='zip',\n cache_dir=cache_dir)\n logger.info('Fed gldv2 mapping files are downloaded successfully.')\n base_path = os.path.dirname(path)\n train_path = os.path.join(base_path, FED_GLD_SPLIT_FILE_BUNDLE,\n FED_GLD_TRAIN_SPLIT_FILE)\n test_path = os.path.join(base_path, FED_GLD_SPLIT_FILE_BUNDLE,\n FED_GLD_TEST_SPLIT_FILE)\n train_mapping = vision_datasets_utils.read_csv(train_path)\n test_mapping = vision_datasets_utils.read_csv(test_path)\n all_images = set()\n all_images.update([row['image_id'] for row in train_mapping],\n [row['image_id'] for row in test_mapping])\n image_dir = os.path.join(cache_dir, 'images')\n if not os.path.exists(image_dir):\n os.mkdir(image_dir)\n logger.info('Start to download GLDv2 dataset.')\n with multiprocessing.pool.ThreadPool(num_worker) as pool:\n train_args = [\n (i, all_images, image_dir, base_url) for i in range(NUM_SHARD_TRAIN)\n ]\n pool.starmap(_filter_images, train_args)\n\n logger.info('Finish downloading GLDv2 dataset.')\n fed_gld_train, fed_gld_test = _create_federated_gld_dataset(\n cache_dir, image_dir, train_path, test_path)\n mini_gld_train, mini_gld_test = _create_mini_gld_dataset(cache_dir, image_dir)\n\n return fed_gld_train, fed_gld_test, mini_gld_train, mini_gld_test\n\n\ndef load_data(num_worker: int = 1,\n cache_dir: str = 'cache',\n gld23k: bool = False,\n base_url: str = GLD_SHARD_BASE_URL):\n \"\"\"Loads a federated version of the Google Landmark v2 dataset.\n\n The dataset consists of photos of various world landmarks, with images\n grouped by photographer to achieve a federated partitioning of the data.\n The dataset is downloaded and cached locally. If previously downloaded, it\n tries to load the dataset from cache.\n\n The `tf.data.Datasets` returned by\n `tff.simulation.datasets.ClientData.create_tf_dataset_for_client` will yield\n `collections.OrderedDict` objects at each iteration, with the following keys\n and values:\n\n - `'image/decoded'`: A `tf.Tensor` with `dtype=tf.uint8` that\n corresponds to the pixels of the landmark images.\n - `'class'`: A `tf.Tensor` with `dtype=tf.int64` and shape [1],\n corresponding to the class label of the landmark ([0, 203) for gld23k,\n [0, 2028) for gld160k).\n\n Two flavors of GLD datasets are available. When gld23k is true, a minimum\n version of the federated Google landmark dataset will be provided for faster\n iterations. The gld23k dataset contains 203 classes, 233 clients and 23080\n images. When gld23k is false, the gld160k dataset\n (https://arxiv.org/abs/2003.08082) will be provided. The gld160k dataset\n contains 2,028 classes, 1262 clients and 164,172 images.\n\n Args:\n num_worker: (Optional) The number of threads for downloading the GLD v2\n dataset.\n cache_dir: (Optional) The directory to cache the downloaded file. If `None`,\n caches in Keras' default cache directory.\n gld23k: (Optional) When true, a smaller version of the federated Google\n Landmark v2 dataset will be loaded. This gld23k dataset is used for faster\n prototyping.\n base_url: (Optional) The base url to download GLD v2 image shards.\n\n Returns:\n Tuple of (train, test) where the tuple elements are\n a `tff.simulation.datasets.ClientData` and a `tf.data.Dataset`.\n \"\"\"\n if not os.path.exists(cache_dir):\n os.mkdir(cache_dir)\n q = multiprocessing.Queue(-1)\n listener = multiprocessing.Process(\n target=_listener_process,\n args=(q, os.path.join(cache_dir, 'load_data.log')))\n listener.start()\n logger = logging.getLogger(LOGGER)\n qh = logging.handlers.QueueHandler(q)\n logger.addHandler(qh)\n logger.info('Start to load data.')\n if gld23k:\n existing_data_cache = os.path.join(cache_dir, MINI_GLD_CACHE)\n else:\n existing_data_cache = os.path.join(cache_dir, FED_GLD_CACHE)\n try:\n logger.info('Try loading dataset from cache')\n return vision_datasets_utils.load_data_from_cache(existing_data_cache,\n TRAIN_SUB_DIR,\n TEST_FILE_NAME, LOGGER)\n except Exception: # pylint: disable=broad-except\n logger.info('Loading from cache failed, start to download the data.')\n fed_gld_train, fed_gld_test, mini_gld_train, mini_gld_test = _download_data(\n num_worker, cache_dir, base_url)\n finally:\n q.put_nowait(None)\n listener.join()\n if gld23k:\n return mini_gld_train, mini_gld_test\n else:\n return fed_gld_train, fed_gld_test\n",
"path": "tensorflow_federated/python/simulation/datasets/gldv2.py"
}
] | [
{
"content": "# Copyright 2020, The TensorFlow Federated Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"Libraries for the federated Google Landmark v2 dataset for simulation.\"\"\"\n\nimport collections\nimport logging\nimport logging.handlers\nimport multiprocessing.pool\nimport os\nimport shutil\nimport sys\nimport tempfile\nimport traceback\n\nfrom typing import Dict\nfrom typing import List\nfrom typing import Set\nfrom typing import Tuple\n\nimport tensorflow as tf\n\nfrom tensorflow_federated.python.simulation.datasets import vision_datasets_utils\nfrom tensorflow_federated.python.simulation.datasets.client_data import ClientData\n\nFED_GLD_SPLIT_FILE_BUNDLE = 'landmarks-user-160k'\nFED_GLD_SPLIT_FILE_DOWNLOAD_URL = 'http://storage.googleapis.com/gresearch/federated-vision-datasets/%s.zip' % FED_GLD_SPLIT_FILE_BUNDLE\nFED_GLD_SPLIT_FILE_BUNDLE_MD5_CHECKSUM = '53c36bd7d5fc12f927af2820b7e4a57c'\nFED_GLD_TRAIN_SPLIT_FILE = 'federated_train.csv'\nFED_GLD_TEST_SPLIT_FILE = 'test.csv'\nGLD_SHARD_BASE_URL = 'https://s3.amazonaws.com/google-landmark'\nNUM_SHARD_TRAIN = 500\nMINI_GLD_TRAIN_DOWNLOAD_URL = 'https://storage.googleapis.com/tff-datasets-public/mini_gld_train_split.csv'\nMINI_GLD_TRAIN_SPLIT_FILE = 'mini_gld_train_split.csv'\nMINI_GLD_TEST_DOWNLOAD_URL = 'https://storage.googleapis.com/tff-datasets-public/mini_gld_test.csv'\nMINI_GLD_TEST_SPLIT_FILE = 'mini_gld_test.csv'\nMINI_GLD_TRAIN_SPLIT_FILE_MD5_CHECKSUM = '9fd62cf79a67046fdd673d3a0ac52841'\nMINI_GLD_TEST_SPLIT_FILE_MD5_CHECKSUM = '298e9d19d66357236f66fe8e22920933'\nFED_GLD_CACHE = 'gld160k'\nMINI_GLD_CACHE = 'gld23k'\nTRAIN_SUB_DIR = 'train'\nTEST_FILE_NAME = 'test.tfRecord'\nLOGGER = 'gldv2'\n\n\ndef _listener_process(queue: multiprocessing.Queue, log_file: str):\n \"\"\"Sets up a separate process for handling logging messages.\n\n This setup is required because without it, the logging messages will be\n duplicated when multiple processes are created for downloading GLD dataset.\n\n Args:\n queue: The queue to receive logging messages.\n log_file: The file which the messages will be written to.\n \"\"\"\n root = logging.getLogger()\n h = logging.FileHandler(log_file)\n fmt = logging.Formatter(\n fmt='%(asctime)s %(levelname)-8s %(message)s',\n datefmt='%Y-%m-%d %H:%M:%S')\n h.setFormatter(fmt)\n root.addHandler(h)\n while True:\n try:\n record = queue.get()\n # We send None as signal to stop\n if record is None:\n break\n logger = logging.getLogger(record.name)\n logger.handle(record)\n except Exception: # pylint: disable=broad-except\n print('Something went wrong:', file=sys.stderr)\n traceback.print_exc(file=sys.stderr)\n\n\ndef _create_dataset_with_mapping(\n image_dir: str, mapping: List[Dict[str, str]]) -> List[tf.train.Example]:\n \"\"\"Builds a dataset based on the mapping file and the images in the image dir.\n\n Args:\n image_dir: The directory contains the image files.\n mapping: A list of dictionaries. Each dictionary contains 'image_id' and\n 'class' columns.\n\n Returns:\n A list of `tf.train.Example`.\n \"\"\"\n logger = logging.getLogger(LOGGER)\n examples = []\n for row in mapping:\n img_path = os.path.join(image_dir, '%s.jpg' % row['image_id'])\n try:\n with open(img_path, 'rb') as f:\n img_bytes = f.read()\n examples.append(\n vision_datasets_utils.create_example(img_bytes, int(row['class'])))\n except IOError as e:\n logger.warning('Image %s is not found. Exception: %s', img_path, e)\n continue\n return examples\n\n\ndef _create_train_data_files(cache_dir: str, image_dir: str, mapping_file: str):\n \"\"\"Create the train data and persist it into a separate file per user.\n\n Args:\n cache_dir: The directory caching the intermediate results.\n image_dir: The directory containing all the downloaded images.\n mapping_file: The file containing 'image_id' to 'class' mappings.\n \"\"\"\n logger = logging.getLogger(LOGGER)\n if not os.path.isdir(image_dir):\n logger.error('Image directory %s does not exist', image_dir)\n raise ValueError('%s does not exist or is not a directory' % image_dir)\n\n mapping_table = vision_datasets_utils.read_csv(mapping_file)\n expected_cols = ['user_id', 'image_id', 'class']\n if not all(col in mapping_table[0].keys() for col in expected_cols):\n logger.error('%s has wrong format.', mapping_file)\n raise ValueError(\n 'The mapping file must contain user_id, image_id and class columns. '\n 'The existing columns are %s' % ','.join(mapping_table[0].keys()))\n if not os.path.exists(cache_dir):\n os.makedirs(cache_dir)\n mapping_per_user = collections.defaultdict(list)\n for row in mapping_table:\n user_id = row['user_id']\n mapping_per_user[user_id].append(row)\n for user_id, data in mapping_per_user.items():\n examples = _create_dataset_with_mapping(image_dir, data)\n with tf.io.TFRecordWriter(os.path.join(cache_dir, str(user_id))) as writer:\n for example in examples:\n writer.write(example.SerializeToString())\n logger.info('Created tfrecord file for user %s with %d examples, at %s',\n user_id, len(examples), cache_dir)\n\n\ndef _create_test_data_file(cache_dir: str, image_dir: str, mapping_file: str):\n \"\"\"Create the test data and persist it into a file.\n\n Args:\n cache_dir: The directory caching the intermediate results.\n image_dir: The directory containing all the downloaded images.\n mapping_file: The file containing 'image_id' to 'class' mappings.\n \"\"\"\n logger = logging.getLogger(LOGGER)\n if not os.path.isdir(image_dir):\n logger.error('Image directory %s does not exist', image_dir)\n raise ValueError('%s does not exist or is not a directory' % image_dir)\n mapping_table = vision_datasets_utils.read_csv(mapping_file)\n expected_cols = ['image_id', 'class']\n if not all(col in mapping_table[0].keys() for col in expected_cols):\n logger.error('%s has wrong format.', mapping_file)\n raise ValueError(\n 'The mapping file must contain image_id and class columns. The existing'\n ' columns are %s' % ','.join(mapping_table[0].keys()))\n if not os.path.exists(cache_dir):\n os.makedirs(cache_dir)\n examples = _create_dataset_with_mapping(image_dir, mapping_table)\n with tf.io.TFRecordWriter(os.path.join(cache_dir, TEST_FILE_NAME)) as writer:\n for example in examples:\n writer.write(example.SerializeToString())\n logger.info('Created tfrecord file at %s', cache_dir)\n\n\ndef _create_federated_gld_dataset(\n cache_dir: str, image_dir: str, train_mapping_file: str,\n test_mapping_file: str) -> Tuple[ClientData, tf.data.Dataset]:\n \"\"\"Generate fedreated GLDv2 dataset with the downloaded images.\n\n Args:\n cache_dir: The directory for caching the intermediate results.\n image_dir: The directory that contains the filtered images.\n train_mapping_file: The mapping file for the train set.\n test_mapping_file: The mapping file for the test set.\n\n Returns:\n A tuple of `(ClientData, tf.data.Dataset)`.\n \"\"\"\n\n _create_train_data_files(\n cache_dir=os.path.join(cache_dir, FED_GLD_CACHE, TRAIN_SUB_DIR),\n image_dir=image_dir,\n mapping_file=train_mapping_file)\n _create_test_data_file(\n cache_dir=os.path.join(cache_dir, FED_GLD_CACHE),\n image_dir=image_dir,\n mapping_file=test_mapping_file)\n return vision_datasets_utils.load_data_from_cache(\n os.path.join(cache_dir, FED_GLD_CACHE), LOGGER)\n\n\ndef _create_mini_gld_dataset(\n cache_dir: str, image_dir: str) -> Tuple[ClientData, tf.data.Dataset]:\n \"\"\"Generate mini federated GLDv2 dataset with the downloaded images.\n\n Args:\n cache_dir: The directory for caching the intermediate results.\n image_dir: The directory that contains the filtered images.\n\n Returns:\n A tuple of `ClientData`, `tf.data.Dataset`.\n \"\"\"\n train_path = tf.keras.utils.get_file(\n MINI_GLD_TRAIN_SPLIT_FILE,\n origin=MINI_GLD_TRAIN_DOWNLOAD_URL,\n file_hash=MINI_GLD_TRAIN_SPLIT_FILE_MD5_CHECKSUM,\n hash_algorithm='md5',\n cache_dir=cache_dir)\n test_path = tf.keras.utils.get_file(\n MINI_GLD_TEST_SPLIT_FILE,\n origin=MINI_GLD_TEST_DOWNLOAD_URL,\n file_hash=MINI_GLD_TEST_SPLIT_FILE_MD5_CHECKSUM,\n hash_algorithm='md5',\n cache_dir=cache_dir)\n _create_train_data_files(\n cache_dir=os.path.join(cache_dir, MINI_GLD_CACHE, TRAIN_SUB_DIR),\n image_dir=image_dir,\n mapping_file=train_path)\n _create_test_data_file(\n cache_dir=os.path.join(cache_dir, MINI_GLD_CACHE),\n image_dir=image_dir,\n mapping_file=test_path)\n return vision_datasets_utils.load_data_from_cache(\n os.path.join(cache_dir, MINI_GLD_CACHE), LOGGER)\n\n\ndef _filter_images(shard: int, all_images: Set[str], image_dir: str,\n base_url: str):\n \"\"\"Download full GLDv2 dataset, only keep images that are included in the federated gld v2 dataset.\n\n Args:\n shard: The shard of the GLDv2 dataset.\n all_images: A set which contains all images included in the federated GLD\n dataset.\n image_dir: The directory to keep all filtered images.\n base_url: The base url for downloading GLD v2 dataset images.\n\n Raises:\n IOError: when failed to download checksum.\n \"\"\"\n shard_str = '%03d' % shard\n images_tar_url = '%s/train/images_%s.tar' % (base_url, shard_str)\n images_md5_url = '%s/md5sum/train/md5.images_%s.txt' % (base_url, shard_str)\n with tempfile.TemporaryDirectory() as tmp_dir:\n logger = logging.getLogger(LOGGER)\n logger.info('Start to download checksum for shard %s', shard_str)\n md5_path = tf.keras.utils.get_file(\n 'images_md5_%s.txt' % shard_str,\n origin=images_md5_url,\n cache_dir=tmp_dir)\n with open(md5_path, 'r') as f:\n md5_hash = f.read()\n if not md5_hash:\n msg = 'Failed to download checksum for shard %s.' % shard_str\n logger.info(msg)\n raise IOError(msg)\n logger.info('Downloaded checksum for shard %s successfully.', shard_str)\n logger.info('Start to download data for shard %s', shard_str)\n tf.keras.utils.get_file(\n 'images_%s.tar' % shard_str,\n origin=images_tar_url,\n file_hash=md5_hash,\n hash_algorithm='md5',\n extract=True,\n cache_dir=tmp_dir)\n logger.info('Data for shard %s was downloaded successfully.', shard_str)\n count = 0\n for root, _, files in os.walk(tmp_dir):\n for filename in files:\n name, extension = os.path.splitext(filename)\n if extension == '.jpg' and name in all_images:\n count += 1\n shutil.copyfile(\n os.path.join(root, filename), os.path.join(image_dir, filename))\n logger.info('Moved %d images from shard %s to %s', count, shard_str,\n image_dir)\n\n\ndef _download_data(\n num_worker: int, cache_dir: str, base_url: str\n) -> Tuple[ClientData, tf.data.Dataset, ClientData, tf.data.Dataset]:\n \"\"\"Create a `tff.simulation.datasets.ClientData` for the chosen data split.\n\n Download the entire GLD v2 dataset, subset the dataset to only include the\n images in the federated GLD v2 dataset, and create both gld23k and gld160k\n datasets.\n\n Args:\n num_worker: The number of threads for downloading the GLD v2 dataset.\n cache_dir: The directory for caching temporary results.\n base_url: The base url for downloading GLD images.\n\n Returns:\n A tuple of `tff.simulation.datasets.ClientData`, `tf.data.Dataset`.\n \"\"\"\n logger = logging.getLogger(LOGGER)\n logger.info('Start to download fed gldv2 mapping files')\n path = tf.keras.utils.get_file(\n '%s.zip' % FED_GLD_SPLIT_FILE_BUNDLE,\n origin=FED_GLD_SPLIT_FILE_DOWNLOAD_URL,\n file_hash=FED_GLD_SPLIT_FILE_BUNDLE_MD5_CHECKSUM,\n hash_algorithm='md5',\n extract=True,\n archive_format='zip',\n cache_dir=cache_dir)\n logger.info('Fed gldv2 mapping files are downloaded successfully.')\n base_path = os.path.dirname(path)\n train_path = os.path.join(base_path, FED_GLD_SPLIT_FILE_BUNDLE,\n FED_GLD_TRAIN_SPLIT_FILE)\n test_path = os.path.join(base_path, FED_GLD_SPLIT_FILE_BUNDLE,\n FED_GLD_TEST_SPLIT_FILE)\n train_mapping = vision_datasets_utils.read_csv(train_path)\n test_mapping = vision_datasets_utils.read_csv(test_path)\n all_images = set()\n all_images.update([row['image_id'] for row in train_mapping],\n [row['image_id'] for row in test_mapping])\n image_dir = os.path.join(cache_dir, 'images')\n if not os.path.exists(image_dir):\n os.mkdir(image_dir)\n logger.info('Start to download GLDv2 dataset.')\n with multiprocessing.pool.ThreadPool(num_worker) as pool:\n train_args = [\n (i, all_images, image_dir, base_url) for i in range(NUM_SHARD_TRAIN)\n ]\n pool.starmap(_filter_images, train_args)\n\n logger.info('Finish downloading GLDv2 dataset.')\n fed_gld_train, fed_gld_test = _create_federated_gld_dataset(\n cache_dir, image_dir, train_path, test_path)\n mini_gld_train, mini_gld_test = _create_mini_gld_dataset(cache_dir, image_dir)\n\n return fed_gld_train, fed_gld_test, mini_gld_train, mini_gld_test\n\n\ndef load_data(num_worker: int = 1,\n cache_dir: str = 'cache',\n gld23k: bool = False,\n base_url: str = GLD_SHARD_BASE_URL):\n \"\"\"Loads a federated version of the Google Landmark v2 dataset.\n\n The dataset consists of photos of various world landmarks, with images\n grouped by photographer to achieve a federated partitioning of the data.\n The dataset is downloaded and cached locally. If previously downloaded, it\n tries to load the dataset from cache.\n\n The `tf.data.Datasets` returned by\n `tff.simulation.datasets.ClientData.create_tf_dataset_for_client` will yield\n `collections.OrderedDict` objects at each iteration, with the following keys\n and values:\n\n - `'image/decoded'`: A `tf.Tensor` with `dtype=tf.uint8` that\n corresponds to the pixels of the landmark images.\n - `'class'`: A `tf.Tensor` with `dtype=tf.int64` and shape [1],\n corresponding to the class label of the landmark ([0, 203) for gld23k,\n [0, 2028) for gld160k).\n\n Two flavors of GLD datasets are available. When gld23k is true, a minimum\n version of the federated Google landmark dataset will be provided for faster\n iterations. The gld23k dataset contains 203 classes, 233 clients and 23080\n images. When gld23k is false, the gld160k dataset\n (https://arxiv.org/abs/2003.08082) will be provided. The gld160k dataset\n contains 2,028 classes, 1262 clients and 164,172 images.\n\n Args:\n num_worker: (Optional) The number of threads for downloading the GLD v2\n dataset.\n cache_dir: (Optional) The directory to cache the downloaded file. If `None`,\n caches in Keras' default cache directory.\n gld23k: (Optional) When true, a smaller version of the federated Google\n Landmark v2 dataset will be loaded. This gld23k dataset is used for faster\n prototyping.\n base_url: (Optional) The base url to download GLD v2 image shards.\n\n Returns:\n Tuple of (train, test) where the tuple elements are\n a `tff.simulation.datasets.ClientData` and a `tf.data.Dataset`.\n \"\"\"\n if not os.path.exists(cache_dir):\n os.mkdir(cache_dir)\n q = multiprocessing.Queue(-1)\n listener = multiprocessing.Process(\n target=_listener_process,\n args=(q, os.path.join(cache_dir, 'load_data.log')))\n listener.start()\n logger = logging.getLogger(LOGGER)\n qh = logging.handlers.QueueHandler(q)\n logger.addHandler(qh)\n logger.info('Start to load data.')\n if gld23k:\n existing_data_cache = os.path.join(cache_dir, MINI_GLD_CACHE)\n else:\n existing_data_cache = os.path.join(cache_dir, FED_GLD_CACHE)\n try:\n logger.info('Try loading dataset from cache')\n return vision_datasets_utils.load_data_from_cache(existing_data_cache,\n TRAIN_SUB_DIR,\n TEST_FILE_NAME, LOGGER)\n except Exception: # pylint: disable=broad-except\n logger.info('Loading from cache failed, start to download the data.')\n fed_gld_train, fed_gld_test, mini_gld_train, mini_gld_test = _download_data(\n num_worker, cache_dir, base_url)\n finally:\n q.put_nowait(None)\n listener.join()\n if gld23k:\n return mini_gld_train, mini_gld_test\n else:\n return fed_gld_train, fed_gld_test\n",
"path": "tensorflow_federated/python/simulation/datasets/gldv2.py"
}
] | diff --git a/tensorflow_federated/python/simulation/datasets/gldv2.py b/tensorflow_federated/python/simulation/datasets/gldv2.py
index ac782fb409..ec76f69466 100644
--- a/tensorflow_federated/python/simulation/datasets/gldv2.py
+++ b/tensorflow_federated/python/simulation/datasets/gldv2.py
@@ -15,6 +15,7 @@
import collections
import logging
+import logging.handlers
import multiprocessing.pool
import os
import shutil
|
open-mmlab__mmsegmentation-77 | CUDA error: an illegal memory access was encountered
```python
sys.platform: linux
Python: 3.7.7 (default, May 7 2020, 21:25:33) [GCC 7.3.0]
CUDA available: True
CUDA_HOME: /usr/local/cuda
NVCC: Cuda compilation tools, release 10.0, V10.0.130
GPU 0,1: GeForce GTX 1080 Ti
GCC: gcc (Ubuntu 7.5.0-3ubuntu1~18.04) 7.5.0
PyTorch: 1.4.0
PyTorch compiling details: PyTorch built with:
- GCC 7.3
- Intel(R) Math Kernel Library Version 2020.0.1 Product Build 20200208 for Intel(R) 64 architecture applications
- Intel(R) MKL-DNN v0.21.1 (Git Hash 7d2fd500bc78936d1d648ca713b901012f470dbc)
- OpenMP 201511 (a.k.a. OpenMP 4.5)
- NNPACK is enabled
- CUDA Runtime 10.0
- NVCC architecture flags: -gencode;arch=compute_37,code=sm_37;-gencode;arch=compute_50,code=sm_50;-gencode;arch=compute_60,code=sm_60;-gencode;arch=compute_61,code=sm_61;-gencode;arch=compute_70,code=sm_70;-gencode;arch=compute_75,code=sm_75;-gencode;arch=compute_37,code=compute_37
- CuDNN 7.6.3
- Magma 2.5.1
- Build settings: BLAS=MKL, BUILD_NAMEDTENSOR=OFF, BUILD_TYPE=Release, CXX_FLAGS= -Wno-deprecated -fvisibility-inlines-hidden -fopenmp -DUSE_FBGEMM -DUSE_QNNPACK -DUSE_PYTORCH_QNNPACK -O2 -fPIC -Wno-narrowing -Wall -Wextra -Wno-missing-field-initializers -Wno-type-limits -Wno-array-bounds -Wno-unknown-pragmas -Wno-sign-compare -Wno-unused-parameter -Wno-unused-variable -Wno-unused-function -Wno-unused-result -Wno-strict-overflow -Wno-strict-aliasing -Wno-error=deprecated-declarations -Wno-stringop-overflow -Wno-error=pedantic -Wno-error=redundant-decls -Wno-error=old-style-cast -fdiagnostics-color=always -faligned-new -Wno-unused-but-set-variable -Wno-maybe-uninitialized -fno-math-errno -fno-trapping-math -Wno-stringop-overflow, DISABLE_NUMA=1, PERF_WITH_AVX=1, PERF_WITH_AVX2=1, PERF_WITH_AVX512=1, USE_CUDA=ON, USE_EXCEPTION_PTR=1, USE_GFLAGS=OFF, USE_GLOG=OFF, USE_MKL=ON, USE_MKLDNN=ON, USE_MPI=OFF, USE_NCCL=ON, USE_NNPACK=ON, USE_OPENMP=ON, USE_STATIC_DISPATCH=OFF,
TorchVision: 0.5.0
OpenCV: 4.2.0
MMCV: 1.0.4
MMSegmentation: 0.5.0+b57fb2b
MMCV Compiler: GCC 7.5
MMCV CUDA Compiler: 10.0
```
Error was encountered during training process with condfigs:
```python
Config:
norm_cfg = dict(type='BN', requires_grad=True)
model = dict(
type='EncoderDecoder',
pretrained='open-mmlab://resnet50_v1c',
backbone=dict(
type='ResNetV1c',
depth=50,
num_stages=4,
out_indices=(0, 1, 2, 3),
dilations=(1, 1, 2, 4),
strides=(1, 2, 1, 1),
norm_cfg=dict(type='BN', requires_grad=True),
norm_eval=False,
style='pytorch',
contract_dilation=True),
decode_head=dict(
type='PSPHead',
in_channels=2048,
in_index=3,
channels=512,
pool_scales=(1, 2, 3, 6),
dropout_ratio=0.1,
num_classes=9,
norm_cfg=dict(type='BN', requires_grad=True),
align_corners=False,
loss_decode=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0)),
auxiliary_head=dict(
type='FCNHead',
in_channels=1024,
in_index=2,
channels=256,
num_convs=1,
concat_input=False,
dropout_ratio=0.1,
num_classes=9,
norm_cfg=dict(type='BN', requires_grad=True),
align_corners=False,
loss_decode=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=0.4)))
train_cfg = dict()
test_cfg = dict(mode='whole')
dataset_type = 'Aircraft'
data_root = '/mmdetection_aircraft/data/segm2/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
crop_size = (512, 512)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations'),
dict(type='Resize', img_scale=(640, 480), ratio_range=(0.5, 2.0)),
dict(type='RandomCrop', crop_size=(512, 512), cat_max_ratio=0.75),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='PhotoMetricDistortion'),
dict(
type='Normalize',
mean=[123.675, 116.28, 103.53],
std=[58.395, 57.12, 57.375],
to_rgb=True),
dict(type='Pad', size=(512, 512), pad_val=0, seg_pad_val=255),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_semantic_seg'])
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(640, 480),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(
type='Normalize',
mean=[123.675, 116.28, 103.53],
std=[58.395, 57.12, 57.375],
to_rgb=True),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img'])
])
]
data = dict(
samples_per_gpu=2,
workers_per_gpu=1,
train=dict(
type='Aircraft',
data_root='/mmdetection_aircraft/data/segm2/',
img_dir='JPEGImages',
ann_dir='PaletteClass',
pipeline=[
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations'),
dict(type='Resize', img_scale=(640, 480), ratio_range=(0.5, 2.0)),
dict(type='RandomCrop', crop_size=(512, 512), cat_max_ratio=0.75),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='PhotoMetricDistortion'),
dict(
type='Normalize',
mean=[123.675, 116.28, 103.53],
std=[58.395, 57.12, 57.375],
to_rgb=True),
dict(type='Pad', size=(512, 512), pad_val=0, seg_pad_val=255),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_semantic_seg'])
],
split='train.txt'),
val=dict(
type='Aircraft',
data_root='/mmdetection_aircraft/data/segm2/',
img_dir='JPEGImages',
ann_dir='PaletteClass',
pipeline=[
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(640, 480),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(
type='Normalize',
mean=[123.675, 116.28, 103.53],
std=[58.395, 57.12, 57.375],
to_rgb=True),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img'])
])
],
split='val.txt'),
test=dict(
type='Aircraft',
data_root='/mmdetection_aircraft/data/segm2/',
img_dir='JPEGImages',
ann_dir='PaletteClass',
pipeline=[
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(640, 480),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(
type='Normalize',
mean=[123.675, 116.28, 103.53],
std=[58.395, 57.12, 57.375],
to_rgb=True),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img'])
])
],
split='val.txt'))
log_config = dict(
interval=1, hooks=[dict(type='TextLoggerHook', by_epoch=False)])
dist_params = dict(backend='nccl')
log_level = 'INFO'
load_from = 'checkpoints/pspnet_r50-d8_512x1024_40k_cityscapes_20200605_003338-2966598c.pth'
resume_from = None
workflow = [('train', 1)]
cudnn_benchmark = True
optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0005)
optimizer_config = dict()
lr_config = dict(policy='poly', power=0.9, min_lr=0.0001, by_epoch=False)
total_iters = 400
checkpoint_config = dict(by_epoch=False, interval=200)
evaluation = dict(interval=1, metric='mIoU')
work_dir = './work_dirs/pspnet'
seed = 0
gpu_ids = [1]
```
The script take an approximately 4-5GB of GPU from 11GB available and return this error:
#ERROR
```python
---------------------------------------------------------------------------
RuntimeError Traceback (most recent call last)
<ipython-input-8-fec2661e1f4c> in <module>
16 mmcv.mkdir_or_exist(osp.abspath(cfg.work_dir))
17 train_segmentor(model, datasets, cfg, distributed=False, validate=True,
---> 18 meta=dict())
~/mmsegmentation/mmseg/apis/train.py in train_segmentor(model, dataset, cfg, distributed, validate, timestamp, meta)
104 elif cfg.load_from:
105 runner.load_checkpoint(cfg.load_from)
--> 106 runner.run(data_loaders, cfg.workflow, cfg.total_iters)
~/miniconda3/envs/open-mmlab/lib/python3.7/site-packages/mmcv/runner/iter_based_runner.py in run(self, data_loaders, workflow, max_iters, **kwargs)
117 if mode == 'train' and self.iter >= max_iters:
118 break
--> 119 iter_runner(iter_loaders[i], **kwargs)
120
121 time.sleep(1) # wait for some hooks like loggers to finish
~/miniconda3/envs/open-mmlab/lib/python3.7/site-packages/mmcv/runner/iter_based_runner.py in train(self, data_loader, **kwargs)
53 self.call_hook('before_train_iter')
54 data_batch = next(data_loader)
---> 55 outputs = self.model.train_step(data_batch, self.optimizer, **kwargs)
56 if not isinstance(outputs, dict):
57 raise TypeError('model.train_step() must return a dict')
~/miniconda3/envs/open-mmlab/lib/python3.7/site-packages/mmcv/parallel/data_parallel.py in train_step(self, *inputs, **kwargs)
29
30 inputs, kwargs = self.scatter(inputs, kwargs, self.device_ids)
---> 31 return self.module.train_step(*inputs[0], **kwargs[0])
32
33 def val_step(self, *inputs, **kwargs):
~/mmsegmentation/mmseg/models/segmentors/base.py in train_step(self, data_batch, optimizer, **kwargs)
150 #data_batch['gt_semantic_seg'] = data_batch['gt_semantic_seg'][:,:,:,:,0]
151 #print(data_batch['gt_semantic_seg'].shape)
--> 152 losses = self.forward_train(**data_batch, **kwargs)
153 loss, log_vars = self._parse_losses(losses)
154
~/mmsegmentation/mmseg/models/segmentors/encoder_decoder.py in forward_train(self, img, img_metas, gt_semantic_seg)
155
156 loss_decode = self._decode_head_forward_train(x, img_metas,
--> 157 gt_semantic_seg)
158 losses.update(loss_decode)
159
~/mmsegmentation/mmseg/models/segmentors/encoder_decoder.py in _decode_head_forward_train(self, x, img_metas, gt_semantic_seg)
99 loss_decode = self.decode_head.forward_train(x, img_metas,
100 gt_semantic_seg,
--> 101 self.train_cfg)
102
103 losses.update(add_prefix(loss_decode, 'decode'))
~/mmsegmentation/mmseg/models/decode_heads/decode_head.py in forward_train(self, inputs, img_metas, gt_semantic_seg, train_cfg)
184 """
185 seg_logits = self.forward(inputs)
--> 186 losses = self.losses(seg_logits, gt_semantic_seg)
187 return losses
188
~/miniconda3/envs/open-mmlab/lib/python3.7/site-packages/mmcv/runner/fp16_utils.py in new_func(*args, **kwargs)
162 'method of nn.Module')
163 if not (hasattr(args[0], 'fp16_enabled') and args[0].fp16_enabled):
--> 164 return old_func(*args, **kwargs)
165 # get the arg spec of the decorated method
166 args_info = getfullargspec(old_func)
~/mmsegmentation/mmseg/models/decode_heads/decode_head.py in losses(self, seg_logit, seg_label)
229 seg_label,
230 weight=seg_weight,
--> 231 ignore_index=self.ignore_index)
232 loss['acc_seg'] = accuracy(seg_logit, seg_label)
233 return loss
~/miniconda3/envs/open-mmlab/lib/python3.7/site-packages/torch/nn/modules/module.py in __call__(self, *input, **kwargs)
530 result = self._slow_forward(*input, **kwargs)
531 else:
--> 532 result = self.forward(*input, **kwargs)
533 for hook in self._forward_hooks.values():
534 hook_result = hook(self, input, result)
~/mmsegmentation/mmseg/models/losses/cross_entropy_loss.py in forward(self, cls_score, label, weight, avg_factor, reduction_override, **kwargs)
175 class_weight=class_weight,
176 reduction=reduction,
--> 177 avg_factor=avg_factor)
178 return loss_cls
~/mmsegmentation/mmseg/models/losses/cross_entropy_loss.py in cross_entropy(pred, label, weight, class_weight, reduction, avg_factor, ignore_index)
28 weight = weight.float()
29 loss = weight_reduce_loss(
---> 30 loss, weight=weight, reduction=reduction, avg_factor=avg_factor)
31
32 return loss
~/mmsegmentation/mmseg/models/losses/utils.py in weight_reduce_loss(loss, weight, reduction, avg_factor)
45 # if avg_factor is not specified, just reduce the loss
46 if avg_factor is None:
---> 47 loss = reduce_loss(loss, reduction)
48 else:
49 # if reduction is mean, then average the loss by avg_factor
~/mmsegmentation/mmseg/models/losses/utils.py in reduce_loss(loss, reduction)
19 return loss
20 elif reduction_enum == 1:
---> 21 return loss.mean()
22 elif reduction_enum == 2:
23 return loss.sum()
RuntimeError: CUDA error: an illegal memory access was encountered
```
But if i reduce the size the image size twice with the same images per GPU (2) ,script takes approxiamtely 2GB from GPU and everything works fine.
Also,i want to add that using another PyTorch script with my own Dataloader i'm able to fill in GPU on full (11GB) by training process with the same Torch version and the same hardware.
| [
{
"content": "import mmcv\n\nfrom .version import __version__, version_info\n\nMMCV_MIN = '1.0.5'\nMMCV_MAX = '1.0.5'\n\n\ndef digit_version(version_str):\n digit_version = []\n for x in version_str.split('.'):\n if x.isdigit():\n digit_version.append(int(x))\n elif x.find('rc') != -1:\n patch_version = x.split('rc')\n digit_version.append(int(patch_version[0]) - 1)\n digit_version.append(int(patch_version[1]))\n return digit_version\n\n\nmmcv_min_version = digit_version(MMCV_MIN)\nmmcv_max_version = digit_version(MMCV_MAX)\nmmcv_version = digit_version(mmcv.__version__)\n\n\nassert (mmcv_min_version <= mmcv_version <= mmcv_max_version), \\\n f'MMCV=={mmcv.__version__} is used but incompatible. ' \\\n f'Please install mmcv>={mmcv_min_version}, <={mmcv_max_version}.'\n\n__all__ = ['__version__', 'version_info']\n",
"path": "mmseg/__init__.py"
}
] | [
{
"content": "import mmcv\n\nfrom .version import __version__, version_info\n\nMMCV_MIN = '1.0.5'\nMMCV_MAX = '1.1.0'\n\n\ndef digit_version(version_str):\n digit_version = []\n for x in version_str.split('.'):\n if x.isdigit():\n digit_version.append(int(x))\n elif x.find('rc') != -1:\n patch_version = x.split('rc')\n digit_version.append(int(patch_version[0]) - 1)\n digit_version.append(int(patch_version[1]))\n return digit_version\n\n\nmmcv_min_version = digit_version(MMCV_MIN)\nmmcv_max_version = digit_version(MMCV_MAX)\nmmcv_version = digit_version(mmcv.__version__)\n\n\nassert (mmcv_min_version <= mmcv_version <= mmcv_max_version), \\\n f'MMCV=={mmcv.__version__} is used but incompatible. ' \\\n f'Please install mmcv>={mmcv_min_version}, <={mmcv_max_version}.'\n\n__all__ = ['__version__', 'version_info']\n",
"path": "mmseg/__init__.py"
}
] | diff --git a/docs/tutorials/new_dataset.md b/docs/tutorials/new_dataset.md
index 0ad1019e0e..6118904765 100644
--- a/docs/tutorials/new_dataset.md
+++ b/docs/tutorials/new_dataset.md
@@ -38,6 +38,9 @@ Only
`data/my_dataset/ann_dir/train/xxx{seg_map_suffix}`,
`data/my_dataset/ann_dir/train/zzz{seg_map_suffix}` will be loaded.
+Note: The annotations are images of shape (H, W), the value pixel should fall in range `[0, num_classes - 1]`.
+You may use `'P'` mode of [pillow](https://pillow.readthedocs.io/en/stable/handbook/concepts.html#palette) to create your annotation image with color.
+
## Customize datasets by mixing dataset
MMSegmentation also supports to mix dataset for training.
diff --git a/mmseg/__init__.py b/mmseg/__init__.py
index 11376951e9..abaee58890 100644
--- a/mmseg/__init__.py
+++ b/mmseg/__init__.py
@@ -3,7 +3,7 @@
from .version import __version__, version_info
MMCV_MIN = '1.0.5'
-MMCV_MAX = '1.0.5'
+MMCV_MAX = '1.1.0'
def digit_version(version_str):
|
keras-team__keras-637 | Misiing import in list_pictures
`list_pictures` abborts with error `NameError: global name 're' is not defined`
| [
{
"content": "from __future__ import absolute_import\n\nimport numpy as np\nfrom scipy import ndimage\nfrom scipy import linalg\n\nfrom os import listdir\nfrom os.path import isfile, join\nimport random, math\nfrom six.moves import range\n\n'''\n Fairly basic set of tools for realtime data augmentation on image data.\n Can easily be extended to include new transforms, new preprocessing methods, etc...\n'''\n\ndef random_rotation(x, rg, fill_mode=\"nearest\", cval=0.):\n angle = random.uniform(-rg, rg)\n x = ndimage.interpolation.rotate(x, angle, axes=(1,2), reshape=False, mode=fill_mode, cval=cval)\n return x\n\ndef random_shift(x, wrg, hrg, fill_mode=\"nearest\", cval=0.):\n crop_left_pixels = 0\n crop_right_pixels = 0\n crop_top_pixels = 0\n crop_bottom_pixels = 0\n\n original_w = x.shape[1]\n original_h = x.shape[2]\n\n if wrg:\n crop = random.uniform(0., wrg)\n split = random.uniform(0, 1)\n crop_left_pixels = int(split*crop*x.shape[1])\n crop_right_pixels = int((1-split)*crop*x.shape[1])\n\n if hrg:\n crop = random.uniform(0., hrg)\n split = random.uniform(0, 1)\n crop_top_pixels = int(split*crop*x.shape[2])\n crop_bottom_pixels = int((1-split)*crop*x.shape[2])\n\n x = ndimage.interpolation.shift(x, (0, crop_left_pixels, crop_top_pixels), mode=fill_mode, cval=cval)\n return x\n\ndef horizontal_flip(x):\n for i in range(x.shape[0]):\n x[i] = np.fliplr(x[i])\n return x\n\ndef vertical_flip(x):\n for i in range(x.shape[0]):\n x[i] = np.flipud(x[i])\n return x\n\n\ndef random_barrel_transform(x, intensity):\n # TODO\n pass\n\ndef random_shear(x, intensity):\n # TODO\n pass\n\ndef random_channel_shift(x, rg):\n # TODO\n pass\n\ndef random_zoom(x, rg, fill_mode=\"nearest\", cval=0.):\n zoom_w = random.uniform(1.-rg, 1.)\n zoom_h = random.uniform(1.-rg, 1.)\n x = ndimage.interpolation.zoom(x, zoom=(1., zoom_w, zoom_h), mode=fill_mode, cval=cval)\n return x # shape of result will be different from shape of input!\n\n\n\n\ndef array_to_img(x, scale=True):\n from PIL import Image\n x = x.transpose(1, 2, 0) \n if scale:\n x += max(-np.min(x), 0)\n x /= np.max(x)\n x *= 255\n if x.shape[2] == 3:\n # RGB\n return Image.fromarray(x.astype(\"uint8\"), \"RGB\")\n else:\n # grayscale\n return Image.fromarray(x[:,:,0].astype(\"uint8\"), \"L\")\n\n\ndef img_to_array(img):\n x = np.asarray(img, dtype='float32')\n if len(x.shape)==3:\n # RGB: height, width, channel -> channel, height, width\n x = x.transpose(2, 0, 1)\n else:\n # grayscale: height, width -> channel, height, width\n x = x.reshape((1, x.shape[0], x.shape[1]))\n return x\n\n\ndef load_img(path, grayscale=False):\n from PIL import Image\n img = Image.open(open(path))\n if grayscale:\n img = img.convert('L')\n else: # Assure 3 channel even when loaded image is grayscale\n img = img.convert('RGB')\n return img\n\n\ndef list_pictures(directory, ext='jpg|jpeg|bmp|png'):\n return [join(directory,f) for f in listdir(directory) \\\n if isfile(join(directory,f)) and re.match('([\\w]+\\.(?:' + ext + '))', f)]\n\n\n\nclass ImageDataGenerator(object):\n '''\n Generate minibatches with \n realtime data augmentation.\n '''\n def __init__(self, \n featurewise_center=True, # set input mean to 0 over the dataset\n samplewise_center=False, # set each sample mean to 0\n featurewise_std_normalization=True, # divide inputs by std of the dataset\n samplewise_std_normalization=False, # divide each input by its std\n\n zca_whitening=False, # apply ZCA whitening\n rotation_range=0., # degrees (0 to 180)\n width_shift_range=0., # fraction of total width\n height_shift_range=0., # fraction of total height\n horizontal_flip=False,\n vertical_flip=False,\n ):\n self.__dict__.update(locals())\n self.mean = None\n self.std = None\n self.principal_components = None\n\n\n def flow(self, X, y, batch_size=32, shuffle=False, seed=None, save_to_dir=None, save_prefix=\"\", save_format=\"jpeg\"):\n if seed:\n random.seed(seed)\n\n if shuffle:\n seed = random.randint(1, 10e6)\n np.random.seed(seed)\n np.random.shuffle(X)\n np.random.seed(seed)\n np.random.shuffle(y)\n\n nb_batch = int(math.ceil(float(X.shape[0])/batch_size))\n for b in range(nb_batch):\n batch_end = (b+1)*batch_size\n if batch_end > X.shape[0]:\n nb_samples = X.shape[0] - b*batch_size\n else:\n nb_samples = batch_size\n\n bX = np.zeros(tuple([nb_samples]+list(X.shape)[1:]))\n for i in range(nb_samples):\n x = X[b*batch_size+i]\n x = self.random_transform(x.astype(\"float32\"))\n x = self.standardize(x)\n bX[i] = x\n\n if save_to_dir:\n for i in range(nb_samples):\n img = array_to_img(bX[i], scale=True)\n img.save(save_to_dir + \"/\" + save_prefix + \"_\" + str(i) + \".\" + save_format)\n\n yield bX, y[b*batch_size:b*batch_size+nb_samples]\n\n\n def standardize(self, x):\n if self.featurewise_center:\n x -= self.mean\n if self.featurewise_std_normalization:\n x /= self.std\n\n if self.zca_whitening:\n flatx = np.reshape(x, (x.shape[0]*x.shape[1]*x.shape[2]))\n whitex = np.dot(flatx, self.principal_components)\n x = np.reshape(whitex, (x.shape[0], x.shape[1], x.shape[2]))\n\n if self.samplewise_center:\n x -= np.mean(x)\n if self.samplewise_std_normalization:\n x /= np.std(x)\n\n return x\n\n\n def random_transform(self, x):\n if self.rotation_range:\n x = random_rotation(x, self.rotation_range)\n if self.width_shift_range or self.height_shift_range:\n x = random_shift(x, self.width_shift_range, self.height_shift_range)\n if self.horizontal_flip:\n if random.random() < 0.5:\n x = horizontal_flip(x)\n if self.vertical_flip:\n if random.random() < 0.5:\n x = vertical_flip(x)\n\n # TODO:\n # zoom\n # barrel/fisheye\n # shearing\n # channel shifting\n return x\n\n\n def fit(self, X, \n augment=False, # fit on randomly augmented samples\n rounds=1, # if augment, how many augmentation passes over the data do we use\n seed=None\n ):\n '''\n Required for featurewise_center, featurewise_std_normalization and zca_whitening.\n '''\n X = np.copy(X)\n \n if augment:\n aX = np.zeros(tuple([rounds*X.shape[0]]+list(X.shape)[1:]))\n for r in range(rounds):\n for i in range(X.shape[0]):\n img = array_to_img(X[i])\n img = self.random_transform(img)\n aX[i+r*X.shape[0]] = img_to_array(img)\n X = aX\n\n if self.featurewise_center:\n self.mean = np.mean(X, axis=0)\n X -= self.mean\n if self.featurewise_std_normalization:\n self.std = np.std(X, axis=0)\n X /= self.std\n\n if self.zca_whitening:\n flatX = np.reshape(X, (X.shape[0], X.shape[1]*X.shape[2]*X.shape[3]))\n fudge = 10e-6\n sigma = np.dot(flatX.T, flatX) / flatX.shape[1]\n U, S, V = linalg.svd(sigma)\n self.principal_components = np.dot(np.dot(U, np.diag(1. / np.sqrt(S + fudge))), U.T)\n\n\n",
"path": "keras/preprocessing/image.py"
}
] | [
{
"content": "from __future__ import absolute_import\n\nimport numpy as np\nimport re\nfrom scipy import ndimage\nfrom scipy import linalg\n\nfrom os import listdir\nfrom os.path import isfile, join\nimport random, math\nfrom six.moves import range\n\n'''\n Fairly basic set of tools for realtime data augmentation on image data.\n Can easily be extended to include new transforms, new preprocessing methods, etc...\n'''\n\ndef random_rotation(x, rg, fill_mode=\"nearest\", cval=0.):\n angle = random.uniform(-rg, rg)\n x = ndimage.interpolation.rotate(x, angle, axes=(1,2), reshape=False, mode=fill_mode, cval=cval)\n return x\n\ndef random_shift(x, wrg, hrg, fill_mode=\"nearest\", cval=0.):\n crop_left_pixels = 0\n crop_right_pixels = 0\n crop_top_pixels = 0\n crop_bottom_pixels = 0\n\n original_w = x.shape[1]\n original_h = x.shape[2]\n\n if wrg:\n crop = random.uniform(0., wrg)\n split = random.uniform(0, 1)\n crop_left_pixels = int(split*crop*x.shape[1])\n crop_right_pixels = int((1-split)*crop*x.shape[1])\n\n if hrg:\n crop = random.uniform(0., hrg)\n split = random.uniform(0, 1)\n crop_top_pixels = int(split*crop*x.shape[2])\n crop_bottom_pixels = int((1-split)*crop*x.shape[2])\n\n x = ndimage.interpolation.shift(x, (0, crop_left_pixels, crop_top_pixels), mode=fill_mode, cval=cval)\n return x\n\ndef horizontal_flip(x):\n for i in range(x.shape[0]):\n x[i] = np.fliplr(x[i])\n return x\n\ndef vertical_flip(x):\n for i in range(x.shape[0]):\n x[i] = np.flipud(x[i])\n return x\n\n\ndef random_barrel_transform(x, intensity):\n # TODO\n pass\n\ndef random_shear(x, intensity):\n # TODO\n pass\n\ndef random_channel_shift(x, rg):\n # TODO\n pass\n\ndef random_zoom(x, rg, fill_mode=\"nearest\", cval=0.):\n zoom_w = random.uniform(1.-rg, 1.)\n zoom_h = random.uniform(1.-rg, 1.)\n x = ndimage.interpolation.zoom(x, zoom=(1., zoom_w, zoom_h), mode=fill_mode, cval=cval)\n return x # shape of result will be different from shape of input!\n\n\n\n\ndef array_to_img(x, scale=True):\n from PIL import Image\n x = x.transpose(1, 2, 0) \n if scale:\n x += max(-np.min(x), 0)\n x /= np.max(x)\n x *= 255\n if x.shape[2] == 3:\n # RGB\n return Image.fromarray(x.astype(\"uint8\"), \"RGB\")\n else:\n # grayscale\n return Image.fromarray(x[:,:,0].astype(\"uint8\"), \"L\")\n\n\ndef img_to_array(img):\n x = np.asarray(img, dtype='float32')\n if len(x.shape)==3:\n # RGB: height, width, channel -> channel, height, width\n x = x.transpose(2, 0, 1)\n else:\n # grayscale: height, width -> channel, height, width\n x = x.reshape((1, x.shape[0], x.shape[1]))\n return x\n\n\ndef load_img(path, grayscale=False):\n from PIL import Image\n img = Image.open(open(path))\n if grayscale:\n img = img.convert('L')\n else: # Assure 3 channel even when loaded image is grayscale\n img = img.convert('RGB')\n return img\n\n\ndef list_pictures(directory, ext='jpg|jpeg|bmp|png'):\n return [join(directory,f) for f in listdir(directory) \\\n if isfile(join(directory,f)) and re.match('([\\w]+\\.(?:' + ext + '))', f)]\n\n\n\nclass ImageDataGenerator(object):\n '''\n Generate minibatches with \n realtime data augmentation.\n '''\n def __init__(self, \n featurewise_center=True, # set input mean to 0 over the dataset\n samplewise_center=False, # set each sample mean to 0\n featurewise_std_normalization=True, # divide inputs by std of the dataset\n samplewise_std_normalization=False, # divide each input by its std\n\n zca_whitening=False, # apply ZCA whitening\n rotation_range=0., # degrees (0 to 180)\n width_shift_range=0., # fraction of total width\n height_shift_range=0., # fraction of total height\n horizontal_flip=False,\n vertical_flip=False,\n ):\n self.__dict__.update(locals())\n self.mean = None\n self.std = None\n self.principal_components = None\n\n\n def flow(self, X, y, batch_size=32, shuffle=False, seed=None, save_to_dir=None, save_prefix=\"\", save_format=\"jpeg\"):\n if seed:\n random.seed(seed)\n\n if shuffle:\n seed = random.randint(1, 10e6)\n np.random.seed(seed)\n np.random.shuffle(X)\n np.random.seed(seed)\n np.random.shuffle(y)\n\n nb_batch = int(math.ceil(float(X.shape[0])/batch_size))\n for b in range(nb_batch):\n batch_end = (b+1)*batch_size\n if batch_end > X.shape[0]:\n nb_samples = X.shape[0] - b*batch_size\n else:\n nb_samples = batch_size\n\n bX = np.zeros(tuple([nb_samples]+list(X.shape)[1:]))\n for i in range(nb_samples):\n x = X[b*batch_size+i]\n x = self.random_transform(x.astype(\"float32\"))\n x = self.standardize(x)\n bX[i] = x\n\n if save_to_dir:\n for i in range(nb_samples):\n img = array_to_img(bX[i], scale=True)\n img.save(save_to_dir + \"/\" + save_prefix + \"_\" + str(i) + \".\" + save_format)\n\n yield bX, y[b*batch_size:b*batch_size+nb_samples]\n\n\n def standardize(self, x):\n if self.featurewise_center:\n x -= self.mean\n if self.featurewise_std_normalization:\n x /= self.std\n\n if self.zca_whitening:\n flatx = np.reshape(x, (x.shape[0]*x.shape[1]*x.shape[2]))\n whitex = np.dot(flatx, self.principal_components)\n x = np.reshape(whitex, (x.shape[0], x.shape[1], x.shape[2]))\n\n if self.samplewise_center:\n x -= np.mean(x)\n if self.samplewise_std_normalization:\n x /= np.std(x)\n\n return x\n\n\n def random_transform(self, x):\n if self.rotation_range:\n x = random_rotation(x, self.rotation_range)\n if self.width_shift_range or self.height_shift_range:\n x = random_shift(x, self.width_shift_range, self.height_shift_range)\n if self.horizontal_flip:\n if random.random() < 0.5:\n x = horizontal_flip(x)\n if self.vertical_flip:\n if random.random() < 0.5:\n x = vertical_flip(x)\n\n # TODO:\n # zoom\n # barrel/fisheye\n # shearing\n # channel shifting\n return x\n\n\n def fit(self, X, \n augment=False, # fit on randomly augmented samples\n rounds=1, # if augment, how many augmentation passes over the data do we use\n seed=None\n ):\n '''\n Required for featurewise_center, featurewise_std_normalization and zca_whitening.\n '''\n X = np.copy(X)\n \n if augment:\n aX = np.zeros(tuple([rounds*X.shape[0]]+list(X.shape)[1:]))\n for r in range(rounds):\n for i in range(X.shape[0]):\n img = array_to_img(X[i])\n img = self.random_transform(img)\n aX[i+r*X.shape[0]] = img_to_array(img)\n X = aX\n\n if self.featurewise_center:\n self.mean = np.mean(X, axis=0)\n X -= self.mean\n if self.featurewise_std_normalization:\n self.std = np.std(X, axis=0)\n X /= self.std\n\n if self.zca_whitening:\n flatX = np.reshape(X, (X.shape[0], X.shape[1]*X.shape[2]*X.shape[3]))\n fudge = 10e-6\n sigma = np.dot(flatX.T, flatX) / flatX.shape[1]\n U, S, V = linalg.svd(sigma)\n self.principal_components = np.dot(np.dot(U, np.diag(1. / np.sqrt(S + fudge))), U.T)\n\n\n",
"path": "keras/preprocessing/image.py"
}
] | diff --git a/keras/preprocessing/image.py b/keras/preprocessing/image.py
index 90abe2bf1116..5b64a588ad9e 100644
--- a/keras/preprocessing/image.py
+++ b/keras/preprocessing/image.py
@@ -1,6 +1,7 @@
from __future__ import absolute_import
import numpy as np
+import re
from scipy import ndimage
from scipy import linalg
|
mathesar-foundation__mathesar-2759 | Support Importing Semicolon Separated Values file
## Problem
Currently Mathesar allows importing [DSV](https://en.wikipedia.org/wiki/Delimiter-separated_values) files with following delimiters:
`,`
`\t`
`:`
`|`
Apart from them, semicolons`;` are popular delimiters used in industries (as address and integer generally contain commas).
## Proposed solution
It might be helpful if mathesar allows the user to import data from **semicolon-separated values** files as well.
| [
{
"content": "from io import TextIOWrapper\n\nimport clevercsv as csv\n\nfrom db.identifiers import truncate_if_necessary\nfrom db.tables.operations.alter import update_pk_sequence_to_latest\nfrom mathesar.database.base import create_mathesar_engine\nfrom mathesar.models.base import Table\nfrom db.records.operations.insert import insert_records_from_csv\nfrom db.tables.operations.create import create_string_column_table\nfrom db.tables.operations.select import get_oid_from_table\nfrom db.tables.operations.drop import drop_table\nfrom mathesar.errors import InvalidTableError\nfrom db.constants import ID, ID_ORIGINAL, COLUMN_NAME_TEMPLATE\nfrom psycopg2.errors import IntegrityError, DataError\n\nfrom mathesar.state import reset_reflection\n\nALLOWED_DELIMITERS = \",\\t:|\"\nSAMPLE_SIZE = 20000\nCHECK_ROWS = 10\n\n\ndef get_file_encoding(file):\n \"\"\"\n Given a file, uses charset_normalizer if installed or chardet which is installed as part of clevercsv module to\n detect the file encoding. Returns a default value of utf-8-sig if encoding could not be detected or detection\n libraries are missing.\n \"\"\"\n from charset_normalizer import detect\n # Sample Size reduces the accuracy\n encoding = detect(file.read()).get('encoding', None)\n file.seek(0)\n if encoding is not None:\n return encoding\n return \"utf-8\"\n\n\ndef check_dialect(file, dialect):\n \"\"\"\n Checks to see if we can parse the given file with the given dialect\n\n Parses the first CHECK_ROWS rows. Checks to see if any have formatting issues (as\n indicated by parse_row), or if any have a differing number of columns.\n\n Args:\n file: _io.TextIOWrapper object, an already opened file\n dialect: csv.Dialect object, the dialect we are validating\n\n Returns:\n bool: False if any error that would cause SQL errors were found, otherwise True\n \"\"\"\n prev_num_columns = None\n row_gen = csv.read.reader(file, dialect)\n for _ in range(CHECK_ROWS):\n try:\n row = next(row_gen)\n except StopIteration:\n # If less than CHECK_ROWS rows in file, stop early\n break\n\n num_columns = len(row)\n if prev_num_columns is None:\n prev_num_columns = num_columns\n elif prev_num_columns != num_columns:\n return False\n return True\n\n\ndef get_sv_dialect(file):\n \"\"\"\n Given a *sv file, generate a dialect to parse it.\n\n Args:\n file: _io.TextIOWrapper object, an already opened file\n\n Returns:\n dialect: csv.Dialect object, the dialect to parse the file\n\n Raises:\n InvalidTableError: If the generated dialect was unable to parse the file\n \"\"\"\n dialect = csv.detect.Detector().detect(file.read(SAMPLE_SIZE),\n delimiters=ALLOWED_DELIMITERS)\n if dialect is None:\n raise InvalidTableError\n\n file.seek(0)\n if check_dialect(file, dialect):\n file.seek(0)\n return dialect\n else:\n raise InvalidTableError\n\n\ndef get_sv_reader(file, header, dialect=None):\n encoding = get_file_encoding(file)\n file = TextIOWrapper(file, encoding=encoding)\n if dialect:\n reader = csv.DictReader(file, dialect=dialect)\n else:\n reader = csv.DictReader(file)\n if not header:\n reader.fieldnames = [\n f\"{COLUMN_NAME_TEMPLATE}{i}\" for i in range(len(reader.fieldnames))\n ]\n file.seek(0)\n\n return reader\n\n\ndef create_db_table_from_data_file(data_file, name, schema, comment=None):\n db_name = schema.database.name\n engine = create_mathesar_engine(db_name)\n sv_filename = data_file.file.path\n header = data_file.header\n dialect = csv.dialect.SimpleDialect(data_file.delimiter, data_file.quotechar,\n data_file.escapechar)\n encoding = get_file_encoding(data_file.file)\n with open(sv_filename, 'rb') as sv_file:\n sv_reader = get_sv_reader(sv_file, header, dialect=dialect)\n column_names = _process_column_names(sv_reader.fieldnames)\n table = create_string_column_table(\n name=name,\n schema=schema.name,\n column_names=column_names,\n engine=engine,\n comment=comment,\n )\n try:\n insert_records_from_csv(\n table,\n engine,\n sv_filename,\n column_names,\n header,\n delimiter=dialect.delimiter,\n escape=dialect.escapechar,\n quote=dialect.quotechar,\n encoding=encoding\n )\n update_pk_sequence_to_latest(engine, table)\n except (IntegrityError, DataError):\n drop_table(name=name, schema=schema.name, engine=engine)\n column_names_alt = [\n column_name if column_name != ID else ID_ORIGINAL\n for column_name in column_names\n ]\n table = create_string_column_table(\n name=name,\n schema=schema.name,\n column_names=column_names_alt,\n engine=engine,\n comment=comment,\n )\n insert_records_from_csv(\n table,\n engine,\n sv_filename,\n column_names_alt,\n header,\n delimiter=dialect.delimiter,\n escape=dialect.escapechar,\n quote=dialect.quotechar,\n encoding=encoding\n )\n reset_reflection(db_name=db_name)\n return table\n\n\ndef _process_column_names(column_names):\n column_names = (\n column_name.strip()\n for column_name\n in column_names\n )\n column_names = (\n truncate_if_necessary(column_name)\n for column_name\n in column_names\n )\n column_names = (\n f\"{COLUMN_NAME_TEMPLATE}{i}\" if name == '' else name\n for i, name\n in enumerate(column_names)\n )\n return list(column_names)\n\n\ndef create_table_from_csv(data_file, name, schema, comment=None):\n engine = create_mathesar_engine(schema.database.name)\n db_table = create_db_table_from_data_file(\n data_file, name, schema, comment=comment\n )\n db_table_oid = get_oid_from_table(db_table.name, db_table.schema, engine)\n # Using current_objects to create the table instead of objects. objects\n # triggers re-reflection, which will cause a race condition to create the table\n table = Table.current_objects.get(\n oid=db_table_oid,\n schema=schema,\n )\n table.import_verified = False\n table.save()\n data_file.table_imported_to = table\n data_file.save()\n return table\n",
"path": "mathesar/imports/csv.py"
}
] | [
{
"content": "from io import TextIOWrapper\n\nimport clevercsv as csv\n\nfrom db.identifiers import truncate_if_necessary\nfrom db.tables.operations.alter import update_pk_sequence_to_latest\nfrom mathesar.database.base import create_mathesar_engine\nfrom mathesar.models.base import Table\nfrom db.records.operations.insert import insert_records_from_csv\nfrom db.tables.operations.create import create_string_column_table\nfrom db.tables.operations.select import get_oid_from_table\nfrom db.tables.operations.drop import drop_table\nfrom mathesar.errors import InvalidTableError\nfrom db.constants import ID, ID_ORIGINAL, COLUMN_NAME_TEMPLATE\nfrom psycopg2.errors import IntegrityError, DataError\n\nfrom mathesar.state import reset_reflection\n\nALLOWED_DELIMITERS = \",\\t:|;\"\nSAMPLE_SIZE = 20000\nCHECK_ROWS = 10\n\n\ndef get_file_encoding(file):\n \"\"\"\n Given a file, uses charset_normalizer if installed or chardet which is installed as part of clevercsv module to\n detect the file encoding. Returns a default value of utf-8-sig if encoding could not be detected or detection\n libraries are missing.\n \"\"\"\n from charset_normalizer import detect\n # Sample Size reduces the accuracy\n encoding = detect(file.read()).get('encoding', None)\n file.seek(0)\n if encoding is not None:\n return encoding\n return \"utf-8\"\n\n\ndef check_dialect(file, dialect):\n \"\"\"\n Checks to see if we can parse the given file with the given dialect\n\n Parses the first CHECK_ROWS rows. Checks to see if any have formatting issues (as\n indicated by parse_row), or if any have a differing number of columns.\n\n Args:\n file: _io.TextIOWrapper object, an already opened file\n dialect: csv.Dialect object, the dialect we are validating\n\n Returns:\n bool: False if any error that would cause SQL errors were found, otherwise True\n \"\"\"\n prev_num_columns = None\n row_gen = csv.read.reader(file, dialect)\n for _ in range(CHECK_ROWS):\n try:\n row = next(row_gen)\n except StopIteration:\n # If less than CHECK_ROWS rows in file, stop early\n break\n\n num_columns = len(row)\n if prev_num_columns is None:\n prev_num_columns = num_columns\n elif prev_num_columns != num_columns:\n return False\n return True\n\n\ndef get_sv_dialect(file):\n \"\"\"\n Given a *sv file, generate a dialect to parse it.\n\n Args:\n file: _io.TextIOWrapper object, an already opened file\n\n Returns:\n dialect: csv.Dialect object, the dialect to parse the file\n\n Raises:\n InvalidTableError: If the generated dialect was unable to parse the file\n \"\"\"\n dialect = csv.detect.Detector().detect(file.read(SAMPLE_SIZE),\n delimiters=ALLOWED_DELIMITERS)\n if dialect is None:\n raise InvalidTableError\n\n file.seek(0)\n if check_dialect(file, dialect):\n file.seek(0)\n return dialect\n else:\n raise InvalidTableError\n\n\ndef get_sv_reader(file, header, dialect=None):\n encoding = get_file_encoding(file)\n file = TextIOWrapper(file, encoding=encoding)\n if dialect:\n reader = csv.DictReader(file, dialect=dialect)\n else:\n reader = csv.DictReader(file)\n if not header:\n reader.fieldnames = [\n f\"{COLUMN_NAME_TEMPLATE}{i}\" for i in range(len(reader.fieldnames))\n ]\n file.seek(0)\n\n return reader\n\n\ndef create_db_table_from_data_file(data_file, name, schema, comment=None):\n db_name = schema.database.name\n engine = create_mathesar_engine(db_name)\n sv_filename = data_file.file.path\n header = data_file.header\n dialect = csv.dialect.SimpleDialect(data_file.delimiter, data_file.quotechar,\n data_file.escapechar)\n encoding = get_file_encoding(data_file.file)\n with open(sv_filename, 'rb') as sv_file:\n sv_reader = get_sv_reader(sv_file, header, dialect=dialect)\n column_names = _process_column_names(sv_reader.fieldnames)\n table = create_string_column_table(\n name=name,\n schema=schema.name,\n column_names=column_names,\n engine=engine,\n comment=comment,\n )\n try:\n insert_records_from_csv(\n table,\n engine,\n sv_filename,\n column_names,\n header,\n delimiter=dialect.delimiter,\n escape=dialect.escapechar,\n quote=dialect.quotechar,\n encoding=encoding\n )\n update_pk_sequence_to_latest(engine, table)\n except (IntegrityError, DataError):\n drop_table(name=name, schema=schema.name, engine=engine)\n column_names_alt = [\n column_name if column_name != ID else ID_ORIGINAL\n for column_name in column_names\n ]\n table = create_string_column_table(\n name=name,\n schema=schema.name,\n column_names=column_names_alt,\n engine=engine,\n comment=comment,\n )\n insert_records_from_csv(\n table,\n engine,\n sv_filename,\n column_names_alt,\n header,\n delimiter=dialect.delimiter,\n escape=dialect.escapechar,\n quote=dialect.quotechar,\n encoding=encoding\n )\n reset_reflection(db_name=db_name)\n return table\n\n\ndef _process_column_names(column_names):\n column_names = (\n column_name.strip()\n for column_name\n in column_names\n )\n column_names = (\n truncate_if_necessary(column_name)\n for column_name\n in column_names\n )\n column_names = (\n f\"{COLUMN_NAME_TEMPLATE}{i}\" if name == '' else name\n for i, name\n in enumerate(column_names)\n )\n return list(column_names)\n\n\ndef create_table_from_csv(data_file, name, schema, comment=None):\n engine = create_mathesar_engine(schema.database.name)\n db_table = create_db_table_from_data_file(\n data_file, name, schema, comment=comment\n )\n db_table_oid = get_oid_from_table(db_table.name, db_table.schema, engine)\n # Using current_objects to create the table instead of objects. objects\n # triggers re-reflection, which will cause a race condition to create the table\n table = Table.current_objects.get(\n oid=db_table_oid,\n schema=schema,\n )\n table.import_verified = False\n table.save()\n data_file.table_imported_to = table\n data_file.save()\n return table\n",
"path": "mathesar/imports/csv.py"
}
] | diff --git a/mathesar/imports/csv.py b/mathesar/imports/csv.py
index 19c8d8f950..2329b7891b 100644
--- a/mathesar/imports/csv.py
+++ b/mathesar/imports/csv.py
@@ -16,7 +16,7 @@
from mathesar.state import reset_reflection
-ALLOWED_DELIMITERS = ",\t:|"
+ALLOWED_DELIMITERS = ",\t:|;"
SAMPLE_SIZE = 20000
CHECK_ROWS = 10
diff --git a/mathesar/tests/data/csv_parsing/patents_invalid.csv b/mathesar/tests/data/csv_parsing/patents_invalid.csv
index f71ddfccec..96478af390 100644
--- a/mathesar/tests/data/csv_parsing/patents_invalid.csv
+++ b/mathesar/tests/data/csv_parsing/patents_invalid.csv
@@ -1,1395 +1,1395 @@
-"Center";"Status";"Case Number";"Patent Number";"Application SN";"Title";"Patent Expiration Date"
-"NASA Kennedy Space Center";"Application";"KSC-12871";0;"13/033,085";"Polyimide Wire Insulation Repair System";
-"NASA Ames Research Center";"Issued";"ARC-14048-1";5694939;"08/543,093";"Autogenic-Feedback Training Exercise Method & System";"10/03/2015"
-"NASA Ames Research Center";"Issued";"ARC-14231-1";6109270;"09/017,519";"Multimodality Instrument For Tissue Characterization";"02/04/2017"
-"NASA Ames Research Center";"Issued";"ARC-14231-2DIV";6976013;"10/874,003";"Metrics For Body Sensing System";"06/16/2024"
-"NASA Ames Research Center";"Issued";"ARC-14231-3";6718196;"09/652,299";"Multimodality Instrument For Tissue Characterization";"02/04/2017"
-"NASA Ames Research Center";"Issued";"ARC-14275-1";6445390;"09/226,673";"Automated Triangle Geometry Processing For Surface Modeling And Cartesian Grid Generation (CART3D)";"12/24/2018"
-"NASA Ames Research Center";"Issued";"ARC-14281-1";6606612;"09/374,491";"Aerodynamic Design Using Neural Networks";"08/13/2019"
-"NASA Ames Research Center";"Issued";"ARC-14281-3";7191161;"10/637,087";"Method For Constructing Composite Response Surfaces By Combining Neural Networks With Polynomial Interpolation Or Estimation Techniques";"11/18/2020"
-"NASA Ames Research Center";"Issued";"ARC-14359-1";6314362;"09/498,123";"A Direct-To Controller Tool (A Component Of The CTAS Software Suite)";"02/02/2020"
-"NASA Ames Research Center";"Issued";"ARC-14494-1";6720984;"09/606,107";"Bio-Electric Keyboard/Mouse/Joystick Interface Software/Algorithm";"06/13/2020"
-"NASA Ames Research Center";"Issued";"ARC-14512-1";6823333;"09/800,309";"Keyword-in-context Search Method And Software For Information Retrieval From Collections Of Text Documents (Quorum/Perilog)";"03/02/2021"
-"NASA Ames Research Center";"Issued";"ARC-14513-1";6741981;"09/800,311";"Model-based Phrase Search Method And Software For Information Retrieval From Collections Of Text Documents (Quorum/Perilog)";"09/14/2021"
-"NASA Ames Research Center";"Issued";"ARC-14514-1";6697793;"09/800,313";"Method And Software For Using Implicit Phrase Models To Generate Prominent Phrases Contained In Collections Of Text Documents (Quorum/Perilog)";"03/02/2021"
-"NASA Ames Research Center";"Issued";"ARC-14515-1";6721728;"09/800,310";"Method And Software For Extracting And Distilling Topically And Situationally Relevant Phrases From Collections Of Text Documents (Quorum/Perilog)";"07/26/2021"
-"NASA Ames Research Center";"Issued";"ARC-14556-1";7346172;"09/822470";"Spatially-modulated Auditory Alert Having Enhanced Detection";"08/24/2022"
-"NASA Ames Research Center";"Issued";"ARC-14569-1";7783130;"11/045,041";"Spatial Standard Observer";"03/26/2028"
-"NASA Ames Research Center";"Issued";"ARC-14569-2";8139892;"12/807,375";"Spatial Standard Observer";"01/24/2025"
-"NASA Ames Research Center";"Issued";"ARC-14586-1DIV";7293001;"11/274,744";"A Hybrid Neural Network And Support Vector Machine Method For Optimization";"01/07/2022"
-"NASA Ames Research Center";"Issued";"ARC-14613-1";6858197;"10/099,247";"A Novel Technique That Allows For The Deposition And Patterning Of A Catalyst Onto A Surface For The Growth Of Single-Walled Carbon Nanotubes";"11/30/2019"
-"NASA Ames Research Center";"Issued";"ARC-14652-1";7375826;"10/956,517";"3D Laser Scanner";"03/25/2026"
-"NASA Ames Research Center";"Issued";"ARC-14653-1";7702427;"10/914,783";"Future ATM (Air Traffic Management) Concepts Evaluation Tool (FACET)";"07/30/2024"
-"NASA Ames Research Center";"Issued";"ARC-14653-2";8290696;"12/694,966";"Future ATM (Air Traffic Management) Concepts Evaluation Tool (FACET)";"07/30/2024"
-"NASA Ames Research Center";"Issued";"ARC-14661-1";7276266;"10/320,698";"A Plasma Apparatus And Process For Functionalization Of Carbon Nanotubes";"12/13/2022"
-"NASA Ames Research Center";"Issued";"ARC-14661-2";7473436;"10/828,524";"Improved Functionalization Of Carbon Nanotubes";"12/13/2022"
-"NASA Ames Research Center";"Issued";"ARC-14661-3";7767270;"11/387,503";"Selective Functionalization Of Carbon Nanotubes Based Upon Distance Traveled";"11/05/2025"
-"NASA Ames Research Center";"Issued";"ARC-14662-1";6968338;"10/232,975";"Advanced XML Database Integration Technique For Managing Unstructured Documents (NETMARK) (Part of NTTS Suite)";"07/18/2023"
-"NASA Ames Research Center";"Issued";"ARC-14682-2";7333735;"10/885,533";"Communication Using VCSEL Laser Array";"11/03/2023"
-"NASA Ames Research Center";"Issued";"ARC-14710-1";7231329;"10/706,478";"Elimination Of Parameter Input Requirement For Elliptic Grid Generation Methods In Engineering";"03/11/2025"
-"NASA Ames Research Center";"Issued";"ARC-14733-1";6972056;"10/135,013";"An Environmentally Compatible Method To Purify Carbon Nanotubes";"01/03/2023"
-"NASA Ames Research Center";"Issued";"ARC-14743-1";7767305;"10/758611";"High-Efficiency Tantalum-Based Ceramics (HETC)";"01/14/2024"
-"NASA Armstrong Flight Research Center";"Issued";"DRC-008-014";8047472;"12/45,970";"IMPROVED RAM BOOSTER";"03/11/2028"
-"NASA Ames Research Center";"Issued";"ARC-14744-1US";7816491;"10/494,853";"Ordered Biological Nanostructures Formed From Chaperonin Polypeptides";"05/06/2024"
-"NASA Ames Research Center";"Issued";"ARC-14744-2";7795388;"11/194,991";"A Versatile Platform For Nanotechnology Based On Circular Permutations Of Chaperonin Protein";"05/06/2024"
-"NASA Ames Research Center";"Issued";"ARC-14940-1";7135172;"10/238,515";"Bucky Paper As An Artificial Support Membrane In Retinal Cell Transplantation";"06/12/2024"
-"NASA Ames Research Center";"Issued";"ARC-14941-1";6755530;"10/198,672";"Carbon Nanotubes As A Prototype Interface For Retinal Cell Recording And Stimulation (Vision Chip)";"10/18/2022"
-"NASA Ames Research Center";"Issued";"ARC-14950-1";7596416;"10/928,874";"Program Management Tool (PMT) Also Known As Business Intelligence (BI)";"07/22/2027"
-"NASA Ames Research Center";"Issued";"ARC-14950-2";8224472;"12/211,439";"Enhanced Project Management Tool";"10/20/2026"
-"NASA Ames Research Center";"Issued";"ARC-14970-1";7129857;"10/789,049";"Intelligent Weather Agent";"07/20/2024"
-"NASA Ames Research Center";"Issued";"ARC-15040-1";8200486;"10/457,696";"Sub Auditory Speech Recognition Based On Electromyographic Signals";"09/14/2025"
-"NASA Ames Research Center";"Issued";"ARC-15041-2";7206674;"10/923,156";"Information Display System For Atypical Flight Phase";"05/21/2024"
-"NASA Ames Research Center";"Issued";"ARC-15042-2";7217650;"10/816,576";"Metallic Nanowire Interconnections For Integrated Circuit Fabrication";"03/11/2023"
-"NASA Ames Research Center";"Issued";"ARC-15058-1";7383238;"10/789,029";"Inductive Monitoring System - System Health Monitoring Software That Learns System Behavior From Data (IMS)";"03/12/2025"
-"NASA Ames Research Center";"Issued";"ARC-15073-1";7590606;"10/703,039";"InvestigationOrganizer: Information Storage, Modeling And Visualization Support For Accident/Mishap Investigations (Part Of A Suite Of Software That Includes ARC-15069, ARC-15070 And ARC-15073) ";"04/30/2026"
-"NASA Ames Research Center";"Issued";"ARC-15088-1";7070923;"10/608,884";"Carbon Nanotube Bucky Paper Cages For Immune Shielding Of Cells And Tissue For Transplantation";"09/20/2023"
-"NASA Ames Research Center";"Issued";"ARC-15101-1";7113265;"10/808,704";"Sample Handling Device For X-ray Diffraction Instruments";"03/17/2024"
-"NASA Ames Research Center";"Issued";"ARC-15157-1";7286573;"10/923,160";"A Method Of Converting Quantum Wells From Type-II To Type-I And Of Enhancing Interband Optical Gain ";"03/11/2025"
-"NASA Ames Research Center";"Issued";"ARC-15171-1";7650232;"11/239,456";"Trajectory Specification For High-Capacity Air Traffic Control";"05/25/2027"
-"NASA Ames Research Center";"Issued";"ARC-15173-1";7273095;"10/825,795";"Embedded Carbon Nanotube Array As High Performance Thermal Conductors";"03/11/2023"
-"NASA Ames Research Center";"Issued";"ARC-15173-2";7784531;"11/900,131";"Nanoengineered Thermal Materials Based On Carbon Nanotube Array Composites";"02/16/2024"
-"NASA Ames Research Center";"Issued";"ARC-15201-1";7381459;"10/779,504";"Toughened Uni-piece Fibrous Reduced Oxidation Ceramic (TUFROC) Light-Weight Thermal Protection System For Use On Space Vehicles During Atmospheric Entry At Hypersonic Speed";"02/12/2024"
-"NASA Ames Research Center";"Issued";"ARC-15201-2";7314648;"10/911,747";"Toughened Uni-piece Fibrous Reinforced Oxidation-Resistant Composite (TUFROC)";"02/12/2024"
-"NASA Ames Research Center";"Issued";"ARC-15204-1";7949472;"10/885,537";"Nanopore Pipetts For Structural Characterization Of Single Polymeric Biomelecules";"01/14/2026"
-"NASA Ames Research Center";"Issued";"ARC-15204-1DIV";8494782;"13/092,048";"Nanopore Pipetts For Structural Characterization Of Single Polymeric Biomelecules";"06/24/2024"
-"NASA Ames Research Center";"Issued";"ARC-15205-1";7939734;"10/873,996";"The Electrochemical Biosensors Using Carbon Nanotube Nanoelectrode Arrays";"06/14/2024"
-"NASA Ames Research Center";"Issued";"ARC-15312-1";7672969;"11/513,429";"Context Based Configuration Management Concept";"08/25/2026"
-"NASA Ames Research Center";"Issued";"ARC-15314-1";7718223;"11/007,913";"Provision Of Carbon Nanotube Arrays Of Variable Density For IC Hot Spot Control";"02/12/2027"
-"NASA Ames Research Center";"Issued";"ARC-15314-2";7704547;"11/472,516";"Carbon Nanotube Growth Density Control";"12/07/2024"
-"NASA Ames Research Center";"Issued";"ARC-15315-1";7378963;"11/239,449";"Reconfigurable Auditory-visual Display For Multi-channel Control Center And Rescue Communications";"01/06/2026"
-"NASA Ames Research Center";"Issued";"ARC-15356-2";7161501;"11/66,650";"Display Of Aircraft Energy State For Flight Operations Quality Assurance (FOQA) Programs";"09/22/2024"
-"NASA Ames Research Center";"Issued";"ARC-15356-3";7212135;"11/066649";"Real-Time Analysis And Display Of Aircraft Approach Maneuvers ";"09/22/2024"
-"NASA Ames Research Center";"Issued";"ARC-15370-1";7698274;"10/956,524";"Selective Access And Editing In A Database (Part of NTTS Suite)";"03/18/2027"
-"NASA Ames Research Center";"Issued";"ARC-15392-1";7313475;"11/053,713";"Delay Banking: Collaborative Decision Making For Airspace-user Priority In Tactical Flow Restrictions";"04/04/2025"
-"NASA Ames Research Center";"Issued";"ARC-15404-1";7288490;"11/009,854";"Use Of A Single Electrode To Orient Carbon Nanotube Growth";"12/07/2024"
-"NASA Ames Research Center";"Issued";"ARC-15437-1";7438422;"11/340,816";"Low Cost Portable Planetarium Imaging System";"05/14/2027"
-"NASA Ames Research Center";"Issued";"ARC-15443-1";7531775;"11/251,006";"A Tracking Sunphotometer Without Moving Parts ";"01/31/2026"
-"NASA Ames Research Center";"Issued";"ARC-15460-1";7426848;"11/203,576";"Discharge Based Gas Sensor Array Using Self-Oriented Regular Vertical Array Of Carbon Nanotubes";"08/05/2025"
-"NASA Ames Research Center";"Issued";"ARC-15462-1";7574338;"11/340002";"Finite-Difference Simulation And Visualization Of Elastodynamics In Time-Evolving Generalized Curvilinear Coordinates ";"07/29/2026"
-"NASA Ames Research Center";"Issued";"ARC-15487-1";7796026;"11/111,620";"Electronic Firefighter Escape Trail";"06/04/2028"
-"NASA Ames Research Center";"Issued";"ARC-15506-1";7529633;"11/203,589";"Applications Of Carbon Nanotube Hold-Off Voltages";"10/22/2026"
-"NASA Ames Research Center";"Issued";"ARC-15519-1";7574357;"11/169,265";"Security Applications For Subvocal Speech";"11/09/2026"
-"NASA Ames Research Center";"Issued";"ARC-15566-1";7801687;"11/178,079";"Gas Sensors Based on Coated and Doped Carbon Nanotubes";"05/26/2029"
-"NASA Ames Research Center";"Issued";"ARC-15566-2";8000903;"11/416,505";"Coated Or Doped Carbon Nanotube Network Sensors As Affected By Environmental Parameters And Elapsed Time";"09/15/2029"
-"NASA Ames Research Center";"Issued";"ARC-15566-3";7875455;"11/489,803";"Nanotechnology Sensors For Determination Of Chemical Substances In An Oil Reservoir";"12/17/2028"
-"NASA Ames Research Center";"Issued";"ARC-15566-5";7623972;"11/591,630";"Detection Of Presence Of Chemical Precursors";"07/08/2025"
-"NASA Ames Research Center";"Issued";"ARC-15575-1";7473930;"11/173,053";"Use Of Carbon Nanotube Arrays For Display Purposes";"10/24/2026"
-"NASA Ames Research Center";"Issued";"ARC-15578-2";7873181;"11/525,600";"Visual Signal Sensor Organ Replacement: Implementation";"05/19/2028"
-"NASA Ames Research Center";"Issued";"ARC-15606-1";7431242;"11/265,324";"Aero Assist Capsule Vehicle Geometry For Atmospheric Entry";"04/01/2026"
-"NASA Ames Research Center";"Issued";"ARC-15684-1";7516890;"11/444,807";"InterIssued Inventory Monitoring";"05/25/2026"
-"NASA Ames Research Center";"Issued";"ARC-15714-1";7869029;"11/398,733";"Light Collimator And Monitor";"11/11/2029"
-"NASA Ames Research Center";"Issued";"ARC-15782-1";7549338;"11/973998";"Nanotechnology Sensor Of Presence And Concentration Of A Target Molecule";"09/28/2027"
-"NASA Ames Research Center";"Issued";"ARC-15796-1";8675922;"13/444,777";"Motion Blur Evaluation Techniques";"08/31/1932"
-"NASA Ames Research Center";"Issued";"ARC-15870-1";7655497;"11/513,431";"Growth Method For Phase Change Nanostructures";"08/16/2027"
-"NASA Ames Research Center";"Issued";"ARC-15890-1";7655145;"11/543,275";"Water Treatment Systems For Long Space Flight Use";"11/05/2027"
-"NASA Ames Research Center";"Issued";"ARC-15900-1";7490367;"11/526,175";"Wet Waste Drying Bag";"09/20/2026"
-"NASA Ames Research Center";"Issued";"ARC-15903-1DIV";8409491;"13/215,206";"In-situ Formation Of Reinforcement Phases In Ceramic Composites And Ultra High Temperature Ceramic Composites For Advanced TPS Applications";"09/28/2027"
-"NASA Ames Research Center";"Issued";"ARC-15967-1";7635420;"11/645,267";"Dielectrophoresis-Based Particle Sensor Using Nanoelectrode Arrays";"06/06/2028"
-"NASA Ames Research Center";"Application";"ARC-15977-1";0;"12/100,378";"Artificial Immune System Based Approach For Air Combat Maneuvering";
-"NASA Ames Research Center";"Application";"ARC-15981-4";;"13/463,780";"Chaperonin-based Templates for Pseudo-cellulosomes with Multiple Enzymes Present";"07/19/2027"
-"NASA Ames Research Center";"Issued";"ARC-15983-1";7923709;"12/273,502";"Radiation Shielding System Using A Composite Of Hydrogen-Rich Polymers Loaded With Carbon Nanotubes";"09/30/2029"
-"NASA Ames Research Center";"Application";"ARC-16478-1";;"14/191,246";"Real Time PIREPs Using Audio Twitter";"02/26/1934"
-"NASA Ames Research Center";"Issued";"ARC-15995-1";8290246;"11/958,296";"A Method To Measure The Recession Of Ablative Materials In Arc-jet Testing Using Digital Stereo-photogrammetry And Image Cross-correlation";"07/01/1931"
-"NASA Ames Research Center";"Issued";"ARC-16013-1";7968054;"11/715,785";"Wireless Chemical Sensor Data Transmission System Based On Nanotechnology";"10/03/2029"
-"NASA Ames Research Center";"Issued";"ARC-16018-1";7662459;"12/175,379";"Atmospheric Entry Heat Shield Employing Cured Thermal Protection Material Blocks Bonded In A Large-Cell Honeycomb Matrix";"07/17/2028"
-"NASA Ames Research Center";"Application";"ARC-16132-1";0;"14/091,250";"Surface Densification Of Phenolic Impregnated Carbon Ablator (PICA)";"11/26/1933"
-"NASA Ames Research Center";"Issued";"ARC-16133-1";8069001;"12/319,918";"Hollow AErothermal Ablation And Temperature (HEAT) Isotherm Sensor For Tracking Isotherm Through The TPS Material";"10/09/2029"
-"NASA Ames Research Center";"Application";"ARC-16211-1";0;"13/896,284";"Low Cost Optical Fiber Solar Cell Configurations";"05/16/1933"
-"NASA Ames Research Center";"Issued";"ARC-16235-1";8285659;"12/543,411";"Modeling-Error-Driven Performance-Seeking Direct Adaptive Control";"11/18/1930"
-"NASA Ames Research Center";"Application";"ARC-16273-1";0;"12/454,024";"Decomposition Technique for Remaining Useful Life Prediction";"11/18/1930"
-"NASA Ames Research Center";"Issued";"ARC-16280-1";8409845;"12/316,557";"Offshore membrane enclosures for dewatering Algae (OMEDA)";"10/15/1931"
-"NASA Ames Research Center";"Issued";"ARC-16298-1";8333810;"12/398,854";"Nanotechnology-Based Supercapacitor";"06/29/1930"
-"NASA Ames Research Center";"Issued";"ARC-16320-1";8332342;"12/622,407";"Battery Prognostics using Particle Filtering Techniques";"02/05/1931"
-"NASA Ames Research Center";"Issued";"ARC-16331-1";8408707;"12/428,441";"System to estimate visual acuity from wavefront aberrations";"05/29/2029"
-"NASA Ames Research Center";"Issued";"ARC-16334-1";8244477;"12/478,667";"Estimation of Growth Stage and Growth Rate for Algae";"06/04/2029"
-"NASA Ames Research Center";"Application";"ARC-16337-1";0;"13/793,998";"Method and Device for Biometric Subject Verification and Identification Based Upon electrocardiographic signals";"03/11/1933"
-"NASA Ames Research Center";"Application";"ARC-16340-1";0;"13/645,284";"Method for formation and manufacture of carbon nanotube mesh bucky paper capsules for transplantation of cells and tissue and implantation of medical devices";"10/04/1932"
-"NASA Ames Research Center";"Issued";"ARC-16342-1";8412469;"12/698,996";"Advanced Sensor Technology for Algal Biotechnology (ASTAB) ";"12/16/1930"
-"NASA Ames Research Center";"Application";"ARC-16348-1";;"13/109,954";"Co-Optimized Blunt-Body ReEntry Vehicle Aerothermodynamic Parametric Shape and Multi-Discipline Optimization Design Process";
-"NASA Ames Research Center";"Issued";"ARC-16351-1";8498756;"13/213,022";"Hovercraft Landing System";"12/07/1931"
-"NASA Ames Research Center";"Issued";"ARC-16370-1";8375675;"12/574,493";"Self Aligning Lug for adapting carbon fiber rods to a bolted metallic connection";"05/07/1931"
-"NASA Ames Research Center";"Application";"ARC-16372-1";0;"13/794,061";"Inexpensive Cooling Systems for Devices";"03/11/1933"
-"NASA Ames Research Center";"Issued";"ARC-16373-1";8489181;"12/319,220";"Heart Electrical Actions as Biometric Indicia";"04/29/1932"
-"NASA Ames Research Center";"Application";"ARC-16405-1";0;"14/091,236";"Nanowire based piezoelectric power generation";"11/26/1933"
-"NASA Ames Research Center";"Issued";"ARC-16407-1";8337208;"12/622,374";"Content Analysis to Detect High Stress in Oral Interviews and Text Documents";"05/26/1931"
-"NASA Ames Research Center";"Application";"ARC-16419-1";0;"13/317,034";"Strobing to Mitigate Vibration for Display Legibility";"10/05/1932"
-"NASA Ames Research Center";"Application";"ARC-16450-1CIP";0;"13/720,898";"Distributed Prognostics and Health Management with a Wireless Network Architecture ";"05/05/2029"
-"NASA Ames Research Center";"Application";"ARC-16456-1";;"13/480,917";"FABRICATION OF NANOPIPETTE ARRAY FOR BIOSENSING";
-"NASA Ames Research Center";"Application";"ARC-16461-1";;"13/956,218";"Solar Powered CO2 Conversions with Thin Film Devices";"07/31/1933"
-"NASA Ames Research Center";"Application";"ARC-16466-1";;"14/010,322";"Combined HETC/ROCCI TPS Material for Temperatures Up To T=3200 F ";"08/26/1933"
-"NASA Ames Research Center";"Application";"ARC-16467-1";;"13/615,202";"ODVEC: Outlier Detection Via Estimating Clusters";
-"NASA Ames Research Center";"Application";"ARC-16607-1";;"13/658,749";"An Approach to Make Flexible Ablators that are Flexible Char Formers";"10/23/1932"
-"NASA Ames Research Center";"Application";"ARC-16621-1";;"13/472,283";"Transformable Hypersonic Aerodynamic Decelerator";"12/04/1932"
-"NASA Ames Research Center";"Application";"ARC-16644-1";;"13/648,197";"Variable Camber Continuous Aerodynamic Control Surfaces and Methods for Active Wing Shaping Control ";"10/09/1932"
-"NASA Ames Research Center";"Application";"ARC-16646-1";;"13/485,721";"A method to produce copper nanowires for interconnect applications";
-"NASA Ames Research Center";"Application";"ARC-16661-1";;"13/444,789";"Video acuity measurement system";
-"NASA Ames Research Center";"Application";"ARC-16697-1";;"13/956,929";"NTTS Search and Reporting (Part of NTTS Suite)";"08/01/1933"
-"NASA Ames Research Center";"Application";"ARC-16707-1";;"13/438,793";"Ectomycorrhizal mediated remediaiton of phenolic-based contamination through use of specifically adapted ectomycorrhizal fungi and enzyme enhancement through partial defoliation of the host.";
-"NASA Ames Research Center";"Application";"ARC-16707-1CIP";;"13/854,620";"Ectomycorrhizal mediated remediaiton of phenolic-based contamination through use of specifically adapted ectomycorrhizal fungi and enzyme enhancement through partial defoliation of the host.";"04/03/1932"
-"NASA Ames Research Center";"Application";"ARC-16732-1";;"13/573,924";"NanoSat Launch Adapter System (NLAS)";"03/14/1933"
-"NASA Ames Research Center";"Application";"ARC-16733-1";;"13/535,884";"Habitat Water Wall for Water, Solids, and Atmosphere Recycle and Reuse ";
-"NASA Ames Research Center";"Application";"ARC-16752-1";;"14/179,401";"Fuel-Efficient, Airport-Friendly, Multi-Speed Transport Aircraft Configuration with Novel Structural Approach";"02/12/1934"
-"NASA Ames Research Center";"Application";"ARC-16811-1";;"13/544,752";"Compliant electrode and composite materials for piezoelectric wind and mechanical energy conversions";
-"NASA Ames Research Center";"Application";"ARC-16812-1";;"13/783,112";"Graphene composite materials for supercapacitor electrodes ";"03/01/1933"
-"NASA Ames Research Center";"Application";"ARC-16833-1";;"13/747,875";"Flight Deck Predictive Weather Display and Decision Support Interface ";"01/23/1933"
-"NASA Ames Research Center";"Application";"ARC-16844-1";;"13/662,346";"Adaptive control and disturbance rejection of non-minimum phase plants using residual mode filters";"10/26/1932"
-"NASA Ames Research Center";"Application";"ARC-16846-1";;"13/707,546";"Dynamic Weather Routes Tool";"12/06/1932"
-"NASA Ames Research Center";"Application";"ARC-16892-1A";;"13/929,646";"The Surface-Adhering Bioreactor (SABR): A novel microbial cell cultivation platform";"06/27/1933"
-"NASA Ames Research Center";"Application";"ARC-16902-1";;"13/725,475";"Nanosensors for medical diagnosis";"12/21/1932"
-"NASA Ames Research Center";"Application";"ARC-16916-1";;"13/956,736";"A Method for Improving Control Systems with Normalized Adaptation by Optimal Control Modification";"08/01/1933"
-"NASA Ames Research Center";"Application";"ARC-16924-1";;"14/010,355";"Aluminoborosilicate Supplement for Thermal Protection of a Re-entrant Vehicle";"08/26/1933"
-"NASA Ames Research Center";"Application";"ARC-16942-2";;"13/659,739";"A new family of low density flexible ablators";"10/24/1932"
-"NASA Armstrong Flight Research Center";"Issued";"DRC-001-049";7180943;"10/113,637";"Adaptive Lossless Data Compression";"03/26/2022"
-"NASA Armstrong Flight Research Center";"Issued";"DRC-005-031";7407131;"11/288,052";"Sound Shield";"10/31/2025"
-"NASA Armstrong Flight Research Center";"Issued";"DRC-006-001";7431243;"11/227,325";"Algorithms For Autonomous Soaring";"02/27/2026"
-"NASA Armstrong Flight Research Center";"Application";"DRC-006-002";0;"11/422,554";"Air Breathing,Reusable, Vertical Launch, Vertical Landing, First Stage Launch System with Off-the-Shelf Second Stage - Ram Booster";
-"NASA Armstrong Flight Research Center";"Issued";"DRC-006-005";7711455;"11/463,485";"Propulsion Controlled Aircraft Computer (PCAC)";"08/09/2026"
-"NASA Armstrong Flight Research Center";"Issued";"DRC-006-024";7520176;"11/567,118";"Method for Real-Time Structure Shape Sensing";"12/05/2026"
-"NASA Armstrong Flight Research Center";"Application";"DRC-006-045";0;"11/682,969";"METHOD FOR REDUCING THE REFRESH RATE OF FIBER BRAGG GRATING SENSORS";
-"NASA Armstrong Flight Research Center";"Issued";"DRC-008-001";8145366;"12/138,747";"Real-time Interactive Sonic Boom Display";"04/28/2030"
-"NASA Armstrong Flight Research Center";"Issued";"DRC-008-023";7715994;"12/191,734";"IMPROVED PROCESS FOR USING SURFACE STRAIN MEASUREMENTS TO OBTAIN OPERATIONAL LOADS FOR COMPLEX STRUCTURES";"08/14/2028"
-"NASA Armstrong Flight Research Center";"Application";"DRC-009-008";0;"12/718034";"Continental Digital Elevation Map Compression and Decompression Software";
-"NASA Armstrong Flight Research Center";"Issued";"DRC-009-026";8447443;"13/367990";"A New Peak-Seeking Control Method";"02/07/2032"
-"NASA Armstrong Flight Research Center";"Application";"DRC-010-042";;"13/463246";"An apparatus and a method to eliminate polarization-induced fading from multiple fiber-optics strain sensors via signal-processing under polarization diversity detection scheme";
-"NASA Armstrong Flight Research Center";"Application";"DRC-011-002";;"13/759,847";"OPTICAL WAVEGUIDE BRAGG GRATING WAVELENGTH SHIFT BY LIGHT INTERACTION WITH ACTIVE MATERIAL";"02/05/2033"
-"NASA Armstrong Flight Research Center";"Application";"DRC-011-015";;"14/106947";"In-situ three-dimensional shape rendering from strain values obtained through optical fiber sensors";"05/31/2032"
-"NASA Armstrong Flight Research Center";"Application";"DRC-012-005";;"13/759210";"Method and apparatus of multiplexing and acquiring data from multiple optical fibers using a single data channel of an optical frequency-domain reflectrometry (OFDR) system (Revised)";"02/05/2033"
-"NASA Armstrong Flight Research Center";"Application";"DRC-012-006";;"13/733364";"A Novel Approach to Liquid Level Sensing Using Fiber Bragg Grating Technology";"01/03/2033"
-"NASA Armstrong Flight Research Center";"Application";"DRC-012-011";;"13/573920";"Air Launch From A Towed Aircraft";"07/05/2032"
-"NASA Armstrong Flight Research Center";"Issued";"DRC-096-055";6126111;"09/112,067";"Emergency Flight Control System Using One Engine And Fuel Transfer";"07/08/2018"
-"NASA Armstrong Flight Research Center";"Issued";"DRC-097-021";6102330;"08/905,777";"Emergency Aircraft Lateral Controller Using Existing (non-modified) Digital Engine Computers During A System Failure For The Purpose Of Safe Landing";"07/29/2017"
-"NASA Armstrong Flight Research Center";"Issued";"DRC-098-001";6216063;"09/74,024";"A Flutterometer Flight Test Tool";"05/06/2018"
-"NASA Goddard Space Flight Center";"Application";"GSC-13378-1";0;"07/710,633";"SPLINE-LOCKING PAYLOAD FASTENER";
-"NASA Goddard Space Flight Center";"Issued";"GSC-13802-1";6584874;"08/673,859";"USING A 3-D SPRAG IN RACHETING TOOLS BASED ON PAT. NO. 5,482-144";"07/02/2016"
-"NASA Goddard Space Flight Center";"Issued";"GSC-13817-1";5983162;"08/872,586";"Empirical Mode Decomposition Method And Hilbert Spectral Analysis Algorithms";"06/10/2017"
-"NASA Goddard Space Flight Center";"Issued";"GSC-13817-2";6631325;"09/82,523";"COMPUTER IMPLEMENTED EMPIRICAL MODE DECOMPOSITION METHOD APPARATUS AND ARTICLE OF MANUFACTURE UTILIZING CURVATURE EXTREMA";"06/10/2017"
-"NASA Goddard Space Flight Center";"Issued";"GSC-13817-3";6381559;"09/282,424";"Empirical Mode Decomposition Apparatus, Method, And Article Of Manufacture For Analyzing Biological Signals And Performing Curve Fitting";"03/31/2019"
-"NASA Goddard Space Flight Center";"Issued";"GSC-13817-4";6862558;"10/73,957";"Empirical Mode Decomposition For Analyzing Acoustical Signals";"02/13/2022"
-"NASA Goddard Space Flight Center";"Issued";"GSC-13817-5";6738734;"10/11,206";"Empirical Mode Decomposition Apparatus, Method And Article Of Manufacture For Analyzing Biological Signals And Performing Curve Fitting";"06/10/2017"
-"NASA Goddard Space Flight Center";"Issued";"GSC-13905-1";6640949;"10/95,343";"1-Way Bearing";"03/01/2022"
-"NASA Goddard Space Flight Center";"Issued";"GSC-13909-1";6311130;"09/150,671";"Computer Implemented Empirical Mode Decomposition Method, Apparatus, And Article Of Manufacture For Two-Dimensional Signals";"09/10/2018"
-"NASA Goddard Space Flight Center";"Issued";"GSC-13985-1";6566854;"09/646,161";"Active Antenna Combined With Non-Ferrous Current Probe.";"09/12/2020"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14064-1";6648522;"09/804,646";"Universal Fiber Optic Connector Polishing Fixture With Precision Alignment Capability";"03/13/2021"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14207-1";6626792;"09/799,872";"Gear Bearings";"03/03/2021"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14209-1";6293803;"09/501,412";"Stress Relieved Zee Electrical Interconnect";"02/09/2020"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14213-1";6760487;"09/550,254";"Estimated Spectrum Adaptive Postfilter (ESAP) And The Iterative Prepost Filtering (IPF) Algorithms";"04/14/2020"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14236-1";6538796;"09/541,680";"MEMS Devices For Spacecraft Thermal Control Applications";"03/31/2020"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14302-1";6782124;"09/729,138";"Extension Of The Empirical Mode Decomposition Method To A Time Series Of 2-Dimensional Grid Maps";"11/29/2020"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14305-1";6895115;"09/839,147";"Method For Recursive Implementation Of Hierarchical Segmentation";"04/23/2021"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14389-1";7543274;"10/789,028";"Deriving Formal Specifications And Code From Scenarios";"02/25/2024"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14393-1";7145739;"10/385,166";"Light Weight Optical Mirrors Formed In Single Crystal Silicon";"03/06/2023"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14413-1";7255483;"10/93,621";"Thrust Rollers";"03/01/2022"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14435-1";6740224;"10/173,533";"Innovative Manufacturing Procedure For Low Cost And High Quality Carbon Nanotubes";"06/11/2022"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14480-2";7762155;"11/444,808";"Gear Bearings";"05/25/2026"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14561-1";7207245;"11/174,454";"Screw-Locking Wrench";"06/30/2025"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14562-1";7504921;"11/543,278";"Stepping Flextures";"09/29/2026"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14601-1";7008605;"10/292,952";"Method For Manufacturing High Quality Carbon Nanotubes";"11/08/2022"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14603-1";7544146;"11/122,201";"Anti-Backlash Gear-Bearings";"05/02/2025"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14608-1";6990436;"10/729,579";"Time Frequency Analysis Based On Extrema Sifting";"11/28/2023"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14616-1";7248342;"10/730,195";"Conceptual Design Of A 3D Imaging Lidar For High-Resolution Mapping Of The Surface Topography Of Moons Or Planets From Space";"12/05/2023"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14657-1";7512568;"11/109,400";"Evolvable Neural Software System";"04/08/2025"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14666-1";6775600;"10/267,092";"Systems And Methods For Determining Spacecraft Orientation";"10/07/2022"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14673-1";6901353;"10/615,365";"Normalized Amplitude Hilbert Transform (NAHT): A New Algorithm For Computing Instantaneous Frequency";"07/08/2023"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14683-1";8480826;"11/736,874";"Specular Coatings For Composite Structures";"04/18/2027"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14762-1";7769488;"11/108,627";"SMART Solar Sail";"04/08/2025"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14777-1";7341932;"11/251,531";"Large Area Vacuum Ultra-Violet Sensors";"09/30/2025"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14793-1";7548199;"11/239,458";"Pivot 2.0: Radiation Hardened, Fast Acquisition/Weak Signal Tracking GPS Receiver";"09/20/2025"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14807-1";7464006;"10/963,470";"Application Of HHT To Financial Data Analysis For Define Volatility And Trend";"10/07/2024"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14833-1";7346461;"11/251,004";"Stability Spectrum Through Hilbert-Huang Transform";"09/30/2025"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14845-1";7290737;"11/251,537";"Demiseable Reaction Wheel Assembly";"09/29/2025"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14871-1";7935297;"11/370,396";"Template For Deposition Of Micron And Sub-micron Pointed Structures";"03/06/2026"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14873-1";8357211;"12/872,445 ";"ADR Salt Pill Design And Crystal Growth Process For Hydrated Magnetic Salts";"08/31/2030"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14879-1";7635832;"11/469,105";"Iterative-Transform Phase-Retrieval Utilizing Adaptive Diversity";"08/31/2026"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14941-1";7739671;"11/203,590";"A Method And System For Direct Implementation Of Formal Specifications Derived Mechanically From Informal Requirements";"08/12/2025"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14942-1";7752608;"11/203,586";"A Method And System For Formal Analysis, Simulation, And Verification Of Knowledge-Based Systems, Rule-Based Systems, And Expert Systems";"08/12/2025"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14952-1";7513546;"11/689,161";"Conformal Gripper";"03/21/2027"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14960-1";7992760;"11/357,458";"Hardware And Technique For Dead End Welding Of All Types Of Tubing";"02/08/2026"
-"NASA Goddard Space Flight Center";"Application";"GSC-16700-1";;"14/041407";"SpaceCube v2.0 Flight Processor Card";"09/30/2033"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14968-1";7627538;"11/251,538";"Apoptosis And Self-destruct: Mechanisms For Management Of Autonomic Systems";"09/29/2025"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14968-2";7925600;"12/603,140";"SWARM AUTONOMIC AGENTS WITH SELF-DESTRUCT CAPABILITY";"10/21/2029"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14979-1";7601091;"11/426,134";"Modular Gear Bearing";"06/23/2026"
-"NASA Goddard Space Flight Center";"Issued";"GSC-14994-1";7697759;"11/251,530";"A Split-Remerge Method For Eliminating Processing Window Artifacts In Recursive Hierarchical Segmentation";"09/30/2025"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15001-1";7924415;"12/389,097";"Light Direction Sensor";"02/19/2029"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15002-1";7240879;"11/124,592";"Space Robotic System For In Space Servicing Of Unmanned Spacecraft Applications";"05/06/2025"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15002-2";7513459;"11/670,653";"Method And Associated Apparatus For Capturing, Servicing, And De-Orbiting Earth Satellites Using Robotics";"05/06/2025"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15002-3";7293743;"11/670,270";"Method And Associated Apparatus For Capturing, Servicing, And De-Orbiting Earth Satellites Using Robotics";"11/13/2025"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15002-4";7438264;"11/670,781";"Method And Associated Apparatus For Capturing, Servicing And De-Orbiting Earth Satellites Using Robotics";"05/06/2025"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15002-5";7513460;"11/671,062";"Method And Associated Apparatus For Capturing, Servicing, And De-Orbiting Earth Satellites Using Robotics";"05/06/2025"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15027-1";7412175;"11/425,352";"Millimeter Wave Polarization Transformer";"06/20/2026"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15027-2";7609978;"12/056,964";"INTERFEROMETRIC POLARIZATION CONTROL";"03/27/2028"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15027-3";7616903;"12/057,060";"INTERFEROMETRIC POLARIZATION CONTROL";"03/27/2028"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15030-1";7907333;"11/460,482";"A Pulsed, 1 Micron, Single Frequency, Diode-Seeded Ytterbium-doped Fiber Amplifier With Variable Output Parameters, P";"07/27/2026"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15038-1";7765171;"11/426,853";"SPAACE: Self Properties For An Autonomous & Autonomic Computing Environment";"06/27/2026"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15039-1";7762523;"11/861,038";"Miniaturized Double Latching Solenoid Valve";"09/25/2027"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15042-1";7622907;"11/535,872";"Driven Ground";"09/27/2026"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15055-1";7746190;"11/748,969";"Broadband High Spurious-suppression Microwave Waveguide Filter For Polarization-preserving And Transformer";"05/15/2027"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15077-1";8068556;"12/147,100";"Low Cost TDRSS Tranceiver (LCT2)";"06/26/2028"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15079-1";7886273;"11/532,800";"Generation And Verification Of Policies For Autonomic Systems";"09/18/2026"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15080-1";7979848;"11/533,837";"A Method Of Deriving Process Based Specifications From Scenarios Via Pattern Matching";"09/21/2026"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15115-1";7465926;"11/537,280";"Miniaturized Radiation Spectrometer Development";"09/29/2026"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15136-1";8093094;"12/137,844";"Blocking Contacts For N-Type Cadmium Zinc Cadmium Zinc Telluride (CdZnTe)";"06/12/2028"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15148-1";7668796;"11/536,132";"Enhancing R2D2C Requirements Based Programming With Automata Learning";"09/28/2026"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15162-1";7796726;"11/706,693";"Instrument And Method For X-Ray Diffraction, Fluorescence, And Crystal Texture Analysis Without Sample Preparation";"02/14/2027"
-"NASA Goddard Space Flight Center";"Application";"GSC-15163-2";0;"13/092198";"AIGaN Ultraviolet Detectors For Dual Band UV Detection";
-"NASA Goddard Space Flight Center";"Issued";"GSC-15176-1";7899760;"11/533,855";"Autonomic Quiescence";"09/21/2026"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15177-1";8082538;"11/536378";"A Method For Developing And Maintaining Evolving Systems With Software Product Lines";"09/28/2026"
-"NASA Goddard Space Flight Center";"Application";"GSC-15177-2";0;"13/305932";"A Method For Developing And Maintaining Evolving Systems With Software Product Lines";
-"NASA Goddard Space Flight Center";"Issued";"GSC-15178-1";7992134;"11/536,969";"Modeling, Specifying And Deploying Policies In Autonomous And Autonomic Systems Using An AOSE Methodology";"09/29/2026"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15179-1";7904396;"11/533,895";"An Autonomic Smoke Detector";"09/21/2026"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15184-1";7978312;"11/933,492";"An Active, Solid-state, 3-Dimensional Range Imaging System";"11/01/2027"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15206-1";8041655;"11/836,352";"Otoacoustic Protection In Biologically-Inspired Systems";"08/09/2027"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15206-2";8140452;"13/230915";"Otoacoustic Protection In Biologically-Inspired Systems";"09/13/2031"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15206-3";8140453;"13/230922";"Otoacoustic Protection In Biologically-Inspired Systems";"09/13/2031"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15206-4";8275725;"13/230920";"Otoacoustic Protection In Biologically-Inspired Systems";"09/13/2031"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15206-5";8165976;"13/230922";"Otoacoustic Protection In Biologically-Inspired Systems";"09/13/2031"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15206-6";8165977;"13/230923";"Otoacoustic Protection In Biologically-Inspired Systems";"09/13/2031"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15217-1";8139674;"12/173,243";"Spaceflight Ka-Band High Rate Rad Hard Modulator";"07/15/2028"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15301-1";7673089;"11/935,572";"An Extendibe USB Drive That Accepts External Media";"11/06/2027"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15302-1";7673089;"11/935,572";"An Double-Headed USB Drive";"11/06/2027"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15328-1";8499779;"12/014,889";"Non-Pyrotechnic Zero-Leak Normally-Closed Valve";"01/16/2028"
-"NASA Goddard Space Flight Center";"Application";"GSC-15333-1";0;"11/860,830";"Improved, Flexure-Base Linear Bearing";
-"NASA Goddard Space Flight Center";"Issued";"GSC-15341-1";7922920;"11/862,550";"Low Conductance Silicon Micro-leak for Mass Spectrometer Inlet";"09/27/2027"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15341-3";8455926;"12/889,014 ";"Low Conductance Silicon Micro-leak for Mass Spectrometer Inlet";"09/23/2030"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15349-1";7830527;"12/102,240";"Method And Apparatus For Second Harmonic Generation And Other Frequency Convertion With Multiple Frequency Channels";"04/14/2028"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15353-1";7830224;"11/877,102";"Compact Low-loss Planar Magic-T With Broadband Phase And Amplitude Responses";"10/23/2027"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15357-1";8041661;"11/861,687";"Stability Algorithm For Neural Entities (SANE)";"09/26/2027"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15364-1";8155939;"12/170,683";"Hughes Particle – Surface Interaction Model";"07/10/2028"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15377-1";7811406;"12/249,265";"Advanced Adhesive Bond Shape Tailoring for Large Composite Primary Structures Subjected to Cryogenic and Ambient Loading Environments";"10/10/2028"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15416-1";7999427;"12/188,039";"Directed Flux Motor Utilizing Concentric Magnets and Interwoven Flux Channels";"08/07/2028"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15417-1";7735385;"12/187,562";"Actuated Ball and Socket Joint";"08/07/2028"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15419-1";8030873;"12/187,926";"Improvements to the Walk and Roll Robot";"08/07/2028"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15421-1";7968812;"12/353,009";"Spring Joint Package with Overstrain Sensor ( OS Sensor Joint )";"01/13/2029"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15431-1";7921731;"12/327,514";"A two-axis direct fluid shear stress sensor suited for aerodynamic applications";"12/03/2028"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15445-1";7982861;"12/183,820";"Pseudo-Noise Code Modulation using Return to Zero pulses for Ranging, Altimetry and Communications";"07/31/2028"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15458-1";8094731;"12/357,081";"Space Link Extension Return Channel Frames (SLE-RCF) Service (User side) Software Library";"01/21/2029"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15483-1";7817087;"12/116,518";"Relative Spacecraft Navigation using Reflected GPS Signals";"05/07/2028"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15520-1";8547531;"12/873373";"Non-scanning laser 3D imager";"09/01/2030"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15527-1";8160728;"12/558,672";"Sensor Complete Requirements Algorithm For Autonomous Mobility";"09/14/2029"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15538-1";8198956;"12/535,954";"Compact planar microwave blocking filter";"08/05/2029"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15550-1";8275724;"12/569,422";"A biologically-inspired method of improving system performance and survivability through self-sacrifice";"09/29/2029"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15552-1";7924126;"12/555,634";"Small, High Field Superconducting Magnets";"09/08/2029"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15557-1";8095485;"12/353,637";"Formulation for Emotion Embedding in Logic Systems (FEELS)";"01/14/2029"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15583-1";7970025;"12/496,954";"Tunable Frequency-stabilized Laser via Offset Sideband Locking";"07/02/2029"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15584-1";8144331;"12/487,454";"Hilbert-Transform-Based Phase Referencing Algorithm for Wide-Field Imaging Interferometry.";"06/18/2029"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15655-1";8138961;"12/561,644";"Low Frequency Wideband Step Frequency Inverse Synthetic Aperture Radar For 3-D Imaging of Interior of Near Earth Objects/Planetary Bodies";"09/17/2029"
-"NASA Goddard Space Flight Center";"Application";"GSC-15660-1";0;"13/247416";"Extreme Environment Low Temperature Transistor Models";
-"NASA Goddard Space Flight Center";"Issued";"GSC-15662-1";8092031;"12/569,090";"Flight Mirror Mount and Flight Mounting Procedure for an Ultra-Lightweight High-Precision Glass Mirror";"09/29/2029"
-"NASA Goddard Space Flight Center";"Application";"GSC-15672-1";0;"13/211413";"Multicolor detectors for ultrasensitive long-wave imaging cameras";
-"NASA Goddard Space Flight Center";"Issued";"GSC-15678-1";8484274;"12/549,159";"Optimal Padding for the Two-Dimensional Fast Fourier Transform";"08/27/2029"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15684-1";8285401;"12/549,898";"Discrete Fourier Transform (DFT) Analysis in a Complex Vector Space";"08/28/2029"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15685-1";8331733;"12/550,141";"Sampling Theorem in Terms of the Bandwidth and Sampling Interval";"08/28/2029"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15692-1";8330644;"12/835,958 ";"Expandable Reconfigurable Instrument Node - Web Sensor Strand Demonstration";"07/19/2030"
-"NASA Goddard Space Flight Center";"Application";"GSC-15693-1";0;"12/570,224";"Variable Sampling Mapping: A novel supplement to iterative-transform phase retrieval algorithms for undersampled images, broadband illumination, and noisy detection environments";
-"NASA Goddard Space Flight Center";"Issued";"GSC-15699-1";8480296;"12/560,535";"A Low Cost, Low Temperature Radiometer for Thermal Measurements.";"09/16/2029"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15724-1";8275015;"12/551,212";"Passively Q-switched side pumped Monolithic Ring Laser";"08/31/2029"
-"NASA Goddard Space Flight Center";"Application";"GSC-15727-1";0;"13/222575";"An All-metal, Solderless Circularly Polarized Microwave Antenna Element with Very Low Off-Axis Cross-Polarization";
-"NASA Goddard Space Flight Center";"Issued";"GSC-15729-1";8674302;"12/789,937";"Novel Superconducting Transition Edge Sensor Design";"05/28/2030"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15729-2";8393786;"12/789,954 ";"Novel Superconducting Transition Edge Sensor Design";"05/28/2030"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15730-1";8355579;"12/783054";"Automatic Extraction of Planetary Image Features";"05/19/2030"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15732-1";8093565;"12/695478";"Crossed Small Deflection Energy Analyzer (SDEA) for Wind/Temperature Spectrometer (WTS)";"01/28/2030"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15758-1";8044332;"12/553,613";"Hybrid Architecture Active Wavefront Sensing and Control";"09/03/2029"
-"NASA Goddard Space Flight Center";"Issued";"GSC-15771-1";8035081;"12/570,166";"High Precision Electric Gate (HPEG) for Time of Flight Mass Spectrometers";"09/30/2029"
-"NASA Goddard Space Flight Center";"Application";"GSC-15774-1";0;"13/154599";"Ensemble Detector";
-"NASA Goddard Space Flight Center";"Application";"GSC-15782-1";0;"13/216479";"Ultra-low Power (< 100mW), 64-Channel Pulse Data Collection System";
-"NASA Goddard Space Flight Center";"Issued";"GSC-15792-1";8406469;"12/838600";"Progressive Band Selection for Hyperspectral Images";"07/19/2030"
-"NASA Goddard Space Flight Center";"Application";"GSC-15815-1";0;"12/887988";"LIDAR Luminance Quantizer";
-"NASA Goddard Space Flight Center";"Issued";"GSC-15826-1";8134130;"12/839207";"The Corner Cathode: Making Collimated Electron Beams with a Small Number of Electrodes";"07/19/2030"
-"NASA Goddard Space Flight Center";"Application";"GSC-15829-1";0;"13/601293";"Resolution enhanced pseudo random code technique";"08/31/2032"
-"NASA Goddard Space Flight Center";"Application";"GSC-15839-1";0;"12/840787";"Low threshold, narrow linewidth optical parametric generator";
-"NASA Goddard Space Flight Center";"Issued";"GSC-15856-1";8196853;"12/779494";"Aerodynamically Stabilized Instrument Platform for Kites and Tethered Blimps ( AeroPod )";"05/13/2030"
-"NASA Goddard Space Flight Center";"Application";"GSC-15886-1";0;"12/838963";"Automated Beam Balance Scale Logger";
-"NASA Goddard Space Flight Center";"Application";"GSC-15911-1";0;"13/217965";"Graphite Composite Panel Polishing Fixture";
-"NASA Goddard Space Flight Center";"Application";"GSC-15934-1";0;"12/839125";"Determining Phase Retrieval Sampling from the Modulation Transfer Function";
-"NASA Goddard Space Flight Center";"Application";"GSC-15935-1";0;"13/043257";"New Variables for Iterative Transform Phase Retrieval";
-"NASA Goddard Space Flight Center";"Application";"GSC-15936-1";0;"12/854490";"SpaceCube Version 1.5";
-"NASA Goddard Space Flight Center";"Issued";"GSC-15947-1";8274726;"12/839171";"Sampling and Reconstruction of the Sinc(x) Function";"07/19/2030"
-"NASA Goddard Space Flight Center";"Application";"GSC-15948-1";0;"13/204767";"Lateral Kevlar Suspension Device (LKSD)";
-"NASA Goddard Space Flight Center";"Application";"GSC-15949-1";0;"13/600992";"Vectorized Rebinning Algorithm for Fast Data Down-Sampling";"08/31/2032"
-"NASA Goddard Space Flight Center";"Application";"GSC-15951-1";0;"13/222839";"An Improved Method of Fabricating Single Crystal Silicon Light Weight Mirrors";
-"NASA Goddard Space Flight Center";"Issued";"GSC-15953-1";8484509;"12/854546";"SpaceCube Demonstration Platform";"08/11/2030"
-"NASA Goddard Space Flight Center";"Application";"GSC-15953-2";0;"13/903357";"SpaceCube Demonstration Platform";"09/30/2029"
-"NASA Goddard Space Flight Center";"Application";"GSC-15957-1";0;"13/211526";"Imaging System Aperture Masks for Image Plane Exit Pupil Characterization";
-"NASA Goddard Space Flight Center";"Issued";"GSC-15964-1";8525110;"13/247,168 ";"An Instrument Suite for the Vertical Characterization of the Ionosphere-Thermosphere System from 100 km to 700km Altitude";"09/28/2031"
-"NASA Goddard Space Flight Center";"Application";"GSC-15970-1";0;"13/034125";"Electrospray Ionization for Chemical Analysis of Organic Molecules for Mass Spectrometry";
-"NASA Goddard Space Flight Center";"Application";"GSC-15976-1";0;"12/872366";"Phase Retrieval System for Assessing Diamond-Turning and other Optical Surface Artifacts";
-"NASA Goddard Space Flight Center";"Issued";"GSC-15977-1";8354952;"12/839060";"Phase Retrieval for Radio Telescope and Antenna Control";"07/19/2030"
-"NASA Goddard Space Flight Center";"Application";"GSC-15979-1";0;"12/839187";"Multi-Scale Image Reconstruction using Wavelets";
-"NASA Goddard Space Flight Center";"Application";"GSC-15994-1";;"13/104538";"Photonic Choke-Joints for Dual-Polarization Waveguides";
-"NASA Goddard Space Flight Center";"Application";"GSC-16006-1";;"13/216671";"Programmable High-Rate Multi-Mission Receiver for Space Communication";
-"NASA Goddard Space Flight Center";"Application";"GSC-16008-1";;"13/600826";"Phase controlled magnetic mirror for wavefront correction";"08/31/2032"
-"NASA Goddard Space Flight Center";"Application";"GSC-16016-1";;"13/193272";"Carbon Nanotubes on titanium substrates for stray light suppression";
-"NASA Goddard Space Flight Center";"Issued";"GSC-16024-1";8526733;"13/150,316";"Refinement of the HSEG Algorithm for Improved Computational Processing Efficiency";"06/01/2031"
-"NASA Goddard Space Flight Center";"Application";"GSC-16789-1";;"14/ 033725";"LEARNS (Logic Expansion for Autonomously Reconfigurable Neural Systems)";
-"NASA Goddard Space Flight Center";"Application";"GSC-16029-1";;"13/193249";"Nanostructure secondary mirror apodization mask for transmitter signal suppression in a duplex telescope.";
-"NASA Goddard Space Flight Center";"Application";"GSC-16096-1";;"13/211432";"Prototype Genomics Based keyed-Hash Message Authentication Code Protocol";
-"NASA Goddard Space Flight Center";"Application";"GSC-16100-1";;"12/881587";"Lunar Reconnaissance Orbiter (LRO) Command and Data Handling Flight Electronics Subsystem";
-"NASA Goddard Space Flight Center";"Application";"GSC-16105-1";;"13/197214";"Molecular Adsorber Coating";
-"NASA Goddard Space Flight Center";"Application";"GSC-16109-1";;"13/240180";"HEXPANDO expanding head for fastener retention hexagonal wrench";
-"NASA Goddard Space Flight Center";"Application";"GSC-16122-1";;"13/474053";"Apparatuses and Methods to Enable Sub-MHz Precision in Fast Laser Frequency Tuning";
-"NASA Goddard Space Flight Center";"Application";"GSC-16135-1";;"13/534427";"A cryptographic approach to microRNA target binding analysis";
-"NASA Goddard Space Flight Center";"Application";"GSC-16146-1";;"13/601194";"Wafer Level Microchannel Fabrication Process for Lap-on-a-Chip Devices";"08/31/2032"
-"NASA Goddard Space Flight Center";"Application";"GSC-16182-1";;"13/595604";"A High Event Rate, Zero Dead Time, Multi-Stop Time-to-digital Converter Application Specific Integrated Circuit";"08/27/2032"
-"NASA Goddard Space Flight Center";"Application";"GSC-16193-1";;"13/720175";"Fine Control and Maintenance Algorithm for Visible Nulling Coronagraphy";"12/19/2032"
-"NASA Goddard Space Flight Center";"Application";"GSC-16223-1";;"13/551649";"SpaceCube Mini";
-"NASA Goddard Space Flight Center";"Application";"GSC-16247-1";;"13/570100";"Enhanced adhesion multiwalled carbon nanotubes on titanium substrates for stray light control";
-"NASA Goddard Space Flight Center";"Application";"GSC-16250-1";;"13/150316";"Further Refinement of the Computationally Efficient HSEG Algorithm";
-"NASA Goddard Space Flight Center";"Application";"GSC-16259-1";;"13/050617";"Spaceflight Refuiling Tools";
-"NASA Goddard Space Flight Center";"Application";"GSC-16299-1";;"13/622465";"V-Assembly Dual Head Efficiency Resonator (VADER) Laser Transmitter";"09/19/2032"
-"NASA Goddard Space Flight Center";"Application";"GSC-16301-1";;"13/771815";"Impedance matched to vacuum, invisible-edge diffraction suppressed mirror";"02/20/2033"
-"NASA Goddard Space Flight Center";"Application";"GSC-16327-1";;"13/545173";"Miniaturized laser heterodyne radiometer for carbon dioxide (CO2), methane (CH4), and carbon monoxide (CO) measurements in the atmospheric column.";
-"NASA Goddard Space Flight Center";"Application";"GSC-16328-1";;"13/474367";"Development of the Hilbert-Huang Transform Real-Time Data Processing System with 2-D Capabilities";
-"NASA Goddard Space Flight Center";"Application";"GSC-16438-1";;"13/606174";"Power provision based on self-sacrificing spacecraft";
-"NASA Goddard Space Flight Center";"Application";"GSC-16460-1";;"13/592409";"Autonomic Autopoiesis";"08/23/2032"
-"NASA Goddard Space Flight Center";"Application";"GSC-16461-1";;"13/592412";"Autonomic and Apoptotic Cloud, Autonomic and Apoptotic Grid, Autonomic and Apoptotic Highly Distributed System";
-"NASA Goddard Space Flight Center";"Application";"GSC-16485-1";;"14/038381";"Broadband planar impedance transformer";"09/26/2033"
-"NASA Goddard Space Flight Center";"Application";"GSC-16516-1";;"14/021812";"Muti-function microposters inside of microfluidic channel for Lab-On-A-Chip device";"09/09/2033"
-"NASA Kennedy Space Center";"Application";"KSC-12866";0;"12/843,353";"In-Situ Wire Damage Detection System";
-"NASA Goddard Space Flight Center";"Application";"GSC-16545-1";;"13/534442";"INTEGRATED GENOMIC AND PROTEOMIC INFORMATION SECURITY PROTOCOL";
-"NASA Goddard Space Flight Center";"Application";"GSC-16555-1";;"14/023847";"Green Precision Cleaning System";"09/11/2033"
-"NASA Goddard Space Flight Center";"Application";"GSC-16569-1";;"14/041,720";"Mirrorlet array for Integral Field Spectrometers (IFS)";
-"NASA Goddard Space Flight Center";"Application";"GSC-16674-1";;"14/041224";"MISSE-7 Control Center";"09/30/2033"
-"NASA Goddard Space Flight Center";"Application";"GSC-16795-1";;"13/781,121 ";"Wallops Flight Facility 6U Advanced CubeSat Ejector (ACE)";"01/04/2033"
-"NASA Goddard Space Flight Center";"Application";"GSC-16805-1";;"14/040924";"SpaceCube v2.0 Micro";"09/30/2033"
-"NASA Goddard Space Flight Center";"Application";"GSC-16808-1";;"14/040848";"SpaceCube v. 2.0 Flight Power Card";"09/30/2033"
-"NASA Goddard Space Flight Center";"Application";"GSC-16859-1";;"14/037484";"Chemical sensors based on 2-dimensional materials";"09/26/2033"
-"NASA Goddard Space Flight Center";"Application";"GSC-16887-1";;"14/037458";"Propellant Transfer Assembly Design and Development";"09/26/2033"
-"NASA Headquarters";"Issued";"HQN-11248-1";6223143;"09/143,969";"Quantitative Risk Assessment Software (QRAS) System";"08/31/2018"
-"NASA Kennedy Space Center";"Issued";"KSC-11641";5730806;"08/437,859";"Gas-Liquid Supersonic Cleaning And Cleaning Verification Spray System";
-"NASA Kennedy Space Center";"Issued";"KSC-11751";5710377;"08/540,616";"Improved Portable Ultrasonic Leak Detector (Combined With KSC-11751-2)";
-"NASA Kennedy Space Center";"Issued";"KSC-11804";5693871;"08/695,071";"Low-Differential Pressure Generator For Evaluating Low Differential Pressure Transducers";
-"NASA Kennedy Space Center";"Issued";"KSC-11866-1";5977773;"08/912,035";"Non-Intrusive Impedance-Based Cable Tester - Standing Wave Reflectometer";
-"NASA Kennedy Space Center";"Issued";"KSC-11884";6039783;"08/772,057";"A New Process And Equipment For Conversion Of NOx Scrubber Liquor To Fertilizer (related To KSC-11994)";
-"NASA Kennedy Space Center";"Issued";"KSC-11884-2";6641638;"09/511,634";"Process And Equipment For Nitrogen Oxide Waste Conversion To Fertilizer - Continuation-In-Part Filed 2/17/00";
-"NASA Kennedy Space Center";"Issued";"KSC-11937-2";7209567;"10/390,259";"Communication System With Adaptive Noise Suppression";
-"NASA Kennedy Space Center";"Issued";"KSC-12035-1";6552521;"09/906,014";"Improved Single-Station Accurate Location Of Lightning Strikes (Combined With KSC-12276 & KSC-12173)";
-"NASA Kennedy Space Center";"Issued";"KSC-12049";6627065;"09/977,531";"Liquid Galvanic Coatings For Protection Of Imbedded Metals";
-"NASA Kennedy Space Center";"Issued";"KSC-12056";6676912;"09/698,607";"New Air Pollution Control Technology For Removal Of Nitrogen Oxides From Stationary Combustion Sources";
-"NASA Kennedy Space Center";"Issued";"KSC-12092-2";6967051;"09/939,286";"Thermal Insulation System And Method (Continuing Patent Application) (Combined With KSC-12092)";
-"NASA Kennedy Space Center";"Issued";"KSC-12107";6742926;"09/906,018";"Thermal Insulation Test Apparatus With Sleeve (Related To KSC-12108)";
-"NASA Kennedy Space Center";"Issued";"KSC-12108";6487866;"09/906,011";"Multipurpose Thermal Insulation Test Apparatus (Related To 12107)";
-"NASA Kennedy Space Center";"Issued";"KSC-12168";6452510;"09/802,535";"Personal Cabin Pressure Monitor And Altitude Warning System";
-"NASA Kennedy Space Center";"Issued";"KSC-12190";6764617;"09/994,996";"A Novel Ferromagnetic Conducting Lignosulfonic Acid-Doped Polyaniline (Related To KSC-11940, KSC-11940-1, KSC-11940-2, KSC-12154, KSC-12191)";
-"NASA Kennedy Space Center";"Issued";"KSC-12191-2";7179404;"11/215,205";"Corrosion Prevention Of Cold Rolled Steel Using Water Dispersible Lignosulfonic Acid Doped Polyaniline";
-"NASA Kennedy Space Center";"Issued";"KSC-12205";6715914;"10/185,378";"Apparatus And Method For Thermal Performance Testing Of Pipelines And Piping Systems";
-"NASA Kennedy Space Center";"Issued";"KSC-12220";6917203;"10/235,020";"Current Signature Sensor (Combined With KSC-12152)";
-"NASA Kennedy Space Center";"Issued";"KSC-12221";6757641;"10/185,830";"Multisensor Transducer And Weight Factor (Combined With KSC-12359 and KSC-13139)";
-"NASA Kennedy Space Center";"Issued";"KSC-12235";6793903;"10/014,140";"High-Temperature Decomposition Of Hydrogen Peroxide";
-"NASA Kennedy Space Center";"Issued";"KSC-12235-2";6955799;"10/923,152";"Temperature Decomposition Of Hydrogen Peroxide";
-"NASA Kennedy Space Center";"Issued";"KSC-12235-3";8029736;"10/923,163";"High Temperature Decomposition Of Hydrogen Peroxide";
-"NASA Kennedy Space Center";"Issued";"KSC-12236";8511396;"10/476,175";"Non-Toxic Environmentally Safe Halon Replacement (HABx)";
-"NASA Kennedy Space Center";"Application";"KSC-12236-2-PCT";0;"/0";"Flame Suppression Agent, System And Users";
-"NASA Kennedy Space Center";"Application";"KSC-12236-CIP";;"13/428,736";"Non-Toxic Environmentally Safe Halon Replacement (HABx)";
-"NASA Kennedy Space Center";"Issued";"KSC-12246";6664298;"09/972,296";"Zero-Valent Metal Emulsion For Reductive Dehalogenation Of DNAPLs";
-"NASA Kennedy Space Center";"Issued";"KSC-12246-2";7037946;"10/701,412";"Zero-Valent Metal Emulsion For Reductive Dehalogenation Of DNAPLs";
-"NASA Kennedy Space Center";"Issued";"KSC-12278";7400766;"10/783,295";"Image Edge Extraction Via Fuzzy Reasoning (FRED) (combined With KSC-12272)";
-"NASA Kennedy Space Center";"Issued";"KSC-12386";7274907;"10/748,915";"Modular Wireless Data Acquisition System (combined With KSC-12479, KSC-12486)";
-"NASA Kennedy Space Center";"Issued";"KSC-12390";6824306;"10/318,665";"Thermal Insulation Test Apparatus For Flat Specimens";
-"NASA Kennedy Space Center";"Issued";"KSC-12394";7239751;"10/750,629";"Hypothesis Support Mechanism For Mid-Level Visual Pattern Recognition (PIPR)";
-"NASA Kennedy Space Center";"Issued";"KSC-12458";7156957;"10/440,543";"UV Induced Oxidation Of Nitric Oxide";
-"NASA Kennedy Space Center";"Issued";"KSC-12490";7298897;"10/779,551";"Noniterative Optimal Binarization Of Gray-Scaled Digital Images Via Fuzzy Reasoning (FRAT) (combined With KSC-12272)";
-"NASA Kennedy Space Center";"Issued";"KSC-12518";7790128;"10/641,581";"Hydrogen Peroxide Catalytic Decomposition";
-"NASA Kennedy Space Center";"Issued";"KSC-12539";7285306;"10/684,064";"Self-Healing Wire Insulation";
-"NASA Kennedy Space Center";"Issued";"KSC-12539-2";8119238;"11/856,218";"Self-Healing Wire Insulation";
-"NASA Kennedy Space Center";"Application";"KSC-12539-3";0;"13/348,861";"Self-Healing Wire Insulation";
-"NASA Kennedy Space Center";"Issued";"KSC-12540";6958085;"10/666,821";"High Performance Immobilized Liquid Membranes For Carbon Dioxide Separations";
-"NASA Kennedy Space Center";"Issued";"KSC-12630";7496237;"11/010,698";"Image Processing For Binarization Enhancement Via Fuzzy Reasoning";
-"NASA Kennedy Space Center";"Issued";"KSC-12631";7582147;"11/208,122";"Metallic Pigment Powder Particle For Use In A Liquid Coating System To Protect Reinforcing Steel In Concrete Structures";
-"NASA Kennedy Space Center";"Issued";"KSC-12637";7271199;"10/977,622";"Micro-scale Particle Emulsion And Their Application For Removal Of PCBs And Metals Found In Ex Situ Structures";
-"NASA Kennedy Space Center";"Issued";"KSC-12664";7404938;"10/845,418";"Emission Control System";
-"NASA Kennedy Space Center";"Issued";"KSC-12664-3-CIP";7582271;"11/40,294";"Emission Control System";
-"NASA Kennedy Space Center";"Issued";"KSC-12666";7122166;"10/845,607";"Hydrogen Peroxide Concentrator";
-"NASA Kennedy Space Center";"Issued";"KSC-12669";7302364;"11/83,420";"Integrated Spaceport Automated Data Management Architecture (Combine With KSC-12581, KSC-12583, KSC-12671and KSC-12582)";
-"NASA Kennedy Space Center";"Issued";"KSC-12697";7309738;"10/962,827";"A New Approach For Achieving Fire Retardancy While Retaining Physical Properties In A Compatible Polymer Matrix";
-"NASA Kennedy Space Center";"Issued";"KSC-12697-3";7968648;"11/935,093";"A New Approach For Achieving Flame Retardancy While Retaining Physical Properties In A Compatible Polymer Matrix";
-"NASA Kennedy Space Center";"Issued";"KSC-12703";8031449;"12/485,979";"Integral Battery Power Limiting Circuit For Intrinsically Safe Applications";
-"NASA Kennedy Space Center";"Issued";"KSC-12723";7790225;"11/239,445";"Coating For Corrosion Detection And Prevention";
-"NASA Kennedy Space Center";"Application";"KSC-12723-DIV";;"12/792,238";"Coating For Corrosion Detection And Prevention";
-"NASA Kennedy Space Center";"Issued";"KSC-12848";7781492;"11/759,672";"New Organic/inorganic Polymeric Thermal Insulators";
-"NASA Kennedy Space Center";"Issued";"KSC-12848-DIV";7977411;"12/835,233";"New Organic/inorganic Polymeric Thermal Insulators";
-"NASA Kennedy Space Center";"Application";"KSC-12871-CIP";0;"13/915,407";"Polyimide Wire Insulation Repair System";
-"NASA Kennedy Space Center";"Application";"KSC-12871-DIV1";0;"14/093,701";"Polyimide Wire Insulation Repair System";
-"NASA Kennedy Space Center";"Application";"KSC-12871-DIV2";0;"14/093,680";"Polyimide Wire Insulation Repair System";
-"NASA Kennedy Space Center";"Issued";"KSC-12875";7841771;"11/777,711";"Self Validating Thermocouple (Combined With KSC-12865)";
-"NASA Kennedy Space Center";"Issued";"KSC-12878-2-CIP";8163972;"12/465,457";"Bimetallic Treatment System and it's application for Removal of PCBs Found in Ex Situ Structures without the Use of a Catalized Agent";
-"NASA Kennedy Space Center";"Issued";"KSC-12890";7790787;"11/740,357";"New Organic/Inorganic Polymeric Materials";
-"NASA Kennedy Space Center";"Application";"KSC-12890-2-DIV";0;"12/834,416";"New Organic/Inorganic Polymeric Materials";
-"NASA Kennedy Space Center";"Issued";"KSC-12899";8425866;"11/466,624";"Gas Phase Oxidation Of NO To NO2";
-"NASA Kennedy Space Center";"Issued";"KSC-12978";7842639;"11/749,767";"Preparation of a Bimetal Using Mechanical Alloying for the Dehalogenation of Compounds";
-"NASA Kennedy Space Center";"Issued";"KSC-12978-DIV";8288307;"12/909,219";"Preparation of a Bimetal Using Mechanical Alloying for the Dehalogenation of Compounds";
-"NASA Kennedy Space Center";"Issued";"KSC-12983";8409534;"11/692,557";"Mercury Emission Control System";
-"NASA Kennedy Space Center";"Application";"KSC-13047";0;"12/813,864";"Insulation Test Cryostat with Lift Mechanism (Combined with KSC-13048)";
-"NASA Kennedy Space Center";"Application";"KSC-13047-DIV";0;"14/090,193";"Insulation Test Cryostat with Lift Mechanism (Combined with KSC-13048)";
-"NASA Kennedy Space Center";"Issued";"KSC-13088";8293178;"11/935,545";"Improved Thermal Reactivity Of Hydrogen Sensing Pigments In Manufactured Polymer Composites";
-"NASA Kennedy Space Center";"Application";"KSC-13088-CON";0;"13/611,856";"Improved Thermal Reactivity Of Hydrogen Sensing Pigments In Manufactured Polymer Composites";
-"NASA Kennedy Space Center";"Application";"KSC-13088-DIV";0;"13/615,850";"Improved Thermal Reactivity Of Hydrogen Sensing Pigments In Manufactured Polymer Composites";
-"NASA Kennedy Space Center";"Application";"KSC-13161";0;"12/855,791";"PH Sensitive Microcapsule With Corrosion Indicator";
-"NASA Kennedy Space Center";"Application";"KSC-13167";0;"12/856,849";"Watercore PH Sensitive Microcapsule";
-"NASA Kennedy Space Center";"Application";"KSC-13265-CIP2";0;"14/150,502";"An Inductive Non-Contact Position Sensor";
-"NASA Kennedy Space Center";"Application";"KSC-13278";0;"13/354,576";"A Method for Making Elongated Microcapsules Under Simple Shear Conditions";
-"NASA Kennedy Space Center";"Issued";"KSC-13285";8593153;"12/843,382";"An improved Online Diagnostic Device (ODD) for Wiring Evaluation";
-"NASA Kennedy Space Center";"Issued";"KSC-13331";8577639;"13/031,182";"A Method for Accurately Calibrating a Spectrometer Using Broadband Light";
-"NASA Kennedy Space Center";"Application";"KSC-13336";0;"12/843,487";"Sputter Coated wire for in-situ wire damage detection";
-"NASA Kennedy Space Center";"Application";"KSC-13343";0;"13/278,710";"Conductive Carbon Nanotube for use with Desktop Inkjet Printing";
-"NASA Kennedy Space Center";"Application";"KSC-13366";0;"13/523,806";"High Performance Self Healing Film";
-"NASA Kennedy Space Center";"Application";"KSC-13579";;"13/895,717";"Green PCB Removal From Sediment Systems (GPRSS)";
-"NASA Kennedy Space Center";"Application";"KSC-13588";;"13/495,862";"Multi-Dimensional Damage Detection For Flat Surfaces";
-"NASA Kennedy Space Center";"Application";"KSC-13592";;"13/542,155";"pH sensitive microparticles";
-"NASA Kennedy Space Center";"Application";"KSC-13595";;"14/192,784";"Aerogel insulation and composites integrated into unique lay-ups (Incorporates Embodiments from KSC-13702)";
-"NASA Kennedy Space Center";"Application";"KSC-13636";;"13/546,880";"Incorporation of Chemochromic Indicator for the Presence of Hypergolic Fuels into a Variety of Manufactured Parts";
-"NASA Kennedy Space Center";"Application";"KSC-13638";;"14/176,824";"A Two Dimensional Inductive Position Sensor";
-"NASA Kennedy Space Center";"Application";"KSC-13664";;"13/896,896";"Regolith Advanced Surface Systems Operations Robot (RASSOR) Excavator";
-"NASA Kennedy Space Center";"Application";"KSC-13689";;"13/961,521";"Coherence Multiplexing of Wireless Surface Acoustic Wave Sensors";
-"NASA Langley Research Center";"Issued";"LAR-14673-1";5736642;"08/778,066";"Nonlinear Ultrasonic Scanning To Detect Material Defects";"01/08/2017"
-"NASA Langley Research Center";"Issued";"LAR-14840-1";5841032;"08/792,909";"Variable And Fixed Frequency Pulsed Phase-Locked Loop";"01/24/2017"
-"NASA Langley Research Center";"Issued";"LAR-15205-1";5741883;"08/359,752";"Tough, Soluble, Aromatic, Thermoplastic Copolyimides";"04/21/2015"
-"NASA Langley Research Center";"Issued";"LAR-15282-1";5755571;"08/712,984";"Ultrasonic Periodontal Structures Mapping Device";"09/09/2016"
-"NASA Langley Research Center";"Issued";"LAR-15318-1";5798521;"08/806,732";"Distributed Fiber-optic Strain Sensor";"02/27/2017"
-"NASA Langley Research Center";"Issued";"LAR-15348-1";5632841;"08/416,598";"Thin Layer Composite Unimorph Ferroelectric Driver And Sensor, THUNDER";"04/04/2015"
-"NASA Langley Research Center";"Issued";"LAR-15348-2";6734603;"08/797,553";"Thin Layer Composite Unimorph Ferroelectric Driver And Sensor";"04/04/2015"
-"NASA Langley Research Center";"Issued";"LAR-15351-1-CU";5585083;"08/414,661";"Catalyst For Formaldehyde Oxidation";"03/30/2015"
-"NASA Langley Research Center";"Issued";"LAR-15370-1-SB";5640408;"08/593,438";"Quasi Four-Level TM:LuAG Laser (Tm:LuAG Laser)";"01/27/2016"
-"NASA Langley Research Center";"Issued";"LAR-15376-1";5771204;"08/754,642";"Relative Phase Measurement Instrument For Multiple-Echo Systems";"11/21/2016"
-"NASA Langley Research Center";"Issued";"LAR-15406-1";5617873;"08/449,473";"Noninvasive Meth/Apparatus For Monitoring Intracranial Pressure & Pressure Vols Index In Humans";"05/23/2015"
-"NASA Langley Research Center";"Issued";"LAR-15412-1";5606014;"08/511,422";"Imide Oligomers And Co-Oligomers Containing Pendent Phenylethynyl Groups And Polymers Therefrom";"08/04/2015"
-"NASA Langley Research Center";"Issued";"LAR-15412-2";5689004;"08/747,472";"Imide Oligomers And Co-Oligomers Containing Pendent Phenylethynyl Groups And Polymers Therefrom";"08/04/2015"
-"NASA Langley Research Center";"Issued";"LAR-15449-1";6133401;"09/342,462";"A Method To Prepare Processable Polyimides With Reactive Endgroups Using 1,3 Bis (3-Aminophenoxyl) Benzene";"06/29/2019"
-"NASA Langley Research Center";"Issued";"LAR-15449-2";6288209;"09/667,426";"Method To Prepare Processable Polyimides With Reactive Endgroups Using 1,3-Bix(3-Aminophenoxyl)Benzene";"06/29/2019"
-"NASA Langley Research Center";"Issued";"LAR-15507-1";6475147;"09/493,044";"Ultrasonic Technique To Measure Intracranial Pressure";"01/27/2020"
-"NASA Langley Research Center";"Issued";"LAR-15508-1";6545760;"09/535,659";"Distributed Rayleigh Scatter Fiber Optic Strain Sensor";"03/24/2020"
-"NASA Langley Research Center";"Issued";"LAR-15514-1-SB";5991456;"08/654,840";"Method Of Improving A Digital Image";"05/29/2016"
-"NASA Langley Research Center";"Issued";"LAR-15524-1";6000844;"08/810,058";"A Method And Apparatus For The Portable Identification Of Material Thickness Of Layers Using A Scanning Linear Heat Source And Infrared Detectorcramer";"03/04/2017"
-"NASA Langley Research Center";"Issued";"LAR-15525-1-CU";5948965;"08/845,899";"Solid State Carbon Monoxide Sensor";"04/28/2017"
-"NASA Langley Research Center";"Issued";"LAR-15637-1";6015272;"08/673,627";"Magnetically Suspended Miniature Fluid Pump And Method Of Making Same";"06/26/2016"
-"NASA Langley Research Center";"Issued";"LAR-15637-2";6447265;"09/398,878";"Magnetically Suspended Miniature Fluid Pump And Method Of Designing The Same";"06/26/2019"
-"NASA Langley Research Center";"Issued";"LAR-15652-1-CU";6132694;"08/991,075";"Catalyst For Oxidation Of Hydro-Carbons And Volatile Organic Compounds";"12/16/2017"
-"NASA Langley Research Center";"Application";"LAR-15665-1-CU";0;"08/838,596";"Catalyst For Carbon Monoxide Oxidation";
-"NASA Langley Research Center";"Issued";"LAR-15745-1";6222007;"09/093,826";"Prepreg And Composites Made From Polyimide Salt-Like Solution";"05/29/2018"
-"NASA Langley Research Center";"Issued";"LAR-15747-1-CU";6200539;"09/357,403";"One-Atmosphere Uniform Glow Discharge Plasma Gas Flow Acceleration";"07/20/2019"
-"NASA Langley Research Center";"Issued";"LAR-15767-1";6180746;"09/316,428";"Polyimide Foam From Ether-Containing Monomeric Solutions";"05/21/2019"
-"NASA Langley Research Center";"Issued";"LAR-15816-1";6629341;"09/430,677";"Macro-Fiber Composite Actuator With Interdigitated Electrodes";"10/29/2019"
-"NASA Langley Research Center";"Issued";"LAR-15816-2";7197798;"10/653,824";"A Method For Fabricating A Piezoelectric Composite Apparatus";"06/30/2020"
-"NASA Langley Research Center";"Issued";"LAR-15817-1";6450820;"09/612,412";"A Method Of Encouraging Physiological Self-Regulation Through Modulation Of An Operator's Control Input To A Video Game Or Training Simulator";"07/12/2020"
-"NASA Langley Research Center";"Issued";"LAR-15818-3";6922242;"10/465,386";"Optical Path Switching Based Differential Absorption Radiometry For Substance Detection";"06/21/2019"
-"NASA Langley Research Center";"Issued";"LAR-15831-1";5994418;"09/316,865";"Hollow Polyimide Microspheres";"05/21/2019"
-"NASA Langley Research Center";"Issued";"LAR-15831-2";6235803;"09/408,652";"Hollow Polyimide Microspheres";"05/21/2019"
-"NASA Langley Research Center";"Issued";"LAR-15831-3";6084000;"09/394,534";"Hollow Polyimide Microsphere";"05/21/2019"
-"NASA Langley Research Center";"Issued";"LAR-15834-1";6359107;"09/575,826";"High Performance / High Temperature Resins For Infusion And Transfer Molding Processes";"05/18/2020"
-"NASA Langley Research Center";"Issued";"LAR-15851-1-CU";6753293;"09/607,211";"Process For Coating Substrates With Catalyst Materials";"05/11/2021"
-"NASA Langley Research Center";"Issued";"LAR-15854-1";6761695;"10/94,023";"Technique For Non-Invasive Absolute Measurement Of Intra-Cranial Pressure In Humans";"07/28/2022"
-"NASA Langley Research Center";"Issued";"LAR-15927-1";6584848;"10/263,292";"Dielectric Electrostatic Ultrasonic Transducer (DEUT)";"09/30/2022"
-"NASA Langley Research Center";"Issued";"LAR-15934-1";6566648;"09/535,661";"Edge Triggered Apparatus And Method For Measuring Strain In Bragg Gratings";"03/24/2020"
-"NASA Langley Research Center";"Issued";"LAR-15943-1";6746410;"10/121,932";"Transducer Assembly To Measure Changes In Circumferential Expansion Of The Human Skull Due To Changes In Intracranial Pressure";"11/16/2022"
-"NASA Langley Research Center";"Issued";"LAR-15954-1";6376830;"09/606,120";"Single Laser Sweep Full S-Parameter Characterization Of Fiber Bragg Gratings";"06/15/2020"
-"NASA Langley Research Center";"Issued";"LAR-15959-1";7019621;"09/753,370";"Structural Tailored High Displacement Ferro-Electric Sensors And Actuators";"01/02/2021"
-"NASA Langley Research Center";"Issued";"LAR-15977-1";6133330;"09/337,475";"Polyimide Foam From Monomeric Solutions";"05/21/2019"
-"NASA Langley Research Center";"Issued";"LAR-15990-1";6551251;"09/784,413";"Dual Transmission Interface For Passive Fetal Heart Monitoring";"02/13/2021"
-"NASA Langley Research Center";"Issued";"LAR-16001-1";7371358;"10/975,117";"Catalyst For Treatment And Control Of Post-Combustion Emissions";"10/25/2024"
-"NASA Langley Research Center";"Issued";"LAR-16005-1";6426496;"09/648,529";"High Precision Solid State Wavelength Monitor";"11/26/2020"
-"NASA Langley Research Center";"Issued";"LAR-16012-1-CU";6834125;"09/888,701";"Improvement To The Multiscale Retinex With Color Restoration";"06/25/2021"
-"NASA Langley Research Center";"Issued";"LAR-16020-1";6629446;"09/758,115";"Single Vector Force Balance Calibration System";"01/26/2022"
-"NASA Langley Research Center";"Issued";"LAR-16079-1";6939940;"09/757,398";"Liquid Crystalline Thermosets From Oligo-Esters, Ester-Imides And Ester-Amides";"01/05/2021"
-"NASA Langley Research Center";"Issued";"LAR-16083-1";8062129;"11/536,811";"A Method And System For Multi-Player Game Playing Where Physiological Characteristics Of The Players Modulate Their Relative Advantage Over Opponents Or Competitors";"05/22/2030"
-"NASA Langley Research Center";"Issued";"LAR-16116-1";6888346;"10/21,683";"Giant Magnetoresistive Based Self-Nulling Probe For Deep Flaw Detection";"11/28/2021"
-"NASA Langley Research Center";"Issued";"LAR-16176-2";7109287;"10/988,407";"Space Environmentally Durable Polyimides And Copolyimides";"03/03/2025"
-"NASA Langley Research Center";"Issued";"LAR-16220-1";6867533;"09/696,527";"Shaping, Tuning, And Positioning Membrane Structures Using Electroactive Polymer Actuators";"10/23/2020"
-"NASA Langley Research Center";"Issued";"LAR-16231-1-CU";7092539;"09/997,113";"MEMS Based Acoustic Array";"11/28/2021"
-"NASA Langley Research Center";"Issued";"LAR-16256-1";8628333;"11/129,756";"Method And System For Training Psychophysiological Skills Conducive To Optimal Performance Through Perturbation Of Training Tasks, Environments And Devices";"08/27/2029"
-"NASA Langley Research Center";"Application";"LAR-16256-1-CON";0;"14/153,434";"Method And System For Training Psychophysiological Skills Conducive To Optimal Performance Through Perturbation Of Training Tasks, Environments And Devices";"05/13/2025"
-"NASA Langley Research Center";"Issued";"LAR-16299-1";7871682;"10/956,520";"Composite Roll Press And Processes";"12/07/2025"
-"NASA Langley Research Center";"Issued";"LAR-16307-1-SB";7390768;"10/056,845";"Methodology For The Effective Stabilization Of Tin-Oxide-Based Oxidation/Reduction Catalysts";"01/22/2022"
-"NASA Langley Research Center";"Issued";"LAR-16307-2";7985709;"10/956,515";"Methodology For The Effective Stabilization Of Tin-Oxide-Based Oxidation/Reduction Catalysts";"04/16/2027"
-"NASA Langley Research Center";"Application";"LAR-16308-2";0;"12/726,403";"Catalyst For Decomposition Of Nitrogen Oxides (Divisional of LAR 16308-1-CU)";
-"NASA Langley Research Center";"Issued";"LAR-16311-1";6777525;"10/115,812";"Heat, Moisture, Chemical Resistant Polyimide Compositions And Methods For Making And Using The Same";"04/01/2022"
-"NASA Langley Research Center";"Issued";"LAR-16323-1";7253903;"11/27,930";"Method To Linearize Non-Linear Physical Measurements";"06/24/2025"
-"NASA Langley Research Center";"Issued";"LAR-16324-1";6714132;"10/011,229";"Proximity Sensor";"11/27/2021"
-"NASA Langley Research Center";"Issued";"LAR-16324-2";7106203;"10/783,486";"Self-Activating System And Method For Alerting When An Object Or Person Is Left Unattended";"11/27/2021"
-"NASA Langley Research Center";"Issued";"LAR-16326-1";7060991;"10/410,605";"Method For Measuring Thickness Of Small Radius Of Curvature Structures Using A Thermal Line Scanner";"04/10/2023"
-"NASA Langley Research Center";"Issued";"LAR-16332-1-CU";6842543;"09/888,816";"Method Of Improving A Digital Image Having White Zones";"06/25/2021"
-"NASA Langley Research Center";"Issued";"LAR-16363-1";6856073;"10/390,675";"Radial Electric Field Piezo-Diaphragm Fluidic Control Systems";"03/13/2023"
-"NASA Langley Research Center";"Issued";"LAR-16383-1-NP";7588699;"10/288,797";"Electrically Conductive, Optically Transparent Polymer/Carbon Nanotube Composites And Process For Preparation Thereof";"07/02/2023"
-"NASA Langley Research Center";"Issued";"LAR-16383-2";7972536;"12/546,724";"Electrically Conductive, Optically Transparent Polymer/Carbon Nanotube Composites And Process For Preparation Thereof";"10/12/2029"
-"NASA Langley Research Center";"Issued";"LAR-16390-1-SB";7318915;"10/342,660";"Ruthenium Stabilization Mechanism For Next Generation Oxidation And Reduction Catalyst Systems";"01/13/2023"
-"NASA Langley Research Center";"Issued";"LAR-16393-1";6919669;"10/392,491";"Sonic Transducers And Sensors Using Radial Field Diaphragms";"05/31/2023"
-"NASA Langley Research Center";"Issued";"LAR-16406-1-CU";7491169;"10/805,816";"Ultrasonic Method And Means To Assess Compartment Syndrome (Hyper Pressure States In Arm, Leg Muscle/Tendon Compartments)";"09/20/2025"
-"NASA Langley Research Center";"Issued";"LAR-16409-1";8015819;"11/536,790";"Wet Active Chevron Nozzle For Controllable Jet Noise Reduction";"09/17/2028"
-"NASA Langley Research Center";"Issued";"LAR-16432-1";7692116;"10/188,525";"Synthesis Of Carbon Nanotubes Using High Average Power Ultrafast Laser Ablation";"07/03/2022"
-"NASA Langley Research Center";"Issued";"LAR-16437-1-NP";7169374;"11/129,751";"Templated Growth Of Carbon Nanotubes";"05/11/2025"
-"NASA Langley Research Center";"Issued";"LAR-16440-1";6740048;"10/263,285";"Method Of Determining Intracranial Pressure From Skull Expansion Measurements";"09/25/2022"
-"NASA Langley Research Center";"Issued";"LAR-16475-1";7194912;"10/890,843";"Carbon Nanotube-Based Structural Health Monitoring Sensor";"08/07/2024"
-"NASA Langley Research Center";"Issued";"LAR-16496-1";7104498;"10/867,114";"Blown Channel-Wing System For Thrust Deflection And Force/Moment Generation";"10/03/2024"
-"NASA Langley Research Center";"Issued";"LAR-16499-1";7491428;"10/730,188";"Method for the controlled deposition and alignment of single walled carbon nanotubes";"11/15/2025"
-"NASA Langley Research Center";"Issued";"LAR-16510-1";6773407;"10/263,286";"Non-Invasive Method Of Determining Absolute Intracranial Pressure";"12/25/2022"
-"NASA Langley Research Center";"Issued";"LAR-16516-1";6879893;"10/675,502";"Autonomous Health Monitoring Architecture Hardware";"09/30/2023"
-"NASA Langley Research Center";"Issued";"LAR-16517-1";7048228;"10/678,474";"Partial-Span Slotted Wing For Transonic Aircraft";"10/03/2023"
-"NASA Langley Research Center";"Issued";"LAR-16532-1";7334998;"11/5,624";"Low-Noise Fan Exit Guide Vanes";"12/06/2024"
-"NASA Langley Research Center";"Issued";"LAR-16538-1";7675619;"12/129,967";"Micro-LiDAR For In-Flight Flow Velocimetry And Boundary Layer Control";"11/11/2028"
-"NASA Langley Research Center";"Issued";"LAR-16549-1";7262543;"10/943,655";"Inductor (L)-Capacitor ( C ) (aka, LC) Sensor Circuit For Piezo Material Monitoring";"04/17/2025"
-"NASA Langley Research Center";"Application";"LAR-16565-1";0;"13/020,025";"e-Sensor: Quantitative Imaging of Electric Fields and Electric Potentials";
-"NASA Langley Research Center";"Issued";"LAR-16566-1";7285932;"10/975,119";"Method And Apparatus For Loss Of Control Inhibitor Systems";"10/27/2024"
-"NASA Langley Research Center";"Issued";"LAR-16571-1";7075295;"10/839,448";"LC Sensing Element For Closed Cavities Having Low Radio Frequency Transmissivity";"04/30/2024"
-"NASA Langley Research Center";"Issued";"LAR-16571-2";7589525;"11/421,886";"Magnetic Field Response Sensor For Conductive Media";"09/26/2024"
-"NASA Langley Research Center";"Issued";"LAR-16571-3";7759932;"12/533,520";"Magnetic Field Response Sensor For Conductive Media";"07/31/2029"
-"NASA Langley Research Center";"Issued";"LAR-16573-1";7129467;"10/943,831";"Carbon Nanotube Based Light Sensor";"09/29/2024"
-"NASA Langley Research Center";"Issued";"LAR-16575-1";7181942;"10/943,649";"Instrumented Crimping Tool For Critical Wiring Applications";"11/24/2024"
-"NASA Langley Research Center";"Issued";"LAR-16605-1";7623993;"10/731,742";"Energy-extraction-based active noise control system";"11/27/2026"
-"NASA Langley Research Center";"Issued";"LAR-16615-1";6956066;"10/779,552";"Polyimide Foams";"02/11/2024"
-"NASA Langley Research Center";"Issued";"LAR-16615-2";7541388;"11/124,640";"Polyimide Foams";"05/05/2025"
-"NASA Langley Research Center";"Issued";"LAR-16616-1";7758927;"10/956,704";"Laser-Induced Fabrication Of Metallic Interlayers And Patterns In Polyimide Films";"09/30/2024"
-"NASA Langley Research Center";"Issued";"LAR-16640-1";8089677;"12/135,180";"Programmable Smart Grating Device With Quantum Aperture Array";"08/05/2029"
-"NASA Langley Research Center";"Issued";"LAR-16696-1";7048235;"10/678,397";"Slotted Aircraft Wing (a.k.a. Full Span Slotted Wing)";"10/03/2023"
-"NASA Langley Research Center";"Issued";"LAR-16698-1";7394181;"11/76,824";"High Performance High Efficiency Hybrid Actuator Systems (HYBAS)";"03/04/2025"
-"NASA Langley Research Center";"Issued";"LAR-16736-1";7962252;"11/422,984";"Semi Autonomous Flight System With Avionics Sensor Board, Processing Board, And Flight Control Board";"04/07/2027"
-"NASA Langley Research Center";"Issued";"LAR-16845-1";8083986;"12/315,520";"Advanced Thermo-Electric Materials with Nano-Voids";"12/04/2028"
-"NASA Langley Research Center";"Issued";"LAR-16854-1";7381186;"10/911,755";"Ultrasonic Method And Means To Assess Compartment Syndrome Part B";"08/02/2024"
-"NASA Langley Research Center";"Issued";"LAR-16858-1";7667847;"11/533,921";"Thin, High-Contrast Targets for Ultralightweight Structures";"12/15/2026"
-"NASA Langley Research Center";"Issued";"LAR-16867-1";7402264;"11/076,460";"Electroactive polymer-carbon nanotube-ceramic nanocomposites";"02/27/2026"
-"NASA Langley Research Center";"Issued";"LAR-17548-1";8236413;"12/166,852";"Fail Safe High-Temperature Composite Structure";"07/07/2030"
-"NASA Langley Research Center";"Issued";"LAR-16867-2";7527751;"12/109,490";"Sensing/Actuating Materials Made From Carbon Nanotube Polymer Composites And Methods For Making Same";"04/25/2028"
-"NASA Langley Research Center";"Issued";"LAR-16868-1";7341883;"11/242,415";"Lattice Matched SiGe Layer On Single Crystalline Sapphire Substrate";"09/27/2025"
-"NASA Langley Research Center";"Issued";"LAR-16871-1";6413227;"09/459,384";"Optimization Of Ultrasonic Method For Assessment Of Changes In Intracranial Pressure Through Measurement Of Skull Expansion";"12/02/2019"
-"NASA Langley Research Center";"Issued";"LAR-16872-1";7514726;"11/387,086";"Graded Indexed SiGe Layers on Lattice Matched SiGe Layers on Sapphire";"06/10/2027"
-"NASA Langley Research Center";"Issued";"LAR-16874-1";7723464;"11/674,321";"Novel Aromatic/Aliphatic Diamine Derivatives For Advanced Compositions And Polymers";"02/13/2027"
-"NASA Langley Research Center";"Issued";"LAR-16877-1";7186367;"11/110,996";"Double-Vacuum Bag (DVB) Process For Volatile Management In Resin Matrix Composite Manufacturing";"07/08/2025"
-"NASA Langley Research Center";"Issued";"LAR-16885-1";7890311;"11/177,664";"Method Of Simulating Flow-Through Area Of A Pressure Regulator";"12/15/2029"
-"NASA Langley Research Center";"Issued";"LAR-16886-1";7375808;"11/536,120";"Dual Sensing Capable Germ Or Toxic Chemical (GTC) Sensor Using Quantum Aperture Array With Surface Plasmon Polariton (SPP)";"09/28/2026"
-"NASA Langley Research Center";"Issued";"LAR-16900-1";7278324;"11/155,923";"CNT based crack growth detector and strain field monitor";"08/07/2024"
-"NASA Langley Research Center";"Issued";"LAR-16906-1";8529825;"12/928,128";"Fabrication of Nanovoid-imbedded Bismuth Telluride with Low Dimensional System";"02/01/2028"
-"NASA Langley Research Center";"Issued";"LAR-16907-1";7783060;"11/126,518";"A Deconvolution Approach For The Mapping Of Acoustic Sources (DAMAS) Determined From Phased Microphone Arrays";"03/27/2029"
-"NASA Langley Research Center";"Issued";"LAR-16908-1";7086593;"10/839,445";"Magnetic Field Response Measurement Acquisition System (Includes LAR-16138-1, LAR-16554-1, LAR-16591-1, LAR-16614-1, LAR-16617-1, & LAR-16908-1)";"05/04/2024"
-"NASA Langley Research Center";"Issued";"LAR-16946-1";7484930;"11/169,256";"Blowing Flap Side Edge";"07/01/2025"
-"NASA Langley Research Center";"Issued";"LAR-16950-1";7379231;"11/470,771";"Ferroelectric Light Control Device";"09/07/2026"
-"NASA Langley Research Center";"Issued";"LAR-16958-1";7510802;"11/371,575";"Fabrication of Multilayer Ferritin Array for Bionanobattery";"08/24/2027"
-"NASA Langley Research Center";"Issued";"LAR-16970-1";7231832;"11/229,439";"Method For Determining Cracks On And Within Composite Panels";"12/02/2025"
-"NASA Langley Research Center";"Issued";"LAR-16974-1";7047807;"11/203,583";"Methods Of Mounting Erectable, Flexible And Fixed Magnetic Field Response Sensors";"08/08/2025"
-"NASA Langley Research Center";"Issued";"LAR-17003-1";7467921;"11/239,436";"Rotor Blade Vortex Management Via Boundary Layer Separation Control";"09/22/2025"
-"NASA Langley Research Center";"Issued";"LAR-17013-1";7647771;"11/374,480";"Thermally Driven Miniature Piston Actuator";"11/12/2026"
-"NASA Langley Research Center";"Issued";"LAR-17017-1";7537182;"11/250,700";"Enhanced Separation Control Via Simultaneous Multiple-Location Forcing";"06/18/2027"
-"NASA Langley Research Center";"Issued";"LAR-17032-1";7321185;"11/370,377";"A New Concept For Active Bistable Twisting Structures";"03/06/2026"
-"NASA Langley Research Center";"Issued";"LAR-17044-1";7558371;"12/254,150";"Applications Of Twin-Detection XRD Methods On SiGe (111) Layers On Sapphire (0001) Substrate";"10/20/2028"
-"NASA Langley Research Center";"Issued";"LAR-17073-1";7580323;"11/419,818";"Interdigitated Electrode Actuators For Straining Optical Fibers (IDEAS)";"05/27/2026"
-"NASA Langley Research Center";"Application";"LAR-17088-1";0;"13/032,045";"Nanotubular Toughening Inclusions For Improved Mechanical Reinforcement";
-"NASA Langley Research Center";"Issued";"LAR-17112-1";7507472;"11/81,888";"Multi-Layer Electroactive Devices";"09/08/2025"
-"NASA Langley Research Center";"Issued";"LAR-17116-1";7506541;"11/328,468";"Wireless Fuel Volume Measurement Techniques";"10/18/2026"
-"NASA Langley Research Center";"Issued";"LAR-17126-1";7666939;"11/432,201";"A Method For Producing Stable Dispersions Of Single Walled Carbon Nanotubes In Polymer Matrices Using Noncovalent Interactions";"05/11/2026"
-"NASA Langley Research Center";"Issued";"LAR-17128-1";7285933;"11/188,227";"Method And Apparatus For Loss Of Control Inhibitor Systems";"07/20/2025"
-"NASA Langley Research Center";"Issued";"LAR-17135-1";8217143;"11/827,567";"Fabrication of Metal Nanoshells Derived by a Biotemplate";"11/17/2030"
-"NASA Langley Research Center";"Issued";"LAR-17149-2";8608993;"13/053,633";"A Method For Producing Multifunctional Structural Thermally Stable Nanocomposites With Aligned Carbon Nanotubes";"05/20/2026"
-"NASA Langley Research Center";"Issued";"LAR-17154-1";7655595;"11/421,924";"Sprayable Low Temperature Oxidation Catalyst Coating Based on Sol-Gel Technology";"08/11/2027"
-"NASA Langley Research Center";"Issued";"LAR-17154-2";7781366;"12/369,932";"Sol-Gel Based Oxidation Catalyst And Coating System Using Same (Divisional of -1)";"02/12/2029"
-"NASA Langley Research Center";"Issued";"LAR-17155-1";7255004;"11/229,438";"Wireless Fluid-Lead Measuring Dipstick Assembly (Broken Out Of LAR-16974-1)";"03/22/2026"
-"NASA Langley Research Center";"Issued";"LAR-17157-1";7507784;"11/124,508";"Liquid Crystalline Thermosets From Ester, Ester-Imide, And Ester-Amide Oligomers";"01/05/2021"
-"NASA Langley Research Center";"Issued";"LAR-17163-1";7467536;"11/428,017";"Multi-axis Accelerometer Calibration System Using a Cuboidal Attitude Positioning Device";"08/18/2027"
-"NASA Langley Research Center";"Issued";"LAR-17165-1";7595112;"11/461,150";"Method To Prepare Hybrid Metal/Composite Laminates By Resin Infusion";"02/01/2028"
-"NASA Langley Research Center";"Issued";"LAR-17168-1";7732998;"11/462,114";"Cylindrical Shaped Micro Fiber Composite (CMFC) Actuators";"09/24/2027"
-"NASA Langley Research Center";"Issued";"LAR-17169-1";7446459;"11/486,200";"Hybrid Force/Stress Amplified Piezoelectric Energy Harvesting Transducer System";"07/13/2026"
-"NASA Langley Research Center";"Application";"LAR-17211-1";0;"13/557,250";"Floating Ultrasonic Transducer Inspection System For Nondestructive Evaluation";
-"NASA Langley Research Center";"Issued";"LAR-17213-1";8020805;"11/831,233";"New Configuration and Power Technology for Application-Specific Scenarios of High Altitude Airships";"03/25/2030"
-"NASA Langley Research Center";"Issued";"LAR-17224-1";7998368;"12/272,826";"Effective Dispersion of Carbon Nanotubes in an Aqueous Solution and Their Application on Bionanotechnology";"06/04/2029"
-"NASA Langley Research Center";"Issued";"LAR-17229-1";7760778;"11/670,044";"Thin-film evaporative cooling concept for a solid-state laser diode crystal";"02/01/2027"
-"NASA Langley Research Center";"Issued";"LAR-17235-1";7414708;"11/461,569";"Multi-Point, Multi-Component Interferometric Rayleigh/Mie Doppler Velocimeter";"08/01/2026"
-"NASA Langley Research Center";"Issued";"LAR-17237-1";8294989;"12/512,344";"Photonic DART (Densely Accumulated Ray-point by micro-zone-plaTe)";"04/25/2031"
-"NASA Langley Research Center";"Issued";"LAR-17240-1";8111943;"12/423,907";"Computational Visual Servo:Automatic Measurement and Control for Smart Image Enhancement";"09/14/2030"
-"NASA Langley Research Center";"Issued";"LAR-17241-1";8018815;"12/490,747";"Optical Data Storage System with Micro Zone Plate";"12/05/2029"
-"NASA Langley Research Center";"Issued";"LAR-17242-1";8174695;"12/508,018";"MICRO-RING THIN-FILM SPECTROMETER ARRAY";"09/03/2030"
-"NASA Langley Research Center";"Issued";"LAR-17243-1";8411214;"12/144,937";"Variable Visibility Glasses for Flight Training";"02/01/2032"
-"NASA Langley Research Center";"Issued";"LAR-17245-1";8344281;"12/751,075";"Use of Beam Deflection to Control Electron Beam Wire Deposition Processes";"04/26/2031"
-"NASA Langley Research Center";"Issued";"LAR-17257-1";7590904;"11/531,703";"Detecting the loss of configuration access of reprogrammable Field Programmable Gate Array (FPGA) without external circuitry";"10/07/2027"
-"NASA Langley Research Center";"Issued";"LAR-17267-1";7704553;"11/710,386";"Method of Depositing Metals onto Carbon Allotropes and Compositions Therefrom";"06/26/2028"
-"NASA Langley Research Center";"Issued";"LAR-17268-1";7647543;"11/535,574";"Integrated mitigation for single event upset (SEU) of reprogrammable field programmable gate arrays (FPGA) operating in radiation environments";"09/27/2026"
-"NASA Langley Research Center";"Issued";"LAR-17280-1";7159774;"11/305,854";"Magnetic Field Response Measurement Acquisition System";"04/30/2024"
-"NASA Langley Research Center";"Issued";"LAR-17286-1";8081734;"12/628,446";"Miniature, Low-Power X-Ray Tube Using A Microchannel Electron Generator Electron Source";"02/26/2030"
-"NASA Langley Research Center";"Issued";"LAR-17290-1";7737867;"11/696,333";"Advance Display Media for Improved Airport Surface Operations";"06/11/2028"
-"NASA Langley Research Center";"Issued";"LAR-17293-1";7991491;"11/559,420";"Control Device And Method For Generating Control Signals For Technical Devices";"03/04/2030"
-"NASA Langley Research Center";"Issued";"LAR-17294-1";8430327;"11/671,089";"Low Profile Sensors Using Self-Resonating Inductors";"08/22/2028"
-"NASA Langley Research Center";"Issued";"LAR-17295-1";7683797;"11/671,131";"System For Providing Damage Detection And Thermal Protection";"02/15/2028"
-"NASA Langley Research Center";"Issued";"LAR-17300-1";7538860;"11/840,363";"A Method and Apparatus for Determination of the Reflection Wavelength of Multiple Low-Reflectivity Bragg Gratings in a Single Fiber";"12/31/2027"
-"NASA Langley Research Center";"Application";"LAR-17307-1";0;"11/466,569";"Low Mass Free Piston Space Radiator";
-"NASA Langley Research Center";"Issued";"LAR-17317-1";8401217;"11/780,500";"Extreme Low Frequency Acoustic Measurement Portable System";"11/29/2030"
-"NASA Langley Research Center";"Application";"LAR-17317-2";;"13/771,735";"Extreme Low Frequency Acoustic Measurement System";"07/20/2027"
-"NASA Langley Research Center";"Application";"LAR-17318-1";0;"13/082,734";"Preparation of Metal Nanowire Decorated Carbon Allotropes";"08/29/2027"
-"NASA Langley Research Center";"Issued";"LAR-17321-1";8545986;"12/043,276";"Ultra High-Temperature, Lightweight Insulation Material Compositions And Methods For Making And Using Them";"06/27/2030"
-"NASA Langley Research Center";"Application";"LAR-17323-1";0;"11/757,780";"Concept And Design Of Oxygen Band Radar For Surface Air Pressure Remote Sensing";
-"NASA Langley Research Center";"Issued";"LAR-17325-1";8060350;"12/56,686";"Unsteady aerodynamic reduced-order models (ROMs) for efficient aeroelastic analysis";"03/04/2030"
-"NASA Langley Research Center";"Issued";"LAR-17327-1";8117013;"12/002,857";"Standardized Radiation Shield Design Method: 2005 HZETRN";"07/05/2030"
-"NASA Langley Research Center";"Application";"LAR-17330-1";0;"11/946,207";"Multi Functional Composite And Honeycomb Panels";
-"NASA Langley Research Center";"Issued";"LAR-17332-1";7958733;"11/762,827";"Active Flow Effectors by Embedded Shape Memory Alloy Actuation";"11/04/2029"
-"NASA Langley Research Center";"Application";"LAR-17332-2";;"13/096,305";"Jet Engine Exhaust Nozzle Flow Effector";"07/05/2027"
-"NASA Langley Research Center";"Issued";"LAR-17335-1";8170234;"12/108,562";"Extension Of DAMAS Phased Array Processing For Spatial Coherence Determination (DAMAS-C)";"03/02/2031"
-"NASA Langley Research Center";"Issued";"LAR-17346-1";7649439;"11/465,503";"Thermoelectric Devices From Thin Metal System To Include Flexible Substrate And Method Of Making Same";"04/28/2027"
-"NASA Langley Research Center";"Issued";"LAR-17355-1";8164485;"11/863,964";"A Method of Providing a Synthetic Vision System Flight Management Visualization Display for Aiding Pilot Preview, Rehearsal and/or Review and Real-Time Visual Acquisition of Flight Mission Progress";"06/24/2029"
-"NASA Langley Research Center";"Application";"LAR-17361-1";0;"12/138,709";"Airfoil/ Wing Flow Control Using Flexible Extended Trailing Edge";
-"NASA Langley Research Center";"Issued";"LAR-17365-1";7784732;"11/958,673";"Boundary-Layer-Ingesting S-Duct Diffusing Inlet Flow Control Using Hybrid Vane/Jet Approach at Transonic Flow Conditions";"04/26/2029"
-"NASA Langley Research Center";"Issued";"LAR-17381-1";8044294;"12/254,016";"Thermoelectric material made with highly oriented twinned alloy of Si, Ge, C, and Sn on the basal plane of trigonal substrate and thermoelectric device made with the same material";"10/11/2029"
-"NASA Langley Research Center";"Issued";"LAR-17382-1";8052069;"12/393,238";"Advanced High Performance Vertical Hybrid Electroactive Synthetic Jet Actuator (ASJA-V)";"10/18/2029"
-"NASA Langley Research Center";"Issued";"LAR-17384-1";8662412;"12/354,808";"Advanced Modified High Performance Synthetic Jet Actuator With Optimized Curvature Shape Chamber (ASJA-M)";"10/27/2031"
-"NASA Langley Research Center";"Issued";"LAR-17385-1";7671306;"11/589,011";"Apparatus For Free Electron Laser Ablative Synthesis Of Carbon Nanotubes";"03/10/2028"
-"NASA Langley Research Center";"Application";"LAR-17386-1";0;"12/851,584";"Fine-Grained Targets For Free Electron Laser Synthesis Of Carbon Nanotubes";
-"NASA Langley Research Center";"Issued";"LAR-17387-1";7663077;"11/589,010";"Process For Optimizing The Yield And Production Rate Of Single-Walled Carbon Nanotubes Using Free Electron Laser Synthesis";"01/23/2028"
-"NASA Langley Research Center";"Issued";"LAR-17390-1";8235309;"12/355,782";"Advanced High Performance Horizontal Piezoelectric Hybrid Synthetic Jet Actuator (ASJA-H)";"04/02/2031"
-"NASA Langley Research Center";"Issued";"LAR-17391-1";7792015;"12/187,458";"A Byzantine-Fault Tolerant Self-Stabilizing Protocol for Distributed Clock Synchronization Systems";"08/14/2028"
-"NASA Langley Research Center";"Issued";"LAR-17402-1";7964698;"11/935,036";"Wholly Aromatic Liquid Crystalline Polyetherimide (LC-PEI) Resin for manufacturing high modulus fibers, films, injection molded articles and foams";"09/27/2029"
-"NASA Langley Research Center";"Issued";"LAR-17405-1";8226767;"12/254,134";"Hybrid Bandgap Engineering for Rhombohedral Super-Hetero-Epitaxy";"05/11/2031"
-"NASA Langley Research Center";"Application";"LAR-17413-2";0;"12/641,603";"Nanoparticle-Containing Thermoplastic Composites and Methods of Preparing Same";
-"NASA Langley Research Center";"Issued";"LAR-17425-1";8059273;"12/496,788";"Micro Spectrometer for Parallel Light";"08/19/2029"
-"NASA Langley Research Center";"Application";"LAR-17427-1";0;"12/174,360";"Tailorable Dielectric Materials with Complex Permittivity Characteristics providing High Dielectric Constants and Low Loss Factors";
-"NASA Langley Research Center";"Issued";"LAR-17432-1";8112243;"12/118,172";"Forward Voltage Short Pulse (FVSP) Technique for Measuring High Power Laser Diode Array (LDA) Junction Temperature";"11/27/2030"
-"NASA Langley Research Center";"Issued";"LAR-17433-1";7902815;"11/856,807";"A Multi-Measurement Wheel Sensor";"06/19/2029"
-"NASA Langley Research Center";"Issued";"LAR-17440-1";7845215;"11/844,571";"Resonant Difference-Frequency Atomic Force Ultrasonic Microscope";"02/03/2029"
-"NASA Langley Research Center";"Issued";"LAR-17444-1";8042739;"11/864,012";"Wireless Tamper Detection Sensor Requiring No Electrical Connection";"11/08/2029"
-"NASA Langley Research Center";"Issued";"LAR-17447-1";8002219;"11/941,119";"Multifunctional Boost Protective Cover (MBPC) For A Launch Abort System (LAS)";"01/16/2030"
-"NASA Langley Research Center";"Application";"LAR-17455-3";;"13/938,622";"A Nanotube Film Electrode and an Electroactive Device Fabricated with the Nanotube Film Electrode and Methods for Making Same";"10/28/2031"
-"NASA Langley Research Center";"Issued";"LAR-17469-1";8094306;"12/487,735";"Micro Ring Grating Spectrometer with Moveable Aperture Slit";"08/27/2030"
-"NASA Langley Research Center";"Issued";"LAR-17477-1";7993567;"12/131,420";"Auxiliary Electrode For Electrospinning Process";"10/02/2029"
-"NASA Langley Research Center";"Issued";"LAR-17478-1";7883052;"11/954,452";"Integration Of A Turbo-Fan Engine Above An Aircraft's Wing Which Reduces Drag And Community Noise";"09/24/2029"
-"NASA Langley Research Center";"Issued";"LAR-17480-1";7711509;"11/930,222";"A Method To Calibrate Magnetic Response Fluid-Level Sensors Using Complete Sensor Immersion In Fluid";"03/18/2028"
-"NASA Langley Research Center";"Issued";"LAR-17485-1";7851062;"12/124,273";"Composition of and Method to Prepare Hybrid Laminates from Metal Plasma Coated Fibers and Polymer Matrix Resins";"09/09/2028"
-"NASA Langley Research Center";"Issued";"LAR-17485-2";8017190;"12/906,633";"Metal/Fiber Laminate and Fabrication Using A Porous Metal/Fiber Preform";"05/21/2028"
-"NASA Langley Research Center";"Issued";"LAR-17487-1";8157207;"11/836,517";"Jet Engine Nozzle Exit Configurations And Associated Systems And Methods";"04/15/2029"
-"NASA Langley Research Center";"Issued";"LAR-17488-1";7814786;"12/015,626";"Thin-Film Sensor For Measuring Liquid-Level And Temperature Having No Electrical Connections";"08/26/2028"
-"NASA Langley Research Center";"Issued";"LAR-17493-1";8424200;"12/098,000";"Conducting Nanotubes Or Nanostructures Based Composites, Method Of Making Them And Applications";"05/16/2031"
-"NASA Langley Research Center";"Issued";"LAR-17502-1";8529249;"11/860,703";"Quick Change Ceramic Flame Holder for High Output Torch";"03/14/2030"
-"NASA Langley Research Center";"Application";"LAR-17502-1-CON";;"14/021,325";"Flame Holder System";"09/25/2027"
-"NASA Langley Research Center";"Issued";"LAR-17514-1";8196858;"12/721,833";"Mars Airplane";"02/15/2031"
-"NASA Langley Research Center";"Issued";"LAR-17526-1";7991595;"12/138,768";"Adaptive Refinement Tools (ARTs) for Tetrahedral Unstructured Grids";"06/07/2029"
-"NASA Langley Research Center";"Issued";"LAR-17528-1";7878348;"12/248,339";"Lightweight Lunar Surface Remote Manipulator System (LSRMS)";"10/09/2028"
-"NASA Langley Research Center";"Issued";"LAR-17535-1";8206674;"12/152,414";"High Pressure Boron Vaporization Synthesis Of Few-Walled Boron Nitride Nanotube Fibers";"04/13/2030"
-"NASA Langley Research Center";"Issued";"LAR-17539-1";8164328;"12/493,573";"Development Of Eddy Current Techniques For The Detection Of Stress Corrosion Cracking In Space Shuttle Primary Reaction Control Thrusters";"01/08/2030"
-"NASA Langley Research Center";"Issued";"LAR-17547-1";7848381;"12/366,722";"Line Tunable Visible and Ultraviolet Laser";"07/05/2029"
-"NASA Langley Research Center";"Issued";"LAR-17553-1";8257491;"12/288,379";"NEW RHOMBOHEDRAL ALIGNMENT OF CUBIC SEMICONDUCTOR ON TRIGONAL SUBSTRATE AT A HIGH TEMPERATURE";"07/06/2031"
-"NASA Langley Research Center";"Issued";"LAR-17554-1";7769135;"12/288,380";"X-ray Diffraction Wafer Mapping Method for Rhombohedral Super-Hetero-Epitaxy";"10/20/2028"
-"NASA Langley Research Center";"Application";"LAR-17555-1";0;"13/020,194";"Front-Flight-Path Turbulence & Vortex Detection System";
-"NASA Langley Research Center";"Issued";"LAR-17573-1";7855368;"12/178,173";"Air Coupled Acoustic Thermography Nondestructive Evaluation System And Method";"10/09/2028"
-"NASA Langley Research Center";"Issued";"LAR-17576-1";7742663;"12/261,376";"Innovative Structural Design And Materials For Transmission To And Protection Of Ultraviolet And Infrared Radiation Sensors";"10/30/2028"
-"NASA Langley Research Center";"Issued";"LAR-17579-1";8673649;"12/463,475";"Wireless Chemical Sensing Using Changes To An Electrically Conductive Reactant Within Sensor's Magnetic Field";"01/04/2031"
-"NASA Langley Research Center";"Issued";"LAR-17593-1";8167204;"12/253,422";"Open Circuit Damage Location Sensor Having No Electrical Connections";"10/30/2030"
-"NASA Langley Research Center";"Issued";"LAR-17608-1";7901611;"12/274,652";"Methodology for calculating fiber distribution during electrospinning";"01/12/2029"
-"NASA Langley Research Center";"Issued";"LAR-17609-1";8255732;"12/429,603";"A Self-Stabilizing Byzantine-Fault-Tolerant Clock Synchronization Protocol";"12/30/2030"
-"NASA Langley Research Center";"Issued";"LAR-17629-1";7813599;"12/390,606";"A Method for Shape Determination of Multi-Core Optical Fiber";"02/23/2029"
-"NASA Langley Research Center";"Issued";"LAR-17634-1";7893602;"12/328,162";"Distributed transducer capable of generating or sensing a transverse point load";"03/14/2029"
-"NASA Langley Research Center";"Application";"LAR-17636-1";0;"13/752,495";"PICA on Edge: Edgewise strips of PICA ablator to eliminate gaps in capsule heat shield";"01/29/2033"
-"NASA Langley Research Center";"Issued";"LAR-17638-1";8508413;"13/082,839";"Fractal Dielectric Microstrip Antenna using Patterned Substrate Material Geometries";"03/02/2032"
-"NASA Langley Research Center";"Issued";"LAR-17651-1";8259104;"12/493,666";"Domain Decomposition By the Advancing-Partition Method for Parallel Unstructured Grid Generation";"03/09/2031"
-"NASA Langley Research Center";"Issued";"LAR-17655-1";8111832;"12/424,793";"Local Intelligence Based Impedance Optimization Scheme for Adaptive Noise Reduction";"06/25/2030"
-"NASA Langley Research Center";"Issued";"LAR-17656-1";8108178;"12/467,475";"DIRECTED DESIGN OF EXPERIMENTS FOR VALIDATING PROBABILITY OF DETECTION CAPABILITY OF NDE SYSTEMS (DOEPOD)";"05/05/2030"
-"NASA Langley Research Center";"Application";"LAR-17668-1";0;"12/322,591";"Device for the Large-Scale synthesis of High-Quality Boron Nitride Nanotubes";"02/04/2029"
-"NASA Langley Research Center";"Issued";"LAR-17681-1";8347479;"12/849,906";"Thermally-Activated Crack Healing Mechanism for Metallic Materials";"04/30/2031"
-"NASA Langley Research Center";"Application";"LAR-17681-2";;"13/719,740";"System for Repairing Cracks in Structures";"08/04/2030"
-"NASA Langley Research Center";"Issued";"LAR-17681-3";8679642;"14/037,850";"System for Repairing Cracks in Structures";"08/04/2030"
-"NASA Langley Research Center";"Application";"LAR-17689-1";0;"12/393,289";"Negative Dielectric Constant Material Based on Ion Conducting Materials";"08/20/2031"
-"NASA Langley Research Center";"Application";"LAR-17694-1";0;"12/974,359";"A Synthetic Quadrature Phase Detector/Demodulator for Fourier Transform Spectrometers";"03/09/2032"
-"NASA Langley Research Center";"Issued";"LAR-17695-1";8658004;"12/470,689";"Vapor-Barrier Vacuum Isolation System";"08/01/2032"
-"NASA Langley Research Center";"Application";"LAR-17696-1";0;"12/543,686";"Asymmetric Dielectric Elastomer Composite Material";"03/16/2031"
-"NASA Langley Research Center";"Issued";"LAR-17705-1";8672107;"13/042,655";"Tunable damper capable of tailoring the structural damping for individual modes of vibration using minimal space and minimal impact on the system frequencies and mode shapes.";"11/28/2031"
-"NASA Langley Research Center";"Issued";"LAR-17709-1";7912101;"12/628,423";"Increased Efficiency Nonlinear Optical Interactions";"12/01/2029"
-"NASA Langley Research Center";"Issued";"LAR-17711-1";8179203;"12/569,984";"Wireless Electrical Applications/Devices Using floating Electrodes Electromagnetically Coupled to Open-Circuit Devices";"07/09/2030"
-"NASA Langley Research Center";"Application";"LAR-17723-1";0;"12/699,334";"Novel material for wound healing applications.";
-"NASA Langley Research Center";"Issued";"LAR-17724-1";8378659;"12/703,221";"Electroactive polymer fibers for structural health monitoring.";"01/22/2031"
-"NASA Langley Research Center";"Issued";"LAR-17735-1";8490463;"12/881,431";"Assessment and Calibration of Crimp Tool Equipped with Ultrasonic Analysis, including Phantom Construction";"10/22/2031"
-"NASA Langley Research Center";"Issued";"LAR-17736-1";8147920;"12/370,755";"Controlled Deposition And Alignment Of Carbon Nanotubes (Continuation of LAR 16499-1)";"02/13/2029"
-"NASA Langley Research Center";"Application";"LAR-17738-1";0;"12/685,280";"Sensory Metallic Materials";
-"NASA Langley Research Center";"Issued";"LAR-17743-1";8473663;"13/011,198";"Reconfigurable Peripheral Component Interconnect local bus controller and target design.";"10/07/2031"
-"NASA Langley Research Center";"Issued";"LAR-17745-1";7906043;"12/550,431";"Electrically Conductive, Optically Transparent Polymer/Carbon Nanotube Composites And Process For Preparation Thereof";"11/01/2022"
-"NASA Langley Research Center";"Application";"LAR-17877-1";;"13/277,859";"Autonomous Leading-Edge Slat Device for Reduction of Aeroacoustic Noise Associated with Aircraft Wings";
-"NASA Langley Research Center";"Application";"LAR-17747-1";0;"13/029,471";"Temperature Sensing Using Temperature Sensitive Dielectric Material in Proximity to Open-Circuit Sensors Having No Electrical Connections";
-"NASA Langley Research Center";"Application";"LAR-18090-1";;"13/786,608";"No Moving Part - Variable Frequency Fluidic Oscillator";"03/06/2033"
-"NASA Langley Research Center";"Application";"LAR-17747-1-CON";;"14/193,861";"Wireless Temperature Sensor Having No Electrical Connections and Sensing Method for Use Therewith";"02/17/2031"
-"NASA Langley Research Center";"Issued";"LAR-17748-1";8303922;"12/546,185";"Exfoliation of Hexagonal Boron Nitride";"11/19/2030"
-"NASA Langley Research Center";"Issued";"LAR-17759-1";7935414;"12/406,315";"Multilayer Electroactive Polymer Composite Material (Continuation of LAR 17112-1)";"03/18/2029"
-"NASA Langley Research Center";"Issued";"LAR-17766-1";8452073;"12/750,991";"Method for Closed Loop Process Control for Electron Beam Freeform Fabrication and Deposition Processes";"10/02/2031"
-"NASA Langley Research Center";"Application";"LAR-17769-1";0;"12/894,279";"Modifying Surface Energy via Laser Ablative Surface Patterning";
-"NASA Langley Research Center";"Application";"LAR-17777-1";;"13/443,940";"Process to Fabricate Specific Sized Monodisperse Polystryene Microparticles";
-"NASA Langley Research Center";"Application";"LAR-17780-1";0;"12/387,703";"Boron Nitride Nanotube Fibrils and Yarns (Filed by JLabs, their ref: ID 1248/Docket 2025(JSA)";
-"NASA Langley Research Center";"Application";"LAR-17786-1";0;"12/964,381";"Smart Optics Material Characterization System";
-"NASA Langley Research Center";"Application";"LAR-17789-1";0;"12/969,076";"Electroactive scaffold";
-"NASA Langley Research Center";"Application";"LAR-17791-1";0;"13/070,552";"Apparatus and Method for Selective Enhancement of Surface Plasmon Polaritons to Initiate and Sustain Low Energy Nuclear Reactions in Metal Hydride Systems";
-"NASA Langley Research Center";"Issued";"LAR-17799-1";8655513;"13/046,030";"Realtime 3-D Image Processing and Enhancement";"05/25/2031"
-"NASA Langley Research Center";"Application";"LAR-17800-1";0;"13/527,638";"Method for generating laser linear frequency modulation waveform";
-"NASA Langley Research Center";"Application";"LAR-17801-1";0;"13/566,077";"Coherent Doppler lidar for measuring altitude, ground velocity, and air velocity of aircraft and spaceborne vehicles";"08/03/2032"
-"NASA Langley Research Center";"Application";"LAR-17813-1";0;"13/198,817";"Durable Joining Technology for Uniformly-Curved Composite Sandwich Structures";"08/17/2032"
-"NASA Langley Research Center";"Application";"LAR-17813-1-CON";;"14/200,708";"Systems, Apparatuses, and Methods for Using Durable Adhesively Bonded Joints for Sandwich Structures";"08/05/2031"
-"NASA Langley Research Center";"Application";"LAR-17830-1";0;"12/925,047";"Actuators and Sensors Fabricated with Boron Nitride Nanotubes (BNNTs) and BNNT Polymer Composites";
-"NASA Langley Research Center";"Issued";"LAR-17831-1";8651429;"13/214,453";"Blended Cutout Flap Design for the Reduction of Jet-Flap Interaction Noise";"08/22/2031"
-"NASA Langley Research Center";"Application";"LAR-17832-1";0;"13/214,469";"Aircraft Engine Nozzle Systems for Jet Noise Reduction by Acoustic Shielding";
-"NASA Langley Research Center";"Application";"LAR-17833-1";0;"13/214,481";"Active Aircraft Pylon Noise Control System";
-"NASA Langley Research Center";"Issued";"LAR-17836-1";8671763;"12/850,708";"Sub-Surface Windscreen for Outdoor Measurement of Infrasound";"02/18/2031"
-"NASA Langley Research Center";"Application";"LAR-17841-1";0;" 14/202,699";"High Mobility Transport Layer Structures for Rhombohedral Si/Ge/SiGe Devices";"03/10/2034"
-"NASA Langley Research Center";"Application";"LAR-17848-1";0;"13/796,626";"Spectroscopy using Electric Permittivity, Magnetic Permeability and Electrical Conductivity Spatial Profiles";"03/12/2033"
-"NASA Langley Research Center";"Issued";"LAR-17856-1";8198976;"12/688,309";"Flexible Thin Metal Film Thermal Sensing System (CIP of LAR 17346-1)";"09/20/2030"
-"NASA Langley Research Center";"Application";"LAR-17857-1";0;"12/967,690";"A GPS-Based Pitot-Static Calibration Method Using Global Output-Error Optimization";
-"NASA Langley Research Center";"Application";"LAR-17869-1";;"13/166,226";"Team Electronic Gameplay Combining Different Means of Control";
-"NASA Langley Research Center";"Application";"LAR-17886-1";;"13/324,527";"Method and Apparatus to Detect Wire Pathologies Near Crimped Connector";
-"NASA Langley Research Center";"Application";"LAR-17887-1";;"13/743,750";"Interrogations Leading to Recertification of Wire Crimps and Other Joining Technologies.";"01/17/2033"
-"NASA Langley Research Center";"Issued";"LAR-17888-1";8605262;"13/167,093";"Time Shifted PN Codes for CW LIDAR, RADAR, and SONAR";"12/28/2031"
-"NASA Langley Research Center";"Issued";"LAR-17894-1";8494687;"13/166,121";"3-D Super Resolution Algorithm for Flash LIDAR Image Enhancement";"12/11/2031"
-"NASA Langley Research Center";"Application";"LAR-17895-1";;"13/166,166";"Method and System for Physiologically Modulating Videogames or Simulations Which Use Motion-Sensing Input Devices";
-"NASA Langley Research Center";"Application";"LAR-17902-1";;"13/068,329";"Neutron and Ultraviolet Radiation Shielding Films Fabricated Using Boron Nitride Nanotubes and Boron Nitride Nanotube Composites";
-"NASA Langley Research Center";"Application";"LAR-17906-1";;"13/272,027";"Abnormal Grain Growth Suppression in Aluminum Alloys";
-"NASA Langley Research Center";"Issued";"LAR-17908-1";8655094;"13/105,004";"New Photogrammetry System to Measure Relative 6-Degree-of-Freedom Motion Between Two Bodies Using Heterogeneous Cameras Having Arbitrary Wide-Angle Lenses with Non-Overlapping Fields of View";"04/23/2032"
-"NASA Langley Research Center";"Application";"LAR-17918-1";;"13/136,216";"High Kinetic Energy Penetrator Shielding and High Wear Resistance Materials Fabricated with Boron Nitride Nanotubes (BNNTs) and BNNT Polymer Composites";
-"NASA Langley Research Center";"Issued";"LAR-17919-1";8661653;"13/191,882";"Z-Shields from Fiber Metal Laminate";"07/27/2031"
-"NASA Langley Research Center";"Application";"LAR-17919-2";;"13/963,484";"Z-Shields from Fiber Metal Laminate";"07/27/2031"
-"NASA Langley Research Center";"Application";"LAR-18097-1";;"13/591,320";"Arbitrary Shape Initialization of Fiber Optic Shape Sensing Systems";"08/22/2032"
-"NASA Langley Research Center";"Application";"LAR-17923-1";;"13/411,793";"A Method of Creating Micro-scale Silver Telluride Grains Covered with Bismuth Nanospheres as Nano-bridges for Thermoelectric Application";"11/14/2032"
-"NASA Langley Research Center";"Application";"LAR-17947-1";;"13/775,809";"Linear Fresnel Spectrometer Chip with Gradient Line Grating";"02/25/2033"
-"NASA Langley Research Center";"Application";"LAR-17952-1";;"13/411,891";"Multi-Point Interferometric Phase Change Detection Algorithm";
-"NASA Langley Research Center";"Application";"LAR-17958-1";;"13/195,251";"Wireless Open-Circuit In-Plane Strain and Displacement Sensors Having No Electrical Connections";"07/16/2032"
-"NASA Langley Research Center";"Issued";"LAR-17959-1";8087494;"12/894,326";"Method of Making a Composite Panel Having Subsonic Transverse Wave Speed Characteristics (Continuation of LAR 16535-1)";"09/30/2030"
-"NASA Langley Research Center";"Application";"LAR-17966-1";;"13/457,687";"Wide Bandwidth Magneto-Resistive Sensor Based Eddy Current Probe";
-"NASA Langley Research Center";"Application";"LAR-17967-1";;"13/293,846";"Relaxor Piezoelectric Single Crystal Multilayer Stacks for Energy Harvesting Transducers (RPSEHT)";
-"NASA Langley Research Center";"Application";"LAR-17972-1";;"13/200,314";"BxCyNz Nanotube Formation via the Pressurized Vapor/Condenser";
-"NASA Langley Research Center";"Application";"LAR-17973-1";;"13/200,316";"Efficient Boron Nitride Nanotube (BNNT) and BxCyNz Nanotube Formation via Combined Laser-Gas Flow Levitation (JLab's ref: 2010-09-13-RRW)";
-"NASA Langley Research Center";"Application";"LAR-17977-1";;"13/447,513";"Variable Stiffness Shape Adaptive Multi-Layered Polymer Composite";
-"NASA Langley Research Center";"Application";"LAR-17980-1";;"13/457,540";"Space Utilization Optimization Tools";
-"NASA Langley Research Center";"Application";"LAR-17984-1";;"13/326,779";"FLEXible Side Edge Link (FLEXSEL) for Trailing-Edge Flap Aeroacoustic Noise Reduction";"12/15/2031"
-"NASA Langley Research Center";"Application";"LAR-17985-1";;"13/231,386";"An Acoustic Beamforming Array Using Feedback-Controlled Microphones for Tuning and Self-Matching of Frequency Response (Michigan State University's ref: TEC2011-0045)";
-"NASA Langley Research Center";"Application";"LAR-17987-1";;"13/364,814";"A Self-Stabilizing Distributed Clock Synchronization Protocol For Arbitrary Digraphs";
-"NASA Langley Research Center";"Application";"LAR-17991-1";;"13/200,315";"Production Rig for the Synthesis of BNNTs via the PVC Method";
-"NASA Langley Research Center";"Issued";"LAR-17993-1";8662213;"13/342,264";"Locomotion of Amorphous Surface Robots";"05/06/2032"
-"NASA Langley Research Center";"Application";"LAR-17993-2";;"14/189,019";"Locomotion of Amorphous Surface Robots";"01/03/2033"
-"NASA Langley Research Center";"Application";"LAR-17994-1";;"13/273,516";"Manufacturing of Low Mass, Large-Scale Hierarchical Thin Film Structural Systems";
-"NASA Langley Research Center";"Application";"LAR-17996-1";;"14/202,289";"Nanostructure Neutron Converter Layer Development";"03/10/2034"
-"NASA Langley Research Center";"Issued";"LAR-18006-1";8671551;"13/363,413";"Crimp Quality Assessment from Jaw Position-Ultrasonic Transmission Analysis";"02/01/2032"
-"NASA Langley Research Center";"Application";"LAR-18006-2";;"14/193,086";"Crimp Quality Assessment from Jaw Position-Ultrasonic Transmission Analysis";"02/01/2032"
-"NASA Langley Research Center";"Issued";"LAR-18016-1";8636407;"13/029,426";"Wireless Temperature Sensor Having No Electrical Connections and Sensing Method For Use Therewith";"11/23/2031"
-"NASA Langley Research Center";"Application";"LAR-18021-1";;"13/417,347";"Flap Side Edge Liners for Airframe Noise Reduction";"07/31/2032"
-"NASA Langley Research Center";"Application";"LAR-18023-1";;"13/417,349";"Landing Gear Door Liners for Airframe Noise Reduction";"03/12/2032"
-"NASA Langley Research Center";"Application";"LAR-18024-1";;"13/417,351";"External Acoustic Liners for Multi-Functional Aircraft Noise Reduction";
-"NASA Langley Research Center";"Application";"LAR-18026-1";;"13/286,715";"Synthesis of Novel Copoly(imide oxetane)s with Unique Surface Properties";
-"NASA Langley Research Center";"Application";"LAR-18257-1";;"14/105,757";"A Structural Joint With Multi-Axis Load Carrying Capacity";"12/13/2033"
-"NASA Langley Research Center";"Issued";"LAR-18032-1";8229716;"12/981,432";"Fast Tracking Methods and Systems for Air Traffic Modeling Using a Monotonic Lagrangian Grid (US Naval Research Laboratory ref: 100148-US2)";"12/29/2030"
-"NASA Langley Research Center";"Application";"LAR-18034-1";;"13/291,372";"Compact Active Vibration Control System";
-"NASA Langley Research Center";"Application";"LAR-18037-1";;"13/453,717";"A Multifunctional Lightning Protection and Detection System for Aerospace Vehicles";
-"NASA Langley Research Center";"Application";"LAR-18040-1";;"13/986,089";"Multi-Functional BN-BN Composite";"03/29/2033"
-"NASA Langley Research Center";"Application";"LAR-18065-1";;"13/860,697";"Variable Acceleration Force Calibration System";"04/11/2033"
-"NASA Langley Research Center";"Application";"LAR-18070-1";;"13/923,307";"Transparent and Ubiquitous Sensing Technology";"06/20/2033"
-"NASA Langley Research Center";"Application";"LAR-18071-1";;"13/923,312";"Using Ubiquitous Conductor to Power and Interrogate Wireless Passive Sensors and Construct Sensor Network";
-"NASA Langley Research Center";"Application";"LAR-18073-1";;"13/941,441";"Doped Chiral Polymer Negative Index Materials (DCPNIM)";"07/12/2033"
-"NASA Langley Research Center";"Application";"LAR-18077-1";;"13/630,459";"Flight Deck Technology and Procedure for Pilots to Generate Flight-Optimizing Trajectory Requests that Avoid Nearby Traffic";"09/28/2032"
-"NASA Langley Research Center";"Application";"LAR-18089-1";;"13/786,713";"Synchronized Sweeping Jet Actuators";"03/06/2033"
-"NASA Langley Research Center";"Application";"LAR-18127-1";;"13/913,782";"Synergistic Chemical and Topographical Surface Modifications and Articles of Manufacture for Dynamic Insect Adhesion Mitigation";"06/10/2033"
-"NASA Langley Research Center";"Application";"LAR-18131-1";;"13/774,422";"Puncture- healing Thermoplastic Resin Carbon Fiber Reinforced Composites towards More Damage/Impact Tolerant Systems";
-"NASA Langley Research Center";"Application";"LAR-18132-1";;"13/673,360";"Modeling of Laser Ablation and Plume Chemistry in a Boron Nitride Nanotube Production Rig";"11/09/2032"
-"NASA Langley Research Center";"Application";"LAR-18143-1";;"13/694,286";"In-situ Mechanical Property Measurements of Amorphous Carbon-boron Nitride Nanotube";"11/15/2032"
-"NASA Langley Research Center";"Application";"LAR-18144-1";;"13/836,609";"Method and System for Physiologically Modulating Videogames and Simulations Which Use Gesture and Body Image Sensing Control Input Devices";"03/15/2033"
-"NASA Langley Research Center";"Application";"LAR-18160-1";;"13/864,396";"Tension Stiffened and Tendon Actuated Space Manipulators";"04/17/2033"
-"NASA Langley Research Center";"Application";"LAR-18166-1";;"13/764,062";"Reactive Orthotropic Lattice Diffuser (ROLD) for Reducing Aerodynamic Noise from Aircraft Flap Tips";"03/12/2032"
-"NASA Langley Research Center";"Application";"LAR-18179-1";;"13/792,489";"Extreme Reduced Instruction Set Computing (xRISC) for High Speed Execution of Computing Algorithms";"03/11/2033"
-"NASA Langley Research Center";"Application";"LAR-18183-1";;"13/834,294";"Height Control and Deposition Measurement for the Electron Beam Free Form Fabrication (EBF3) Process";"03/15/2033"
-"NASA Langley Research Center";"Application";"LAR-18184-1";;"13/987,706";"Conductive Polymer/Carbon Nanotube Structural Materials and Methods for Making Same";"08/23/2033"
-"NASA Langley Research Center";"Application";"LAR-18186-1";;"12/482,503";"Flexible Volumetric Structure";
-"NASA Langley Research Center";"Application";"LAR-18202-1";;"13/713,033";"Ground-to-Space Laser Calibration System";"12/13/2032"
-"NASA Langley Research Center";"Application";"LAR-18204-1";;"13/800,379";"Quasi-Static Electric Field Generator";"03/13/2033"
-"NASA Langley Research Center";"Application";"LAR-18211-1";;"13/781,918";"A Statistically Based Approach to Broadband Liner Design and Assessment";"03/01/2033"
-"NASA Langley Research Center";"Application";"LAR-18217-1";;"13/771,116";"A Graphical Acoustic Liner Design and Analysis Tool";"02/20/2033"
-"NASA Langley Research Center";"Application";"LAR-18246-1";;"13/765,714";"Tethered Vehicle Control and Tracking System";"02/13/2033"
-"NASA Langley Research Center";"Application";"LAR-18266-1";;"14/079,914";"Airborne Wind Profiling Algorithm for Doppler Wind Lidar (APOLO)";"11/14/2033"
-"NASA Langley Research Center";"Application";"LAR-18267-1";;"13/838,260";"Method and System for Physiologically Modulating Action Role-playing Open World Video Games and Simulations Which Use Gesture and Body Image Sensing Control Input Devices";
-"NASA Langley Research Center";"Application";"LAR-18270-1";;"14/079,965";"Airborne Doppler Wind Lidar Post Data Processing Software DAPS-LV";"11/14/2033"
-"NASA Langley Research Center";"Application";"LAR-18301-1";;"13/838,163";"Flap Edge Noise Reduction Fins (FENoRFins)";"03/15/2033"
-"NASA Langley Research Center";"Application";"LAR-18318-1";;"14/191,898";"In-Situ Load System (ILS) for Calibrating and Validating Aerodynamic Properties of Scaled Aircraft in Ground-based Aerospace Testing Applications";"02/27/2034"
-"NASA Langley Research Center";"Application";"LAR-18374-1";;"14/072,019";"Modulated Sine Waves for Differential Absorption Measurements Using a CW Laser System";"06/23/2031"
-"NASA Glenn Research Center";"Issued";"LEW-16183-1";5866518;"08/786,360";"PS300 - Self Lubricating Readily Polished High Temperature Composite";"01/16/2017"
-"NASA Glenn Research Center";"Issued";"LEW-16519-2";6291838;"09/448,406";"Gas Sensing Diode";"11/15/2019"
-"NASA Glenn Research Center";"Issued";"LEW-16901-1";7190741;"10/274,756";"A Real-Time Signal-To-Noise Ratio Estimation Technique For BPSK And QPSK Modulation Using The Active Communications Channel";"10/21/2022"
-"NASA Glenn Research Center";"Issued";"LEW-17153-1";6550696;"09/794,794";"Lean Direct Injection Combustor/Multi Point Integrate Module Fuel-Air Mixer";"02/27/2021"
-"NASA Glenn Research Center";"Issued";"LEW-17157-1";6869480;"10/198,668";"Method For Production Of Atomic Scale Step Height Reference Specimens With Atomically Flat Surfaces";"07/17/2022"
-"NASA Glenn Research Center";"Issued";"LEW-17166-1";7497443;"11/121,850";"Resilient, Flexible, Pressure-Activated Seal";"05/03/2025"
-"NASA Glenn Research Center";"Issued";"LEW-17167-1";6667725;"10/196,391";"Radio Frequency (RF) Telemetry System For Sensors And Actuators";"07/11/2022"
-"NASA Glenn Research Center";"Issued";"LEW-17170-1";6706549;"10/124,689";"Common-Layered Architecture For Semiconductor Silicon Carbide (CLASSiC) Bulk Fabrication";"04/12/2022"
-"NASA Glenn Research Center";"Issued";"LEW-17182-1";7086648;"10/652,088";"Acoustic Seal";"08/22/2023"
-"NASA Glenn Research Center";"Issued";"LEW-17240-1";7427428;"10/601,657";"Mechanically Improved Interphase Coating For Silicon-Carbide Fiber-Reinforced Silicon-Carbide Matrix Composites";"06/24/2023"
-"NASA Glenn Research Center";"Issued";"LEW-17256-1";6845664;"10/263,980";"MEMS Direct Chip Attach (MEMS-DCA) Packaging Methodologies For Harsh Environments";"10/03/2022"
-"NASA Glenn Research Center";"Issued";"LEW-17256-2";7518234;"10/926,206";"MEMS Direct Chip Attach Packaging Methodologies And Apparatus For Harsh Environments";"08/25/2024"
-"NASA Glenn Research Center";"Issued";"LEW-17269-2";8212138;"11/696,441";"Reverse-Bias Protected Solar Array With Integrated ByPass Battery";"04/04/2027"
-"NASA Glenn Research Center";"Application";"LEW-17269-3";0;"13/482,493";"Reverse-Bias Protected Solar Array With Integrated ByPass Battery";
-"NASA Glenn Research Center";"Issued";"LEW-17291-1";6784276;"10/202,643";"Improved Processing For Polyimdes Via Concentrated Solid Monomer Reactants Approach";"07/25/2022"
-"NASA Glenn Research Center";"Issued";"LEW-17293-1";7023118;"10/390,256";"A Comprehensive C++ Controller For A Magnetically Supported Vertical Rotor: Version 1.0";"03/12/2023"
-"NASA Glenn Research Center";"Issued";"LEW-17293-2";6809450;"10/729,580";"Software For System For Controlling A Magnetically Levitated Rotor";"12/04/2023"
-"NASA Glenn Research Center";"Issued";"LEW-17299-1";6881820;"10/147,477";"Polyimide Rod-Coil Block Copolymers As Membrane Materials For Ion Conduction";"05/13/2022"
-"NASA Glenn Research Center";"Issued";"LEW-17317-1";7687016;"10/777,630";"Process For Improving Properties Of Silicon Carbide (SiC) Fibers And SiC Fiber-Reinforced Ceramic Matrix Composites";"02/13/2024"
-"NASA Glenn Research Center";"Application";"LEW-17317-2";0;"12/709,086";"Process For Improving Properties Of Silicon Carbide (SiC) Fibers And SiC Fiber-Reinforced Ceramic Matrix Composites";
-"NASA Glenn Research Center";"Issued";"LEW-17345-2";7813406;"11/402,997";"Temporal Laser Pulse Manipulation Using Multiple Optical Ring Cavities";"04/13/2026"
-"NASA Glenn Research Center";"Issued";"LEW-17383-1";6967462;"10/455,139";"Wireless Consumer Power";"06/05/2023"
-"NASA Glenn Research Center";"Application";"LEW-17458-2";0;"13/113,458";"Compact Solid-state Entangled Photon Source";
-"NASA Glenn Research Center";"Issued";"LEW-17483-1";7191013;"10/983,230";"Hand Held Device For Wireless Powering And Interrogation Of BioMEMS Sensors And Actuators";"11/08/2024"
-"NASA Glenn Research Center";"Issued";"LEW-17484-5";7268939;"11/363,300";"Tracking Of Cells With A Compact Microscope Imaging System Using Intelligent Controls";"02/24/2026"
-"NASA Glenn Research Center";"Issued";"LEW-17494-1";7458221;"10/693,850";"Self-Sealing, Smart, Variable Area Nozzle (S3VAN) For Dynamic Flow Control In Gas Turbine Engines";"10/23/2023"
-"NASA Glenn Research Center";"Issued";"LEW-17498-1";7187835;"11/44,063";"Selective Wavelength Filtering";"01/28/2025"
-"NASA Glenn Research Center";"Issued";"LEW-17510-1";7416062;"10/693,853";"Torsional Magnetorheological Fluid Resistant Device (TMRFRD)";"10/23/2023"
-"NASA Glenn Research Center";"Issued";"LEW-17517-1";7326027;"10/856,361";"Flow-Field Control-Rods To Stabilize Flow In A Centrifugal Compressor";"05/25/2024"
-"NASA Glenn Research Center";"Issued";"LEW-17520-1";7259692;"10/931,205";"Hybrid Power Management (HPM) Upgrade";"09/01/2024"
-"NASA Glenn Research Center";"Issued";"LEW-17551-1";7410714;"10/891,599";"Unitized Regenerative Fuel Cell System";"07/15/2024"
-"NASA Glenn Research Center";"Issued";"LEW-17561-1";7400096;"10/894,225";"Large Area Permanent Magnet ECR Plasma Source";"07/19/2024"
-"NASA Glenn Research Center";"Issued";"LEW-17589-1";7305935;"10/925,499";"Slotted Antenna Rectangular Waveguide Plasma Source For Ion Beam And Electron Beam Production";"08/25/2024"
-"NASA Glenn Research Center";"Issued";"LEW-17592-1";7704622;"10/926,457";"New Ion Conducting Organic/Inorganic Hybrid Polymers";"08/26/2024"
-"NASA Glenn Research Center";"Application";"LEW-17595-1";0;"13/018,611";"A Method Of Improving The Thermo-Mechanical Properties Of Fiber-Reinforced Silicon Carbide Matrix Composites";
-"NASA Glenn Research Center";"Issued";"LEW-17605-1";8394492;"10/974,991";"Skin Modified Aerogel Monoliths For Improved Ruggedness And Lower Hydrophylicity";"10/28/2024"
-"NASA Glenn Research Center";"Issued";"LEW-17618-1";7015304;"10/897,279";"High Tg Polyimides For Resin Transfer Molding (RTM)";"07/23/2024"
-"NASA Glenn Research Center";"Issued";"LEW-17618-1-REIS";"RE43,880";"11/429,639";"Solvent-Free Low Melt Viscosity Imide Oligomers and Thermosetting Polyimide Composites";"05/08/2026"
-"NASA Glenn Research Center";"Application";"LEW-17618-3";;"13/952,872";"High Tg Polyimides For Resin Transfer Molding (RTM)";"07/29/2033"
-"NASA Glenn Research Center";"Issued";"LEW-17630-1";7534519;"11/228,185";"Bi-Electrode Supported Cell For High Power Density Solid Oxide Fuel Cells";"09/16/2025"
-"NASA Glenn Research Center";"Application";"LEW-17634-1";0;"11/228,184";"Solid Oxide Fuel Cell Stack Design With Bi-Electrode Supported Cells";
-"NASA Glenn Research Center";"Application";"LEW-17634-2";0;"12/860,210";"Solid Oxide Fuel Cell Stack Design With Bi-Electrode Supported Cells";
-"NASA Glenn Research Center";"Issued";"LEW-17642-2";7308164;"11/398,734";"Energetic Atomic And Ionic Oxygen Textured Optical Surfaces For Blood Glucose Monitoring";"03/23/2026"
-"NASA Glenn Research Center";"Issued";"LEW-17642-4";7305154;"11/483,887";"Energetic Atomic And Ionic Oxygen Textured Optical Surfaces For Blood Glucose Monitoring";"07/11/2026"
-"NASA Glenn Research Center";"Issued";"LEW-17661-1 with LEW-17765-1";7438030;"11/213,604";"Method of Fabricating Silicon Carbide Corrugated Diaphragms and Modular Actuator";"08/26/2025"
-"NASA Glenn Research Center";"Issued";"LEW-17664-1";7500350;"11/44,471";"Elimination Of Lifetime Limiting Mechanism Of Hall Thrusters";"01/28/2025"
-"NASA Glenn Research Center";"Issued";"LEW-17671-1";7493869;"11/311,183";"Very Large Area/Volume Microwave ECR Plasma And Ion Source";"12/16/2025"
-"NASA Glenn Research Center";"Issued";"LEW-17672-1";7261783;"10/946,286";"Low Density High Creep Resistant Single Crystal Superalloy For Turbine Airfoils";"09/22/2024"
-"NASA Glenn Research Center";"Issued";"LEW-17678-1";7624566;"11/40,304";"Magnetic Circuit For Hall Effect Plasma Accelerator";"01/18/2025"
-"NASA Glenn Research Center";"Issued";"LEW-17694-1";7397978;"11/180,990";"Carrier Structure For Packaging Microphotonic Millimeter-Wave Receiver Based On Lithium Niobate Electro-Optic Resonator Disk Technology";"07/13/2025"
-"NASA Glenn Research Center";"Issued";"LEW-17704-1";7250723;"11/16,735";"Cathode Luminescence Light Source For Broad Band Application In The Visible";"12/21/2024"
-"NASA Glenn Research Center";"Issued";"LEW-17765-1 with LEW-17661-1";7438030;"11/213,604";"Side Sliding Microactuator";"10/21/2025"
-"NASA Glenn Research Center";"Issued";"LEW-17786-1";8197249;"11/412,935";"Fully-Premixed Low-Emissions High-Pressure Multi-fuel Burner";"04/28/2026"
-"NASA Glenn Research Center";"Issued";"LEW-17814-1";7574137;"11/418,304";"Multi-wavelength Time-coincident Optical Communications System";"05/05/2026"
-"NASA Glenn Research Center";"Issued";"LEW-17820-1";7755292;"11/625,545";"Method For Ultraminiature Fiber Light Source";"01/22/2027"
-"NASA Glenn Research Center";"Issued";"LEW-17820-2";8264134;"12/795,356";"Method For Ultraminiature Fiber Light Source";"09/11/2032"
-"NASA Glenn Research Center";"Issued";"LEW-17825-1";8163243;"11/517,555";"Zero G Condensing Heat Exchanger With Integral Disinfection";"09/07/2026"
-"NASA Glenn Research Center";"Issued";"LEW-17826-1";7385692;"11/412,924";"Method And System For Fiber Optic Determination Of Nitrogen And Oxygen Concentrations In Ullage Of Liquid Fuel Tanks";"04/28/2026"
-"NASA Glenn Research Center";"Issued";"LEW-17859-1";7389675;"11/434,578";"Miniaturized Metal (Metal Alloy)/PdOx/SiC Schottky Diode Gas Sensors For Hydrogen And Hydrocarbons Detection At High Temperatures";"05/12/2026"
-"NASA Glenn Research Center";"Issued";"LEW-17859-2";8001828;"12/143,139";"Miniaturized Metal (Metal Alloy) PdOx/Sic Hydrogen And Hydrocarbon Gas Sensors";"06/20/2028"
-"NASA Glenn Research Center";"Issued";"LEW-17877-1";7876276;"11/499,982";"Antenna Near-Field Probe Station Scanner";"08/02/2026"
-"NASA Glenn Research Center";"Application";"LEW-17877-2";;"12/857,004";"Antenna Near-Field Probe Station Scanner";
-"NASA Glenn Research Center";"Issued";"LEW-17904-1";7425650;"11/378,553";"Syntheis Of Asymmetric Dianhydrides";"03/15/2026"
-"NASA Glenn Research Center";"Issued";"LEW-17904-2";7381849;"11/890,104";"Synthesis Of Asymmetrical Benzophenone Dianhydride And Asymmetrical 6F-Dianhydride And Polyimides Therefrom (ALSO See LEW 18236-1)";"07/19/2027"
-"NASA Glenn Research Center";"Application";"LEW-17915-1";0;"12/536,969";"Secure Optical Communications Using Quantum Two-Photon Transparency Modulation Spectroscopy";
-"NASA Glenn Research Center";"Issued";"LEW-17916-1";8052854;"11/754,255";"Miniature Amperometric Solid Electrolyte Carbon Dioxide Sensor";"05/25/2027"
-"NASA Glenn Research Center";"Application";"LEW-17916-2";;"13/267,978";"Miniature Amperometric Solid Electrolyte Carbon Dioxide Sensor";
-"NASA Glenn Research Center";"Application";"LEW-17945-1";0;"11/677,654";"Portable Unit For Metabolic Analysis PUMA";
-"NASA Glenn Research Center";"Issued";"LEW-17951-1";8545786;"10/621,752";"Manufacture Of Porous Net-Shaped Materials Comprising Alpha Or Beta Tricalcium Phosphate Or Mixtures Thereof";"07/16/2023"
-"NASA Glenn Research Center";"Issued";"LEW-17954-1";8016543;"11/695,435";"Composite Case Armor";"04/02/2027"
-"NASA Glenn Research Center";"Application";"LEW-17963-1";0;"11/860,661";"Passive Gas/Liquid Separation Within a Fuel Cell or Electrolysis Cell Using A Conductive Porous Separator";
-"NASA Glenn Research Center";"Issued";"LEW-17975-1";7382944;"11/489,813";"Aluminization And Hyperthermal Atomic Oxygen Texturing Of Polymethylmethacralate Optical Fibers For Blood Glucose Monitoring";"07/14/2026"
-"NASA Glenn Research Center";"Issued";"LEW-17991-1";7390161;"/0";"Toughened Composite Structures";"06/24/2025"
-"NASA Glenn Research Center";"Issued";"LEW-18003-1";7583169;"11/689,770";"RF MEMS Switches Utilizing Non-Metallic Thin Film Cantilevers/Bridges With Controlled Stress And Conductivity";"03/22/2027"
-"NASA Glenn Research Center";"Issued";"LEW-18042-1";8067478;"11/582,693";"A Method of Crosslinking Aerogels Using a One-pot Reaction Scheme";"10/16/2026"
-"NASA Glenn Research Center";"Application";"LEW-18042-2";0;"13/242,425";"A Method of Crosslinking Aerogels Using a One-pot Reaction Scheme";
-"NASA Glenn Research Center";"Application";"LEW-18043-1";7341040;"11/486,460";"Supercharged Two-Cycle Engines Employing Novel Single Element Reciprocating Shuttle Inlet Valve Mechanisms And With A Variable Compression Ratio";"07/14/2026"
-"NASA Glenn Research Center";"Application";"LEW-18048-1";0;"12/285,157";"Two And Three Dimensional Near Infrared Subcutaneous Structure Imager Using Adaptive Nonlinear Video Processing";
-"NASA Glenn Research Center";"Issued";"LEW-18049-1";7909897;"11/946,079";"Direct Fuel Impingement Planar-Array-Microreactor";"11/28/2028"
-"NASA Glenn Research Center";"Issued";"LEW-18054-1";7501032;"11/364,283";"High Work Output Ni-Ti-Pt High Temperature Shape Memory Alloys And Associated Processing Methods";"02/28/2026"
-"NASA Glenn Research Center";"Issued";"LEW-18059-1";8242162;"11/956,848";"Fluorescent On-Off Chemical Sensors";"11/30/2019"
-"NASA Glenn Research Center";"Issued";"LEW-18076-1";7999173;"11/689,431";"Dust removal from solar cells";"03/21/2027"
-"NASA Glenn Research Center";"Application";"LEW-18076-2";;"13/198,896";"Dust Removal from Solar Cells";
-"NASA Glenn Research Center";"Issued";"LEW-18089-1";8077103;"11/774,574";"Cup Cylindrical Waveguide Antenna";"07/06/2027"
-"NASA Glenn Research Center";"Issued";"LEW-18138-1";7904282;"11/689,874";"In-Flight Fault Accommodation Through Automated Control Parameter Changes";"03/22/2027"
-"NASA Glenn Research Center";"Application";"LEW-18205-1";0;"12/317,232";"Branched Rod-Coil Polyimide-poly(ethylene Oxide) (PEO) Copolymers That Are Cured In The Solid State At Ambient Temperatures";
-"NASA Glenn Research Center";"Application";"LEW-18207-1";0;"11/759,570";"Circuit For Communication Over DC Power Line Using High Temperature Electronics";
-"NASA Glenn Research Center";"Issued";"LEW-18221-1";7763325;"11/864,607";"A Method For Thermal Spraying Of Coatings Using Resonant Pulsed Combustion";"09/28/2027"
-"NASA Glenn Research Center";"Application";"LEW-18221-2";;"12/835,345";"A Method For Thermal Spraying Of Coatings Using Resonant Pulsed Combustion";
-"NASA Glenn Research Center";"Issued";"LEW-18236-1";8093348;"11/894,290";"Synthesis Of Asymmetrical Benzophenone Dianhydride And Asymmetrical 6F-Dianhydride And Polyimides Therefrom";"08/22/2027"
-"NASA Glenn Research Center";"Application";"LEW-18236-2";0;"13/325,626";"Synthesis Of Asymmetrical Benzophenone Dianhydride And Asymmetrical 6F-Dianhydride And Polyimides Therefrom";
-"NASA Glenn Research Center";"Issued";"LEW-18248-1";7791552;"11/871,237";"Cellular Reflectarray Antenna";"10/12/2027"
-"NASA Glenn Research Center";"Issued";"LEW-18248-2";7990327;"12/874,370";"Cellular Reflectarray Antenna";"09/02/2030"
-"NASA Glenn Research Center";"Issued";"LEW-18253-1";8191426;"12/133,743";"Low TCR Nanocomposite Strain Gages";"06/05/2028"
-"NASA Glenn Research Center";"Issued";"LEW-18254-1";7876423;"12/163,382";"Simultaneous Non-Contact Precision Measurement Of Microstructual And Thickness Variation In Dielectric Materials Using Terahertz Energy";"06/27/2028"
-"NASA Glenn Research Center";"Issued";"LEW-18255-1";7630736;"11/541,102";"Autonomous Wireless Sensor Transceiver";"05/09/2028"
-"NASA Glenn Research Center";"Issued";"LEW-18256-1";7688117;"12/081,762";"An N Channel JFET Based Digital Logic Gate Structure Using Resistive Level Shifters And Having Direct Application To High Temperature Silicon Carbide Electronics";"04/21/2028"
-"NASA Glenn Research Center";"Issued";"LEW-18261-1";7933027;"12/326,436";"A Software Platform For Post-Processing Waveform-Based NDE";"12/02/2028"
-"NASA Glenn Research Center";"Application";"LEW-18291-1";0;"12/214,114";"Adaptive Morphological Feature-Based Object Classifier For A Color Imaging System";
-"NASA Glenn Research Center";"Application";"LEW-18296-1";0;"13/193,160";"Modular Battery Charge Controller";
-"NASA Glenn Research Center";"Issued";"LEW-18313-1";7923715;"12/336,503";"A Novel Nanoionics-based Switch For Radiofrequency (RF) Applications";"12/06/2028"
-"NASA Glenn Research Center";"Issued";"LEW-18313-2";8410469;"13/050,229";"A Novel Nanoionics-based Switch For Radiofrequency (RF) Applications";"03/17/2031"
-"NASA Glenn Research Center";"Application";"LEW-18324-1";0;"12/195,358";"Semiconductor Metal Oxide Modified Solid Electrolyte Carbon Dioxide Microsensors With Reduced Operation Temperature";
-"NASA Glenn Research Center";"Issued";"LEW-18325-1";8415839;"12/319,617";"External Magnetic Field Reduction Techniquie For Advanced Stirling Radioisotope Generator";"01/09/2029"
-"NASA Glenn Research Center";"Application";"LEW-18325-2";;"13/859,179";"External Magnetic Field Reduction Techniquie For Advanced Stirling Radioisotope Generator";"01/09/2029"
-"NASA Glenn Research Center";"Issued";"LEW-18338-1";8506787;"12/533/258";"Advancd Lightweight, High-Strength Electrochemical Cell Design and Structures";"07/31/2029"
-"NASA Glenn Research Center";"Issued";"LEW-18340-1";8091445;"12/431,456";"Offset Compound Gear Inline Two-Speed Drive";"04/28/2029"
-"NASA Glenn Research Center";"Issued";"LEW-18340-2";8668613;"13/346,959";"Offset Compound Gear Inline Two-Speed Drive";"01/10/2032"
-"NASA Glenn Research Center";"Issued";"LEW-18356-1";8220989;"12/571,215";"Device for Measuring the Thermal Conductivity of Small, Highly Insulating Materials";"09/30/2029"
-"NASA Glenn Research Center";"Issued";"LEW-18356-2";8573835;"13/492,181";"Device for Measuring the Thermal Conductivity of Small, Highly Insulating Materials";"06/08/2032"
-"NASA Glenn Research Center";"Issued";"LEW-18362-1";7872750;"12/285,173";"Space Radiation Detector with Spherical Geometry";"09/30/2028"
-"NASA Glenn Research Center";"Issued";"LEW-18362-2";8159669;"12/972,624";"Space Radiation Detector with Spherical Geometry";"12/20/2030"
-"NASA Glenn Research Center";"Issued";"LEW-18373-1";8353209;"12/570,841";"A Radio Frequency Tank Eigenmode Sensor For Propellant Quantity Gauging";"02/04/2031"
-"NASA Glenn Research Center";"Issued";"LEW-18426-1";8484980;"12/894,346";"A Free-Jet Dual-Mode Combustor Concept for Wide Operating Range Ramjet Propulsion";"09/30/2030"
-"NASA Glenn Research Center";"Application";"LEW-18426-2";0;"13/941,987";"A Free-Jet Dual-Mode Combustor Concept for Wide Operating Range Ramjet Propulsion";"07/15/2033"
-"NASA Glenn Research Center";"Issued";"LEW-18432-1";7935601;"12/584,497";"Addendum of Self-Aligned Ion Implant to Design and Processing of SiC High Temperature Transistors for Durable Operation Above 400 C";"09/04/2029"
-"NASA Glenn Research Center";"Application";"LEW-18432-2";0;"13/078,510";"Addendum of Self-Aligned Ion Implant to Design and Processing of SiC High Temperature Transistors for Durable Operation Above 400 C";
-"NASA Glenn Research Center";"Issued";"LEW-18458-1";8386121;"12/791,907";"Optimal Tuner Selection For Kalman Filter-Based Aircraft Engine Performance Estimation";"06/02/2030"
-"NASA Glenn Research Center";"Issued";"LEW-18461-1";8159238;"12/570,742";"Method and Circuit for In-Situ Health Monitoring of Solar Cells in Space";"09/30/2029"
-"NASA Glenn Research Center";"Application";"LEW-18461-2";;"13/448,801";"Method and Circuit for In-Situ Health Monitoring of Solar Cells in Space";
-"NASA Glenn Research Center";"Application";"LEW-18466-1";0;"12/616,952";"Spring Tire";
-"NASA Glenn Research Center";"Application";"LEW-18473-1";0;"12/879,713";"Ka-Band Waveguide 2-Way Hybrid Combiner for MMIC Amplifiers With Unequal and Arbitrary Power Output Ratio";
-"NASA Glenn Research Center";"Issued";"LEW-18474-1";8609750;"12/792,380";"Selective Clay Placement Within A Silicate Clay-Epoxy Blend Nanocomposite";"06/02/2030"
-"NASA Glenn Research Center";"Issued";"LEW-18476-1";8182741;"12/544,742";"Ball Bearings Comprising Nickel-Titanium And Methods Of Manufacture Thereof";"08/20/2029"
-"NASA Glenn Research Center";"Application";"LEW-18476-2";0;"12/544,674";"Ball Bearings Comprising Nickel-Titanium And Methods Of Manufacture Thereof";
-"NASA Glenn Research Center";"Application";"LEW-18477-1";0;"13/242,300";"Graphene Based Reversible Nano-Switch/Sensor Schottky Diode (nanoSSSD) Device";
-"NASA Glenn Research Center";"Issued";"LEW-18483-1";8310671;"12/893,627";"Frame-Transfer Gating (FTG) Raman Spectroscopy for Time-Resolved Multiscalar Combustion Diagnostics";"09/29/2030"
-"NASA Glenn Research Center";"Application";"LEW-18486-2";0;"14/168,830";"Polyimide Aerogels With Three Dimensional Cross-Linked Structure";"01/30/2034"
-"NASA Glenn Research Center";"Issued";"LEW-18491-1";8209976;"12/323,091";"Shape Memory Based Actuators and Release Mechanisms";"11/25/2028"
-"NASA Glenn Research Center";"Application";"LEW-18492-1";0;"13/036,887";"Synthesis Methods, Microscopy Characterization and Device Integration of Nanoscale Metal Oxide Semiconductors for Gas Sensing in Aerospace Applications";
-"NASA Glenn Research Center";"Issued";"LEW-18496-1";8283172;"12/711,465";"Process to Produce Iron Nanoparticles - Lunar Dust Simulant Composite";"02/24/2030"
-"NASA Glenn Research Center";"Application";"LEW-18500-1";0;"12/848,903";"Precision Time Protocol Base Trilateration for Planetary Navigation";
-"NASA Glenn Research Center";"Application";"LEW-18516-1";0;"13/542,163";"Hybrid Gear";
-"NASA Glenn Research Center";"Issued";"LEW-18538-1";8373175;"12/791,276";"Ohmic Contact to N- and P-type Silicon Carbide";"06/01/2030"
-"NASA Glenn Research Center";"Application";"LEW-18542-1";0;"12/870,475";"Functionalization of Single Wall Carbon Nanotubes (SWCNTs) by Photooxidation";
-"NASA Glenn Research Center";"Application";"LEW-18554-1";0;"12/845,998";"Internal Limit Sensor (ILS)";
-"NASA Glenn Research Center";"Application";"LEW-18561-1";0;"12/726,926";"NASA PS400: A New High Temperature Solid Lubricant Coating for High Temperature Wear Applications";
-"NASA Glenn Research Center";"Application";"LEW-18565-1";0;"13/646,100";"Catalytic Microtube Rocket Igniter";"10/05/2032"
-"NASA Glenn Research Center";"Application";"LEW-18566-1";0;"12/829,663";"Low Density, High Creep Resistant Single Crystal Superalloy with Lower Manufacturing Cost";
-"NASA Glenn Research Center";"Application";"LEW-18586-1";;"13/030,342";"Shock Sensing Apparatus";
-"NASA Glenn Research Center";"Issued";"LEW-18593-1";8653693;"13/014,849";"Integrated Exciter/Igniter";"01/27/2031"
-"NASA Glenn Research Center";"Issued";"LEW-18594-1";8409372;"12/874,523";"Thermomechanical Methodology for Stabilizing Shape Memory Alloy (SMA) Response";"09/02/2030"
-"NASA Glenn Research Center";"Application";"LEW-18594-2";;"13/845,526";"Thermomechanical Methodology for Stabilizing Shape Memory Alloy (SMA) Response";
-"NASA Glenn Research Center";"Issued";"LEW-18601-1";8577504;"12/954,009";"Inductive Power Device (IDP)";"11/24/2030"
-"NASA Glenn Research Center";"Application";"LEW-18604-1";;"12/894,444";"Shock Resistant, Debris Tolerant, Lightweight, Corrosion Proof Bearings, Mechanical Components and Mechanisms Made From Hard, Highly Elastic Materials";
-"NASA Glenn Research Center";"Issued";"LEW-18605-1";8468794;"12/894,565";"Dual-Mode Hybrid-Engine (DMH-Engine): A Next-Generation Electric Propulsion Thruster";"09/30/2030"
-"NASA Glenn Research Center";"Application";"LEW-18605-2";;"13/713,907";"Dual-Mode Hybrid-Engine (DMH-Engine): A Next-Generation Electric Propulsion Thruster";
-"NASA Glenn Research Center";"Application";"LEW-18605-3";;"14/152,125";"Dual-Mode Hybrid-Engine (DMH-Engine): A Next-Generation Electric Propulsion Thruster";
-"NASA Glenn Research Center";"Application";"LEW-18608-1";;"12/892,339";"Liquid Tin Electrodes for Directo Conversion of JP-8 Fuel using the NASA BSC Solid Oxide Fuel Cell";
-"NASA Glenn Research Center";"Application";"LEW-18614-1";;"13/303,292";"High-Temperature Thermometer Using Cr-Doped GdAlO3 Broadband Luminescence";
-"NASA Glenn Research Center";"Application";"LEW-18615-1";;"12/892,278";"Purify Nanomaterials By Dissolving Excess Reactants And Catalysts In Ferric Chloride";
-"NASA Glenn Research Center";"Application";"LEW-18629-1";;"13/731,314";"Electrospray Collection of Lunar Dust";
-"NASA Glenn Research Center";"Application";"LEW-18631-1";;"13/218,847";"Circuit for Communication Over Power Lines";
-"NASA Glenn Research Center";"Application";"LEW-18632-1";;"13/311,987";"Method For Fabricating Diamond-Dispersed Fiber-Reinforced Composite Coating On Low Temperature Sliding Thrust Bearing Interfaces";
-"NASA Glenn Research Center";"Application";"LEW-18634-1";;"13/134,959";"Multi-Parameter Aerosol Scattering Sensor";
-"NASA Glenn Research Center";"Issued";"LEW-18636-1";8416007;"13/098,918";"A Source Coupled N Channel JFET Based Digital Logic Gate Structure Using Resistive Level Shifters and Having Direct Application to High Temperature Silicon Carbide Electronics";"05/02/2031"
-"NASA Glenn Research Center";"Application";"LEW-18639-1";;"13/112,293";"Atomic Oxygen Fluence Monitor";
-"NASA Glenn Research Center";"Application";"LEW-18649-1";;"12/870,443";"Ultracapacitor Based Uninterruptible Power Supply (UPS) System";
-"NASA Glenn Research Center";"Application";"LEW-18652-1";;"13/476,470";"Polarization Dependent Whispering Gallery Modes in Microspheres";
-"NASA Glenn Research Center";"Application";"LEW-18658-1";;"13/250,300";"Levitated Ducted Fan (LDF) Aircraft Auxiliary Generator";
-"NASA Glenn Research Center";"Application";"LEW-18674-1";;"13/552,760";"Polymer Electrolyte Based Ambient Temperature Oxygen Microsensors with Extremely Low Power Consumption for Enviromental Monitoring Applications";
-"NASA Johnson Space Center";"Application";"MSC-25349-1";0;"13/922036";"Robonaut Teleoperation System";
-"NASA Glenn Research Center";"Issued";"LEW-18691-1";7588746;"11/431,815";"Process and Apparatus for Hydrogen and Carbon Production via Carbon Aerosol-Catalyzed Dissociation of Hydrocarbons";"05/10/2026"
-"NASA Glenn Research Center";"Issued";"LEW-18692-1";7332146;"11/148,778";"Method For Zero Emission Liquid Hydrogen Production From Methane & Landfill Gas";"06/08/2025"
-"NASA Glenn Research Center";"Application";"LEW-18693-1";;"/";"Process For Hydrogen Production via Integrated Processing of Landfill Gas and Biomass";
-"NASA Glenn Research Center";"Application";"LEW-18694-1";;"13/075,879";"Discrete Data Qualification System and Method Comprising Noise Series Fault Detection";
-"NASA Glenn Research Center";"Application";"LEW-18704-1";;"13/531,763";"A Hybrid Power Management (HPM) Based Vehicle Architecture";
-"NASA Glenn Research Center";"Application";"LEW-18714-1";;"13/361,220";"High Strength Nanocomposite Glass Fibers";
-"NASA Glenn Research Center";"Issued";"LEW-18717-1";8476979;"13/178,101";"A Novel Wideband GaN MMIC Distributed Amplifier Based Microwave Power Module for Space Communications, Navigation, and Radar";"07/07/2031"
-"NASA Glenn Research Center";"Application";"LEW-18717-2";;"13/847,779";"A Novel Wideband GaN MMIC Distributed Amplifier Based Microwave Power Module for Space Communications, Navigation, and Radar";
-"NASA Glenn Research Center";"Application";"LEW-18724-1";;"13/339,521";"VESGEN Software for Mapping and Quantification of Vascular Remodeling in Botanical Plant Leaves";
-"NASA Glenn Research Center";"Application";"LEW-18732-1";;"13/514,582";"Water Purification by High Voltage, Nanosecond, Non-Equilibrium Plasma: Applications to Human Spaceflight and Terrestrial Point-of-Use";"08/16/2032"
-"NASA Glenn Research Center";"Application";"LEW-18736-1";;"13/534,745";"Iridium Interfacial Stack (IrIS) Final";
-"NASA Glenn Research Center";"Application";"LEW-18738-1";;"13/474,948";"Atmospheric Turbulence Modeling for Aero Vehicles";
-"NASA Glenn Research Center";"Application";"LEW-18752-1";;"13/686,000";"Large Strain Transparent Magneto-active Polymer Nanocomposites";"11/28/2031"
-"NASA Glenn Research Center";"Application";"LEW-18754-1";;"13/534,870";"Method For Making Measurements Of The Post-Combustion Residence Time In A Gas Turbine Engine";
-"NASA Glenn Research Center";"Application";"LEW-18761-1";;"13/247,601";"Temperature Sensitive Coating Sensor Based On Hematite";
-"NASA Glenn Research Center";"Application";"LEW-18762-1";;"13/364691";"Selenium Interlayer for High-efficiency Multijunction Solar Cell";
-"NASA Glenn Research Center";"Application";"LEW-18768-1";;"13/788,041";"Processing of Nanosensors Using a Sacrificial Template Approach";"03/23/2032"
-"NASA Glenn Research Center";"Application";"LEW-18769-1";;"13/537,816";"Compact, Lightweight, CMC (Ceramic Matrix Composite)-Based Acoustic Liner for Subsonic Jet Aircraft Engines--Offering High Temperature Capability, Weight Reduction, and Broadband Acoustic Treatment";
-"NASA Glenn Research Center";"Application";"LEW-18771-1";;"13/301,249";"Integrated Temperature and Capacitive Ablation Recession Rate Sensors";
-"NASA Glenn Research Center";"Application";"LEW-18785-1";;"13/246,440";"Method to Pre-Stress Shock Resistant Mechanical Components and Mechanisms made from Hard, Highly Elastic Materials";
-"NASA Glenn Research Center";"Application";"LEW-18789-1";;"13/771,833";"Method to Increase Performance of Foil Bearings Through Passive Thermal Management";"02/27/2032"
-"NASA Glenn Research Center";"Application";"LEW-18797-1";;"13/714,906";"High Speed, Compliant, Planetary Flywheel Touchdown Bearing";"12/16/2031"
-"NASA Glenn Research Center";"Application";"LEW-18802-1";;"13/534,804";"Alpha-STREAM Convertor - A Stirling Engine with no moving parts, eliminated streaming losses, high efficiency, low cost fabrication, and electronic wave modulation.";
-"NASA Glenn Research Center";"Application";"LEW-18809-1";;"13/410,663";"Sampling and Control Circuit Board for an Inertial Measurement Unit";"08/03/2032"
-"NASA Glenn Research Center";"Application";"LEW-18816-1";;"13/749,773";"High Speed Edge Detecting Circuit For Use With Linear Image Sensor";"06/01/2032"
-"NASA Glenn Research Center";"Application";"LEW-18821-1";;"13/561,359";"Dopant Selective Reactive Ion Etching of Silicon Carbide";"07/30/2032"
-"NASA Glenn Research Center";"Application";"LEW-18822-1";;"13/524,327";"Planar Modular Package";
-"NASA Glenn Research Center";"Application";"LEW-18825-1";0;"13/804,546";"Porous Cross-Linked Polyimide-UREA Networks";"03/14/2033"
-"NASA Glenn Research Center";"Application";"LEW-18837-1";;"13/527,181";"In-Situ Solid Particle Generator";
-"NASA Glenn Research Center";"Application";"LEW-18844-1";;"13/918,333";"Electrospun Nanofiber Coating Of Fiber Materials: A Composite Toughening Approach";"06/14/2033"
-"NASA Glenn Research Center";"Application";"LEW-18849-1";;"13/906,521";"Paired Threaded Film Cooling Holes for Improved Turbine Film Cooling";"05/31/2033"
-"NASA Glenn Research Center";"Application";"LEW-18858-1";;"13/904,513";"V-Cess: A Novel Flow Control Method Using A Shaped Recess";"05/29/2033"
-"NASA Glenn Research Center";"Application";"LEW-18862-1";;"13/474,972";"Cascading TESLA oscillating flow diode for Stirling Engine Gas Bearings";
-"NASA Glenn Research Center";"Application";"LEW-18864-1";;"13/756,855";"Polyimide Aerogel Thin Films";"02/03/2032"
-"NASA Glenn Research Center";"Application";"LEW-18873-1";;"13/968,000";"High Temperature Single Crystal Preloader";"08/15/2033"
-"NASA Glenn Research Center";"Application";"LEW-18887-1";;"13/756,604";"Fuzzy Neuron: Method and Hardware Realization";"02/01/2033"
-"NASA Glenn Research Center";"Application";"LEW-18889-1";;"13/713,846";"High Speed Idle Engine Control Mode";"12/13/2032"
-"NASA Glenn Research Center";"Application";"LEW-18890-1";;"13/871,114";"Suppression Of Unwanted Noise And Howl In A Test Configuration Where A Jet Exhaust Is Discharged Into A Duct";
-"NASA Glenn Research Center";"Application";"LEW-18891-1 with LEW-18611-1 and LEW-18895-1";;"13/723,598";"G6 Flywheel Design";"12/23/2031"
-"NASA Glenn Research Center";"Application";"LEW-18893-1";;"13/653,027";"Novel Aerogel-Based Antennas (ABA) for Aerospace Applications";
-"NASA Glenn Research Center";"Application";"LEW-18900-1";;;"High Efficiency, High Temperature Titanium Heat Pipe Radiator for Space Power and Propulsion Systems";
-"NASA Glenn Research Center";"Application";"LEW-18902-1";;"14/094,006";"Analog Correlator Based on One Bit Digital Correlator";"12/02/2033"
-"NASA Glenn Research Center";"Application";"LEW-18903-1";;"13/923,441";"Modeling and Simulation of a Solar Electric Propulsion Vehicle in Near-Earth Vicinity Including Solar Array Degradation";"06/21/2033"
-"NASA Glenn Research Center";"Application";"LEW-18919-1";;"13/645,799";"Wireless Controlled Chalcogenide Nanoionic Radio Frequency Switch";"04/04/2032"
-"NASA Glenn Research Center";"Application";"LEW-18923-1";;"13/963,060";"New Power Source For Deep Space Missions- Utilizing The Doubly Exothermic Reaction Between Deuterium And Palladium To Produce Electrical Power";"08/09/2033"
-"NASA Glenn Research Center";"Application";"LEW-18928-1";;;"Pt-Ti-Si Simultaneous Ohmic Contacts to N- and P-Type Silicon Carbide";
-"NASA Glenn Research Center";"Application";"LEW-18934-1";;"13/900,642";"Conditionally Active Min-Max Limit Regulators";"05/23/2033"
-"NASA Glenn Research Center";"Application";"LEW-18939-1";;"13/916,797";"Magnetostrictive Alternator - Low cost, No moving part, High Efficiency, Oscillating Acoustic Pressure Wave to Electric Power Transducer";"06/13/2033"
-"NASA Glenn Research Center";"Application";"LEW-18942-1";;"13/771,920";"Adaptive Phase Delay Generator";"02/20/2033"
-"NASA Glenn Research Center";"Application";"LEW-18949-1";;"13/923,450";"Advanced High Temperature and Fatigue Resistant Environmental Barrier Coating Bond Coat Systems for SiC/SiC Ceramic Matrix Composites";"06/21/2033"
-"NASA Glenn Research Center";"Application";"LEW-18952-1";;;"A Novel Real Time Adaptive Filter For The Reduction Of Artifacts In Functional Near Infrared Spectroscopy Signals";
-"NASA Glenn Research Center";"Application";"LEW-18957-1";;"14/048,895";"Dynamic Range Enhancement Of High-Speed Data Acquisition Systems By Reversible Non-Linear Amplitude Compression";"10/08/2033"
-"NASA Glenn Research Center";"Application";"LEW-18960-1";;"13/891,461";"Dry Snorkel Cold Immersion Suit for Hypothermia Prevention";"05/11/2032"
-"NASA Glenn Research Center";"Application";"LEW-18963-1";;"13/853,308";"Flywheel Pulse & Glide System for Vehicles";
-"NASA Glenn Research Center";"Application";"LEW-18964-1";;"13/905,333";"High Temperature Lightweight Self-Healing Ceramic Composites for Aircraft Engine Applications";"05/30/2033"
-"NASA Glenn Research Center";"Application";"LEW-18970-1";;"14/158,080";"Methods for Intercalating and Exfoliating Hexagonal Boron Nitride";"01/17/2034"
-"NASA Glenn Research Center";"Application";"LEW-18986-1";;;"Generation Of High Pressure Oxygen Via Electrochemical Pumping In A Multi-Stage Electrolysis Stack";
-"NASA Glenn Research Center";"Application";"LEW-19013-1";;"14/095,442";"Spoked Wheel Assembly With Two Rotational Modes";"12/03/2033"
-"NASA Glenn Research Center";"Application";"LEW-19029-1";;"14/191,708";"Superelastic Ternary Ordered Intermetallic Compounds";"02/27/2034"
-"NASA Glenn Research Center";"Application";"LEW-19040-1";;"14/193,024";"Fast, Large Area, Wide Band Gap UV Photodetector for Cherenkov Light Detection";"02/28/2034"
-"NASA Glenn Research Center";"Application";"LEW-19045-1";;"13/968,531";"Multimode Directional Coupler for Measurement and Utilization of Harmonic Frequencies from Traveling Wave Tube Amplifiers";"08/16/2033"
-"NASA Glenn Research Center";"Application";"LEW-19053-1";;"14/193,719";"Process for Preparing Aerogels from Polyamides";"02/28/2034"
-"NASA Glenn Research Center";"Application";"LEW-19067-1";;;"Plasma Spray-Physical Vapor Deposition (PS-PVD) of Advanced Environmental Barrier Coatings";
-"NASA Glenn Research Center";"Application";"LEW-19077-1";;;"Improved Composite Damage Tolerance and Through Thickness Conductivity By Interleaving Carbon Fiber Veil Nanocomposites";
-"NASA Glenn Research Center";"Application";"LEW-19080-1";;;"Crosslinked Polyethylene Aerogels from Low Density Polyethylene, Linear Low Density Polyethylene, and Repurposed Polyethylene";
-"NASA Glenn Research Center";"Application";"LEW-19098-1";;"61/866,585";"High Temperature, Flexible Composite Seals for Aeronautics and Space Environments Incorporating Aerogel Insulation";
-"NASA Glenn Research Center";"Application";"LEW-19171-1";;"61/931,189";"Low Power Charged Particle Counter for Space Radiation Monitoring";
-"NASA Marshall Space Flight Center";"Issued";"MFS-28402-2";5780594;"08/448,196";"Biologically Active Protein Fragments Containing Specific Binding Regions Of Serum Albumin Or Related Proteins";"07/14/2015"
-"NASA Marshall Space Flight Center";"Issued";"MFS-28985-1";5641681;"08/422,963";"Device And Method For Screening Crystallization Conditions In Solution Crystal Growth";"04/17/2015"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31175-2-CIP";6578851;"09/693,098";"Gasket Assembly For Sealing Mating Surfaces";"10/16/2020"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31243-1";6459822;" 09/364,919";"Video Image Stabilization And Registration (VISAR)";"07/26/2019"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31243-2-CON";6560375;"10/143,539";"Video Image Stabilization And Registration";"05/10/2022"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31258-1";6135255;"09/207,710";"Releasable Conical Roller Clutch";"12/09/2018"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31294-2-CIP2";6592687;"10/196,389";"Aluminum Alloy And Article Cast Therefrom";"07/11/2022"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31294-5-CIP";6399020;"09/688,729";"Aluminum-Silicon Alloy Having Improved Properties At Elevated Temperatures And Articles Cast Therefrom";"10/11/2020"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31294-6-CIP";6419769;"09/749,503";"Aluminum-Silicon Alloy Having Improved Properties At Elevated Temperatures And Process For Producing Cast Articles Therefrom";"12/22/2020"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31294-7-CIP";6669792;"09/800,312";"Process For Producing A Cast Article From A Hypereutectic Aluminum-Silicon Alloy";"03/02/2021"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31303-1";6748349;"09/313,576";"Generalized Fluid System Simulation Program (GFSSP) Version 2.01c";"05/07/2019"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31387-1";6361961;"09/560,532";"GRAVITY RESPONSIVE NADH OXIDASE OF THE PLASMA MEMBRANE";"04/25/2020"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31399-1";6658329;"10/138,887";"Addition Of Rangefinder To The Video Guidance Sensor";"06/05/2022"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31413-1";6497355;"09/690,035";"Precision Penetration Control System For The Friction Stir Welding (FSW) Retractable Pin Tool";"10/19/2020"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31475-1";6424470;"09/616,624";"Panoramic Refracting Optic (PRO)";"07/28/2020"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31475-2-DIV";6580567;"10/173,410";"Panoramic Refracting Conical Optic";"06/17/2022"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31488-1";6028693;"09/7,124";"Microresonator And Associated Method For Producing And Controlling Photonic Signals With A Photonic Bandgap Delay Apparatus";"01/14/2018"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31490-1";7118074;"10/690,161";"Electrodynamic Tether System Design For Spacecraft Deorbit";"10/17/2023"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31529-1";7081730;"10/857,375";"Micro-Commanding Servo Motor Controller With Greater Than Fifty Million To One Dynamic Rate Range";"06/19/2024"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31559-1-CON";8127977;"13/157,895";"Phase/Matrix Transformation Weld Process And Apparatus";"11/27/2021"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31559-1-DIV";7980449;"10/385,168";"Phase/Matrix Transformation Weld Process And Apparatus";"11/27/2021"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31559-2-DIV";8225984;"13/157988";"Phase/Matrix Transformation Weld Process And Apparatus";"11/27/2021"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31565-1";6885779;"09/877,801";"Full-Cycle, Low Loss, Low Distortion Phase Modulation From Multi-Layered Dielectric Stack With Terahertz Optical Bandwidth";"08/17/2022"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31584-1";6497091;"09/877,800";"Hypergolic Ignitor Assembly";"06/06/2021"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31584-1-CIP";6845605;"10/288,800";"Hypergolic Ignitor";"01/26/2023"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31593-1";6939610;"10/212,564";"Smart Thermal Management Coating";"09/20/2022"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31596-1";6873762;"10/118,626";"Fabrication Of Fiber-Optic Gratings Over A Wide Range Of Bragg Wavelength And Bandwidth Using A Single Phase Mask";"10/12/2022"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31616-1";6540426;"09/949,408";"Passive Ball Capture Latch Docking Mechanism";"09/04/2021"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31646-1";6860099;"10/263,297";"Liquid Propellant Tracing Impingement Injector";"05/24/2023"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31649-1";7446860;"11/527,648";"Nonintrusive, Remote, Micron Accuracy, Laser Fresnel Ranging System";"10/19/2026"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31698-1";6802999;"10/173,536";"Method Of Fabricating A Protective Crucible Wall Coating Incorporating Designed Multi-Use Channels";"05/02/2023"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31706-1";6886392;"10/622,174";"Single Ball Bearing Lubricant And Material Evaluator";"07/17/2023"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31727-1";6953129;"10/231,428";"Impact And Fire Resistant Coating For Pressure Vessels";"11/07/2022"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31761-1";6802488;"10/232,974";"Electro-Mechanically Actuated Propellant Valve";"01/29/2023"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31768-1";6745942;"10/214,482";"Magnetic Symbology Reader";"08/05/2022"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31776-1";7735265;"11/780,610";"Foam-Rigidized Inflatable Tubular Space Booms";"07/20/2027"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31785-1";7006203;"10/646,000";"Integrated Rangefinding Measurement In Video Guidance Sensor";"08/21/2023"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31789-1";7265476;"10/975,121";"MEMS- Micro-Translation Stage With Indefinite Linear Travel Capability";"11/01/2025"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31807-1";7050161;"10/637,085";"Global Radius Of Curvature Estimation And Control System For Segmented Mirrors (GRoCECS)";"01/07/2025"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31813-1";7802799;"11/527,653";"Joining Metallic To Composite Components";"07/29/2029"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31815-1";7325749;"10/738,352";"Distributed Solid State Programmable Thermostat / Power Controller";"01/29/2026"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31817-1";7515257;"11/14,455";"Short-Range / Long-Range Integrated Target (SLIT) For Video Guidance Sensor Rendezvous And Docking";"06/07/2027"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31823-1-DIV";7095000;"10/943,827";"Radio-Frequency Driven Dielectric Heaters For Non-Nuclear Testing In Nuclear Core Development";"11/27/2024"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31828-1";6918970;"10/120,226";"High Strength Aluminum Alloy For High Temperature Applications";"04/12/2023"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31838-1";7641949;"10/857,379";"Improved Pressure Vessel Impact Resistance Utilizing Filament Wound Hybrid Fibers";"10/15/2025"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31842-1";7347089;"11/215,749";"Gas Volume Contents Within A Container, Smart Volume Instrument";"11/26/2025"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31843-1";7174077;"10/631,220";"Fiber-Coupled Laser Diodes With Even Illumination Pattern";"07/30/2023"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31852-1";7106457;"10/857,372";"Achromatic Shearing Phase Sensor For Phase Alignment Of A Segmented Telescope";"01/21/2025"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31865-1";6888476;"10/615,369";"Advanced Video Guidance Sensor Software";"07/21/2023"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31886-1";6850592;"10/321,873";"Digital Equivalent System (DEDS) For X-Ray Flourescent Spectral Output";"01/08/2023"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31891-1";7375801;"11/108,140";"Video Sensor With Range Measurement Capability";"11/06/2025"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31918-1";7275675;"10/928,876";"Optimal Design Geometry For All Friction Stir Weld Tools";"01/15/2025"
-"NASA Marshall Space Flight Center";"Issued";"MFS-31944-1";7017812;"10/730,191";"Variable Distance Angular Symbology Reader";"11/26/2023"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32024-1";8297468;"10/857,380";"Liquefied Natural Gas Fuel Tank";"07/13/2021"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32031-1";7738084;"11/543,284";"Fiber Optic Liquid Mass Flow Sensor - Improved Prototype Design";"09/29/2026"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32099-1-CON";8561829;"13/544,066";"Composite Pressure Vessel Including Crack Arresting Barrier";"10/23/2029"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32102-1";7540143;"11/172,665";"Heated Pressure Balls Monopropellant Thermal Rocket Engine Cycle";"12/12/2026"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32105-1-DIV";7568608;"11/700,972";"Ultrasonic Stir Welding Process And Apparatus";"01/29/2027"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32115-1";7686202;"11/543,287";"Gimbling Shoulder For Friction Stir Welding";"06/18/2027"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32136-1";7595841;"11/174,210";"Video Image Stabilization And Registration - Plus (VISAR+)";"12/03/2027"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32137-1";7177164;"11/376,632";"Multi-loop High Voltage Power Supply with Fast Rise/Fall Time";"03/10/2026"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32175-1";7228241;"11/152,810";"An Extended Lee-Kesler Equation-of-State (ELK-EoS) For The Volumetric And Thermodynamic Properties Of Propellant Fluids, Including The Non-Polar Quantum And Polar Fluids";"06/13/2025"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32192-1";7116098;"11/357,454";"Absolute Limit Sensor (ALS)";"02/16/2026"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32208-1";7259981;"11/296,719";"Analog Nonvolatile Computer Memory";"12/14/2025"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32214-1";7418814;"11/172,666";"Dual Expander Cycle Rocket Engine Cycle with an Intermediate Brayton Cycle Heat Exchanger";"12/19/2026"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32228-1";8290435;"12/241,322";"Short Range Antenna / Close Proximity Transmitter and Receiver";"08/17/2031"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32253-1";7469878;"11/518,733";"Magnetorestrictive Valves";"10/17/2026"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32307-1";7908079;"11/527,658";"Portable Runway Intersection Display And Monitoring System";"01/13/2030"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32311-1";7623621;"12/47,686";"Identification And Authentication System Using Integrated Optical And X-ray Fluorescene Spectral Methods";"03/13/2028"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32318-1";8098060;"12/173,318";"SCAPS(Single Coil Absolute Position Sensor) GAPSYN (Inductive Gap Sensor) Digital Signal Conditioning Electronics";"09/29/2030"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32323-1";8169620;"12/563,819";"Sub-Pixel Spatial Resolution Interferometry With Interlaced Stitching";"10/15/2030"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32324-1";7594530;"11/942,322";"Orbital Foamed Metal Extruder";"06/09/2028"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32341-1";8550468;"12/210,843";"High Load Fully Retained Dynamic Cryogenic Seal";"01/09/2032"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32364-1";7808353;"11/513,433";"Plasmoid Thruster for Electrode-less, High Specific Impulse Propulsion";"07/22/2029"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32390-1";7867589;"11/780,561";"Hybrid composite cryogenic tank structure";"10/14/2029"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32400-1";7900436;"11/780,626";"Gas Generator Augmented Expander Cycle Rocket Engine";"01/04/2030"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32402-1";7911174;"12/39,506";"Inexpensive, Rate Insensitive, Linear, Load Compensating System for Hybrid Stepper Motors";"01/25/2029"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32429-1";7807097;"12/123,170";"Orbital Batch Process Foamed Aluminum Facility";"07/11/2028"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32438-1";8004364;"11/828,563";"16-Kilowatt (KW) 2-30MHz Solid State Power Amplifier using innovative combining methods";"11/03/2028"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32439-1";7831225;"11/828,590";"H2O-NaCl based radio frequency power load";"04/07/2029"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32497-1";7848606;"12/047,805";"Reprocessing Non-Oxide Optical Fiber Preforms Utilizing an Axial Magnetic Field";"05/26/2029"
-"NASA Marshall Space Flight Center";"Application";"MFS-32518-1-CIP";;"13/452,303";"Liquid Propellant Injection Elements with Self-Adjusted Inlet Area for Rocket and Other Combustor-Type Engines Applications";"10/03/2028"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32521-1";7804600;"12/44,740";"Dispersive Filter For Enhancement Of Laser Gyroscopes";"06/10/2029"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32548-1";7409875;"11/862,793";"Optical Hotspot Conductive Fluid Flow Sensor";"09/27/2027"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32558-1";8490470;"12/569,555";"True Shear Parallel Plate Viscometer";"12/04/2031"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32584-1";7929144;"12/336,260";"Local Leak Detection and Health Monitoring of Pressurized Tanks in a Space Environment";"11/17/2029"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32588-1";8052860;"11/957,051";"ELECTROCHEMICALLY-ENHANCED MECHANICAL POLISHING OF OPTICS";"09/06/2030"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32605-1";8309944;"12/240,626";"Grazing Incidence Optics for Neutron Analysis and Imaging";"12/07/2030"
-"NASA Marshall Space Flight Center";"Application";"MFS-32605-1-CIP";0;"12/717,450";"Novel Grazing Incidence Neutron Optics";"09/29/2028"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32605-1-DIV";8575577;"13/534,951";"Novel Grazing Incidence Neutron Optics";"09/29/2028"
-"NASA Marshall Space Flight Center";"Application";"MFS-32612-1-CIP";;"13/796,693";"Protective Safety Cover for Pool and Spa Drains";"03/24/2029"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32614-1";464750;"12/826,887";"Magnetostrictive Regulator";"04/03/2031"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32615-1";8132772;"12/567,451";"Avionics/Electronics Box Rail Mount System";"11/27/2030"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32638-1";8291776;"12/827,515";"Magnetostrictive Force-to-Angle Sensor";"03/12/2031"
-"NASA Marshall Space Flight Center";"Application";"MFS-32642-1";0;"12/827,598";"Cryogenic and Non-Cryogenic Optical Liquid Level Instrument for Stratified Conditions";"04/05/2031"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32651-1";8090484;"12/403,096";"A Planar Translation Device for Solar Sail Spacecraft Attitude Control and Maneuvering";"07/03/2030"
-"NASA Marshall Space Flight Center";"Application";"MFS-32655-1";0;"12/862,510";"AEROSPACE LASER IGNITION/ABLATION VARIABLE, HIGH PRECISION THRUSTER";
-"NASA Marshall Space Flight Center";"Issued";"MFS-32667-1";8357884;"12/839,848";"Extraction of Water from the Soil of Space Bodies Using Microwave processes";"04/22/2031"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32697-1";8252734;"12/634,502";"Multi Layered or Mixed Element Aqueous Ionic Fluids As Fuel or Lubrication Friction Modifiers";"08/26/2030"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32697-1-CIP";8563487;"13/525,623";"Multi Layered or Mixed Element Aqueous Ionic Fluids As Fuel or Lubrication Friction Modifiers";"12/09/2029"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32715-1";8535440;"12/758169";"Improvement of Crystalline Quality during Melt Growth of Semiconductors by Mechanically Induced Nucleation";"07/18/2032"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32719-1";8564770;"13/150832";"Field-Deployable Spectral Estimator of Trichloroacetic Acid (TCAA) in Plants";"05/18/2032"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32733-1";7621670;"12/392,867";"Unbalanced Flow Distribution Mixer with Flow Metering Capability";"02/25/2029"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32737-1";8448498;"12/870,468";"Hermetic Seal Leak Detection Apparatus";"06/06/2031"
-"NASA Marshall Space Flight Center";"Application";"MFS-32737-1-CIP";;"13/874182";"Hermetic Seal Leak Detection Apparatus";"08/27/2030"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32748-1";8132961;"12/397,973";"Optimized Length-to-Diameter Ratio Flow Meter";"08/16/2030"
-"NASA Marshall Space Flight Center";"Application";"MFS-32757-1";0;"13/118086";"Compliant Mechanical Motor";
-"NASA Marshall Space Flight Center";"Application";"MFS-32761-1-CIP";;"13/673,309";"Multi-Channel Flow Plug with Eddy Current Minimization for Metering, Mixing, and Conditioning";"07/23/2029"
-"NASA Marshall Space Flight Center";"Application";"MFS-32761-1-CON";;"13/729,861";"Multi-Channel Flow Plug with Eddy Current Minimization for Meeting, Mixing, and Conditioning";"07/23/2029"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32777-1";8425751;"13/020144";"Electrodeposited Nickel-Cobalt Alloy Development";"05/31/2031"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32797-1";8330961;"12/837,173";"A compact sensor for in-situ measurements of gas leaks";"08/24/2031"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32803-1";8133768;"12/560,371";"Method of Manufacturing Light Emmitting, Photovoltaic or other Electronic Apparatus";"05/31/2027"
-"NASA Marshall Space Flight Center";"Application";"MFS-32809-1";0;"13/369,704";"Telemetry encoder/decoder";
-"NASA Marshall Space Flight Center";"Issued";"MFS-32817-1";8290006;"13/281,025";"Variable Power Handheld Laser Torch for Joining Processes";"10/25/2031"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32826-1";8316884;"12/846,429";"Drain System for Pools, Spas, and Tanks. (Reference MFS 32612-1)";"03/23/2031"
-"NASA Marshall Space Flight Center";"Application";"MFS-33054-1";;"14/020,326";"Multi-spacecraft Autonomous Positioning System / Network-Based Navigation";"09/06/2033"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32830-1";8420582;"13/027472";"FRICTION MANAGEMENT USING SOLVENT PARTITIONING OF SINGLE ELEMENT AND MULTI-ELEMENT HYDROPHILIC SURFACE-INTERACTIVE CHEMICALS CONTAINED IN HYDROPHILIC TARGETED EMULSIONS";"02/15/2031"
-"NASA Marshall Space Flight Center";"Application";"MFS-32830-1-CIP";;"13/900,452";"Friction and Wear Management Using Solvent Partioning of Hydrophilic Surface-Interactive Chemicals contains in Boundary Layer-Targeted Emulsions";"03/07/2033"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32840-1";8322685;"12/842,218";"Non-collinear Valve Actuator";"04/02/2031"
-"NASA Marshall Space Flight Center";"Application";"MFS-32841-1";;"13/424,754";"DUPLICATE of Telemetry encoder/decoder";
-"NASA Marshall Space Flight Center";"Application";"MFS-32853-1";;"14/196,203";"Particle Damping for Vibration Mitigation of Circuit Cards";"03/04/2034"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32857-1";8668168;"13/326,513";"Rocket Vent Design with Variable Flow Control and Rain Protection";"01/21/2032"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32859-1";8393520;"13/240,075";"Variably Pulsed High Power Ultrasonic (HPU) Energy for Ultrasonic Stir Welding (USW)";"11/07/2031"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32859-1-DIV";8393523;"13/523,310";"Pulsed Ultrasonic Stir Welding Method";"09/22/2031"
-"NASA Marshall Space Flight Center";"Application";"MFS-32865-1";;"13/302,734";"Easily Installed, In-situ Adaptable Flow Measurement Device and Method.";
-"NASA Marshall Space Flight Center";"Issued";"MFS-32865-2";8555731;"13/302,773";"Easily Installed, In-situ Adaptable Flow Measurement Device and Method.";"06/04/2032"
-"NASA Marshall Space Flight Center";"Application";"MFS-32865-3";;"13/302,817";"Easily Installed, In-situ Adaptable Flow Measurement Device and Method.";
-"NASA Marshall Space Flight Center";"Application";"MFS-32865-4";;"13/302,845";"Easily Installed, In-situ Adaptable Flow Measurement Device and Method.";"08/23/2032"
-"NASA Marshall Space Flight Center";"Issued";"MFS-32871-1";8577519;"13/424,898";"Low Cost Telemetry System for Small/micro satellites";"06/13/2032"
-"NASA Marshall Space Flight Center";"Application";"MFS-32873-1";;"13/523210";"High-current, high-voltage switch using non-hazardous liquid metals";"11/29/2032"
-"NASA Marshall Space Flight Center";"Application";"MFS-32889-1";;"13/174,084";"Pyrotechnic Pipe Plug and Variable Area Flow Meter";
-"NASA Marshall Space Flight Center";"Application";"MFS-32895-1";;"13/242,734";"High Powered Ultrasonically Assisted Thermal Stir Welding";
-"NASA Marshall Space Flight Center";"Application";"MFS-32912-1";;"13/299,930";"Salt Water Power Load - Part II";
-"NASA Marshall Space Flight Center";"Application";"MFS-32916-1";;"13/333283";"Improved Impact Toughness and Heat Treatment for Cast Aluminum Wheels";
-"NASA Marshall Space Flight Center";"Application";"MFS-32924-1";;"13/312,481";"Partial Automated Alignment & Integration System";"07/09/2032"
-"NASA Marshall Space Flight Center";"Application";"MFS-32934-1";;"12/833,894";"Methods, Devices, and Systems Relating to a Sensing Device";
-"NASA Marshall Space Flight Center";"Issued";"MFS-32940-1";8657179;"13/430,268";"Closed Loop Temperature Control for the Thermal Stir Welding Process";"03/26/2032"
-"NASA Marshall Space Flight Center";"Application";"MFS-32944-1";;"13/896,137";"Mitigation of Sonic Boom from Supersonic Vehicles by means of Long Penetration Mode (LPM) Counter-Flowing Cold Gas Jets";"05/16/2033"
-"NASA Marshall Space Flight Center";"Application";"MFS-32945-1";;"14/082,956";"Piezoelectric Gravity Gradient and Multiple Purpose Sensor Detection System";"11/18/2033"
-"NASA Marshall Space Flight Center";"Application";"MFS-32986-1";;"13/961,573";"Non-Explosively-Actuated Pressurization Start Valve";"08/07/2033"
-"NASA Marshall Space Flight Center";"Application";"MFS-33007-1";;"14/192,350";"Carbon Nanotube Tape Vibrating Gyroscope Update";"02/27/2034"
-"NASA Marshall Space Flight Center";"Application";"MFS-33022-1";;"14/192,395";"A Design Technology to Eliminate Dribble Volume in Rocket Engine Manifolds for Swirl-Coaxial Injectors";"02/27/2034"
-"NASA Marshall Space Flight Center";"Application";"MFS-33031-1";;"13/949,361";"An aerodynamic design concept for rocket nozzle side load reduction";"07/24/2033"
-"NASA Marshall Space Flight Center";"Application";"MFS-33060-1";;"14/104,881";"Carbon Nanotube Tape Single Axis Accelerometer";"12/12/2033"
-"NASA Johnson Space Center";"Issued";"MSC-21715-2";5869238;"08/390,904";"Quantitative Method Of Measuring Cancer Cell Urokinase And Metastatic Potential";"02/09/2016"
-"NASA Johnson Space Center";"Issued";"MSC-21947-1";7541159;"10/828,531";"MOLECULAR SPECIFIC ANTIBODIES AGAINST UROKINASE";"08/28/2025"
-"NASA Johnson Space Center";"Issued";"MSC-22119-1";5851816;"08/172,962";"A PROCESS FOR DEVELOPING HIGH-FIDELITY THREE-DIMENSIONAL TUMOR MODELS OF HUMAN PROSTATE CARCINOMA";"12/22/2015"
-"NASA Johnson Space Center";"Issued";"MSC-22122-1";6117674;"08/366,065";"HORIZONTAL ROTATING-WALL VESSEL PROPAGATION IN IN VITRO HUMAN TISSUE MODELS";"09/12/2017"
-"NASA Johnson Space Center";"Issued";"MSC-22489-1";5827531;"08/349,169";"Multi-Lamellar, Immiscible-Phase Microencapsulation of Drugs";"10/27/2015"
-"NASA Johnson Space Center";"Issued";"MSC-22616-2";6133036;"09/7,239";"Preservation Of Liquid Biological Samples";"12/12/2015"
-"NASA Johnson Space Center";"Issued";"MSC-22616-3";6716392;"09/630,979";"Preservation Of Liquid Biological Samples";"01/14/2018"
-"NASA Johnson Space Center";"Issued";"MSC-22633-1";6485963;"09/587,028";"Electrically Potentiated Growth Of Mammalian Neuronal Tissue Facilitated By Rotating Wall Vessel Culture";"06/02/2020"
-"NASA Johnson Space Center";"Issued";"MSC-22633-2";6673597;"09/798,854";"Growth Stimulation Of Biological Cells And Tissue By Electromagnetic Fields And Uses Thereof";"02/28/2021"
-"NASA Johnson Space Center";"Issued";"MSC-22695-1";6261844;"09/213,988";"A Unique Urine Preservative With Combined Antibacterial And Antioxidant Properties";"12/17/2018"
-"NASA Johnson Space Center";"Issued";"MSC-22721-2";6254359;"09/354,915";"Blood Pump Bearing System";"07/09/2019"
-"NASA Johnson Space Center";"Issued";"MSC-22724-1";6047216;"09/129,832";"Millimeter Wave/Microwave Ablation For Treatment Of Atherosclerotic Lesions";"08/05/2018"
-"NASA Johnson Space Center";"Issued";"MSC-22724-2";6226553;"09/501,150";"Endothelium Preserving Microwave Treatment For Atherosclerosis";"02/09/2020"
-"NASA Johnson Space Center";"Issued";"MSC-22724-3";6223086;"09/504,768";"Endothelium Preserving Microwave Treatment For Atherosclerosis";"02/09/2020"
-"NASA Johnson Space Center";"Issued";"MSC-22724-5";6496736;"09/500,538";"Endothelium Preserving Microwave Treatment For Atherosclerosis";"02/09/2020"
-"NASA Johnson Space Center";"Issued";"MSC-22757-1";5879079;"08/917,581";"Automated Propellant Blending Machine";"08/20/2017"
-"NASA Johnson Space Center";"Issued";"MSC-22797-1";6312398;"08/786,842";"A Method Of Applying External Power To Assist In The Operation Of Joints In Pressure Suits And Inflatable Structures2283";"12/19/2016"
-"NASA Johnson Space Center";"Issued";"MSC-22839-1";6501414;"09/826,402";"Locating Concealed Objects Using Spectral Signatures";"04/02/2021"
-"NASA Johnson Space Center";"Issued";"MSC-22859-1";6730498;"09/56,363";"Production Of 1-25diOH Vitamin D3, Erythropoietin And Other Products By Epithelial And Interstitial Cells In Response To Shear Stress";"04/08/2017"
-"NASA Johnson Space Center";"Issued";"MSC-22859-2";6946246;"09/532,001";"Production Of Functional Proteins: Balance Of Shear Stress And Gravity";"03/21/2020"
-"NASA Johnson Space Center";"Issued";"MSC-22859-3";7198947;"10/734,759";"Production Of Functional Proteins: Balance Of Shear Stress And Gravity";"12/22/2023"
-"NASA Johnson Space Center";"Issued";"MSC-22859-5";7972821;"12/174,221";"Production of Functional Proteins: Balance of Shear Stress and Gravity";"02/11/2029"
-"NASA Johnson Space Center";"Issued";"MSC-22863-1";7122071;"10/263,280";"Centrifugal Adsorption Cartridge System (CACS)";"12/21/2022"
-"NASA Johnson Space Center";"Issued";"MSC-22866-1";6099864;"09/79,741";"INSITU Activation Of Microcapsules";"05/15/2018"
-"NASA Johnson Space Center";"Issued";"MSC-22900-1";6231010;"09/236,785";"Advanced Structural/Inflatable Hybrid Spacecraft Habitation Module";"01/25/2019"
-"NASA Johnson Space Center";"Issued";"MSC-23563-2";8039099;"11/848,332";"Nanoencapsulated Aerogels Produced By Monomer Vapor Deposition And Polymerization";"08/13/2028"
-"NASA Johnson Space Center";"Issued";"MSC-22931-1";6354540;"09/405,301";"Electro-Mechanically Actuated Magnetic Ring With Load Sensing Feedback And Closed Loop Control Docking/Berthing System For Alignment And Mating Of Multiple Vehicles, Structures, And/or Assemblies";"09/20/2019"
-"NASA Johnson Space Center";"Issued";"MSC-22936-1";6387399;"09/79,766";"Protein Crystal Encapsulation Process";"05/15/2018"
-"NASA Johnson Space Center";"Issued";"MSC-22936-2";6558698;"09/733,391";"Microencapsulated Bioactive Agents And Method Of Making";"12/06/2020"
-"NASA Johnson Space Center";"Issued";"MSC-22936-3";6676964;"09/774,168";"Method For Determining The Three-Dimensional Structure Of A Protein";"01/26/2021"
-"NASA Johnson Space Center";"Issued";"MSC-22936-4";6599449;"09/774,169";"X-Ray Crystallography Reagent";"01/24/2021"
-"NASA Johnson Space Center";"Issued";"MSC-22937-1";6214300;"09/79,833";"Microencapsulation And Electrostatic Processing Device (MEPS)";"05/15/2018"
-"NASA Johnson Space Center";"Issued";"MSC-22938-1";6103271;"09/79,770";"Low-Shear Microencapsulation & Electrostatic Coating Process";"05/15/2018"
-"NASA Johnson Space Center";"Issued";"MSC-22939-4";7968117;"12/100,009";"Externally Triggered Microcapsules";"07/09/2029"
-"NASA Johnson Space Center";"Issued";"MSC-22970-1";6253563;"09/337,208";"Solar-Powered Refrigeration System";"06/03/2019"
-"NASA Johnson Space Center";"Issued";"MSC-22970-2";6469487;"09/838,679";"Solar Powered Refrigeration System";"06/03/2019"
-"NASA Johnson Space Center";"Issued";"MSC-22970-3";6453693;"09/838,680";"Solar Powered Refrigeration System";"06/03/2019"
-"NASA Johnson Space Center";"Issued";"MSC-23029-1";6651739;"09/793,817";"Medium Frequency Pseudo Noise Geological Radar";"07/20/2021"
-"NASA Johnson Space Center";"Issued";"MSC-23037-1";6864473;"09/988,855";"Variable Shadow Screen For Optical Devices";"11/14/2021"
-"NASA Johnson Space Center";"Issued";"MSC-23041-1";6334302;"09/351,152";"Variable Specific Impulse Magnetoplasma Rocket (VASIMR)";"06/28/2019"
-"NASA Johnson Space Center";"Issued";"MSC-23049-3";6592579;"09/746,542";"Method For Selective Thermal Ablation";"06/28/2021"
-"NASA Johnson Space Center";"Issued";"MSC-23049-4";6675050;"09/746,533";"Computer Program For Microwave Antenna";"05/07/2021"
-"NASA Johnson Space Center";"Issued";"MSC-23076-1";6321746;"09/574,758";"Collapsable, Light, Portable Human Hyperbaric Chamber/Airlock System";"05/17/2020"
-"NASA Johnson Space Center";"Issued";"MSC-23092-1";6547189;"09/826,403";"Advanced, Large Volume, Highly Loaded, Hybrid Inflatable Pressure Vessel";"05/26/2021"
-"NASA Johnson Space Center";"Issued";"MSC-23153-1";6995572;"09/803,613";"Coplanar Waveguide Ice Detection Sensor";"11/04/2023"
-"NASA Johnson Space Center";"Issued";"MSC-23154-1";7113820;"09/906,013";"A Real-Time, High Frequency QRS Electrocardiograph.";"05/03/2023"
-"NASA Johnson Space Center";"Issued";"MSC-23154-2";7539535;"11/345,687";"A Real-Time, High Frequency QRS Electrocardiograph";"07/13/2027"
-"NASA Johnson Space Center";"Issued";"MSC-23178-1";6997637;"10/5,820";"Deceleration Limiting Safety Crash Wall";"05/19/2022"
-"NASA Johnson Space Center";"Issued";"MSC-23193-1";6618010;"09/994,989";"Passive Noncoherent Tracking Of A Data-Modulated Signal";"11/14/2021"
-"NASA Johnson Space Center";"Issued";"MSC-23277-1";7295309;"10/734,753";"Microcapsule Flow Sensor";"11/12/2024"
-"NASA Johnson Space Center";"Issued";"MSC-23303-1";7397774;"10/446,283";"Downlink Data Multiplexer";"01/16/2026"
-"NASA Johnson Space Center";"Issued";"MSC-23307-1";6559645;"10/28,962";"Detection Of Subterranean Metal Objects Using Differential Spectral Processing";"11/17/2020"
-"NASA Johnson Space Center";"Issued";"MSC-23309-1";7040319;"10/87,866";"Oxygen Partial Pressure Monitoring Device For Aircraft Oxygen Masks.";"04/27/2022"
-"NASA Johnson Space Center";"Issued";"MSC-23311-1";6650280;"09/953,612";"Mass Measurement During Fluid Flow Using An Integrated Sonic/Microwave Detector.";"09/14/2021"
-"NASA Johnson Space Center";"Issued";"MSC-23314-1";6899009;"09/892,355";"Flexshield (Flexible Multi-Shock Shield Technology)";"06/26/2021"
-"NASA Johnson Space Center";"Issued";"MSC-23349-1";7415005;"10/283,354";"MCC Voice Over Internet Protocol (VOIP)";"08/08/2026"
-"NASA Johnson Space Center";"Application";"MSC-23349-2-SB";0;"12/170,614";"Ad Hoc Selection of Voice Over Internet Streams";
-"NASA Johnson Space Center";"Issued";"MSC-23424-1";6985606;"10/212,579";"Global Distribution Of Large Fluvial Fans/Potential Hydrocarbon Exploration Guide";"06/12/2024"
-"NASA Johnson Space Center";"Issued";"MSC-23427-1";6944504;"10/302,323";"Microwave Ablation Of Prostatic Cells Using A Separated Antenna Array";"07/23/2023"
-"NASA Johnson Space Center";"Issued";"MSC-23436-1";7126553;"10/679,688";"Tri-Sector Deployable Array Antenna";"08/11/2024"
-"NASA Johnson Space Center";"Issued";"MSC-23443-1";6647855;"10/263,293";"Method And Apparatus For Deploying A Hypervelocity Shield";"09/30/2022"
-"NASA Johnson Space Center";"Issued";"MSC-23444-1";6932090;"10/361,046";"A Simple Countermeasure For Management Of Motion Sickness And Vestibular/Sensory-Motor Problems Associated With Space Flight And Terrestial Motion Sickness";"07/01/2023"
-"NASA Johnson Space Center";"Issued";"MSC-23449-1";7386340;"10/402,866";"Method For Diagnosis Of Coronary Artery Disease And Related Conditions Using 12-Lead High Frequency QRS Electrocardiography";"12/30/2025"
-"NASA Johnson Space Center";"Issued";"MSC-23510-1";6851647;"10/417,377";"Portable Catapult Launcher For Small Aircraft";"04/03/2023"
-"NASA Johnson Space Center";"Issued";"MSC-23518-1";7168935;"10/637,086";"Low Voltage Electron Beam Solid Freeform Fabrication System";"09/29/2024"
-"NASA Johnson Space Center";"Issued";"MSC-23538-1";6943619;"10/443,233";"Practical Active Capacitor Filter";"05/21/2023"
-"NASA Johnson Space Center";"Issued";"MSC-23539-1";6943621;"10/443,234";"Auto-Routable, Configurable, Daisy Chainable Data Acquisition System";"08/16/2023"
-"NASA Johnson Space Center";"Issued";"MSC-23563-1";7270851;"10/985,081";"Nano-Encapsulated Aerogel";"05/14/2025"
-"NASA Johnson Space Center";"Issued";"MSC-23594-1";7125370;"10/845,608";"Articulating Subject Support For Resistive Exercise In The Horizontal Position";"02/22/2025"
-"NASA Johnson Space Center";"Issued";"MSC-23623-1";7212934;"11/370,379";"String Resistance Detector Concept";"03/06/2026"
-"NASA Johnson Space Center";"Issued";"MSC-23659-1";7094045;"10/734,754";"Pulse-Flow Microencapsulation System";"06/09/2024"
-"NASA Johnson Space Center";"Issued";"MSC-23659-2";7588703;"11/428,465";"Microencapsulation System And Method";"03/14/2027"
-"NASA Johnson Space Center";"Issued";"MSC-23668-1";7250075;"10/874,004";"Water Outlet Control Mechanism For Fuel Cell System Operation In Variable Gravity Environments";"11/04/2025"
-"NASA Johnson Space Center";"Issued";"MSC-23695-1";7249540;"11/177,652";"Torquing Tool Attachment For Round Connectors With Attached Cables";"08/27/2025"
-"NASA Johnson Space Center";"Issued";"MSC-23781-1";7410485;"11/40,613";"Directional Microwave Applicator/Antenna";"10/16/2026"
-"NASA Johnson Space Center";"Issued";"MSC-23805-1";7462141;"11/31,942";"Advanced Resistive Exercise Device (ARED)";"01/10/2027"
-"NASA Johnson Space Center";"Issued";"MSC-23881-1";7686529;"11/958,908";"Low Friction, Low Profile, High Moment Two-Axis Joint";"12/18/2027"
-"NASA Johnson Space Center";"Application";"MSC-23882-1";0;"12/899654";"Analog Strain Gage Conditioning System for Space Environment";
-"NASA Johnson Space Center";"Issued";"MSC-23906-1";7295884;"11/158,354";"Method for the Design and Analysis of the Primary Load Bearing Layer of an Inflatable Vessel";"07/20/2026"
-"NASA Johnson Space Center";"Issued";"MSC-23933-1";7543779;"11/625,066";"Low Impact Docking System (LIDS) A.k.a, International Berthing Docking Mechanism (IBDM)";"02/22/2028"
-"NASA Johnson Space Center";"Issued";"MSC-23954-1";7357606;"11/357,461";"Self-Advancing Step-Tap Drill";"08/14/2026"
-"NASA Johnson Space Center";"Issued";"MSC-23988-1";8343740;"12/58,227";"Micro-Organ Device";"10/31/2031"
-"NASA Johnson Space Center";"Issued";"MSC-23988-2";8580546;"13/688982";"Micro-Organ Device";"11/29/2032"
-"NASA Johnson Space Center";"Issued";"MSC-23997-2";7815149;"12/388,345";"Magnetic Capture Docking Mechanism";"04/01/2025"
-"NASA Johnson Space Center";"Issued";"MSC-24000-1";8076136;"/0";"Development And Characterization Of A Three-Dimensional Tissue Culture Model Of Bone";"10/31/2021"
-"NASA Johnson Space Center";"Issued";"MSC-24042-1";7411198;"11/421,174";"New Architecture for Space Radiation Detection";"02/01/2027"
-"NASA Johnson Space Center";"Issued";"MSC-24106-1";7577482;"11/683,770";"Network System Plug And Play Through Positional And Functional Connectivity Identification";"04/21/2028"
-"NASA Johnson Space Center";"Issued";"MSC-24115-1";8022307;"11/772,999";"Method and Apparatus for Fabric Circuits and Antennas";"06/19/2030"
-"NASA Johnson Space Center";"Issued";"MSC-24149-1";8122646;"12/402,986";"A Description Of An Improved Method For Folding, Assembling, And Weight Relief Of An Inflatable Shell";"02/04/2030"
-"NASA Johnson Space Center";"Issued";"MSC-24149-2";8266866;"13/346137";"A Description Of An Improved Method For Folding, Assembling, And Weight Relief Of An Inflatable Shell";"03/12/2029"
-"NASA Johnson Space Center";"Issued";"MSC-24164-1";8338114;"11/789,117";"Methods For Growing Tissue-Like 3D Assemblies (TLA) Of Human Broncho-Epithelial Cells";"05/04/2030"
-"NASA Johnson Space Center";"Issued";"MSC-24169-1";7862946;"11/671,210";"Self-Regulating Control of Parasitic Electric Loads in Fuel Cell Power Systems";"11/05/2029"
-"NASA Johnson Space Center";"Issued";"MSC-24180-1";7935259;"12/167,332";"Water Filtering Device, 100% Effective";"09/14/2029"
-"NASA Johnson Space Center";"Issued";"MSC-24184-1";8116350;"12/353,755";"Ultra-Wideband (UWB) Two-Cluster Angle Of Arrival (AOA) Passive Tracking System Design";"07/22/2030"
-"NASA Johnson Space Center";"Issued";"MSC-24201-1";7509774;"11/610,295";"A Description Of An Improved Method For Attaching An Inflatable Shell To A Rigid Interface";"06/13/2027"
-"NASA Johnson Space Center";"Issued";"MSC-24207-1";7604782;"11/625,670";"X-38 Advanced Sublimator";"04/12/2028"
-"NASA Johnson Space Center";"Issued";"MSC-24215-1";8070105;"11/956,826";"A Description Of A Concentric Nested Torroidal Inflatable Habitat";"10/04/2030"
-"NASA Johnson Space Center";"Issued";"MSC-24216-1";8047473;"12/240,537";"A Description Of An Octonode Connecting Node Concept And Method";"01/10/2030"
-"NASA Johnson Space Center";"Issued";"MSC-24228-1";7521682;"11/421,196";"New Architecture For Space Radiation Detection";"03/07/2027"
-"NASA Johnson Space Center";"Issued";"MSC-24238-1";8388613;"12/757657";"Microwave Tissue Welding For Wound Closure";"11/17/2031"
-"NASA Johnson Space Center";"Issued";"MSC-24263-1";7805276;"11/958,937";"Impact Detection System";"02/12/2029"
-"NASA Johnson Space Center";"Issued";"MSC-24273-1";7840387;"11/778,858";"Method For The Design And Analysis Of The Primary Load Bearing Layer That Interfaces To The Structural Pass-through Of An Inflatable Vessel";"07/31/2029"
-"NASA Johnson Space Center";"Application";"MSC-24314-1";0;"12/880602";"HDSS - High Density Spot Seeding";
-"NASA Johnson Space Center";"Issued";"MSC-24346-1";8466776;"12/828558";"Extended Range RFID and Sensor Tag";"09/05/2031"
-"NASA Johnson Space Center";"Issued";"MSC-24387-1";8011229;"12/323,912";"Artificial Intelligence Algorithm For Assessing Postural Stability During Normal Daily Activities Using Shoe Insert Pressure Sensors";"11/26/2028"
-"NASA Johnson Space Center";"Issued";"MSC-24441-1";7905946;"12/190,364";"A Capillary-based Static Phase Separator For Highly Variable Wetting Conditions";"07/02/2029"
-"NASA Johnson Space Center";"Issued";"MSC-24444-1";8577120;"12/900644";"Flash Infrared (IR) Thermography Contrast Computer Simulation And Data Analysis Software";"04/22/2031"
-"NASA Johnson Space Center";"Application";"MSC-24451-1";0;"13/057399";"Rapid Detection Of The Varicella Zoster Virus (VZV) In Saliva Samples";
-"NASA Johnson Space Center";"Issued";"MSC-24464-1";7859292;"12/502,575";"Reconfigurable SEU/SET Tolerance for FPGAs";"07/14/2029"
-"NASA Johnson Space Center";"Issued";"MSC-24466-1";8183870;"12/370,021";"Battery cell voltage sensing and balancing using addressable transformers with electrical isolation and minimal additional connector pins and circuitry.";"07/01/2030"
-"NASA Johnson Space Center";"Application";"MSC-24490-1";0;"12/612,171";"High Altitude Hydration System";
-"NASA Johnson Space Center";"Application";"MSC-24506-1";0;"12/971919";"A Method to Measure and Estimate Normalized contrast In Infrared Flash Thermography";"01/08/2030"
-"NASA Johnson Space Center";"Issued";"MSC-24508-1";8343403;"12/174,380";"METHOD FOR MAKING A MICROPOROUS MEMBRANE";"12/31/2030"
-"NASA Johnson Space Center";"Issued";"MSC-24509-1";8570047;"12/855384";"Battery Fault Detection with Saturating Transformers";"02/02/2032"
-"NASA Johnson Space Center";"Issued";"MSC-24525-1";8384614;"12/894749";"Deployable Fresnel Rings";"10/11/2031"
-"NASA Johnson Space Center";"Application";"MSC-24541-1";0;"12/899815";"Electromagnetic Time-Variance Magnetic Fields (TVMF) to generate, and re-grow Cartilage Cells by a Noninvasive Method";
-"NASA Johnson Space Center";"Issued";"MSC-24569-1";8176809;"12/331844";"Planar Torsion Spring";
-"NASA Johnson Space Center";"Issued";"MSC-24570-1";8276958;"12/269579";"Bidirectional Tendon Terminator";
-"NASA Johnson Space Center";"Issued";"MSC-24571-1";8371177;"12/241309";"Tendon Tension Sensor";
-"NASA Johnson Space Center";"Application";"MSC-24685-1";8056423;"12/269,552";"Sensing the Tendon Tension through the Conduit Reaction Forces";"11/12/2028"
-"NASA Johnson Space Center";"Application";"MSC-24686-1";8060250;"12/335,153";"Joint Space Impedance Control for Tendon-Driven Manipulators";"12/15/2028"
-"NASA Johnson Space Center";"Issued";"MSC-24687-1";8170718;"12/338697";"Multiple Priority Operational Space Impedance Control";
-"NASA Johnson Space Center";"Issued";"MSC-24688-1";8280837;"12/474068";"CONTACT STATE ESTIMATION FOR MULTI-FINGER ROBOT HANDS USING PARTICLE FILTERS";
-"NASA Johnson Space Center";"Issued";"MSC-24689-1";7784363;"12/241320";"PHALANGE TACTILE LOAD CELL";"09/30/2028"
-"NASA Johnson Space Center";"Issued";"MSC-24732-1";8364314;"12/624445";"METHOD AND APPARATUS FOR AUTOMATIC CONTROL OF A HUMANOID ROBOT";
-"NASA Johnson Space Center";"Application";"MSC-24733-1";0;"13/349265";"Pyrometer";
-"NASA Johnson Space Center";"Application";"MSC-24734-1";8498741;"12/564088";"Dexterous Humanoid Robotic Wrist";
-"NASA Johnson Space Center";"Application";"MSC-24735-1";8467903;"12/564086";"Tendon Driven Finger Actuation System";
-"NASA Johnson Space Center";"Issued";"MSC-24736-1";8291788;"12/564090";"Rotary Series Elastic Actuator";
-"NASA Johnson Space Center";"Issued";"MSC-24737-1";8401700;"12/564124";"ACTUATOR AND ELECTRONICS PACKAGING FOR EXTRINSIC HUMANOID HAND";
-"NASA Johnson Space Center";"Application";"MSC-24738-1";0;"12/564094";"FRAMEWORK AND METHOD FOR CONTROLLING A ROBOTIC SYSTEM USING A DISTRIBUTED COMPUTER NETWORK";
-"NASA Johnson Space Center";"Application";"MSC-24739-1";8511964;"12/564084";"Dexterous Humanoid Robot";
-"NASA Johnson Space Center";"Application";"MSC-24740-1";0;"12/564078";"Dexterous Humanoid Robotic Finger";
-"NASA Johnson Space Center";"Issued";"MSC-24741-1";8255079;"12/564095";"Human Grasp Assist";"09/23/2029"
-"NASA Johnson Space Center";"Application";"MSC-24742-1";8442684;"12/564076";"Integrated High Speed FPGA Based Torque Controller";
-"NASA Johnson Space Center";"Application";"MSC-24743-1";8250901;"12/564092";"Rotary Absolute Position Sensor Calibration";
-"NASA Johnson Space Center";"Application";"MSC-24744-1";8369992;"12/564083";"Diagnostics, prognostics & health management for humanoid robotics and method thereof";
-"NASA Johnson Space Center";"GM";"MSC-24745-1";8424941;"12/564085";"ROBOTIC THUMB ASSEMBLY";
-"NASA Johnson Space Center";"Application";"MSC-24746-1";8260460;"12/564096";"Interactive Robot Control System";
-"NASA Johnson Space Center";"Issued";"MSC-24747-1";8244402;"12/564074";"VISUAL PERCEPTION SYSTEM AND METHOD FOR A HUMANOID ROBOT";
-"NASA Johnson Space Center";"Issued";"MSC-24750-1";8483882;"12/686512";"HIERARCHICAL ROBOT CONTROL SYSTEM AND METHOD FOR CONTROLLING SELECT DEGREES OF FREEDOM OF AN OBJECT USING MULTIPLE MANIPULATORS";
-"NASA Johnson Space Center";"Issued";"MSC-24751-1";8412376;"12/720725";"TENSION DISTRIBUTION IN A TENDON-DRIVEN ROBOTIC FINGER";
-"NASA Johnson Space Center";"Issued";"MSC-24752-1";8033876;"12/706744";"CONNECTOR PIN AND METHOD";
-"NASA Johnson Space Center";"Application";"MSC-24753-1";0;"12/720727";"UNDERACTUATED DESIGN AND CONTROL OF A TENDON-DRIVEN FINGER";
-"NASA Johnson Space Center";"Application";"MSC-24755-1";0;"12/698832";"Architecture For Robust Force and Impedance Control Of Series Elastic Actuators";
-"NASA Johnson Space Center";"Application";"MSC-24758-1";0;"14/184278";"RFID Cavity";"03/11/2033"
-"NASA Johnson Space Center";"Application";"MSC-24798-1";0;"13/789903";"Soft Decision Analyzer (SDA)";"03/08/2033"
-"NASA Johnson Space Center";"Application";"MSC-24811-1";0;"13/461,487";"Self-enclosed and pipette free DNA/RNA Isolation device";
-"NASA Johnson Space Center";"Application";"MSC-24813-1";0;"13/791290";"Pre-Polymerase Chain Reaction Preparation Kit";"08/06/2032"
-"NASA Johnson Space Center";"Application";"MSC-24817-1";8265792;"12/760954";"Method and Apparatus for Calibrating Multi-Axis Load Cells in a Dexterous Robot";
-"NASA Johnson Space Center";"Application";"MSC-24837-1";0;"12/787479";"Applying Workspace Limitations in a Velocity-Controlled Robotic Mechanism";
-"NASA Johnson Space Center";"Application";"MSC-24919-1";0;"13/790591";"RFID Waveguide, Antenna, and Cavity Sensors";"07/13/2032"
-"NASA Johnson Space Center";"Issued";"MSC-24926-1";8412378;"12/629637";"IN-VIVO TENSION CALIBRATION IN TENDON-DRIVEN MANIPULATORS";
-"NASA Johnson Space Center";"Issued";"MSC-24930-1";8489239;"12/916803";"ROBUST OPERATION OF TENDON-DRIVEN ROBOT FINGERS USING FORCE AND POSITION-BASED CONTROL LAWS";
-"NASA Johnson Space Center";"Application";"MSC-25026-1";0;"13/354552";"Battery Charge Equalizer with transformer array";
-"NASA Johnson Space Center";"Issued";"MSC-25053-1";"D628,609";"29/359105";"ROBOT";"04/06/2030"
-"NASA Johnson Space Center";"Application";"MSC-25056-1";0;"13/014901";"SYSTEM AND METHOD FOR TENSIONING A ROBOTICALLY ACTUATED TENDON";
-"NASA Johnson Space Center";"Issued";"MSC-25084-1";8067909;"12/474430";"METHOD AND APPARATUS FOR ELECTROMAGNETICALLY BRAKING A MOTOR";"05/29/2029"
-"NASA Johnson Space Center";"Application";"MSC-25084-DE";0;"12/474430";"Method and Apparatus for Electromagnetically Braking a Motor";
-"NASA Johnson Space Center";"Application";"MSC-25084-JP";0;"12/474430";"Method and Apparatus for Electromagnetically Braking a Motor";
-"NASA Johnson Space Center";"Application";"MSC-25091-1";0;"13/199484";"FRET-Aptamer Assays for C-Telopeptide, Creatinine and Vitamin D";"08/31/2031"
-"NASA Johnson Space Center";"Issued";"MSC-25121-1";8483877;"12/875254";"WORKSPACE SAFE OPERATION OF A FORCE- OR IMPEDANCE-CONTROLLED ROBOT";
-"NASA Johnson Space Center";"Application";"MSC-25149-1";0;"13/196252";"Controlling Execution Sequence Using Tactile-Classification during manipulation by a humanoid robot";
-"NASA Johnson Space Center";"Application";"MSC-25216-1";0;"13/439,546";"METHOD AND COMPOSITION FOR AMELIORATING THE EFFECTS FOR A SUBJECT EXPOSED TO RADIATION OR OTHER SOURCES OF OXIDATIVE STRESS";
-"NASA Johnson Space Center";"Application";"MSC-25217-1";0;"13/272442";"METHOD FOR DYNAMIC OPTIMIZATION OF A ROBOT CONTROL INTERFACE";
-"NASA Johnson Space Center";"Application";"MSC-25219";0;"13/207911";"FAST GRASP CONTACT COMPUTATION FOR A SERIAL ROBOT";
-"NASA Johnson Space Center";"Application";"MSC-25265-1";0;"13/851778";"New method and device for digital to analog transformations and reconstructions of multichannel electrocardiograms";"10/30/2032"
-"NASA Johnson Space Center";"Application";"MSC-25286-1";0;"14/252660";"A chemical formulation to stabilize urine and minimize the precipitation potential of minerals during distillation of urine";"03/11/2033"
-"NASA Johnson Space Center";"Application";"MSC-25313-1";0;"13/774835";"Hydrostatic Hyperbaric Chamber";"02/22/2033"
-"NASA Johnson Space Center";"Application";"MSC-25318";0;"13/408668";"HUMAN GRASP ASSIST SOFT";
-"NASA Johnson Space Center";"Application";"MSC-25319";0;"13/408656";"HUMAN GRASP ASSIST ";
-"NASA Johnson Space Center";"Application";"MSC-25320";0;"13/408675";"HUMAN GRASP ASSIST CONTROLS";
-"NASA Johnson Space Center";"Application";"MSC-25327-1";0;"13/459557";"COMMUNICATION SYSTEM AND METHOD";
-"NASA Johnson Space Center";"Application";"MSC-25386-1";0;"13/951671";"Active Response Gravity Offload System - Vertical Software Release";"07/26/2033"
-"NASA Johnson Space Center";"Application";"MSC-25590-1";0;"13/790927";"Systems and Methods for RFID-Enabled Information Collection";
-"NASA Johnson Space Center";"Application";"MSC-25604-1";0;"13/791584";"Systems and Methods for RFID-Enabled Dispenser";
-"NASA Johnson Space Center";"Application";"MSC-25605-1";0;"13/790721";"Switch Using Radio Frequency Identification";
-"NASA Johnson Space Center";"Application";"MSC-25626-1";0;"14/200,122";"RFID Torque-Sensing Tag System for Fasteners";"03/07/2034"
-"NASA Johnson Space Center";"Application";"MSC-25632-1";0;"13/803017";"ROBOT TASK COMMANDER WITH EXTENSIBLE PROGRAMMING ENVIRONMENT
-";"03/14/2033"
-"NASA Johnson Space Center";"Application";"MSC-25758-1";0;"14/184303";"Methods, Systems and Apparatuses for Radio Frequency Identification";"03/11/2033"
-"NASA Johnson Space Center";"Application";"MSC-25759-1";0;"14/184337";"Methods, Systems and Apparatuses for Radio Frequency Identification";"03/11/2033"
-"NASA Johnson Space Center";"Application";"MSC-25760-1";0;"14/184365";"Methods, Systems and Apparatuses for Radio Frequency Identification";"03/11/2033"
-"NASA Jet Propulsion Laboratory";"Application";"NPO-17734-1";0;"07/700,830";"Formation Of Self-Aligned Guard Ring For Silicide Schottky-Barrier Diodes Used For Infrared Detection";
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-19289-1";6513023;"09/412,199";"On-Chip Learning In VLSI Hardware";"10/01/2019"
-"NASA Jet Propulsion Laboratory";"Application";"NPO-19769-1";0;"08/868,175";"Automated Cargo Inventory Identification Transponder";
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-19855-1";6374630;"09/853,931";"Champagne Heat Pump";"05/09/2021"
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-20031-1";6828935;"10/176,761";"Receiver Controlled Phased Array Antenna";"07/19/2022"
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-20837-1";6526556;"09/591,386";"MORPHING TECHNIQUE FOR ACCELERATED EVOLUTIONARY SYNTHESIS OF ELECTRONIC CIRCUITS";"06/07/2020"
-"NASA Jet Propulsion Laboratory";"Application";"NPO-21136-1";0;"10/219,384";"A CMOS ACTIVE PIXEL SENSOR (APS) FOR READING COMPACT DISCS";
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-30703-1";7240208;"10/424,287";"ENCRYPTING DIGITAL CAMERA";"04/23/2023"
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-40040-1";7480984;"40/863,835";"A Concept For Suppressing Sublimation In Advanced Thermoelectric Devices";"06/07/2024"
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-40407-1";7592747;"11/056,633";"Piezoelectrically Enhanced PhotoCathode (PEPC)";"02/09/2025"
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-40827-1";7156189;"11/1,465";"SELF-MOUNTABLE AND EXTRACTABLE ULTRASONIC/SONIC ANCHOR (U/S-Anchor)";"12/01/2024"
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-41446-1";8358723;"11/602,440";"Architecture Of An Autonomous Radio";"09/12/2031"
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-41506-2";8492160;"12/720,103";"BIOMARKER SENSOR SYSTEM AND METHOD FOR MULTI-COLOR IMAGING AND PROCESSING OF SINGLE-MOLECULE LIFE SIGNATURES";"04/09/2031"
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-41511-1";7385462;"11/376,638";"Wideband (31 To 36 GHz) 24-Way Radial Power Combiner/Divider Fed By A Marie Transducer";"03/14/2026"
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-41982-1";8078309;"12/415,206";"Inverse Tomographic Approach To Create Arbitrary Sidewall Geometries In 3D Using LiGA Technologies";"03/03/2021"
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-42131-1";7824247;"11/756,819";"PORTABLE RAPID AND QUIET DRILL (PRAQD)";"11/02/2027"
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-42312-1";7184624;"11/422,147";"Slow light in chains of vertically coupled whispering gallery mode resonators";"06/05/2026"
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-42466-1";7764384;"11/924,766";"Swept frequency laser metrology system";"10/26/2027"
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-42563-1";7353768;"11/456,441";"Submersible Vehicle Propulsion and Power Generation";"07/10/2026"
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-42672-1";7996112;"11/756,793";"Micro Robot Explorer (SpiderBot) Mesh Crawler";"06/08/2030"
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-43213-1";7850861;"11/764,359";"Patterning packing materials for Fluidic Channels";"10/13/2029"
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-43348-1";7809521;"12/40,459";"Precise delay measurement circuit on FPGAs";"01/31/2029"
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-43361-1";7773121;"11/741,213";"High Resolution, Continuous Field of View, Non-Rotating Imaging Sensor Head";"10/15/2028"
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-43524-1";7773362;"11/683,007";"Dusty Plasma Thruster";"01/03/2029"
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-44079-1";8022860;"11/781,022";"Enhanced Interference Cancellation and Telemetry Reception with a Single Parabolic Dish Antenna using a Focal Plane Array";"04/30/2030"
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-44765-1";7740088;"11/928,069";"Ultrasonic/Sonic Rotary-Hammer Drill (USRoHD)";"04/15/2028"
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-44914-1";8407979;"11/926,279";"Magnetically-Conformed, Variable Area Discharge Chamber for Hall Thruster Plasma Accelerators";"06/08/2031"
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-45053-1";8057283;"12/119,989";"The process of significant improving of optical quality factor of whispering gallery mode resonator.";"09/15/2030"
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-45911-1";8163094;"12/508,006";"Method to Improve Indium Bump Bonding Via Indium Oxide Removal Using a Two Step Plasma Process";"08/16/2030"
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-45948-1";7843650;"12/490,422";"Monolithic Afocal Telescope";"06/24/2029"
-"NASA Jet Propulsion Laboratory";"Application";"NPO-46253-1";0;"12/237,159";"Generation of optical combs in a whispering gallery mode resonator from a bichromatic pump";
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-46843-1";8169371;"12/541,725";"A single-layer, all-metal patch antenna element with wide bandwidth";"09/25/2030"
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-46938-1";8026768;"12/691,070";"A 201Hg+ co-magnetometer for 199Hg+ trapped ion space atomic clocks";"04/03/2030"
-"NASA Jet Propulsion Laboratory";"Application";"NPO-47300-1";0;"13/017,174";"Textured Si Anode for High Capacity, Rapid Charge Rate Li Ion Batteries";
-"NASA Jet Propulsion Laboratory";"Application";"NPO-47300-2";0;"13/895,499";"Textured Si Anode for High Capacity, Rapid Charge Rate Li Ion Batteries";"01/31/2031"
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-47310-1";8502987;"13/018,672";"Coherent Detector for Near-Angle Scattering and Polarization Characterization of Telescope Mirror Coatings";"03/24/2032"
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-47604-1";8649000;"13/277,954";"Surface Enhanced Raman Scattering using Silica Whispering-Gallery Mode Resonators";"07/10/2032"
-"NASA Jet Propulsion Laboratory";"Application";"NPO-47717-1";;"13/281,683";"360-Degree Camera Head for Unmanned Surface Sea Vehicles";
-"NASA Jet Propulsion Laboratory";"Issued";"NPO-47869-1";8649609;"13/071,299";"FPGA Vision Data Architecture";"04/17/2032"
-"NASA Jet Propulsion Laboratory";"Application";"NPO-47881-1";;"14/151,684";"Pulsed Plasma Lubricator (PPL) Technology for the In Situ Replenishment of Dry Lubricants in Extreme Environments";
-"NASA Jet Propulsion Laboratory";"Application";"NPO-48140-1";;"13/456,451";"Probabilistic Surface Characterization for Safe Landing Hazard Detection and Avoidance";
-"NASA Jet Propulsion Laboratory";"Application";"NPO-48413-1";;"13/757,929";"Simple Laser-Communications Terminal for Downlink from Earth-Orbit at Rates Exceeding 10 Gb/s";"02/04/2033"
-"NASA Jet Propulsion Laboratory";"Application";"NPO-48539-1";;"13/858,267";"Neutral mounting of whispering gallery mode resonators for suppression of acceleration-induced frequency fluctuations";"04/08/2033"
-"NASA Jet Propulsion Laboratory";"Application";"NPO-49086-1";;"14/101,547";"Electride Mediated Surface Enhanced Raman Spectroscopy";"12/10/2033"
-"NASA Stennis Space Center";"Issued";"SSC-00040";5726632;"08/622,178";"HANDHELD HYDROGEN FIRE IMAGER";"03/14/2016"
-"NASA Stennis Space Center";"Issued";"SSC-00050";6020587;"09/3,212";"A HAND HELD PLANT STRESS DETECTION SYSTEM";"01/06/2018"
-"NASA Stennis Space Center";"Issued";"SSC-00247";8618933;"11/866,042";"Valve Health Monitoring System Utilizing Smart Instrumentation for Real Time and Historical Data Tracking";"05/03/2032"
-"NASA Stennis Space Center";"Issued";"SSC-00264";8336849;"12/704193";"Conical Seat Shut Off Valve";"01/13/2031"
-"NASA Stennis Space Center";"Issued";"SSC-00327";8401820;"12/566,111";"IN SITU HEALTH MONITORING OF PIEZOELECTRIC SENSORS";"07/31/2030"
+"Center"\"Status"\"Case Number"\"Patent Number"\"Application SN"\"Title"\"Patent Expiration Date"
+"NASA Kennedy Space Center"\"Application"\"KSC-12871"\0\"13/033,085"\"Polyimide Wire Insulation Repair System"\
+"NASA Ames Research Center"\"Issued"\"ARC-14048-1"\5694939\"08/543,093"\"Autogenic-Feedback Training Exercise Method & System"\"10/03/2015"
+"NASA Ames Research Center"\"Issued"\"ARC-14231-1"\6109270\"09/017,519"\"Multimodality Instrument For Tissue Characterization"\"02/04/2017"
+"NASA Ames Research Center"\"Issued"\"ARC-14231-2DIV"\6976013\"10/874,003"\"Metrics For Body Sensing System"\"06/16/2024"
+"NASA Ames Research Center"\"Issued"\"ARC-14231-3"\6718196\"09/652,299"\"Multimodality Instrument For Tissue Characterization"\"02/04/2017"
+"NASA Ames Research Center"\"Issued"\"ARC-14275-1"\6445390\"09/226,673"\"Automated Triangle Geometry Processing For Surface Modeling And Cartesian Grid Generation (CART3D)"\"12/24/2018"
+"NASA Ames Research Center"\"Issued"\"ARC-14281-1"\6606612\"09/374,491"\"Aerodynamic Design Using Neural Networks"\"08/13/2019"
+"NASA Ames Research Center"\"Issued"\"ARC-14281-3"\7191161\"10/637,087"\"Method For Constructing Composite Response Surfaces By Combining Neural Networks With Polynomial Interpolation Or Estimation Techniques"\"11/18/2020"
+"NASA Ames Research Center"\"Issued"\"ARC-14359-1"\6314362\"09/498,123"\"A Direct-To Controller Tool (A Component Of The CTAS Software Suite)"\"02/02/2020"
+"NASA Ames Research Center"\"Issued"\"ARC-14494-1"\6720984\"09/606,107"\"Bio-Electric Keyboard/Mouse/Joystick Interface Software/Algorithm"\"06/13/2020"
+"NASA Ames Research Center"\"Issued"\"ARC-14512-1"\6823333\"09/800,309"\"Keyword-in-context Search Method And Software For Information Retrieval From Collections Of Text Documents (Quorum/Perilog)"\"03/02/2021"
+"NASA Ames Research Center"\"Issued"\"ARC-14513-1"\6741981\"09/800,311"\"Model-based Phrase Search Method And Software For Information Retrieval From Collections Of Text Documents (Quorum/Perilog)"\"09/14/2021"
+"NASA Ames Research Center"\"Issued"\"ARC-14514-1"\6697793\"09/800,313"\"Method And Software For Using Implicit Phrase Models To Generate Prominent Phrases Contained In Collections Of Text Documents (Quorum/Perilog)"\"03/02/2021"
+"NASA Ames Research Center"\"Issued"\"ARC-14515-1"\6721728\"09/800,310"\"Method And Software For Extracting And Distilling Topically And Situationally Relevant Phrases From Collections Of Text Documents (Quorum/Perilog)"\"07/26/2021"
+"NASA Ames Research Center"\"Issued"\"ARC-14556-1"\7346172\"09/822470"\"Spatially-modulated Auditory Alert Having Enhanced Detection"\"08/24/2022"
+"NASA Ames Research Center"\"Issued"\"ARC-14569-1"\7783130\"11/045,041"\"Spatial Standard Observer"\"03/26/2028"
+"NASA Ames Research Center"\"Issued"\"ARC-14569-2"\8139892\"12/807,375"\"Spatial Standard Observer"\"01/24/2025"
+"NASA Ames Research Center"\"Issued"\"ARC-14586-1DIV"\7293001\"11/274,744"\"A Hybrid Neural Network And Support Vector Machine Method For Optimization"\"01/07/2022"
+"NASA Ames Research Center"\"Issued"\"ARC-14613-1"\6858197\"10/099,247"\"A Novel Technique That Allows For The Deposition And Patterning Of A Catalyst Onto A Surface For The Growth Of Single-Walled Carbon Nanotubes"\"11/30/2019"
+"NASA Ames Research Center"\"Issued"\"ARC-14652-1"\7375826\"10/956,517"\"3D Laser Scanner"\"03/25/2026"
+"NASA Ames Research Center"\"Issued"\"ARC-14653-1"\7702427\"10/914,783"\"Future ATM (Air Traffic Management) Concepts Evaluation Tool (FACET)"\"07/30/2024"
+"NASA Ames Research Center"\"Issued"\"ARC-14653-2"\8290696\"12/694,966"\"Future ATM (Air Traffic Management) Concepts Evaluation Tool (FACET)"\"07/30/2024"
+"NASA Ames Research Center"\"Issued"\"ARC-14661-1"\7276266\"10/320,698"\"A Plasma Apparatus And Process For Functionalization Of Carbon Nanotubes"\"12/13/2022"
+"NASA Ames Research Center"\"Issued"\"ARC-14661-2"\7473436\"10/828,524"\"Improved Functionalization Of Carbon Nanotubes"\"12/13/2022"
+"NASA Ames Research Center"\"Issued"\"ARC-14661-3"\7767270\"11/387,503"\"Selective Functionalization Of Carbon Nanotubes Based Upon Distance Traveled"\"11/05/2025"
+"NASA Ames Research Center"\"Issued"\"ARC-14662-1"\6968338\"10/232,975"\"Advanced XML Database Integration Technique For Managing Unstructured Documents (NETMARK) (Part of NTTS Suite)"\"07/18/2023"
+"NASA Ames Research Center"\"Issued"\"ARC-14682-2"\7333735\"10/885,533"\"Communication Using VCSEL Laser Array"\"11/03/2023"
+"NASA Ames Research Center"\"Issued"\"ARC-14710-1"\7231329\"10/706,478"\"Elimination Of Parameter Input Requirement For Elliptic Grid Generation Methods In Engineering"\"03/11/2025"
+"NASA Ames Research Center"\"Issued"\"ARC-14733-1"\6972056\"10/135,013"\"An Environmentally Compatible Method To Purify Carbon Nanotubes"\"01/03/2023"
+"NASA Ames Research Center"\"Issued"\"ARC-14743-1"\7767305\"10/758611"\"High-Efficiency Tantalum-Based Ceramics (HETC)"\"01/14/2024"
+"NASA Armstrong Flight Research Center"\"Issued"\"DRC-008-014"\8047472\"12/45,970"\"IMPROVED RAM BOOSTER"\"03/11/2028"
+"NASA Ames Research Center"\"Issued"\"ARC-14744-1US"\7816491\"10/494,853"\"Ordered Biological Nanostructures Formed From Chaperonin Polypeptides"\"05/06/2024"
+"NASA Ames Research Center"\"Issued"\"ARC-14744-2"\7795388\"11/194,991"\"A Versatile Platform For Nanotechnology Based On Circular Permutations Of Chaperonin Protein"\"05/06/2024"
+"NASA Ames Research Center"\"Issued"\"ARC-14940-1"\7135172\"10/238,515"\"Bucky Paper As An Artificial Support Membrane In Retinal Cell Transplantation"\"06/12/2024"
+"NASA Ames Research Center"\"Issued"\"ARC-14941-1"\6755530\"10/198,672"\"Carbon Nanotubes As A Prototype Interface For Retinal Cell Recording And Stimulation (Vision Chip)"\"10/18/2022"
+"NASA Ames Research Center"\"Issued"\"ARC-14950-1"\7596416\"10/928,874"\"Program Management Tool (PMT) Also Known As Business Intelligence (BI)"\"07/22/2027"
+"NASA Ames Research Center"\"Issued"\"ARC-14950-2"\8224472\"12/211,439"\"Enhanced Project Management Tool"\"10/20/2026"
+"NASA Ames Research Center"\"Issued"\"ARC-14970-1"\7129857\"10/789,049"\"Intelligent Weather Agent"\"07/20/2024"
+"NASA Ames Research Center"\"Issued"\"ARC-15040-1"\8200486\"10/457,696"\"Sub Auditory Speech Recognition Based On Electromyographic Signals"\"09/14/2025"
+"NASA Ames Research Center"\"Issued"\"ARC-15041-2"\7206674\"10/923,156"\"Information Display System For Atypical Flight Phase"\"05/21/2024"
+"NASA Ames Research Center"\"Issued"\"ARC-15042-2"\7217650\"10/816,576"\"Metallic Nanowire Interconnections For Integrated Circuit Fabrication"\"03/11/2023"
+"NASA Ames Research Center"\"Issued"\"ARC-15058-1"\7383238\"10/789,029"\"Inductive Monitoring System - System Health Monitoring Software That Learns System Behavior From Data (IMS)"\"03/12/2025"
+"NASA Ames Research Center"\"Issued"\"ARC-15073-1"\7590606\"10/703,039"\"InvestigationOrganizer: Information Storage, Modeling And Visualization Support For Accident/Mishap Investigations (Part Of A Suite Of Software That Includes ARC-15069, ARC-15070 And ARC-15073) "\"04/30/2026"
+"NASA Ames Research Center"\"Issued"\"ARC-15088-1"\7070923\"10/608,884"\"Carbon Nanotube Bucky Paper Cages For Immune Shielding Of Cells And Tissue For Transplantation"\"09/20/2023"
+"NASA Ames Research Center"\"Issued"\"ARC-15101-1"\7113265\"10/808,704"\"Sample Handling Device For X-ray Diffraction Instruments"\"03/17/2024"
+"NASA Ames Research Center"\"Issued"\"ARC-15157-1"\7286573\"10/923,160"\"A Method Of Converting Quantum Wells From Type-II To Type-I And Of Enhancing Interband Optical Gain "\"03/11/2025"
+"NASA Ames Research Center"\"Issued"\"ARC-15171-1"\7650232\"11/239,456"\"Trajectory Specification For High-Capacity Air Traffic Control"\"05/25/2027"
+"NASA Ames Research Center"\"Issued"\"ARC-15173-1"\7273095\"10/825,795"\"Embedded Carbon Nanotube Array As High Performance Thermal Conductors"\"03/11/2023"
+"NASA Ames Research Center"\"Issued"\"ARC-15173-2"\7784531\"11/900,131"\"Nanoengineered Thermal Materials Based On Carbon Nanotube Array Composites"\"02/16/2024"
+"NASA Ames Research Center"\"Issued"\"ARC-15201-1"\7381459\"10/779,504"\"Toughened Uni-piece Fibrous Reduced Oxidation Ceramic (TUFROC) Light-Weight Thermal Protection System For Use On Space Vehicles During Atmospheric Entry At Hypersonic Speed"\"02/12/2024"
+"NASA Ames Research Center"\"Issued"\"ARC-15201-2"\7314648\"10/911,747"\"Toughened Uni-piece Fibrous Reinforced Oxidation-Resistant Composite (TUFROC)"\"02/12/2024"
+"NASA Ames Research Center"\"Issued"\"ARC-15204-1"\7949472\"10/885,537"\"Nanopore Pipetts For Structural Characterization Of Single Polymeric Biomelecules"\"01/14/2026"
+"NASA Ames Research Center"\"Issued"\"ARC-15204-1DIV"\8494782\"13/092,048"\"Nanopore Pipetts For Structural Characterization Of Single Polymeric Biomelecules"\"06/24/2024"
+"NASA Ames Research Center"\"Issued"\"ARC-15205-1"\7939734\"10/873,996"\"The Electrochemical Biosensors Using Carbon Nanotube Nanoelectrode Arrays"\"06/14/2024"
+"NASA Ames Research Center"\"Issued"\"ARC-15312-1"\7672969\"11/513,429"\"Context Based Configuration Management Concept"\"08/25/2026"
+"NASA Ames Research Center"\"Issued"\"ARC-15314-1"\7718223\"11/007,913"\"Provision Of Carbon Nanotube Arrays Of Variable Density For IC Hot Spot Control"\"02/12/2027"
+"NASA Ames Research Center"\"Issued"\"ARC-15314-2"\7704547\"11/472,516"\"Carbon Nanotube Growth Density Control"\"12/07/2024"
+"NASA Ames Research Center"\"Issued"\"ARC-15315-1"\7378963\"11/239,449"\"Reconfigurable Auditory-visual Display For Multi-channel Control Center And Rescue Communications"\"01/06/2026"
+"NASA Ames Research Center"\"Issued"\"ARC-15356-2"\7161501\"11/66,650"\"Display Of Aircraft Energy State For Flight Operations Quality Assurance (FOQA) Programs"\"09/22/2024"
+"NASA Ames Research Center"\"Issued"\"ARC-15356-3"\7212135\"11/066649"\"Real-Time Analysis And Display Of Aircraft Approach Maneuvers "\"09/22/2024"
+"NASA Ames Research Center"\"Issued"\"ARC-15370-1"\7698274\"10/956,524"\"Selective Access And Editing In A Database (Part of NTTS Suite)"\"03/18/2027"
+"NASA Ames Research Center"\"Issued"\"ARC-15392-1"\7313475\"11/053,713"\"Delay Banking: Collaborative Decision Making For Airspace-user Priority In Tactical Flow Restrictions"\"04/04/2025"
+"NASA Ames Research Center"\"Issued"\"ARC-15404-1"\7288490\"11/009,854"\"Use Of A Single Electrode To Orient Carbon Nanotube Growth"\"12/07/2024"
+"NASA Ames Research Center"\"Issued"\"ARC-15437-1"\7438422\"11/340,816"\"Low Cost Portable Planetarium Imaging System"\"05/14/2027"
+"NASA Ames Research Center"\"Issued"\"ARC-15443-1"\7531775\"11/251,006"\"A Tracking Sunphotometer Without Moving Parts "\"01/31/2026"
+"NASA Ames Research Center"\"Issued"\"ARC-15460-1"\7426848\"11/203,576"\"Discharge Based Gas Sensor Array Using Self-Oriented Regular Vertical Array Of Carbon Nanotubes"\"08/05/2025"
+"NASA Ames Research Center"\"Issued"\"ARC-15462-1"\7574338\"11/340002"\"Finite-Difference Simulation And Visualization Of Elastodynamics In Time-Evolving Generalized Curvilinear Coordinates "\"07/29/2026"
+"NASA Ames Research Center"\"Issued"\"ARC-15487-1"\7796026\"11/111,620"\"Electronic Firefighter Escape Trail"\"06/04/2028"
+"NASA Ames Research Center"\"Issued"\"ARC-15506-1"\7529633\"11/203,589"\"Applications Of Carbon Nanotube Hold-Off Voltages"\"10/22/2026"
+"NASA Ames Research Center"\"Issued"\"ARC-15519-1"\7574357\"11/169,265"\"Security Applications For Subvocal Speech"\"11/09/2026"
+"NASA Ames Research Center"\"Issued"\"ARC-15566-1"\7801687\"11/178,079"\"Gas Sensors Based on Coated and Doped Carbon Nanotubes"\"05/26/2029"
+"NASA Ames Research Center"\"Issued"\"ARC-15566-2"\8000903\"11/416,505"\"Coated Or Doped Carbon Nanotube Network Sensors As Affected By Environmental Parameters And Elapsed Time"\"09/15/2029"
+"NASA Ames Research Center"\"Issued"\"ARC-15566-3"\7875455\"11/489,803"\"Nanotechnology Sensors For Determination Of Chemical Substances In An Oil Reservoir"\"12/17/2028"
+"NASA Ames Research Center"\"Issued"\"ARC-15566-5"\7623972\"11/591,630"\"Detection Of Presence Of Chemical Precursors"\"07/08/2025"
+"NASA Ames Research Center"\"Issued"\"ARC-15575-1"\7473930\"11/173,053"\"Use Of Carbon Nanotube Arrays For Display Purposes"\"10/24/2026"
+"NASA Ames Research Center"\"Issued"\"ARC-15578-2"\7873181\"11/525,600"\"Visual Signal Sensor Organ Replacement: Implementation"\"05/19/2028"
+"NASA Ames Research Center"\"Issued"\"ARC-15606-1"\7431242\"11/265,324"\"Aero Assist Capsule Vehicle Geometry For Atmospheric Entry"\"04/01/2026"
+"NASA Ames Research Center"\"Issued"\"ARC-15684-1"\7516890\"11/444,807"\"InterIssued Inventory Monitoring"\"05/25/2026"
+"NASA Ames Research Center"\"Issued"\"ARC-15714-1"\7869029\"11/398,733"\"Light Collimator And Monitor"\"11/11/2029"
+"NASA Ames Research Center"\"Issued"\"ARC-15782-1"\7549338\"11/973998"\"Nanotechnology Sensor Of Presence And Concentration Of A Target Molecule"\"09/28/2027"
+"NASA Ames Research Center"\"Issued"\"ARC-15796-1"\8675922\"13/444,777"\"Motion Blur Evaluation Techniques"\"08/31/1932"
+"NASA Ames Research Center"\"Issued"\"ARC-15870-1"\7655497\"11/513,431"\"Growth Method For Phase Change Nanostructures"\"08/16/2027"
+"NASA Ames Research Center"\"Issued"\"ARC-15890-1"\7655145\"11/543,275"\"Water Treatment Systems For Long Space Flight Use"\"11/05/2027"
+"NASA Ames Research Center"\"Issued"\"ARC-15900-1"\7490367\"11/526,175"\"Wet Waste Drying Bag"\"09/20/2026"
+"NASA Ames Research Center"\"Issued"\"ARC-15903-1DIV"\8409491\"13/215,206"\"In-situ Formation Of Reinforcement Phases In Ceramic Composites And Ultra High Temperature Ceramic Composites For Advanced TPS Applications"\"09/28/2027"
+"NASA Ames Research Center"\"Issued"\"ARC-15967-1"\7635420\"11/645,267"\"Dielectrophoresis-Based Particle Sensor Using Nanoelectrode Arrays"\"06/06/2028"
+"NASA Ames Research Center"\"Application"\"ARC-15977-1"\0\"12/100,378"\"Artificial Immune System Based Approach For Air Combat Maneuvering"\
+"NASA Ames Research Center"\"Application"\"ARC-15981-4"\\"13/463,780"\"Chaperonin-based Templates for Pseudo-cellulosomes with Multiple Enzymes Present"\"07/19/2027"
+"NASA Ames Research Center"\"Issued"\"ARC-15983-1"\7923709\"12/273,502"\"Radiation Shielding System Using A Composite Of Hydrogen-Rich Polymers Loaded With Carbon Nanotubes"\"09/30/2029"
+"NASA Ames Research Center"\"Application"\"ARC-16478-1"\\"14/191,246"\"Real Time PIREPs Using Audio Twitter"\"02/26/1934"
+"NASA Ames Research Center"\"Issued"\"ARC-15995-1"\8290246\"11/958,296"\"A Method To Measure The Recession Of Ablative Materials In Arc-jet Testing Using Digital Stereo-photogrammetry And Image Cross-correlation"\"07/01/1931"
+"NASA Ames Research Center"\"Issued"\"ARC-16013-1"\7968054\"11/715,785"\"Wireless Chemical Sensor Data Transmission System Based On Nanotechnology"\"10/03/2029"
+"NASA Ames Research Center"\"Issued"\"ARC-16018-1"\7662459\"12/175,379"\"Atmospheric Entry Heat Shield Employing Cured Thermal Protection Material Blocks Bonded In A Large-Cell Honeycomb Matrix"\"07/17/2028"
+"NASA Ames Research Center"\"Application"\"ARC-16132-1"\0\"14/091,250"\"Surface Densification Of Phenolic Impregnated Carbon Ablator (PICA)"\"11/26/1933"
+"NASA Ames Research Center"\"Issued"\"ARC-16133-1"\8069001\"12/319,918"\"Hollow AErothermal Ablation And Temperature (HEAT) Isotherm Sensor For Tracking Isotherm Through The TPS Material"\"10/09/2029"
+"NASA Ames Research Center"\"Application"\"ARC-16211-1"\0\"13/896,284"\"Low Cost Optical Fiber Solar Cell Configurations"\"05/16/1933"
+"NASA Ames Research Center"\"Issued"\"ARC-16235-1"\8285659\"12/543,411"\"Modeling-Error-Driven Performance-Seeking Direct Adaptive Control"\"11/18/1930"
+"NASA Ames Research Center"\"Application"\"ARC-16273-1"\0\"12/454,024"\"Decomposition Technique for Remaining Useful Life Prediction"\"11/18/1930"
+"NASA Ames Research Center"\"Issued"\"ARC-16280-1"\8409845\"12/316,557"\"Offshore membrane enclosures for dewatering Algae (OMEDA)"\"10/15/1931"
+"NASA Ames Research Center"\"Issued"\"ARC-16298-1"\8333810\"12/398,854"\"Nanotechnology-Based Supercapacitor"\"06/29/1930"
+"NASA Ames Research Center"\"Issued"\"ARC-16320-1"\8332342\"12/622,407"\"Battery Prognostics using Particle Filtering Techniques"\"02/05/1931"
+"NASA Ames Research Center"\"Issued"\"ARC-16331-1"\8408707\"12/428,441"\"System to estimate visual acuity from wavefront aberrations"\"05/29/2029"
+"NASA Ames Research Center"\"Issued"\"ARC-16334-1"\8244477\"12/478,667"\"Estimation of Growth Stage and Growth Rate for Algae"\"06/04/2029"
+"NASA Ames Research Center"\"Application"\"ARC-16337-1"\0\"13/793,998"\"Method and Device for Biometric Subject Verification and Identification Based Upon electrocardiographic signals"\"03/11/1933"
+"NASA Ames Research Center"\"Application"\"ARC-16340-1"\0\"13/645,284"\"Method for formation and manufacture of carbon nanotube mesh bucky paper capsules for transplantation of cells and tissue and implantation of medical devices"\"10/04/1932"
+"NASA Ames Research Center"\"Issued"\"ARC-16342-1"\8412469\"12/698,996"\"Advanced Sensor Technology for Algal Biotechnology (ASTAB) "\"12/16/1930"
+"NASA Ames Research Center"\"Application"\"ARC-16348-1"\\"13/109,954"\"Co-Optimized Blunt-Body ReEntry Vehicle Aerothermodynamic Parametric Shape and Multi-Discipline Optimization Design Process"\
+"NASA Ames Research Center"\"Issued"\"ARC-16351-1"\8498756\"13/213,022"\"Hovercraft Landing System"\"12/07/1931"
+"NASA Ames Research Center"\"Issued"\"ARC-16370-1"\8375675\"12/574,493"\"Self Aligning Lug for adapting carbon fiber rods to a bolted metallic connection"\"05/07/1931"
+"NASA Ames Research Center"\"Application"\"ARC-16372-1"\0\"13/794,061"\"Inexpensive Cooling Systems for Devices"\"03/11/1933"
+"NASA Ames Research Center"\"Issued"\"ARC-16373-1"\8489181\"12/319,220"\"Heart Electrical Actions as Biometric Indicia"\"04/29/1932"
+"NASA Ames Research Center"\"Application"\"ARC-16405-1"\0\"14/091,236"\"Nanowire based piezoelectric power generation"\"11/26/1933"
+"NASA Ames Research Center"\"Issued"\"ARC-16407-1"\8337208\"12/622,374"\"Content Analysis to Detect High Stress in Oral Interviews and Text Documents"\"05/26/1931"
+"NASA Ames Research Center"\"Application"\"ARC-16419-1"\0\"13/317,034"\"Strobing to Mitigate Vibration for Display Legibility"\"10/05/1932"
+"NASA Ames Research Center"\"Application"\"ARC-16450-1CIP"\0\"13/720,898"\"Distributed Prognostics and Health Management with a Wireless Network Architecture "\"05/05/2029"
+"NASA Ames Research Center"\"Application"\"ARC-16456-1"\\"13/480,917"\"FABRICATION OF NANOPIPETTE ARRAY FOR BIOSENSING"\
+"NASA Ames Research Center"\"Application"\"ARC-16461-1"\\"13/956,218"\"Solar Powered CO2 Conversions with Thin Film Devices"\"07/31/1933"
+"NASA Ames Research Center"\"Application"\"ARC-16466-1"\\"14/010,322"\"Combined HETC/ROCCI TPS Material for Temperatures Up To T=3200 F "\"08/26/1933"
+"NASA Ames Research Center"\"Application"\"ARC-16467-1"\\"13/615,202"\"ODVEC: Outlier Detection Via Estimating Clusters"\
+"NASA Ames Research Center"\"Application"\"ARC-16607-1"\\"13/658,749"\"An Approach to Make Flexible Ablators that are Flexible Char Formers"\"10/23/1932"
+"NASA Ames Research Center"\"Application"\"ARC-16621-1"\\"13/472,283"\"Transformable Hypersonic Aerodynamic Decelerator"\"12/04/1932"
+"NASA Ames Research Center"\"Application"\"ARC-16644-1"\\"13/648,197"\"Variable Camber Continuous Aerodynamic Control Surfaces and Methods for Active Wing Shaping Control "\"10/09/1932"
+"NASA Ames Research Center"\"Application"\"ARC-16646-1"\\"13/485,721"\"A method to produce copper nanowires for interconnect applications"\
+"NASA Ames Research Center"\"Application"\"ARC-16661-1"\\"13/444,789"\"Video acuity measurement system"\
+"NASA Ames Research Center"\"Application"\"ARC-16697-1"\\"13/956,929"\"NTTS Search and Reporting (Part of NTTS Suite)"\"08/01/1933"
+"NASA Ames Research Center"\"Application"\"ARC-16707-1"\\"13/438,793"\"Ectomycorrhizal mediated remediaiton of phenolic-based contamination through use of specifically adapted ectomycorrhizal fungi and enzyme enhancement through partial defoliation of the host."\
+"NASA Ames Research Center"\"Application"\"ARC-16707-1CIP"\\"13/854,620"\"Ectomycorrhizal mediated remediaiton of phenolic-based contamination through use of specifically adapted ectomycorrhizal fungi and enzyme enhancement through partial defoliation of the host."\"04/03/1932"
+"NASA Ames Research Center"\"Application"\"ARC-16732-1"\\"13/573,924"\"NanoSat Launch Adapter System (NLAS)"\"03/14/1933"
+"NASA Ames Research Center"\"Application"\"ARC-16733-1"\\"13/535,884"\"Habitat Water Wall for Water, Solids, and Atmosphere Recycle and Reuse "\
+"NASA Ames Research Center"\"Application"\"ARC-16752-1"\\"14/179,401"\"Fuel-Efficient, Airport-Friendly, Multi-Speed Transport Aircraft Configuration with Novel Structural Approach"\"02/12/1934"
+"NASA Ames Research Center"\"Application"\"ARC-16811-1"\\"13/544,752"\"Compliant electrode and composite materials for piezoelectric wind and mechanical energy conversions"\
+"NASA Ames Research Center"\"Application"\"ARC-16812-1"\\"13/783,112"\"Graphene composite materials for supercapacitor electrodes "\"03/01/1933"
+"NASA Ames Research Center"\"Application"\"ARC-16833-1"\\"13/747,875"\"Flight Deck Predictive Weather Display and Decision Support Interface "\"01/23/1933"
+"NASA Ames Research Center"\"Application"\"ARC-16844-1"\\"13/662,346"\"Adaptive control and disturbance rejection of non-minimum phase plants using residual mode filters"\"10/26/1932"
+"NASA Ames Research Center"\"Application"\"ARC-16846-1"\\"13/707,546"\"Dynamic Weather Routes Tool"\"12/06/1932"
+"NASA Ames Research Center"\"Application"\"ARC-16892-1A"\\"13/929,646"\"The Surface-Adhering Bioreactor (SABR): A novel microbial cell cultivation platform"\"06/27/1933"
+"NASA Ames Research Center"\"Application"\"ARC-16902-1"\\"13/725,475"\"Nanosensors for medical diagnosis"\"12/21/1932"
+"NASA Ames Research Center"\"Application"\"ARC-16916-1"\\"13/956,736"\"A Method for Improving Control Systems with Normalized Adaptation by Optimal Control Modification"\"08/01/1933"
+"NASA Ames Research Center"\"Application"\"ARC-16924-1"\\"14/010,355"\"Aluminoborosilicate Supplement for Thermal Protection of a Re-entrant Vehicle"\"08/26/1933"
+"NASA Ames Research Center"\"Application"\"ARC-16942-2"\\"13/659,739"\"A new family of low density flexible ablators"\"10/24/1932"
+"NASA Armstrong Flight Research Center"\"Issued"\"DRC-001-049"\7180943\"10/113,637"\"Adaptive Lossless Data Compression"\"03/26/2022"
+"NASA Armstrong Flight Research Center"\"Issued"\"DRC-005-031"\7407131\"11/288,052"\"Sound Shield"\"10/31/2025"
+"NASA Armstrong Flight Research Center"\"Issued"\"DRC-006-001"\7431243\"11/227,325"\"Algorithms For Autonomous Soaring"\"02/27/2026"
+"NASA Armstrong Flight Research Center"\"Application"\"DRC-006-002"\0\"11/422,554"\"Air Breathing,Reusable, Vertical Launch, Vertical Landing, First Stage Launch System with Off-the-Shelf Second Stage - Ram Booster"\
+"NASA Armstrong Flight Research Center"\"Issued"\"DRC-006-005"\7711455\"11/463,485"\"Propulsion Controlled Aircraft Computer (PCAC)"\"08/09/2026"
+"NASA Armstrong Flight Research Center"\"Issued"\"DRC-006-024"\7520176\"11/567,118"\"Method for Real-Time Structure Shape Sensing"\"12/05/2026"
+"NASA Armstrong Flight Research Center"\"Application"\"DRC-006-045"\0\"11/682,969"\"METHOD FOR REDUCING THE REFRESH RATE OF FIBER BRAGG GRATING SENSORS"\
+"NASA Armstrong Flight Research Center"\"Issued"\"DRC-008-001"\8145366\"12/138,747"\"Real-time Interactive Sonic Boom Display"\"04/28/2030"
+"NASA Armstrong Flight Research Center"\"Issued"\"DRC-008-023"\7715994\"12/191,734"\"IMPROVED PROCESS FOR USING SURFACE STRAIN MEASUREMENTS TO OBTAIN OPERATIONAL LOADS FOR COMPLEX STRUCTURES"\"08/14/2028"
+"NASA Armstrong Flight Research Center"\"Application"\"DRC-009-008"\0\"12/718034"\"Continental Digital Elevation Map Compression and Decompression Software"\
+"NASA Armstrong Flight Research Center"\"Issued"\"DRC-009-026"\8447443\"13/367990"\"A New Peak-Seeking Control Method"\"02/07/2032"
+"NASA Armstrong Flight Research Center"\"Application"\"DRC-010-042"\\"13/463246"\"An apparatus and a method to eliminate polarization-induced fading from multiple fiber-optics strain sensors via signal-processing under polarization diversity detection scheme"\
+"NASA Armstrong Flight Research Center"\"Application"\"DRC-011-002"\\"13/759,847"\"OPTICAL WAVEGUIDE BRAGG GRATING WAVELENGTH SHIFT BY LIGHT INTERACTION WITH ACTIVE MATERIAL"\"02/05/2033"
+"NASA Armstrong Flight Research Center"\"Application"\"DRC-011-015"\\"14/106947"\"In-situ three-dimensional shape rendering from strain values obtained through optical fiber sensors"\"05/31/2032"
+"NASA Armstrong Flight Research Center"\"Application"\"DRC-012-005"\\"13/759210"\"Method and apparatus of multiplexing and acquiring data from multiple optical fibers using a single data channel of an optical frequency-domain reflectrometry (OFDR) system (Revised)"\"02/05/2033"
+"NASA Armstrong Flight Research Center"\"Application"\"DRC-012-006"\\"13/733364"\"A Novel Approach to Liquid Level Sensing Using Fiber Bragg Grating Technology"\"01/03/2033"
+"NASA Armstrong Flight Research Center"\"Application"\"DRC-012-011"\\"13/573920"\"Air Launch From A Towed Aircraft"\"07/05/2032"
+"NASA Armstrong Flight Research Center"\"Issued"\"DRC-096-055"\6126111\"09/112,067"\"Emergency Flight Control System Using One Engine And Fuel Transfer"\"07/08/2018"
+"NASA Armstrong Flight Research Center"\"Issued"\"DRC-097-021"\6102330\"08/905,777"\"Emergency Aircraft Lateral Controller Using Existing (non-modified) Digital Engine Computers During A System Failure For The Purpose Of Safe Landing"\"07/29/2017"
+"NASA Armstrong Flight Research Center"\"Issued"\"DRC-098-001"\6216063\"09/74,024"\"A Flutterometer Flight Test Tool"\"05/06/2018"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-13378-1"\0\"07/710,633"\"SPLINE-LOCKING PAYLOAD FASTENER"\
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-13802-1"\6584874\"08/673,859"\"USING A 3-D SPRAG IN RACHETING TOOLS BASED ON PAT. NO. 5,482-144"\"07/02/2016"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-13817-1"\5983162\"08/872,586"\"Empirical Mode Decomposition Method And Hilbert Spectral Analysis Algorithms"\"06/10/2017"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-13817-2"\6631325\"09/82,523"\"COMPUTER IMPLEMENTED EMPIRICAL MODE DECOMPOSITION METHOD APPARATUS AND ARTICLE OF MANUFACTURE UTILIZING CURVATURE EXTREMA"\"06/10/2017"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-13817-3"\6381559\"09/282,424"\"Empirical Mode Decomposition Apparatus, Method, And Article Of Manufacture For Analyzing Biological Signals And Performing Curve Fitting"\"03/31/2019"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-13817-4"\6862558\"10/73,957"\"Empirical Mode Decomposition For Analyzing Acoustical Signals"\"02/13/2022"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-13817-5"\6738734\"10/11,206"\"Empirical Mode Decomposition Apparatus, Method And Article Of Manufacture For Analyzing Biological Signals And Performing Curve Fitting"\"06/10/2017"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-13905-1"\6640949\"10/95,343"\"1-Way Bearing"\"03/01/2022"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-13909-1"\6311130\"09/150,671"\"Computer Implemented Empirical Mode Decomposition Method, Apparatus, And Article Of Manufacture For Two-Dimensional Signals"\"09/10/2018"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-13985-1"\6566854\"09/646,161"\"Active Antenna Combined With Non-Ferrous Current Probe."\"09/12/2020"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14064-1"\6648522\"09/804,646"\"Universal Fiber Optic Connector Polishing Fixture With Precision Alignment Capability"\"03/13/2021"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14207-1"\6626792\"09/799,872"\"Gear Bearings"\"03/03/2021"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14209-1"\6293803\"09/501,412"\"Stress Relieved Zee Electrical Interconnect"\"02/09/2020"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14213-1"\6760487\"09/550,254"\"Estimated Spectrum Adaptive Postfilter (ESAP) And The Iterative Prepost Filtering (IPF) Algorithms"\"04/14/2020"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14236-1"\6538796\"09/541,680"\"MEMS Devices For Spacecraft Thermal Control Applications"\"03/31/2020"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14302-1"\6782124\"09/729,138"\"Extension Of The Empirical Mode Decomposition Method To A Time Series Of 2-Dimensional Grid Maps"\"11/29/2020"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14305-1"\6895115\"09/839,147"\"Method For Recursive Implementation Of Hierarchical Segmentation"\"04/23/2021"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14389-1"\7543274\"10/789,028"\"Deriving Formal Specifications And Code From Scenarios"\"02/25/2024"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14393-1"\7145739\"10/385,166"\"Light Weight Optical Mirrors Formed In Single Crystal Silicon"\"03/06/2023"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14413-1"\7255483\"10/93,621"\"Thrust Rollers"\"03/01/2022"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14435-1"\6740224\"10/173,533"\"Innovative Manufacturing Procedure For Low Cost And High Quality Carbon Nanotubes"\"06/11/2022"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14480-2"\7762155\"11/444,808"\"Gear Bearings"\"05/25/2026"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14561-1"\7207245\"11/174,454"\"Screw-Locking Wrench"\"06/30/2025"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14562-1"\7504921\"11/543,278"\"Stepping Flextures"\"09/29/2026"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14601-1"\7008605\"10/292,952"\"Method For Manufacturing High Quality Carbon Nanotubes"\"11/08/2022"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14603-1"\7544146\"11/122,201"\"Anti-Backlash Gear-Bearings"\"05/02/2025"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14608-1"\6990436\"10/729,579"\"Time Frequency Analysis Based On Extrema Sifting"\"11/28/2023"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14616-1"\7248342\"10/730,195"\"Conceptual Design Of A 3D Imaging Lidar For High-Resolution Mapping Of The Surface Topography Of Moons Or Planets From Space"\"12/05/2023"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14657-1"\7512568\"11/109,400"\"Evolvable Neural Software System"\"04/08/2025"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14666-1"\6775600\"10/267,092"\"Systems And Methods For Determining Spacecraft Orientation"\"10/07/2022"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14673-1"\6901353\"10/615,365"\"Normalized Amplitude Hilbert Transform (NAHT): A New Algorithm For Computing Instantaneous Frequency"\"07/08/2023"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14683-1"\8480826\"11/736,874"\"Specular Coatings For Composite Structures"\"04/18/2027"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14762-1"\7769488\"11/108,627"\"SMART Solar Sail"\"04/08/2025"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14777-1"\7341932\"11/251,531"\"Large Area Vacuum Ultra-Violet Sensors"\"09/30/2025"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14793-1"\7548199\"11/239,458"\"Pivot 2.0: Radiation Hardened, Fast Acquisition/Weak Signal Tracking GPS Receiver"\"09/20/2025"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14807-1"\7464006\"10/963,470"\"Application Of HHT To Financial Data Analysis For Define Volatility And Trend"\"10/07/2024"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14833-1"\7346461\"11/251,004"\"Stability Spectrum Through Hilbert-Huang Transform"\"09/30/2025"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14845-1"\7290737\"11/251,537"\"Demiseable Reaction Wheel Assembly"\"09/29/2025"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14871-1"\7935297\"11/370,396"\"Template For Deposition Of Micron And Sub-micron Pointed Structures"\"03/06/2026"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14873-1"\8357211\"12/872,445 "\"ADR Salt Pill Design And Crystal Growth Process For Hydrated Magnetic Salts"\"08/31/2030"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14879-1"\7635832\"11/469,105"\"Iterative-Transform Phase-Retrieval Utilizing Adaptive Diversity"\"08/31/2026"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14941-1"\7739671\"11/203,590"\"A Method And System For Direct Implementation Of Formal Specifications Derived Mechanically From Informal Requirements"\"08/12/2025"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14942-1"\7752608\"11/203,586"\"A Method And System For Formal Analysis, Simulation, And Verification Of Knowledge-Based Systems, Rule-Based Systems, And Expert Systems"\"08/12/2025"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14952-1"\7513546\"11/689,161"\"Conformal Gripper"\"03/21/2027"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14960-1"\7992760\"11/357,458"\"Hardware And Technique For Dead End Welding Of All Types Of Tubing"\"02/08/2026"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16700-1"\\"14/041407"\"SpaceCube v2.0 Flight Processor Card"\"09/30/2033"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14968-1"\7627538\"11/251,538"\"Apoptosis And Self-destruct: Mechanisms For Management Of Autonomic Systems"\"09/29/2025"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14968-2"\7925600\"12/603,140"\"SWARM AUTONOMIC AGENTS WITH SELF-DESTRUCT CAPABILITY"\"10/21/2029"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14979-1"\7601091\"11/426,134"\"Modular Gear Bearing"\"06/23/2026"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-14994-1"\7697759\"11/251,530"\"A Split-Remerge Method For Eliminating Processing Window Artifacts In Recursive Hierarchical Segmentation"\"09/30/2025"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15001-1"\7924415\"12/389,097"\"Light Direction Sensor"\"02/19/2029"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15002-1"\7240879\"11/124,592"\"Space Robotic System For In Space Servicing Of Unmanned Spacecraft Applications"\"05/06/2025"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15002-2"\7513459\"11/670,653"\"Method And Associated Apparatus For Capturing, Servicing, And De-Orbiting Earth Satellites Using Robotics"\"05/06/2025"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15002-3"\7293743\"11/670,270"\"Method And Associated Apparatus For Capturing, Servicing, And De-Orbiting Earth Satellites Using Robotics"\"11/13/2025"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15002-4"\7438264\"11/670,781"\"Method And Associated Apparatus For Capturing, Servicing And De-Orbiting Earth Satellites Using Robotics"\"05/06/2025"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15002-5"\7513460\"11/671,062"\"Method And Associated Apparatus For Capturing, Servicing, And De-Orbiting Earth Satellites Using Robotics"\"05/06/2025"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15027-1"\7412175\"11/425,352"\"Millimeter Wave Polarization Transformer"\"06/20/2026"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15027-2"\7609978\"12/056,964"\"INTERFEROMETRIC POLARIZATION CONTROL"\"03/27/2028"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15027-3"\7616903\"12/057,060"\"INTERFEROMETRIC POLARIZATION CONTROL"\"03/27/2028"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15030-1"\7907333\"11/460,482"\"A Pulsed, 1 Micron, Single Frequency, Diode-Seeded Ytterbium-doped Fiber Amplifier With Variable Output Parameters, P"\"07/27/2026"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15038-1"\7765171\"11/426,853"\"SPAACE: Self Properties For An Autonomous & Autonomic Computing Environment"\"06/27/2026"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15039-1"\7762523\"11/861,038"\"Miniaturized Double Latching Solenoid Valve"\"09/25/2027"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15042-1"\7622907\"11/535,872"\"Driven Ground"\"09/27/2026"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15055-1"\7746190\"11/748,969"\"Broadband High Spurious-suppression Microwave Waveguide Filter For Polarization-preserving And Transformer"\"05/15/2027"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15077-1"\8068556\"12/147,100"\"Low Cost TDRSS Tranceiver (LCT2)"\"06/26/2028"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15079-1"\7886273\"11/532,800"\"Generation And Verification Of Policies For Autonomic Systems"\"09/18/2026"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15080-1"\7979848\"11/533,837"\"A Method Of Deriving Process Based Specifications From Scenarios Via Pattern Matching"\"09/21/2026"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15115-1"\7465926\"11/537,280"\"Miniaturized Radiation Spectrometer Development"\"09/29/2026"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15136-1"\8093094\"12/137,844"\"Blocking Contacts For N-Type Cadmium Zinc Cadmium Zinc Telluride (CdZnTe)"\"06/12/2028"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15148-1"\7668796\"11/536,132"\"Enhancing R2D2C Requirements Based Programming With Automata Learning"\"09/28/2026"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15162-1"\7796726\"11/706,693"\"Instrument And Method For X-Ray Diffraction, Fluorescence, And Crystal Texture Analysis Without Sample Preparation"\"02/14/2027"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-15163-2"\0\"13/092198"\"AIGaN Ultraviolet Detectors For Dual Band UV Detection"\
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15176-1"\7899760\"11/533,855"\"Autonomic Quiescence"\"09/21/2026"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15177-1"\8082538\"11/536378"\"A Method For Developing And Maintaining Evolving Systems With Software Product Lines"\"09/28/2026"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-15177-2"\0\"13/305932"\"A Method For Developing And Maintaining Evolving Systems With Software Product Lines"\
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15178-1"\7992134\"11/536,969"\"Modeling, Specifying And Deploying Policies In Autonomous And Autonomic Systems Using An AOSE Methodology"\"09/29/2026"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15179-1"\7904396\"11/533,895"\"An Autonomic Smoke Detector"\"09/21/2026"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15184-1"\7978312\"11/933,492"\"An Active, Solid-state, 3-Dimensional Range Imaging System"\"11/01/2027"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15206-1"\8041655\"11/836,352"\"Otoacoustic Protection In Biologically-Inspired Systems"\"08/09/2027"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15206-2"\8140452\"13/230915"\"Otoacoustic Protection In Biologically-Inspired Systems"\"09/13/2031"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15206-3"\8140453\"13/230922"\"Otoacoustic Protection In Biologically-Inspired Systems"\"09/13/2031"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15206-4"\8275725\"13/230920"\"Otoacoustic Protection In Biologically-Inspired Systems"\"09/13/2031"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15206-5"\8165976\"13/230922"\"Otoacoustic Protection In Biologically-Inspired Systems"\"09/13/2031"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15206-6"\8165977\"13/230923"\"Otoacoustic Protection In Biologically-Inspired Systems"\"09/13/2031"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15217-1"\8139674\"12/173,243"\"Spaceflight Ka-Band High Rate Rad Hard Modulator"\"07/15/2028"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15301-1"\7673089\"11/935,572"\"An Extendibe USB Drive That Accepts External Media"\"11/06/2027"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15302-1"\7673089\"11/935,572"\"An Double-Headed USB Drive"\"11/06/2027"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15328-1"\8499779\"12/014,889"\"Non-Pyrotechnic Zero-Leak Normally-Closed Valve"\"01/16/2028"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-15333-1"\0\"11/860,830"\"Improved, Flexure-Base Linear Bearing"\
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15341-1"\7922920\"11/862,550"\"Low Conductance Silicon Micro-leak for Mass Spectrometer Inlet"\"09/27/2027"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15341-3"\8455926\"12/889,014 "\"Low Conductance Silicon Micro-leak for Mass Spectrometer Inlet"\"09/23/2030"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15349-1"\7830527\"12/102,240"\"Method And Apparatus For Second Harmonic Generation And Other Frequency Convertion With Multiple Frequency Channels"\"04/14/2028"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15353-1"\7830224\"11/877,102"\"Compact Low-loss Planar Magic-T With Broadband Phase And Amplitude Responses"\"10/23/2027"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15357-1"\8041661\"11/861,687"\"Stability Algorithm For Neural Entities (SANE)"\"09/26/2027"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15364-1"\8155939\"12/170,683"\"Hughes Particle – Surface Interaction Model"\"07/10/2028"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15377-1"\7811406\"12/249,265"\"Advanced Adhesive Bond Shape Tailoring for Large Composite Primary Structures Subjected to Cryogenic and Ambient Loading Environments"\"10/10/2028"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15416-1"\7999427\"12/188,039"\"Directed Flux Motor Utilizing Concentric Magnets and Interwoven Flux Channels"\"08/07/2028"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15417-1"\7735385\"12/187,562"\"Actuated Ball and Socket Joint"\"08/07/2028"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15419-1"\8030873\"12/187,926"\"Improvements to the Walk and Roll Robot"\"08/07/2028"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15421-1"\7968812\"12/353,009"\"Spring Joint Package with Overstrain Sensor ( OS Sensor Joint )"\"01/13/2029"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15431-1"\7921731\"12/327,514"\"A two-axis direct fluid shear stress sensor suited for aerodynamic applications"\"12/03/2028"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15445-1"\7982861\"12/183,820"\"Pseudo-Noise Code Modulation using Return to Zero pulses for Ranging, Altimetry and Communications"\"07/31/2028"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15458-1"\8094731\"12/357,081"\"Space Link Extension Return Channel Frames (SLE-RCF) Service (User side) Software Library"\"01/21/2029"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15483-1"\7817087\"12/116,518"\"Relative Spacecraft Navigation using Reflected GPS Signals"\"05/07/2028"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15520-1"\8547531\"12/873373"\"Non-scanning laser 3D imager"\"09/01/2030"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15527-1"\8160728\"12/558,672"\"Sensor Complete Requirements Algorithm For Autonomous Mobility"\"09/14/2029"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15538-1"\8198956\"12/535,954"\"Compact planar microwave blocking filter"\"08/05/2029"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15550-1"\8275724\"12/569,422"\"A biologically-inspired method of improving system performance and survivability through self-sacrifice"\"09/29/2029"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15552-1"\7924126\"12/555,634"\"Small, High Field Superconducting Magnets"\"09/08/2029"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15557-1"\8095485\"12/353,637"\"Formulation for Emotion Embedding in Logic Systems (FEELS)"\"01/14/2029"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15583-1"\7970025\"12/496,954"\"Tunable Frequency-stabilized Laser via Offset Sideband Locking"\"07/02/2029"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15584-1"\8144331\"12/487,454"\"Hilbert-Transform-Based Phase Referencing Algorithm for Wide-Field Imaging Interferometry."\"06/18/2029"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15655-1"\8138961\"12/561,644"\"Low Frequency Wideband Step Frequency Inverse Synthetic Aperture Radar For 3-D Imaging of Interior of Near Earth Objects/Planetary Bodies"\"09/17/2029"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-15660-1"\0\"13/247416"\"Extreme Environment Low Temperature Transistor Models"\
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15662-1"\8092031\"12/569,090"\"Flight Mirror Mount and Flight Mounting Procedure for an Ultra-Lightweight High-Precision Glass Mirror"\"09/29/2029"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-15672-1"\0\"13/211413"\"Multicolor detectors for ultrasensitive long-wave imaging cameras"\
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15678-1"\8484274\"12/549,159"\"Optimal Padding for the Two-Dimensional Fast Fourier Transform"\"08/27/2029"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15684-1"\8285401\"12/549,898"\"Discrete Fourier Transform (DFT) Analysis in a Complex Vector Space"\"08/28/2029"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15685-1"\8331733\"12/550,141"\"Sampling Theorem in Terms of the Bandwidth and Sampling Interval"\"08/28/2029"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15692-1"\8330644\"12/835,958 "\"Expandable Reconfigurable Instrument Node - Web Sensor Strand Demonstration"\"07/19/2030"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-15693-1"\0\"12/570,224"\"Variable Sampling Mapping: A novel supplement to iterative-transform phase retrieval algorithms for undersampled images, broadband illumination, and noisy detection environments"\
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15699-1"\8480296\"12/560,535"\"A Low Cost, Low Temperature Radiometer for Thermal Measurements."\"09/16/2029"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15724-1"\8275015\"12/551,212"\"Passively Q-switched side pumped Monolithic Ring Laser"\"08/31/2029"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-15727-1"\0\"13/222575"\"An All-metal, Solderless Circularly Polarized Microwave Antenna Element with Very Low Off-Axis Cross-Polarization"\
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15729-1"\8674302\"12/789,937"\"Novel Superconducting Transition Edge Sensor Design"\"05/28/2030"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15729-2"\8393786\"12/789,954 "\"Novel Superconducting Transition Edge Sensor Design"\"05/28/2030"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15730-1"\8355579\"12/783054"\"Automatic Extraction of Planetary Image Features"\"05/19/2030"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15732-1"\8093565\"12/695478"\"Crossed Small Deflection Energy Analyzer (SDEA) for Wind/Temperature Spectrometer (WTS)"\"01/28/2030"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15758-1"\8044332\"12/553,613"\"Hybrid Architecture Active Wavefront Sensing and Control"\"09/03/2029"
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15771-1"\8035081\"12/570,166"\"High Precision Electric Gate (HPEG) for Time of Flight Mass Spectrometers"\"09/30/2029"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-15774-1"\0\"13/154599"\"Ensemble Detector"\
+"NASA Goddard Space Flight Center"\"Application"\"GSC-15782-1"\0\"13/216479"\"Ultra-low Power (< 100mW), 64-Channel Pulse Data Collection System"\
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15792-1"\8406469\"12/838600"\"Progressive Band Selection for Hyperspectral Images"\"07/19/2030"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-15815-1"\0\"12/887988"\"LIDAR Luminance Quantizer"\
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15826-1"\8134130\"12/839207"\"The Corner Cathode: Making Collimated Electron Beams with a Small Number of Electrodes"\"07/19/2030"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-15829-1"\0\"13/601293"\"Resolution enhanced pseudo random code technique"\"08/31/2032"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-15839-1"\0\"12/840787"\"Low threshold, narrow linewidth optical parametric generator"\
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15856-1"\8196853\"12/779494"\"Aerodynamically Stabilized Instrument Platform for Kites and Tethered Blimps ( AeroPod )"\"05/13/2030"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-15886-1"\0\"12/838963"\"Automated Beam Balance Scale Logger"\
+"NASA Goddard Space Flight Center"\"Application"\"GSC-15911-1"\0\"13/217965"\"Graphite Composite Panel Polishing Fixture"\
+"NASA Goddard Space Flight Center"\"Application"\"GSC-15934-1"\0\"12/839125"\"Determining Phase Retrieval Sampling from the Modulation Transfer Function"\
+"NASA Goddard Space Flight Center"\"Application"\"GSC-15935-1"\0\"13/043257"\"New Variables for Iterative Transform Phase Retrieval"\
+"NASA Goddard Space Flight Center"\"Application"\"GSC-15936-1"\0\"12/854490"\"SpaceCube Version 1.5"\
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15947-1"\8274726\"12/839171"\"Sampling and Reconstruction of the Sinc(x) Function"\"07/19/2030"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-15948-1"\0\"13/204767"\"Lateral Kevlar Suspension Device (LKSD)"\
+"NASA Goddard Space Flight Center"\"Application"\"GSC-15949-1"\0\"13/600992"\"Vectorized Rebinning Algorithm for Fast Data Down-Sampling"\"08/31/2032"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-15951-1"\0\"13/222839"\"An Improved Method of Fabricating Single Crystal Silicon Light Weight Mirrors"\
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15953-1"\8484509\"12/854546"\"SpaceCube Demonstration Platform"\"08/11/2030"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-15953-2"\0\"13/903357"\"SpaceCube Demonstration Platform"\"09/30/2029"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-15957-1"\0\"13/211526"\"Imaging System Aperture Masks for Image Plane Exit Pupil Characterization"\
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15964-1"\8525110\"13/247,168 "\"An Instrument Suite for the Vertical Characterization of the Ionosphere-Thermosphere System from 100 km to 700km Altitude"\"09/28/2031"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-15970-1"\0\"13/034125"\"Electrospray Ionization for Chemical Analysis of Organic Molecules for Mass Spectrometry"\
+"NASA Goddard Space Flight Center"\"Application"\"GSC-15976-1"\0\"12/872366"\"Phase Retrieval System for Assessing Diamond-Turning and other Optical Surface Artifacts"\
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-15977-1"\8354952\"12/839060"\"Phase Retrieval for Radio Telescope and Antenna Control"\"07/19/2030"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-15979-1"\0\"12/839187"\"Multi-Scale Image Reconstruction using Wavelets"\
+"NASA Goddard Space Flight Center"\"Application"\"GSC-15994-1"\\"13/104538"\"Photonic Choke-Joints for Dual-Polarization Waveguides"\
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16006-1"\\"13/216671"\"Programmable High-Rate Multi-Mission Receiver for Space Communication"\
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16008-1"\\"13/600826"\"Phase controlled magnetic mirror for wavefront correction"\"08/31/2032"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16016-1"\\"13/193272"\"Carbon Nanotubes on titanium substrates for stray light suppression"\
+"NASA Goddard Space Flight Center"\"Issued"\"GSC-16024-1"\8526733\"13/150,316"\"Refinement of the HSEG Algorithm for Improved Computational Processing Efficiency"\"06/01/2031"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16789-1"\\"14/ 033725"\"LEARNS (Logic Expansion for Autonomously Reconfigurable Neural Systems)"\
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16029-1"\\"13/193249"\"Nanostructure secondary mirror apodization mask for transmitter signal suppression in a duplex telescope."\
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16096-1"\\"13/211432"\"Prototype Genomics Based keyed-Hash Message Authentication Code Protocol"\
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16100-1"\\"12/881587"\"Lunar Reconnaissance Orbiter (LRO) Command and Data Handling Flight Electronics Subsystem"\
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16105-1"\\"13/197214"\"Molecular Adsorber Coating"\
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16109-1"\\"13/240180"\"HEXPANDO expanding head for fastener retention hexagonal wrench"\
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16122-1"\\"13/474053"\"Apparatuses and Methods to Enable Sub-MHz Precision in Fast Laser Frequency Tuning"\
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16135-1"\\"13/534427"\"A cryptographic approach to microRNA target binding analysis"\
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16146-1"\\"13/601194"\"Wafer Level Microchannel Fabrication Process for Lap-on-a-Chip Devices"\"08/31/2032"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16182-1"\\"13/595604"\"A High Event Rate, Zero Dead Time, Multi-Stop Time-to-digital Converter Application Specific Integrated Circuit"\"08/27/2032"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16193-1"\\"13/720175"\"Fine Control and Maintenance Algorithm for Visible Nulling Coronagraphy"\"12/19/2032"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16223-1"\\"13/551649"\"SpaceCube Mini"\
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16247-1"\\"13/570100"\"Enhanced adhesion multiwalled carbon nanotubes on titanium substrates for stray light control"\
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16250-1"\\"13/150316"\"Further Refinement of the Computationally Efficient HSEG Algorithm"\
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16259-1"\\"13/050617"\"Spaceflight Refuiling Tools"\
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16299-1"\\"13/622465"\"V-Assembly Dual Head Efficiency Resonator (VADER) Laser Transmitter"\"09/19/2032"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16301-1"\\"13/771815"\"Impedance matched to vacuum, invisible-edge diffraction suppressed mirror"\"02/20/2033"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16327-1"\\"13/545173"\"Miniaturized laser heterodyne radiometer for carbon dioxide (CO2), methane (CH4), and carbon monoxide (CO) measurements in the atmospheric column."\
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16328-1"\\"13/474367"\"Development of the Hilbert-Huang Transform Real-Time Data Processing System with 2-D Capabilities"\
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16438-1"\\"13/606174"\"Power provision based on self-sacrificing spacecraft"\
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16460-1"\\"13/592409"\"Autonomic Autopoiesis"\"08/23/2032"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16461-1"\\"13/592412"\"Autonomic and Apoptotic Cloud, Autonomic and Apoptotic Grid, Autonomic and Apoptotic Highly Distributed System"\
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16485-1"\\"14/038381"\"Broadband planar impedance transformer"\"09/26/2033"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16516-1"\\"14/021812"\"Muti-function microposters inside of microfluidic channel for Lab-On-A-Chip device"\"09/09/2033"
+"NASA Kennedy Space Center"\"Application"\"KSC-12866"\0\"12/843,353"\"In-Situ Wire Damage Detection System"\
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16545-1"\\"13/534442"\"INTEGRATED GENOMIC AND PROTEOMIC INFORMATION SECURITY PROTOCOL"\
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16555-1"\\"14/023847"\"Green Precision Cleaning System"\"09/11/2033"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16569-1"\\"14/041,720"\"Mirrorlet array for Integral Field Spectrometers (IFS)"\
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16674-1"\\"14/041224"\"MISSE-7 Control Center"\"09/30/2033"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16795-1"\\"13/781,121 "\"Wallops Flight Facility 6U Advanced CubeSat Ejector (ACE)"\"01/04/2033"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16805-1"\\"14/040924"\"SpaceCube v2.0 Micro"\"09/30/2033"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16808-1"\\"14/040848"\"SpaceCube v. 2.0 Flight Power Card"\"09/30/2033"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16859-1"\\"14/037484"\"Chemical sensors based on 2-dimensional materials"\"09/26/2033"
+"NASA Goddard Space Flight Center"\"Application"\"GSC-16887-1"\\"14/037458"\"Propellant Transfer Assembly Design and Development"\"09/26/2033"
+"NASA Headquarters"\"Issued"\"HQN-11248-1"\6223143\"09/143,969"\"Quantitative Risk Assessment Software (QRAS) System"\"08/31/2018"
+"NASA Kennedy Space Center"\"Issued"\"KSC-11641"\5730806\"08/437,859"\"Gas-Liquid Supersonic Cleaning And Cleaning Verification Spray System"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-11751"\5710377\"08/540,616"\"Improved Portable Ultrasonic Leak Detector (Combined With KSC-11751-2)"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-11804"\5693871\"08/695,071"\"Low-Differential Pressure Generator For Evaluating Low Differential Pressure Transducers"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-11866-1"\5977773\"08/912,035"\"Non-Intrusive Impedance-Based Cable Tester - Standing Wave Reflectometer"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-11884"\6039783\"08/772,057"\"A New Process And Equipment For Conversion Of NOx Scrubber Liquor To Fertilizer (related To KSC-11994)"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-11884-2"\6641638\"09/511,634"\"Process And Equipment For Nitrogen Oxide Waste Conversion To Fertilizer - Continuation-In-Part Filed 2/17/00"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-11937-2"\7209567\"10/390,259"\"Communication System With Adaptive Noise Suppression"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12035-1"\6552521\"09/906,014"\"Improved Single-Station Accurate Location Of Lightning Strikes (Combined With KSC-12276 & KSC-12173)"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12049"\6627065\"09/977,531"\"Liquid Galvanic Coatings For Protection Of Imbedded Metals"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12056"\6676912\"09/698,607"\"New Air Pollution Control Technology For Removal Of Nitrogen Oxides From Stationary Combustion Sources"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12092-2"\6967051\"09/939,286"\"Thermal Insulation System And Method (Continuing Patent Application) (Combined With KSC-12092)"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12107"\6742926\"09/906,018"\"Thermal Insulation Test Apparatus With Sleeve (Related To KSC-12108)"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12108"\6487866\"09/906,011"\"Multipurpose Thermal Insulation Test Apparatus (Related To 12107)"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12168"\6452510\"09/802,535"\"Personal Cabin Pressure Monitor And Altitude Warning System"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12190"\6764617\"09/994,996"\"A Novel Ferromagnetic Conducting Lignosulfonic Acid-Doped Polyaniline (Related To KSC-11940, KSC-11940-1, KSC-11940-2, KSC-12154, KSC-12191)"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12191-2"\7179404\"11/215,205"\"Corrosion Prevention Of Cold Rolled Steel Using Water Dispersible Lignosulfonic Acid Doped Polyaniline"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12205"\6715914\"10/185,378"\"Apparatus And Method For Thermal Performance Testing Of Pipelines And Piping Systems"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12220"\6917203\"10/235,020"\"Current Signature Sensor (Combined With KSC-12152)"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12221"\6757641\"10/185,830"\"Multisensor Transducer And Weight Factor (Combined With KSC-12359 and KSC-13139)"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12235"\6793903\"10/014,140"\"High-Temperature Decomposition Of Hydrogen Peroxide"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12235-2"\6955799\"10/923,152"\"Temperature Decomposition Of Hydrogen Peroxide"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12235-3"\8029736\"10/923,163"\"High Temperature Decomposition Of Hydrogen Peroxide"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12236"\8511396\"10/476,175"\"Non-Toxic Environmentally Safe Halon Replacement (HABx)"\
+"NASA Kennedy Space Center"\"Application"\"KSC-12236-2-PCT"\0\"/0"\"Flame Suppression Agent, System And Users"\
+"NASA Kennedy Space Center"\"Application"\"KSC-12236-CIP"\\"13/428,736"\"Non-Toxic Environmentally Safe Halon Replacement (HABx)"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12246"\6664298\"09/972,296"\"Zero-Valent Metal Emulsion For Reductive Dehalogenation Of DNAPLs"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12246-2"\7037946\"10/701,412"\"Zero-Valent Metal Emulsion For Reductive Dehalogenation Of DNAPLs"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12278"\7400766\"10/783,295"\"Image Edge Extraction Via Fuzzy Reasoning (FRED) (combined With KSC-12272)"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12386"\7274907\"10/748,915"\"Modular Wireless Data Acquisition System (combined With KSC-12479, KSC-12486)"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12390"\6824306\"10/318,665"\"Thermal Insulation Test Apparatus For Flat Specimens"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12394"\7239751\"10/750,629"\"Hypothesis Support Mechanism For Mid-Level Visual Pattern Recognition (PIPR)"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12458"\7156957\"10/440,543"\"UV Induced Oxidation Of Nitric Oxide"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12490"\7298897\"10/779,551"\"Noniterative Optimal Binarization Of Gray-Scaled Digital Images Via Fuzzy Reasoning (FRAT) (combined With KSC-12272)"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12518"\7790128\"10/641,581"\"Hydrogen Peroxide Catalytic Decomposition"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12539"\7285306\"10/684,064"\"Self-Healing Wire Insulation"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12539-2"\8119238\"11/856,218"\"Self-Healing Wire Insulation"\
+"NASA Kennedy Space Center"\"Application"\"KSC-12539-3"\0\"13/348,861"\"Self-Healing Wire Insulation"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12540"\6958085\"10/666,821"\"High Performance Immobilized Liquid Membranes For Carbon Dioxide Separations"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12630"\7496237\"11/010,698"\"Image Processing For Binarization Enhancement Via Fuzzy Reasoning"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12631"\7582147\"11/208,122"\"Metallic Pigment Powder Particle For Use In A Liquid Coating System To Protect Reinforcing Steel In Concrete Structures"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12637"\7271199\"10/977,622"\"Micro-scale Particle Emulsion And Their Application For Removal Of PCBs And Metals Found In Ex Situ Structures"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12664"\7404938\"10/845,418"\"Emission Control System"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12664-3-CIP"\7582271\"11/40,294"\"Emission Control System"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12666"\7122166\"10/845,607"\"Hydrogen Peroxide Concentrator"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12669"\7302364\"11/83,420"\"Integrated Spaceport Automated Data Management Architecture (Combine With KSC-12581, KSC-12583, KSC-12671and KSC-12582)"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12697"\7309738\"10/962,827"\"A New Approach For Achieving Fire Retardancy While Retaining Physical Properties In A Compatible Polymer Matrix"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12697-3"\7968648\"11/935,093"\"A New Approach For Achieving Flame Retardancy While Retaining Physical Properties In A Compatible Polymer Matrix"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12703"\8031449\"12/485,979"\"Integral Battery Power Limiting Circuit For Intrinsically Safe Applications"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12723"\7790225\"11/239,445"\"Coating For Corrosion Detection And Prevention"\
+"NASA Kennedy Space Center"\"Application"\"KSC-12723-DIV"\\"12/792,238"\"Coating For Corrosion Detection And Prevention"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12848"\7781492\"11/759,672"\"New Organic/inorganic Polymeric Thermal Insulators"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12848-DIV"\7977411\"12/835,233"\"New Organic/inorganic Polymeric Thermal Insulators"\
+"NASA Kennedy Space Center"\"Application"\"KSC-12871-CIP"\0\"13/915,407"\"Polyimide Wire Insulation Repair System"\
+"NASA Kennedy Space Center"\"Application"\"KSC-12871-DIV1"\0\"14/093,701"\"Polyimide Wire Insulation Repair System"\
+"NASA Kennedy Space Center"\"Application"\"KSC-12871-DIV2"\0\"14/093,680"\"Polyimide Wire Insulation Repair System"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12875"\7841771\"11/777,711"\"Self Validating Thermocouple (Combined With KSC-12865)"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12878-2-CIP"\8163972\"12/465,457"\"Bimetallic Treatment System and it's application for Removal of PCBs Found in Ex Situ Structures without the Use of a Catalized Agent"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12890"\7790787\"11/740,357"\"New Organic/Inorganic Polymeric Materials"\
+"NASA Kennedy Space Center"\"Application"\"KSC-12890-2-DIV"\0\"12/834,416"\"New Organic/Inorganic Polymeric Materials"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12899"\8425866\"11/466,624"\"Gas Phase Oxidation Of NO To NO2"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12978"\7842639\"11/749,767"\"Preparation of a Bimetal Using Mechanical Alloying for the Dehalogenation of Compounds"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12978-DIV"\8288307\"12/909,219"\"Preparation of a Bimetal Using Mechanical Alloying for the Dehalogenation of Compounds"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-12983"\8409534\"11/692,557"\"Mercury Emission Control System"\
+"NASA Kennedy Space Center"\"Application"\"KSC-13047"\0\"12/813,864"\"Insulation Test Cryostat with Lift Mechanism (Combined with KSC-13048)"\
+"NASA Kennedy Space Center"\"Application"\"KSC-13047-DIV"\0\"14/090,193"\"Insulation Test Cryostat with Lift Mechanism (Combined with KSC-13048)"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-13088"\8293178\"11/935,545"\"Improved Thermal Reactivity Of Hydrogen Sensing Pigments In Manufactured Polymer Composites"\
+"NASA Kennedy Space Center"\"Application"\"KSC-13088-CON"\0\"13/611,856"\"Improved Thermal Reactivity Of Hydrogen Sensing Pigments In Manufactured Polymer Composites"\
+"NASA Kennedy Space Center"\"Application"\"KSC-13088-DIV"\0\"13/615,850"\"Improved Thermal Reactivity Of Hydrogen Sensing Pigments In Manufactured Polymer Composites"\
+"NASA Kennedy Space Center"\"Application"\"KSC-13161"\0\"12/855,791"\"PH Sensitive Microcapsule With Corrosion Indicator"\
+"NASA Kennedy Space Center"\"Application"\"KSC-13167"\0\"12/856,849"\"Watercore PH Sensitive Microcapsule"\
+"NASA Kennedy Space Center"\"Application"\"KSC-13265-CIP2"\0\"14/150,502"\"An Inductive Non-Contact Position Sensor"\
+"NASA Kennedy Space Center"\"Application"\"KSC-13278"\0\"13/354,576"\"A Method for Making Elongated Microcapsules Under Simple Shear Conditions"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-13285"\8593153\"12/843,382"\"An improved Online Diagnostic Device (ODD) for Wiring Evaluation"\
+"NASA Kennedy Space Center"\"Issued"\"KSC-13331"\8577639\"13/031,182"\"A Method for Accurately Calibrating a Spectrometer Using Broadband Light"\
+"NASA Kennedy Space Center"\"Application"\"KSC-13336"\0\"12/843,487"\"Sputter Coated wire for in-situ wire damage detection"\
+"NASA Kennedy Space Center"\"Application"\"KSC-13343"\0\"13/278,710"\"Conductive Carbon Nanotube for use with Desktop Inkjet Printing"\
+"NASA Kennedy Space Center"\"Application"\"KSC-13366"\0\"13/523,806"\"High Performance Self Healing Film"\
+"NASA Kennedy Space Center"\"Application"\"KSC-13579"\\"13/895,717"\"Green PCB Removal From Sediment Systems (GPRSS)"\
+"NASA Kennedy Space Center"\"Application"\"KSC-13588"\\"13/495,862"\"Multi-Dimensional Damage Detection For Flat Surfaces"\
+"NASA Kennedy Space Center"\"Application"\"KSC-13592"\\"13/542,155"\"pH sensitive microparticles"\
+"NASA Kennedy Space Center"\"Application"\"KSC-13595"\\"14/192,784"\"Aerogel insulation and composites integrated into unique lay-ups (Incorporates Embodiments from KSC-13702)"\
+"NASA Kennedy Space Center"\"Application"\"KSC-13636"\\"13/546,880"\"Incorporation of Chemochromic Indicator for the Presence of Hypergolic Fuels into a Variety of Manufactured Parts"\
+"NASA Kennedy Space Center"\"Application"\"KSC-13638"\\"14/176,824"\"A Two Dimensional Inductive Position Sensor"\
+"NASA Kennedy Space Center"\"Application"\"KSC-13664"\\"13/896,896"\"Regolith Advanced Surface Systems Operations Robot (RASSOR) Excavator"\
+"NASA Kennedy Space Center"\"Application"\"KSC-13689"\\"13/961,521"\"Coherence Multiplexing of Wireless Surface Acoustic Wave Sensors"\
+"NASA Langley Research Center"\"Issued"\"LAR-14673-1"\5736642\"08/778,066"\"Nonlinear Ultrasonic Scanning To Detect Material Defects"\"01/08/2017"
+"NASA Langley Research Center"\"Issued"\"LAR-14840-1"\5841032\"08/792,909"\"Variable And Fixed Frequency Pulsed Phase-Locked Loop"\"01/24/2017"
+"NASA Langley Research Center"\"Issued"\"LAR-15205-1"\5741883\"08/359,752"\"Tough, Soluble, Aromatic, Thermoplastic Copolyimides"\"04/21/2015"
+"NASA Langley Research Center"\"Issued"\"LAR-15282-1"\5755571\"08/712,984"\"Ultrasonic Periodontal Structures Mapping Device"\"09/09/2016"
+"NASA Langley Research Center"\"Issued"\"LAR-15318-1"\5798521\"08/806,732"\"Distributed Fiber-optic Strain Sensor"\"02/27/2017"
+"NASA Langley Research Center"\"Issued"\"LAR-15348-1"\5632841\"08/416,598"\"Thin Layer Composite Unimorph Ferroelectric Driver And Sensor, THUNDER"\"04/04/2015"
+"NASA Langley Research Center"\"Issued"\"LAR-15348-2"\6734603\"08/797,553"\"Thin Layer Composite Unimorph Ferroelectric Driver And Sensor"\"04/04/2015"
+"NASA Langley Research Center"\"Issued"\"LAR-15351-1-CU"\5585083\"08/414,661"\"Catalyst For Formaldehyde Oxidation"\"03/30/2015"
+"NASA Langley Research Center"\"Issued"\"LAR-15370-1-SB"\5640408\"08/593,438"\"Quasi Four-Level TM:LuAG Laser (Tm:LuAG Laser)"\"01/27/2016"
+"NASA Langley Research Center"\"Issued"\"LAR-15376-1"\5771204\"08/754,642"\"Relative Phase Measurement Instrument For Multiple-Echo Systems"\"11/21/2016"
+"NASA Langley Research Center"\"Issued"\"LAR-15406-1"\5617873\"08/449,473"\"Noninvasive Meth/Apparatus For Monitoring Intracranial Pressure & Pressure Vols Index In Humans"\"05/23/2015"
+"NASA Langley Research Center"\"Issued"\"LAR-15412-1"\5606014\"08/511,422"\"Imide Oligomers And Co-Oligomers Containing Pendent Phenylethynyl Groups And Polymers Therefrom"\"08/04/2015"
+"NASA Langley Research Center"\"Issued"\"LAR-15412-2"\5689004\"08/747,472"\"Imide Oligomers And Co-Oligomers Containing Pendent Phenylethynyl Groups And Polymers Therefrom"\"08/04/2015"
+"NASA Langley Research Center"\"Issued"\"LAR-15449-1"\6133401\"09/342,462"\"A Method To Prepare Processable Polyimides With Reactive Endgroups Using 1,3 Bis (3-Aminophenoxyl) Benzene"\"06/29/2019"
+"NASA Langley Research Center"\"Issued"\"LAR-15449-2"\6288209\"09/667,426"\"Method To Prepare Processable Polyimides With Reactive Endgroups Using 1,3-Bix(3-Aminophenoxyl)Benzene"\"06/29/2019"
+"NASA Langley Research Center"\"Issued"\"LAR-15507-1"\6475147\"09/493,044"\"Ultrasonic Technique To Measure Intracranial Pressure"\"01/27/2020"
+"NASA Langley Research Center"\"Issued"\"LAR-15508-1"\6545760\"09/535,659"\"Distributed Rayleigh Scatter Fiber Optic Strain Sensor"\"03/24/2020"
+"NASA Langley Research Center"\"Issued"\"LAR-15514-1-SB"\5991456\"08/654,840"\"Method Of Improving A Digital Image"\"05/29/2016"
+"NASA Langley Research Center"\"Issued"\"LAR-15524-1"\6000844\"08/810,058"\"A Method And Apparatus For The Portable Identification Of Material Thickness Of Layers Using A Scanning Linear Heat Source And Infrared Detectorcramer"\"03/04/2017"
+"NASA Langley Research Center"\"Issued"\"LAR-15525-1-CU"\5948965\"08/845,899"\"Solid State Carbon Monoxide Sensor"\"04/28/2017"
+"NASA Langley Research Center"\"Issued"\"LAR-15637-1"\6015272\"08/673,627"\"Magnetically Suspended Miniature Fluid Pump And Method Of Making Same"\"06/26/2016"
+"NASA Langley Research Center"\"Issued"\"LAR-15637-2"\6447265\"09/398,878"\"Magnetically Suspended Miniature Fluid Pump And Method Of Designing The Same"\"06/26/2019"
+"NASA Langley Research Center"\"Issued"\"LAR-15652-1-CU"\6132694\"08/991,075"\"Catalyst For Oxidation Of Hydro-Carbons And Volatile Organic Compounds"\"12/16/2017"
+"NASA Langley Research Center"\"Application"\"LAR-15665-1-CU"\0\"08/838,596"\"Catalyst For Carbon Monoxide Oxidation"\
+"NASA Langley Research Center"\"Issued"\"LAR-15745-1"\6222007\"09/093,826"\"Prepreg And Composites Made From Polyimide Salt-Like Solution"\"05/29/2018"
+"NASA Langley Research Center"\"Issued"\"LAR-15747-1-CU"\6200539\"09/357,403"\"One-Atmosphere Uniform Glow Discharge Plasma Gas Flow Acceleration"\"07/20/2019"
+"NASA Langley Research Center"\"Issued"\"LAR-15767-1"\6180746\"09/316,428"\"Polyimide Foam From Ether-Containing Monomeric Solutions"\"05/21/2019"
+"NASA Langley Research Center"\"Issued"\"LAR-15816-1"\6629341\"09/430,677"\"Macro-Fiber Composite Actuator With Interdigitated Electrodes"\"10/29/2019"
+"NASA Langley Research Center"\"Issued"\"LAR-15816-2"\7197798\"10/653,824"\"A Method For Fabricating A Piezoelectric Composite Apparatus"\"06/30/2020"
+"NASA Langley Research Center"\"Issued"\"LAR-15817-1"\6450820\"09/612,412"\"A Method Of Encouraging Physiological Self-Regulation Through Modulation Of An Operator's Control Input To A Video Game Or Training Simulator"\"07/12/2020"
+"NASA Langley Research Center"\"Issued"\"LAR-15818-3"\6922242\"10/465,386"\"Optical Path Switching Based Differential Absorption Radiometry For Substance Detection"\"06/21/2019"
+"NASA Langley Research Center"\"Issued"\"LAR-15831-1"\5994418\"09/316,865"\"Hollow Polyimide Microspheres"\"05/21/2019"
+"NASA Langley Research Center"\"Issued"\"LAR-15831-2"\6235803\"09/408,652"\"Hollow Polyimide Microspheres"\"05/21/2019"
+"NASA Langley Research Center"\"Issued"\"LAR-15831-3"\6084000\"09/394,534"\"Hollow Polyimide Microsphere"\"05/21/2019"
+"NASA Langley Research Center"\"Issued"\"LAR-15834-1"\6359107\"09/575,826"\"High Performance / High Temperature Resins For Infusion And Transfer Molding Processes"\"05/18/2020"
+"NASA Langley Research Center"\"Issued"\"LAR-15851-1-CU"\6753293\"09/607,211"\"Process For Coating Substrates With Catalyst Materials"\"05/11/2021"
+"NASA Langley Research Center"\"Issued"\"LAR-15854-1"\6761695\"10/94,023"\"Technique For Non-Invasive Absolute Measurement Of Intra-Cranial Pressure In Humans"\"07/28/2022"
+"NASA Langley Research Center"\"Issued"\"LAR-15927-1"\6584848\"10/263,292"\"Dielectric Electrostatic Ultrasonic Transducer (DEUT)"\"09/30/2022"
+"NASA Langley Research Center"\"Issued"\"LAR-15934-1"\6566648\"09/535,661"\"Edge Triggered Apparatus And Method For Measuring Strain In Bragg Gratings"\"03/24/2020"
+"NASA Langley Research Center"\"Issued"\"LAR-15943-1"\6746410\"10/121,932"\"Transducer Assembly To Measure Changes In Circumferential Expansion Of The Human Skull Due To Changes In Intracranial Pressure"\"11/16/2022"
+"NASA Langley Research Center"\"Issued"\"LAR-15954-1"\6376830\"09/606,120"\"Single Laser Sweep Full S-Parameter Characterization Of Fiber Bragg Gratings"\"06/15/2020"
+"NASA Langley Research Center"\"Issued"\"LAR-15959-1"\7019621\"09/753,370"\"Structural Tailored High Displacement Ferro-Electric Sensors And Actuators"\"01/02/2021"
+"NASA Langley Research Center"\"Issued"\"LAR-15977-1"\6133330\"09/337,475"\"Polyimide Foam From Monomeric Solutions"\"05/21/2019"
+"NASA Langley Research Center"\"Issued"\"LAR-15990-1"\6551251\"09/784,413"\"Dual Transmission Interface For Passive Fetal Heart Monitoring"\"02/13/2021"
+"NASA Langley Research Center"\"Issued"\"LAR-16001-1"\7371358\"10/975,117"\"Catalyst For Treatment And Control Of Post-Combustion Emissions"\"10/25/2024"
+"NASA Langley Research Center"\"Issued"\"LAR-16005-1"\6426496\"09/648,529"\"High Precision Solid State Wavelength Monitor"\"11/26/2020"
+"NASA Langley Research Center"\"Issued"\"LAR-16012-1-CU"\6834125\"09/888,701"\"Improvement To The Multiscale Retinex With Color Restoration"\"06/25/2021"
+"NASA Langley Research Center"\"Issued"\"LAR-16020-1"\6629446\"09/758,115"\"Single Vector Force Balance Calibration System"\"01/26/2022"
+"NASA Langley Research Center"\"Issued"\"LAR-16079-1"\6939940\"09/757,398"\"Liquid Crystalline Thermosets From Oligo-Esters, Ester-Imides And Ester-Amides"\"01/05/2021"
+"NASA Langley Research Center"\"Issued"\"LAR-16083-1"\8062129\"11/536,811"\"A Method And System For Multi-Player Game Playing Where Physiological Characteristics Of The Players Modulate Their Relative Advantage Over Opponents Or Competitors"\"05/22/2030"
+"NASA Langley Research Center"\"Issued"\"LAR-16116-1"\6888346\"10/21,683"\"Giant Magnetoresistive Based Self-Nulling Probe For Deep Flaw Detection"\"11/28/2021"
+"NASA Langley Research Center"\"Issued"\"LAR-16176-2"\7109287\"10/988,407"\"Space Environmentally Durable Polyimides And Copolyimides"\"03/03/2025"
+"NASA Langley Research Center"\"Issued"\"LAR-16220-1"\6867533\"09/696,527"\"Shaping, Tuning, And Positioning Membrane Structures Using Electroactive Polymer Actuators"\"10/23/2020"
+"NASA Langley Research Center"\"Issued"\"LAR-16231-1-CU"\7092539\"09/997,113"\"MEMS Based Acoustic Array"\"11/28/2021"
+"NASA Langley Research Center"\"Issued"\"LAR-16256-1"\8628333\"11/129,756"\"Method And System For Training Psychophysiological Skills Conducive To Optimal Performance Through Perturbation Of Training Tasks, Environments And Devices"\"08/27/2029"
+"NASA Langley Research Center"\"Application"\"LAR-16256-1-CON"\0\"14/153,434"\"Method And System For Training Psychophysiological Skills Conducive To Optimal Performance Through Perturbation Of Training Tasks, Environments And Devices"\"05/13/2025"
+"NASA Langley Research Center"\"Issued"\"LAR-16299-1"\7871682\"10/956,520"\"Composite Roll Press And Processes"\"12/07/2025"
+"NASA Langley Research Center"\"Issued"\"LAR-16307-1-SB"\7390768\"10/056,845"\"Methodology For The Effective Stabilization Of Tin-Oxide-Based Oxidation/Reduction Catalysts"\"01/22/2022"
+"NASA Langley Research Center"\"Issued"\"LAR-16307-2"\7985709\"10/956,515"\"Methodology For The Effective Stabilization Of Tin-Oxide-Based Oxidation/Reduction Catalysts"\"04/16/2027"
+"NASA Langley Research Center"\"Application"\"LAR-16308-2"\0\"12/726,403"\"Catalyst For Decomposition Of Nitrogen Oxides (Divisional of LAR 16308-1-CU)"\
+"NASA Langley Research Center"\"Issued"\"LAR-16311-1"\6777525\"10/115,812"\"Heat, Moisture, Chemical Resistant Polyimide Compositions And Methods For Making And Using The Same"\"04/01/2022"
+"NASA Langley Research Center"\"Issued"\"LAR-16323-1"\7253903\"11/27,930"\"Method To Linearize Non-Linear Physical Measurements"\"06/24/2025"
+"NASA Langley Research Center"\"Issued"\"LAR-16324-1"\6714132\"10/011,229"\"Proximity Sensor"\"11/27/2021"
+"NASA Langley Research Center"\"Issued"\"LAR-16324-2"\7106203\"10/783,486"\"Self-Activating System And Method For Alerting When An Object Or Person Is Left Unattended"\"11/27/2021"
+"NASA Langley Research Center"\"Issued"\"LAR-16326-1"\7060991\"10/410,605"\"Method For Measuring Thickness Of Small Radius Of Curvature Structures Using A Thermal Line Scanner"\"04/10/2023"
+"NASA Langley Research Center"\"Issued"\"LAR-16332-1-CU"\6842543\"09/888,816"\"Method Of Improving A Digital Image Having White Zones"\"06/25/2021"
+"NASA Langley Research Center"\"Issued"\"LAR-16363-1"\6856073\"10/390,675"\"Radial Electric Field Piezo-Diaphragm Fluidic Control Systems"\"03/13/2023"
+"NASA Langley Research Center"\"Issued"\"LAR-16383-1-NP"\7588699\"10/288,797"\"Electrically Conductive, Optically Transparent Polymer/Carbon Nanotube Composites And Process For Preparation Thereof"\"07/02/2023"
+"NASA Langley Research Center"\"Issued"\"LAR-16383-2"\7972536\"12/546,724"\"Electrically Conductive, Optically Transparent Polymer/Carbon Nanotube Composites And Process For Preparation Thereof"\"10/12/2029"
+"NASA Langley Research Center"\"Issued"\"LAR-16390-1-SB"\7318915\"10/342,660"\"Ruthenium Stabilization Mechanism For Next Generation Oxidation And Reduction Catalyst Systems"\"01/13/2023"
+"NASA Langley Research Center"\"Issued"\"LAR-16393-1"\6919669\"10/392,491"\"Sonic Transducers And Sensors Using Radial Field Diaphragms"\"05/31/2023"
+"NASA Langley Research Center"\"Issued"\"LAR-16406-1-CU"\7491169\"10/805,816"\"Ultrasonic Method And Means To Assess Compartment Syndrome (Hyper Pressure States In Arm, Leg Muscle/Tendon Compartments)"\"09/20/2025"
+"NASA Langley Research Center"\"Issued"\"LAR-16409-1"\8015819\"11/536,790"\"Wet Active Chevron Nozzle For Controllable Jet Noise Reduction"\"09/17/2028"
+"NASA Langley Research Center"\"Issued"\"LAR-16432-1"\7692116\"10/188,525"\"Synthesis Of Carbon Nanotubes Using High Average Power Ultrafast Laser Ablation"\"07/03/2022"
+"NASA Langley Research Center"\"Issued"\"LAR-16437-1-NP"\7169374\"11/129,751"\"Templated Growth Of Carbon Nanotubes"\"05/11/2025"
+"NASA Langley Research Center"\"Issued"\"LAR-16440-1"\6740048\"10/263,285"\"Method Of Determining Intracranial Pressure From Skull Expansion Measurements"\"09/25/2022"
+"NASA Langley Research Center"\"Issued"\"LAR-16475-1"\7194912\"10/890,843"\"Carbon Nanotube-Based Structural Health Monitoring Sensor"\"08/07/2024"
+"NASA Langley Research Center"\"Issued"\"LAR-16496-1"\7104498\"10/867,114"\"Blown Channel-Wing System For Thrust Deflection And Force/Moment Generation"\"10/03/2024"
+"NASA Langley Research Center"\"Issued"\"LAR-16499-1"\7491428\"10/730,188"\"Method for the controlled deposition and alignment of single walled carbon nanotubes"\"11/15/2025"
+"NASA Langley Research Center"\"Issued"\"LAR-16510-1"\6773407\"10/263,286"\"Non-Invasive Method Of Determining Absolute Intracranial Pressure"\"12/25/2022"
+"NASA Langley Research Center"\"Issued"\"LAR-16516-1"\6879893\"10/675,502"\"Autonomous Health Monitoring Architecture Hardware"\"09/30/2023"
+"NASA Langley Research Center"\"Issued"\"LAR-16517-1"\7048228\"10/678,474"\"Partial-Span Slotted Wing For Transonic Aircraft"\"10/03/2023"
+"NASA Langley Research Center"\"Issued"\"LAR-16532-1"\7334998\"11/5,624"\"Low-Noise Fan Exit Guide Vanes"\"12/06/2024"
+"NASA Langley Research Center"\"Issued"\"LAR-16538-1"\7675619\"12/129,967"\"Micro-LiDAR For In-Flight Flow Velocimetry And Boundary Layer Control"\"11/11/2028"
+"NASA Langley Research Center"\"Issued"\"LAR-16549-1"\7262543\"10/943,655"\"Inductor (L)-Capacitor ( C ) (aka, LC) Sensor Circuit For Piezo Material Monitoring"\"04/17/2025"
+"NASA Langley Research Center"\"Application"\"LAR-16565-1"\0\"13/020,025"\"e-Sensor: Quantitative Imaging of Electric Fields and Electric Potentials"\
+"NASA Langley Research Center"\"Issued"\"LAR-16566-1"\7285932\"10/975,119"\"Method And Apparatus For Loss Of Control Inhibitor Systems"\"10/27/2024"
+"NASA Langley Research Center"\"Issued"\"LAR-16571-1"\7075295\"10/839,448"\"LC Sensing Element For Closed Cavities Having Low Radio Frequency Transmissivity"\"04/30/2024"
+"NASA Langley Research Center"\"Issued"\"LAR-16571-2"\7589525\"11/421,886"\"Magnetic Field Response Sensor For Conductive Media"\"09/26/2024"
+"NASA Langley Research Center"\"Issued"\"LAR-16571-3"\7759932\"12/533,520"\"Magnetic Field Response Sensor For Conductive Media"\"07/31/2029"
+"NASA Langley Research Center"\"Issued"\"LAR-16573-1"\7129467\"10/943,831"\"Carbon Nanotube Based Light Sensor"\"09/29/2024"
+"NASA Langley Research Center"\"Issued"\"LAR-16575-1"\7181942\"10/943,649"\"Instrumented Crimping Tool For Critical Wiring Applications"\"11/24/2024"
+"NASA Langley Research Center"\"Issued"\"LAR-16605-1"\7623993\"10/731,742"\"Energy-extraction-based active noise control system"\"11/27/2026"
+"NASA Langley Research Center"\"Issued"\"LAR-16615-1"\6956066\"10/779,552"\"Polyimide Foams"\"02/11/2024"
+"NASA Langley Research Center"\"Issued"\"LAR-16615-2"\7541388\"11/124,640"\"Polyimide Foams"\"05/05/2025"
+"NASA Langley Research Center"\"Issued"\"LAR-16616-1"\7758927\"10/956,704"\"Laser-Induced Fabrication Of Metallic Interlayers And Patterns In Polyimide Films"\"09/30/2024"
+"NASA Langley Research Center"\"Issued"\"LAR-16640-1"\8089677\"12/135,180"\"Programmable Smart Grating Device With Quantum Aperture Array"\"08/05/2029"
+"NASA Langley Research Center"\"Issued"\"LAR-16696-1"\7048235\"10/678,397"\"Slotted Aircraft Wing (a.k.a. Full Span Slotted Wing)"\"10/03/2023"
+"NASA Langley Research Center"\"Issued"\"LAR-16698-1"\7394181\"11/76,824"\"High Performance High Efficiency Hybrid Actuator Systems (HYBAS)"\"03/04/2025"
+"NASA Langley Research Center"\"Issued"\"LAR-16736-1"\7962252\"11/422,984"\"Semi Autonomous Flight System With Avionics Sensor Board, Processing Board, And Flight Control Board"\"04/07/2027"
+"NASA Langley Research Center"\"Issued"\"LAR-16845-1"\8083986\"12/315,520"\"Advanced Thermo-Electric Materials with Nano-Voids"\"12/04/2028"
+"NASA Langley Research Center"\"Issued"\"LAR-16854-1"\7381186\"10/911,755"\"Ultrasonic Method And Means To Assess Compartment Syndrome Part B"\"08/02/2024"
+"NASA Langley Research Center"\"Issued"\"LAR-16858-1"\7667847\"11/533,921"\"Thin, High-Contrast Targets for Ultralightweight Structures"\"12/15/2026"
+"NASA Langley Research Center"\"Issued"\"LAR-16867-1"\7402264\"11/076,460"\"Electroactive polymer-carbon nanotube-ceramic nanocomposites"\"02/27/2026"
+"NASA Langley Research Center"\"Issued"\"LAR-17548-1"\8236413\"12/166,852"\"Fail Safe High-Temperature Composite Structure"\"07/07/2030"
+"NASA Langley Research Center"\"Issued"\"LAR-16867-2"\7527751\"12/109,490"\"Sensing/Actuating Materials Made From Carbon Nanotube Polymer Composites And Methods For Making Same"\"04/25/2028"
+"NASA Langley Research Center"\"Issued"\"LAR-16868-1"\7341883\"11/242,415"\"Lattice Matched SiGe Layer On Single Crystalline Sapphire Substrate"\"09/27/2025"
+"NASA Langley Research Center"\"Issued"\"LAR-16871-1"\6413227\"09/459,384"\"Optimization Of Ultrasonic Method For Assessment Of Changes In Intracranial Pressure Through Measurement Of Skull Expansion"\"12/02/2019"
+"NASA Langley Research Center"\"Issued"\"LAR-16872-1"\7514726\"11/387,086"\"Graded Indexed SiGe Layers on Lattice Matched SiGe Layers on Sapphire"\"06/10/2027"
+"NASA Langley Research Center"\"Issued"\"LAR-16874-1"\7723464\"11/674,321"\"Novel Aromatic/Aliphatic Diamine Derivatives For Advanced Compositions And Polymers"\"02/13/2027"
+"NASA Langley Research Center"\"Issued"\"LAR-16877-1"\7186367\"11/110,996"\"Double-Vacuum Bag (DVB) Process For Volatile Management In Resin Matrix Composite Manufacturing"\"07/08/2025"
+"NASA Langley Research Center"\"Issued"\"LAR-16885-1"\7890311\"11/177,664"\"Method Of Simulating Flow-Through Area Of A Pressure Regulator"\"12/15/2029"
+"NASA Langley Research Center"\"Issued"\"LAR-16886-1"\7375808\"11/536,120"\"Dual Sensing Capable Germ Or Toxic Chemical (GTC) Sensor Using Quantum Aperture Array With Surface Plasmon Polariton (SPP)"\"09/28/2026"
+"NASA Langley Research Center"\"Issued"\"LAR-16900-1"\7278324\"11/155,923"\"CNT based crack growth detector and strain field monitor"\"08/07/2024"
+"NASA Langley Research Center"\"Issued"\"LAR-16906-1"\8529825\"12/928,128"\"Fabrication of Nanovoid-imbedded Bismuth Telluride with Low Dimensional System"\"02/01/2028"
+"NASA Langley Research Center"\"Issued"\"LAR-16907-1"\7783060\"11/126,518"\"A Deconvolution Approach For The Mapping Of Acoustic Sources (DAMAS) Determined From Phased Microphone Arrays"\"03/27/2029"
+"NASA Langley Research Center"\"Issued"\"LAR-16908-1"\7086593\"10/839,445"\"Magnetic Field Response Measurement Acquisition System (Includes LAR-16138-1, LAR-16554-1, LAR-16591-1, LAR-16614-1, LAR-16617-1, & LAR-16908-1)"\"05/04/2024"
+"NASA Langley Research Center"\"Issued"\"LAR-16946-1"\7484930\"11/169,256"\"Blowing Flap Side Edge"\"07/01/2025"
+"NASA Langley Research Center"\"Issued"\"LAR-16950-1"\7379231\"11/470,771"\"Ferroelectric Light Control Device"\"09/07/2026"
+"NASA Langley Research Center"\"Issued"\"LAR-16958-1"\7510802\"11/371,575"\"Fabrication of Multilayer Ferritin Array for Bionanobattery"\"08/24/2027"
+"NASA Langley Research Center"\"Issued"\"LAR-16970-1"\7231832\"11/229,439"\"Method For Determining Cracks On And Within Composite Panels"\"12/02/2025"
+"NASA Langley Research Center"\"Issued"\"LAR-16974-1"\7047807\"11/203,583"\"Methods Of Mounting Erectable, Flexible And Fixed Magnetic Field Response Sensors"\"08/08/2025"
+"NASA Langley Research Center"\"Issued"\"LAR-17003-1"\7467921\"11/239,436"\"Rotor Blade Vortex Management Via Boundary Layer Separation Control"\"09/22/2025"
+"NASA Langley Research Center"\"Issued"\"LAR-17013-1"\7647771\"11/374,480"\"Thermally Driven Miniature Piston Actuator"\"11/12/2026"
+"NASA Langley Research Center"\"Issued"\"LAR-17017-1"\7537182\"11/250,700"\"Enhanced Separation Control Via Simultaneous Multiple-Location Forcing"\"06/18/2027"
+"NASA Langley Research Center"\"Issued"\"LAR-17032-1"\7321185\"11/370,377"\"A New Concept For Active Bistable Twisting Structures"\"03/06/2026"
+"NASA Langley Research Center"\"Issued"\"LAR-17044-1"\7558371\"12/254,150"\"Applications Of Twin-Detection XRD Methods On SiGe (111) Layers On Sapphire (0001) Substrate"\"10/20/2028"
+"NASA Langley Research Center"\"Issued"\"LAR-17073-1"\7580323\"11/419,818"\"Interdigitated Electrode Actuators For Straining Optical Fibers (IDEAS)"\"05/27/2026"
+"NASA Langley Research Center"\"Application"\"LAR-17088-1"\0\"13/032,045"\"Nanotubular Toughening Inclusions For Improved Mechanical Reinforcement"\
+"NASA Langley Research Center"\"Issued"\"LAR-17112-1"\7507472\"11/81,888"\"Multi-Layer Electroactive Devices"\"09/08/2025"
+"NASA Langley Research Center"\"Issued"\"LAR-17116-1"\7506541\"11/328,468"\"Wireless Fuel Volume Measurement Techniques"\"10/18/2026"
+"NASA Langley Research Center"\"Issued"\"LAR-17126-1"\7666939\"11/432,201"\"A Method For Producing Stable Dispersions Of Single Walled Carbon Nanotubes In Polymer Matrices Using Noncovalent Interactions"\"05/11/2026"
+"NASA Langley Research Center"\"Issued"\"LAR-17128-1"\7285933\"11/188,227"\"Method And Apparatus For Loss Of Control Inhibitor Systems"\"07/20/2025"
+"NASA Langley Research Center"\"Issued"\"LAR-17135-1"\8217143\"11/827,567"\"Fabrication of Metal Nanoshells Derived by a Biotemplate"\"11/17/2030"
+"NASA Langley Research Center"\"Issued"\"LAR-17149-2"\8608993\"13/053,633"\"A Method For Producing Multifunctional Structural Thermally Stable Nanocomposites With Aligned Carbon Nanotubes"\"05/20/2026"
+"NASA Langley Research Center"\"Issued"\"LAR-17154-1"\7655595\"11/421,924"\"Sprayable Low Temperature Oxidation Catalyst Coating Based on Sol-Gel Technology"\"08/11/2027"
+"NASA Langley Research Center"\"Issued"\"LAR-17154-2"\7781366\"12/369,932"\"Sol-Gel Based Oxidation Catalyst And Coating System Using Same (Divisional of -1)"\"02/12/2029"
+"NASA Langley Research Center"\"Issued"\"LAR-17155-1"\7255004\"11/229,438"\"Wireless Fluid-Lead Measuring Dipstick Assembly (Broken Out Of LAR-16974-1)"\"03/22/2026"
+"NASA Langley Research Center"\"Issued"\"LAR-17157-1"\7507784\"11/124,508"\"Liquid Crystalline Thermosets From Ester, Ester-Imide, And Ester-Amide Oligomers"\"01/05/2021"
+"NASA Langley Research Center"\"Issued"\"LAR-17163-1"\7467536\"11/428,017"\"Multi-axis Accelerometer Calibration System Using a Cuboidal Attitude Positioning Device"\"08/18/2027"
+"NASA Langley Research Center"\"Issued"\"LAR-17165-1"\7595112\"11/461,150"\"Method To Prepare Hybrid Metal/Composite Laminates By Resin Infusion"\"02/01/2028"
+"NASA Langley Research Center"\"Issued"\"LAR-17168-1"\7732998\"11/462,114"\"Cylindrical Shaped Micro Fiber Composite (CMFC) Actuators"\"09/24/2027"
+"NASA Langley Research Center"\"Issued"\"LAR-17169-1"\7446459\"11/486,200"\"Hybrid Force/Stress Amplified Piezoelectric Energy Harvesting Transducer System"\"07/13/2026"
+"NASA Langley Research Center"\"Application"\"LAR-17211-1"\0\"13/557,250"\"Floating Ultrasonic Transducer Inspection System For Nondestructive Evaluation"\
+"NASA Langley Research Center"\"Issued"\"LAR-17213-1"\8020805\"11/831,233"\"New Configuration and Power Technology for Application-Specific Scenarios of High Altitude Airships"\"03/25/2030"
+"NASA Langley Research Center"\"Issued"\"LAR-17224-1"\7998368\"12/272,826"\"Effective Dispersion of Carbon Nanotubes in an Aqueous Solution and Their Application on Bionanotechnology"\"06/04/2029"
+"NASA Langley Research Center"\"Issued"\"LAR-17229-1"\7760778\"11/670,044"\"Thin-film evaporative cooling concept for a solid-state laser diode crystal"\"02/01/2027"
+"NASA Langley Research Center"\"Issued"\"LAR-17235-1"\7414708\"11/461,569"\"Multi-Point, Multi-Component Interferometric Rayleigh/Mie Doppler Velocimeter"\"08/01/2026"
+"NASA Langley Research Center"\"Issued"\"LAR-17237-1"\8294989\"12/512,344"\"Photonic DART (Densely Accumulated Ray-point by micro-zone-plaTe)"\"04/25/2031"
+"NASA Langley Research Center"\"Issued"\"LAR-17240-1"\8111943\"12/423,907"\"Computational Visual Servo:Automatic Measurement and Control for Smart Image Enhancement"\"09/14/2030"
+"NASA Langley Research Center"\"Issued"\"LAR-17241-1"\8018815\"12/490,747"\"Optical Data Storage System with Micro Zone Plate"\"12/05/2029"
+"NASA Langley Research Center"\"Issued"\"LAR-17242-1"\8174695\"12/508,018"\"MICRO-RING THIN-FILM SPECTROMETER ARRAY"\"09/03/2030"
+"NASA Langley Research Center"\"Issued"\"LAR-17243-1"\8411214\"12/144,937"\"Variable Visibility Glasses for Flight Training"\"02/01/2032"
+"NASA Langley Research Center"\"Issued"\"LAR-17245-1"\8344281\"12/751,075"\"Use of Beam Deflection to Control Electron Beam Wire Deposition Processes"\"04/26/2031"
+"NASA Langley Research Center"\"Issued"\"LAR-17257-1"\7590904\"11/531,703"\"Detecting the loss of configuration access of reprogrammable Field Programmable Gate Array (FPGA) without external circuitry"\"10/07/2027"
+"NASA Langley Research Center"\"Issued"\"LAR-17267-1"\7704553\"11/710,386"\"Method of Depositing Metals onto Carbon Allotropes and Compositions Therefrom"\"06/26/2028"
+"NASA Langley Research Center"\"Issued"\"LAR-17268-1"\7647543\"11/535,574"\"Integrated mitigation for single event upset (SEU) of reprogrammable field programmable gate arrays (FPGA) operating in radiation environments"\"09/27/2026"
+"NASA Langley Research Center"\"Issued"\"LAR-17280-1"\7159774\"11/305,854"\"Magnetic Field Response Measurement Acquisition System"\"04/30/2024"
+"NASA Langley Research Center"\"Issued"\"LAR-17286-1"\8081734\"12/628,446"\"Miniature, Low-Power X-Ray Tube Using A Microchannel Electron Generator Electron Source"\"02/26/2030"
+"NASA Langley Research Center"\"Issued"\"LAR-17290-1"\7737867\"11/696,333"\"Advance Display Media for Improved Airport Surface Operations"\"06/11/2028"
+"NASA Langley Research Center"\"Issued"\"LAR-17293-1"\7991491\"11/559,420"\"Control Device And Method For Generating Control Signals For Technical Devices"\"03/04/2030"
+"NASA Langley Research Center"\"Issued"\"LAR-17294-1"\8430327\"11/671,089"\"Low Profile Sensors Using Self-Resonating Inductors"\"08/22/2028"
+"NASA Langley Research Center"\"Issued"\"LAR-17295-1"\7683797\"11/671,131"\"System For Providing Damage Detection And Thermal Protection"\"02/15/2028"
+"NASA Langley Research Center"\"Issued"\"LAR-17300-1"\7538860\"11/840,363"\"A Method and Apparatus for Determination of the Reflection Wavelength of Multiple Low-Reflectivity Bragg Gratings in a Single Fiber"\"12/31/2027"
+"NASA Langley Research Center"\"Application"\"LAR-17307-1"\0\"11/466,569"\"Low Mass Free Piston Space Radiator"\
+"NASA Langley Research Center"\"Issued"\"LAR-17317-1"\8401217\"11/780,500"\"Extreme Low Frequency Acoustic Measurement Portable System"\"11/29/2030"
+"NASA Langley Research Center"\"Application"\"LAR-17317-2"\\"13/771,735"\"Extreme Low Frequency Acoustic Measurement System"\"07/20/2027"
+"NASA Langley Research Center"\"Application"\"LAR-17318-1"\0\"13/082,734"\"Preparation of Metal Nanowire Decorated Carbon Allotropes"\"08/29/2027"
+"NASA Langley Research Center"\"Issued"\"LAR-17321-1"\8545986\"12/043,276"\"Ultra High-Temperature, Lightweight Insulation Material Compositions And Methods For Making And Using Them"\"06/27/2030"
+"NASA Langley Research Center"\"Application"\"LAR-17323-1"\0\"11/757,780"\"Concept And Design Of Oxygen Band Radar For Surface Air Pressure Remote Sensing"\
+"NASA Langley Research Center"\"Issued"\"LAR-17325-1"\8060350\"12/56,686"\"Unsteady aerodynamic reduced-order models (ROMs) for efficient aeroelastic analysis"\"03/04/2030"
+"NASA Langley Research Center"\"Issued"\"LAR-17327-1"\8117013\"12/002,857"\"Standardized Radiation Shield Design Method: 2005 HZETRN"\"07/05/2030"
+"NASA Langley Research Center"\"Application"\"LAR-17330-1"\0\"11/946,207"\"Multi Functional Composite And Honeycomb Panels"\
+"NASA Langley Research Center"\"Issued"\"LAR-17332-1"\7958733\"11/762,827"\"Active Flow Effectors by Embedded Shape Memory Alloy Actuation"\"11/04/2029"
+"NASA Langley Research Center"\"Application"\"LAR-17332-2"\\"13/096,305"\"Jet Engine Exhaust Nozzle Flow Effector"\"07/05/2027"
+"NASA Langley Research Center"\"Issued"\"LAR-17335-1"\8170234\"12/108,562"\"Extension Of DAMAS Phased Array Processing For Spatial Coherence Determination (DAMAS-C)"\"03/02/2031"
+"NASA Langley Research Center"\"Issued"\"LAR-17346-1"\7649439\"11/465,503"\"Thermoelectric Devices From Thin Metal System To Include Flexible Substrate And Method Of Making Same"\"04/28/2027"
+"NASA Langley Research Center"\"Issued"\"LAR-17355-1"\8164485\"11/863,964"\"A Method of Providing a Synthetic Vision System Flight Management Visualization Display for Aiding Pilot Preview, Rehearsal and/or Review and Real-Time Visual Acquisition of Flight Mission Progress"\"06/24/2029"
+"NASA Langley Research Center"\"Application"\"LAR-17361-1"\0\"12/138,709"\"Airfoil/ Wing Flow Control Using Flexible Extended Trailing Edge"\
+"NASA Langley Research Center"\"Issued"\"LAR-17365-1"\7784732\"11/958,673"\"Boundary-Layer-Ingesting S-Duct Diffusing Inlet Flow Control Using Hybrid Vane/Jet Approach at Transonic Flow Conditions"\"04/26/2029"
+"NASA Langley Research Center"\"Issued"\"LAR-17381-1"\8044294\"12/254,016"\"Thermoelectric material made with highly oriented twinned alloy of Si, Ge, C, and Sn on the basal plane of trigonal substrate and thermoelectric device made with the same material"\"10/11/2029"
+"NASA Langley Research Center"\"Issued"\"LAR-17382-1"\8052069\"12/393,238"\"Advanced High Performance Vertical Hybrid Electroactive Synthetic Jet Actuator (ASJA-V)"\"10/18/2029"
+"NASA Langley Research Center"\"Issued"\"LAR-17384-1"\8662412\"12/354,808"\"Advanced Modified High Performance Synthetic Jet Actuator With Optimized Curvature Shape Chamber (ASJA-M)"\"10/27/2031"
+"NASA Langley Research Center"\"Issued"\"LAR-17385-1"\7671306\"11/589,011"\"Apparatus For Free Electron Laser Ablative Synthesis Of Carbon Nanotubes"\"03/10/2028"
+"NASA Langley Research Center"\"Application"\"LAR-17386-1"\0\"12/851,584"\"Fine-Grained Targets For Free Electron Laser Synthesis Of Carbon Nanotubes"\
+"NASA Langley Research Center"\"Issued"\"LAR-17387-1"\7663077\"11/589,010"\"Process For Optimizing The Yield And Production Rate Of Single-Walled Carbon Nanotubes Using Free Electron Laser Synthesis"\"01/23/2028"
+"NASA Langley Research Center"\"Issued"\"LAR-17390-1"\8235309\"12/355,782"\"Advanced High Performance Horizontal Piezoelectric Hybrid Synthetic Jet Actuator (ASJA-H)"\"04/02/2031"
+"NASA Langley Research Center"\"Issued"\"LAR-17391-1"\7792015\"12/187,458"\"A Byzantine-Fault Tolerant Self-Stabilizing Protocol for Distributed Clock Synchronization Systems"\"08/14/2028"
+"NASA Langley Research Center"\"Issued"\"LAR-17402-1"\7964698\"11/935,036"\"Wholly Aromatic Liquid Crystalline Polyetherimide (LC-PEI) Resin for manufacturing high modulus fibers, films, injection molded articles and foams"\"09/27/2029"
+"NASA Langley Research Center"\"Issued"\"LAR-17405-1"\8226767\"12/254,134"\"Hybrid Bandgap Engineering for Rhombohedral Super-Hetero-Epitaxy"\"05/11/2031"
+"NASA Langley Research Center"\"Application"\"LAR-17413-2"\0\"12/641,603"\"Nanoparticle-Containing Thermoplastic Composites and Methods of Preparing Same"\
+"NASA Langley Research Center"\"Issued"\"LAR-17425-1"\8059273\"12/496,788"\"Micro Spectrometer for Parallel Light"\"08/19/2029"
+"NASA Langley Research Center"\"Application"\"LAR-17427-1"\0\"12/174,360"\"Tailorable Dielectric Materials with Complex Permittivity Characteristics providing High Dielectric Constants and Low Loss Factors"\
+"NASA Langley Research Center"\"Issued"\"LAR-17432-1"\8112243\"12/118,172"\"Forward Voltage Short Pulse (FVSP) Technique for Measuring High Power Laser Diode Array (LDA) Junction Temperature"\"11/27/2030"
+"NASA Langley Research Center"\"Issued"\"LAR-17433-1"\7902815\"11/856,807"\"A Multi-Measurement Wheel Sensor"\"06/19/2029"
+"NASA Langley Research Center"\"Issued"\"LAR-17440-1"\7845215\"11/844,571"\"Resonant Difference-Frequency Atomic Force Ultrasonic Microscope"\"02/03/2029"
+"NASA Langley Research Center"\"Issued"\"LAR-17444-1"\8042739\"11/864,012"\"Wireless Tamper Detection Sensor Requiring No Electrical Connection"\"11/08/2029"
+"NASA Langley Research Center"\"Issued"\"LAR-17447-1"\8002219\"11/941,119"\"Multifunctional Boost Protective Cover (MBPC) For A Launch Abort System (LAS)"\"01/16/2030"
+"NASA Langley Research Center"\"Application"\"LAR-17455-3"\\"13/938,622"\"A Nanotube Film Electrode and an Electroactive Device Fabricated with the Nanotube Film Electrode and Methods for Making Same"\"10/28/2031"
+"NASA Langley Research Center"\"Issued"\"LAR-17469-1"\8094306\"12/487,735"\"Micro Ring Grating Spectrometer with Moveable Aperture Slit"\"08/27/2030"
+"NASA Langley Research Center"\"Issued"\"LAR-17477-1"\7993567\"12/131,420"\"Auxiliary Electrode For Electrospinning Process"\"10/02/2029"
+"NASA Langley Research Center"\"Issued"\"LAR-17478-1"\7883052\"11/954,452"\"Integration Of A Turbo-Fan Engine Above An Aircraft's Wing Which Reduces Drag And Community Noise"\"09/24/2029"
+"NASA Langley Research Center"\"Issued"\"LAR-17480-1"\7711509\"11/930,222"\"A Method To Calibrate Magnetic Response Fluid-Level Sensors Using Complete Sensor Immersion In Fluid"\"03/18/2028"
+"NASA Langley Research Center"\"Issued"\"LAR-17485-1"\7851062\"12/124,273"\"Composition of and Method to Prepare Hybrid Laminates from Metal Plasma Coated Fibers and Polymer Matrix Resins"\"09/09/2028"
+"NASA Langley Research Center"\"Issued"\"LAR-17485-2"\8017190\"12/906,633"\"Metal/Fiber Laminate and Fabrication Using A Porous Metal/Fiber Preform"\"05/21/2028"
+"NASA Langley Research Center"\"Issued"\"LAR-17487-1"\8157207\"11/836,517"\"Jet Engine Nozzle Exit Configurations And Associated Systems And Methods"\"04/15/2029"
+"NASA Langley Research Center"\"Issued"\"LAR-17488-1"\7814786\"12/015,626"\"Thin-Film Sensor For Measuring Liquid-Level And Temperature Having No Electrical Connections"\"08/26/2028"
+"NASA Langley Research Center"\"Issued"\"LAR-17493-1"\8424200\"12/098,000"\"Conducting Nanotubes Or Nanostructures Based Composites, Method Of Making Them And Applications"\"05/16/2031"
+"NASA Langley Research Center"\"Issued"\"LAR-17502-1"\8529249\"11/860,703"\"Quick Change Ceramic Flame Holder for High Output Torch"\"03/14/2030"
+"NASA Langley Research Center"\"Application"\"LAR-17502-1-CON"\\"14/021,325"\"Flame Holder System"\"09/25/2027"
+"NASA Langley Research Center"\"Issued"\"LAR-17514-1"\8196858\"12/721,833"\"Mars Airplane"\"02/15/2031"
+"NASA Langley Research Center"\"Issued"\"LAR-17526-1"\7991595\"12/138,768"\"Adaptive Refinement Tools (ARTs) for Tetrahedral Unstructured Grids"\"06/07/2029"
+"NASA Langley Research Center"\"Issued"\"LAR-17528-1"\7878348\"12/248,339"\"Lightweight Lunar Surface Remote Manipulator System (LSRMS)"\"10/09/2028"
+"NASA Langley Research Center"\"Issued"\"LAR-17535-1"\8206674\"12/152,414"\"High Pressure Boron Vaporization Synthesis Of Few-Walled Boron Nitride Nanotube Fibers"\"04/13/2030"
+"NASA Langley Research Center"\"Issued"\"LAR-17539-1"\8164328\"12/493,573"\"Development Of Eddy Current Techniques For The Detection Of Stress Corrosion Cracking In Space Shuttle Primary Reaction Control Thrusters"\"01/08/2030"
+"NASA Langley Research Center"\"Issued"\"LAR-17547-1"\7848381\"12/366,722"\"Line Tunable Visible and Ultraviolet Laser"\"07/05/2029"
+"NASA Langley Research Center"\"Issued"\"LAR-17553-1"\8257491\"12/288,379"\"NEW RHOMBOHEDRAL ALIGNMENT OF CUBIC SEMICONDUCTOR ON TRIGONAL SUBSTRATE AT A HIGH TEMPERATURE"\"07/06/2031"
+"NASA Langley Research Center"\"Issued"\"LAR-17554-1"\7769135\"12/288,380"\"X-ray Diffraction Wafer Mapping Method for Rhombohedral Super-Hetero-Epitaxy"\"10/20/2028"
+"NASA Langley Research Center"\"Application"\"LAR-17555-1"\0\"13/020,194"\"Front-Flight-Path Turbulence & Vortex Detection System"\
+"NASA Langley Research Center"\"Issued"\"LAR-17573-1"\7855368\"12/178,173"\"Air Coupled Acoustic Thermography Nondestructive Evaluation System And Method"\"10/09/2028"
+"NASA Langley Research Center"\"Issued"\"LAR-17576-1"\7742663\"12/261,376"\"Innovative Structural Design And Materials For Transmission To And Protection Of Ultraviolet And Infrared Radiation Sensors"\"10/30/2028"
+"NASA Langley Research Center"\"Issued"\"LAR-17579-1"\8673649\"12/463,475"\"Wireless Chemical Sensing Using Changes To An Electrically Conductive Reactant Within Sensor's Magnetic Field"\"01/04/2031"
+"NASA Langley Research Center"\"Issued"\"LAR-17593-1"\8167204\"12/253,422"\"Open Circuit Damage Location Sensor Having No Electrical Connections"\"10/30/2030"
+"NASA Langley Research Center"\"Issued"\"LAR-17608-1"\7901611\"12/274,652"\"Methodology for calculating fiber distribution during electrospinning"\"01/12/2029"
+"NASA Langley Research Center"\"Issued"\"LAR-17609-1"\8255732\"12/429,603"\"A Self-Stabilizing Byzantine-Fault-Tolerant Clock Synchronization Protocol"\"12/30/2030"
+"NASA Langley Research Center"\"Issued"\"LAR-17629-1"\7813599\"12/390,606"\"A Method for Shape Determination of Multi-Core Optical Fiber"\"02/23/2029"
+"NASA Langley Research Center"\"Issued"\"LAR-17634-1"\7893602\"12/328,162"\"Distributed transducer capable of generating or sensing a transverse point load"\"03/14/2029"
+"NASA Langley Research Center"\"Application"\"LAR-17636-1"\0\"13/752,495"\"PICA on Edge: Edgewise strips of PICA ablator to eliminate gaps in capsule heat shield"\"01/29/2033"
+"NASA Langley Research Center"\"Issued"\"LAR-17638-1"\8508413\"13/082,839"\"Fractal Dielectric Microstrip Antenna using Patterned Substrate Material Geometries"\"03/02/2032"
+"NASA Langley Research Center"\"Issued"\"LAR-17651-1"\8259104\"12/493,666"\"Domain Decomposition By the Advancing-Partition Method for Parallel Unstructured Grid Generation"\"03/09/2031"
+"NASA Langley Research Center"\"Issued"\"LAR-17655-1"\8111832\"12/424,793"\"Local Intelligence Based Impedance Optimization Scheme for Adaptive Noise Reduction"\"06/25/2030"
+"NASA Langley Research Center"\"Issued"\"LAR-17656-1"\8108178\"12/467,475"\"DIRECTED DESIGN OF EXPERIMENTS FOR VALIDATING PROBABILITY OF DETECTION CAPABILITY OF NDE SYSTEMS (DOEPOD)"\"05/05/2030"
+"NASA Langley Research Center"\"Application"\"LAR-17668-1"\0\"12/322,591"\"Device for the Large-Scale synthesis of High-Quality Boron Nitride Nanotubes"\"02/04/2029"
+"NASA Langley Research Center"\"Issued"\"LAR-17681-1"\8347479\"12/849,906"\"Thermally-Activated Crack Healing Mechanism for Metallic Materials"\"04/30/2031"
+"NASA Langley Research Center"\"Application"\"LAR-17681-2"\\"13/719,740"\"System for Repairing Cracks in Structures"\"08/04/2030"
+"NASA Langley Research Center"\"Issued"\"LAR-17681-3"\8679642\"14/037,850"\"System for Repairing Cracks in Structures"\"08/04/2030"
+"NASA Langley Research Center"\"Application"\"LAR-17689-1"\0\"12/393,289"\"Negative Dielectric Constant Material Based on Ion Conducting Materials"\"08/20/2031"
+"NASA Langley Research Center"\"Application"\"LAR-17694-1"\0\"12/974,359"\"A Synthetic Quadrature Phase Detector/Demodulator for Fourier Transform Spectrometers"\"03/09/2032"
+"NASA Langley Research Center"\"Issued"\"LAR-17695-1"\8658004\"12/470,689"\"Vapor-Barrier Vacuum Isolation System"\"08/01/2032"
+"NASA Langley Research Center"\"Application"\"LAR-17696-1"\0\"12/543,686"\"Asymmetric Dielectric Elastomer Composite Material"\"03/16/2031"
+"NASA Langley Research Center"\"Issued"\"LAR-17705-1"\8672107\"13/042,655"\"Tunable damper capable of tailoring the structural damping for individual modes of vibration using minimal space and minimal impact on the system frequencies and mode shapes."\"11/28/2031"
+"NASA Langley Research Center"\"Issued"\"LAR-17709-1"\7912101\"12/628,423"\"Increased Efficiency Nonlinear Optical Interactions"\"12/01/2029"
+"NASA Langley Research Center"\"Issued"\"LAR-17711-1"\8179203\"12/569,984"\"Wireless Electrical Applications/Devices Using floating Electrodes Electromagnetically Coupled to Open-Circuit Devices"\"07/09/2030"
+"NASA Langley Research Center"\"Application"\"LAR-17723-1"\0\"12/699,334"\"Novel material for wound healing applications."\
+"NASA Langley Research Center"\"Issued"\"LAR-17724-1"\8378659\"12/703,221"\"Electroactive polymer fibers for structural health monitoring."\"01/22/2031"
+"NASA Langley Research Center"\"Issued"\"LAR-17735-1"\8490463\"12/881,431"\"Assessment and Calibration of Crimp Tool Equipped with Ultrasonic Analysis, including Phantom Construction"\"10/22/2031"
+"NASA Langley Research Center"\"Issued"\"LAR-17736-1"\8147920\"12/370,755"\"Controlled Deposition And Alignment Of Carbon Nanotubes (Continuation of LAR 16499-1)"\"02/13/2029"
+"NASA Langley Research Center"\"Application"\"LAR-17738-1"\0\"12/685,280"\"Sensory Metallic Materials"\
+"NASA Langley Research Center"\"Issued"\"LAR-17743-1"\8473663\"13/011,198"\"Reconfigurable Peripheral Component Interconnect local bus controller and target design."\"10/07/2031"
+"NASA Langley Research Center"\"Issued"\"LAR-17745-1"\7906043\"12/550,431"\"Electrically Conductive, Optically Transparent Polymer/Carbon Nanotube Composites And Process For Preparation Thereof"\"11/01/2022"
+"NASA Langley Research Center"\"Application"\"LAR-17877-1"\\"13/277,859"\"Autonomous Leading-Edge Slat Device for Reduction of Aeroacoustic Noise Associated with Aircraft Wings"\
+"NASA Langley Research Center"\"Application"\"LAR-17747-1"\0\"13/029,471"\"Temperature Sensing Using Temperature Sensitive Dielectric Material in Proximity to Open-Circuit Sensors Having No Electrical Connections"\
+"NASA Langley Research Center"\"Application"\"LAR-18090-1"\\"13/786,608"\"No Moving Part - Variable Frequency Fluidic Oscillator"\"03/06/2033"
+"NASA Langley Research Center"\"Application"\"LAR-17747-1-CON"\\"14/193,861"\"Wireless Temperature Sensor Having No Electrical Connections and Sensing Method for Use Therewith"\"02/17/2031"
+"NASA Langley Research Center"\"Issued"\"LAR-17748-1"\8303922\"12/546,185"\"Exfoliation of Hexagonal Boron Nitride"\"11/19/2030"
+"NASA Langley Research Center"\"Issued"\"LAR-17759-1"\7935414\"12/406,315"\"Multilayer Electroactive Polymer Composite Material (Continuation of LAR 17112-1)"\"03/18/2029"
+"NASA Langley Research Center"\"Issued"\"LAR-17766-1"\8452073\"12/750,991"\"Method for Closed Loop Process Control for Electron Beam Freeform Fabrication and Deposition Processes"\"10/02/2031"
+"NASA Langley Research Center"\"Application"\"LAR-17769-1"\0\"12/894,279"\"Modifying Surface Energy via Laser Ablative Surface Patterning"\
+"NASA Langley Research Center"\"Application"\"LAR-17777-1"\\"13/443,940"\"Process to Fabricate Specific Sized Monodisperse Polystryene Microparticles"\
+"NASA Langley Research Center"\"Application"\"LAR-17780-1"\0\"12/387,703"\"Boron Nitride Nanotube Fibrils and Yarns (Filed by JLabs, their ref: ID 1248/Docket 2025(JSA)"\
+"NASA Langley Research Center"\"Application"\"LAR-17786-1"\0\"12/964,381"\"Smart Optics Material Characterization System"\
+"NASA Langley Research Center"\"Application"\"LAR-17789-1"\0\"12/969,076"\"Electroactive scaffold"\
+"NASA Langley Research Center"\"Application"\"LAR-17791-1"\0\"13/070,552"\"Apparatus and Method for Selective Enhancement of Surface Plasmon Polaritons to Initiate and Sustain Low Energy Nuclear Reactions in Metal Hydride Systems"\
+"NASA Langley Research Center"\"Issued"\"LAR-17799-1"\8655513\"13/046,030"\"Realtime 3-D Image Processing and Enhancement"\"05/25/2031"
+"NASA Langley Research Center"\"Application"\"LAR-17800-1"\0\"13/527,638"\"Method for generating laser linear frequency modulation waveform"\
+"NASA Langley Research Center"\"Application"\"LAR-17801-1"\0\"13/566,077"\"Coherent Doppler lidar for measuring altitude, ground velocity, and air velocity of aircraft and spaceborne vehicles"\"08/03/2032"
+"NASA Langley Research Center"\"Application"\"LAR-17813-1"\0\"13/198,817"\"Durable Joining Technology for Uniformly-Curved Composite Sandwich Structures"\"08/17/2032"
+"NASA Langley Research Center"\"Application"\"LAR-17813-1-CON"\\"14/200,708"\"Systems, Apparatuses, and Methods for Using Durable Adhesively Bonded Joints for Sandwich Structures"\"08/05/2031"
+"NASA Langley Research Center"\"Application"\"LAR-17830-1"\0\"12/925,047"\"Actuators and Sensors Fabricated with Boron Nitride Nanotubes (BNNTs) and BNNT Polymer Composites"\
+"NASA Langley Research Center"\"Issued"\"LAR-17831-1"\8651429\"13/214,453"\"Blended Cutout Flap Design for the Reduction of Jet-Flap Interaction Noise"\"08/22/2031"
+"NASA Langley Research Center"\"Application"\"LAR-17832-1"\0\"13/214,469"\"Aircraft Engine Nozzle Systems for Jet Noise Reduction by Acoustic Shielding"\
+"NASA Langley Research Center"\"Application"\"LAR-17833-1"\0\"13/214,481"\"Active Aircraft Pylon Noise Control System"\
+"NASA Langley Research Center"\"Issued"\"LAR-17836-1"\8671763\"12/850,708"\"Sub-Surface Windscreen for Outdoor Measurement of Infrasound"\"02/18/2031"
+"NASA Langley Research Center"\"Application"\"LAR-17841-1"\0\" 14/202,699"\"High Mobility Transport Layer Structures for Rhombohedral Si/Ge/SiGe Devices"\"03/10/2034"
+"NASA Langley Research Center"\"Application"\"LAR-17848-1"\0\"13/796,626"\"Spectroscopy using Electric Permittivity, Magnetic Permeability and Electrical Conductivity Spatial Profiles"\"03/12/2033"
+"NASA Langley Research Center"\"Issued"\"LAR-17856-1"\8198976\"12/688,309"\"Flexible Thin Metal Film Thermal Sensing System (CIP of LAR 17346-1)"\"09/20/2030"
+"NASA Langley Research Center"\"Application"\"LAR-17857-1"\0\"12/967,690"\"A GPS-Based Pitot-Static Calibration Method Using Global Output-Error Optimization"\
+"NASA Langley Research Center"\"Application"\"LAR-17869-1"\\"13/166,226"\"Team Electronic Gameplay Combining Different Means of Control"\
+"NASA Langley Research Center"\"Application"\"LAR-17886-1"\\"13/324,527"\"Method and Apparatus to Detect Wire Pathologies Near Crimped Connector"\
+"NASA Langley Research Center"\"Application"\"LAR-17887-1"\\"13/743,750"\"Interrogations Leading to Recertification of Wire Crimps and Other Joining Technologies."\"01/17/2033"
+"NASA Langley Research Center"\"Issued"\"LAR-17888-1"\8605262\"13/167,093"\"Time Shifted PN Codes for CW LIDAR, RADAR, and SONAR"\"12/28/2031"
+"NASA Langley Research Center"\"Issued"\"LAR-17894-1"\8494687\"13/166,121"\"3-D Super Resolution Algorithm for Flash LIDAR Image Enhancement"\"12/11/2031"
+"NASA Langley Research Center"\"Application"\"LAR-17895-1"\\"13/166,166"\"Method and System for Physiologically Modulating Videogames or Simulations Which Use Motion-Sensing Input Devices"\
+"NASA Langley Research Center"\"Application"\"LAR-17902-1"\\"13/068,329"\"Neutron and Ultraviolet Radiation Shielding Films Fabricated Using Boron Nitride Nanotubes and Boron Nitride Nanotube Composites"\
+"NASA Langley Research Center"\"Application"\"LAR-17906-1"\\"13/272,027"\"Abnormal Grain Growth Suppression in Aluminum Alloys"\
+"NASA Langley Research Center"\"Issued"\"LAR-17908-1"\8655094\"13/105,004"\"New Photogrammetry System to Measure Relative 6-Degree-of-Freedom Motion Between Two Bodies Using Heterogeneous Cameras Having Arbitrary Wide-Angle Lenses with Non-Overlapping Fields of View"\"04/23/2032"
+"NASA Langley Research Center"\"Application"\"LAR-17918-1"\\"13/136,216"\"High Kinetic Energy Penetrator Shielding and High Wear Resistance Materials Fabricated with Boron Nitride Nanotubes (BNNTs) and BNNT Polymer Composites"\
+"NASA Langley Research Center"\"Issued"\"LAR-17919-1"\8661653\"13/191,882"\"Z-Shields from Fiber Metal Laminate"\"07/27/2031"
+"NASA Langley Research Center"\"Application"\"LAR-17919-2"\\"13/963,484"\"Z-Shields from Fiber Metal Laminate"\"07/27/2031"
+"NASA Langley Research Center"\"Application"\"LAR-18097-1"\\"13/591,320"\"Arbitrary Shape Initialization of Fiber Optic Shape Sensing Systems"\"08/22/2032"
+"NASA Langley Research Center"\"Application"\"LAR-17923-1"\\"13/411,793"\"A Method of Creating Micro-scale Silver Telluride Grains Covered with Bismuth Nanospheres as Nano-bridges for Thermoelectric Application"\"11/14/2032"
+"NASA Langley Research Center"\"Application"\"LAR-17947-1"\\"13/775,809"\"Linear Fresnel Spectrometer Chip with Gradient Line Grating"\"02/25/2033"
+"NASA Langley Research Center"\"Application"\"LAR-17952-1"\\"13/411,891"\"Multi-Point Interferometric Phase Change Detection Algorithm"\
+"NASA Langley Research Center"\"Application"\"LAR-17958-1"\\"13/195,251"\"Wireless Open-Circuit In-Plane Strain and Displacement Sensors Having No Electrical Connections"\"07/16/2032"
+"NASA Langley Research Center"\"Issued"\"LAR-17959-1"\8087494\"12/894,326"\"Method of Making a Composite Panel Having Subsonic Transverse Wave Speed Characteristics (Continuation of LAR 16535-1)"\"09/30/2030"
+"NASA Langley Research Center"\"Application"\"LAR-17966-1"\\"13/457,687"\"Wide Bandwidth Magneto-Resistive Sensor Based Eddy Current Probe"\
+"NASA Langley Research Center"\"Application"\"LAR-17967-1"\\"13/293,846"\"Relaxor Piezoelectric Single Crystal Multilayer Stacks for Energy Harvesting Transducers (RPSEHT)"\
+"NASA Langley Research Center"\"Application"\"LAR-17972-1"\\"13/200,314"\"BxCyNz Nanotube Formation via the Pressurized Vapor/Condenser"\
+"NASA Langley Research Center"\"Application"\"LAR-17973-1"\\"13/200,316"\"Efficient Boron Nitride Nanotube (BNNT) and BxCyNz Nanotube Formation via Combined Laser-Gas Flow Levitation (JLab's ref: 2010-09-13-RRW)"\
+"NASA Langley Research Center"\"Application"\"LAR-17977-1"\\"13/447,513"\"Variable Stiffness Shape Adaptive Multi-Layered Polymer Composite"\
+"NASA Langley Research Center"\"Application"\"LAR-17980-1"\\"13/457,540"\"Space Utilization Optimization Tools"\
+"NASA Langley Research Center"\"Application"\"LAR-17984-1"\\"13/326,779"\"FLEXible Side Edge Link (FLEXSEL) for Trailing-Edge Flap Aeroacoustic Noise Reduction"\"12/15/2031"
+"NASA Langley Research Center"\"Application"\"LAR-17985-1"\\"13/231,386"\"An Acoustic Beamforming Array Using Feedback-Controlled Microphones for Tuning and Self-Matching of Frequency Response (Michigan State University's ref: TEC2011-0045)"\
+"NASA Langley Research Center"\"Application"\"LAR-17987-1"\\"13/364,814"\"A Self-Stabilizing Distributed Clock Synchronization Protocol For Arbitrary Digraphs"\
+"NASA Langley Research Center"\"Application"\"LAR-17991-1"\\"13/200,315"\"Production Rig for the Synthesis of BNNTs via the PVC Method"\
+"NASA Langley Research Center"\"Issued"\"LAR-17993-1"\8662213\"13/342,264"\"Locomotion of Amorphous Surface Robots"\"05/06/2032"
+"NASA Langley Research Center"\"Application"\"LAR-17993-2"\\"14/189,019"\"Locomotion of Amorphous Surface Robots"\"01/03/2033"
+"NASA Langley Research Center"\"Application"\"LAR-17994-1"\\"13/273,516"\"Manufacturing of Low Mass, Large-Scale Hierarchical Thin Film Structural Systems"\
+"NASA Langley Research Center"\"Application"\"LAR-17996-1"\\"14/202,289"\"Nanostructure Neutron Converter Layer Development"\"03/10/2034"
+"NASA Langley Research Center"\"Issued"\"LAR-18006-1"\8671551\"13/363,413"\"Crimp Quality Assessment from Jaw Position-Ultrasonic Transmission Analysis"\"02/01/2032"
+"NASA Langley Research Center"\"Application"\"LAR-18006-2"\\"14/193,086"\"Crimp Quality Assessment from Jaw Position-Ultrasonic Transmission Analysis"\"02/01/2032"
+"NASA Langley Research Center"\"Issued"\"LAR-18016-1"\8636407\"13/029,426"\"Wireless Temperature Sensor Having No Electrical Connections and Sensing Method For Use Therewith"\"11/23/2031"
+"NASA Langley Research Center"\"Application"\"LAR-18021-1"\\"13/417,347"\"Flap Side Edge Liners for Airframe Noise Reduction"\"07/31/2032"
+"NASA Langley Research Center"\"Application"\"LAR-18023-1"\\"13/417,349"\"Landing Gear Door Liners for Airframe Noise Reduction"\"03/12/2032"
+"NASA Langley Research Center"\"Application"\"LAR-18024-1"\\"13/417,351"\"External Acoustic Liners for Multi-Functional Aircraft Noise Reduction"\
+"NASA Langley Research Center"\"Application"\"LAR-18026-1"\\"13/286,715"\"Synthesis of Novel Copoly(imide oxetane)s with Unique Surface Properties"\
+"NASA Langley Research Center"\"Application"\"LAR-18257-1"\\"14/105,757"\"A Structural Joint With Multi-Axis Load Carrying Capacity"\"12/13/2033"
+"NASA Langley Research Center"\"Issued"\"LAR-18032-1"\8229716\"12/981,432"\"Fast Tracking Methods and Systems for Air Traffic Modeling Using a Monotonic Lagrangian Grid (US Naval Research Laboratory ref: 100148-US2)"\"12/29/2030"
+"NASA Langley Research Center"\"Application"\"LAR-18034-1"\\"13/291,372"\"Compact Active Vibration Control System"\
+"NASA Langley Research Center"\"Application"\"LAR-18037-1"\\"13/453,717"\"A Multifunctional Lightning Protection and Detection System for Aerospace Vehicles"\
+"NASA Langley Research Center"\"Application"\"LAR-18040-1"\\"13/986,089"\"Multi-Functional BN-BN Composite"\"03/29/2033"
+"NASA Langley Research Center"\"Application"\"LAR-18065-1"\\"13/860,697"\"Variable Acceleration Force Calibration System"\"04/11/2033"
+"NASA Langley Research Center"\"Application"\"LAR-18070-1"\\"13/923,307"\"Transparent and Ubiquitous Sensing Technology"\"06/20/2033"
+"NASA Langley Research Center"\"Application"\"LAR-18071-1"\\"13/923,312"\"Using Ubiquitous Conductor to Power and Interrogate Wireless Passive Sensors and Construct Sensor Network"\
+"NASA Langley Research Center"\"Application"\"LAR-18073-1"\\"13/941,441"\"Doped Chiral Polymer Negative Index Materials (DCPNIM)"\"07/12/2033"
+"NASA Langley Research Center"\"Application"\"LAR-18077-1"\\"13/630,459"\"Flight Deck Technology and Procedure for Pilots to Generate Flight-Optimizing Trajectory Requests that Avoid Nearby Traffic"\"09/28/2032"
+"NASA Langley Research Center"\"Application"\"LAR-18089-1"\\"13/786,713"\"Synchronized Sweeping Jet Actuators"\"03/06/2033"
+"NASA Langley Research Center"\"Application"\"LAR-18127-1"\\"13/913,782"\"Synergistic Chemical and Topographical Surface Modifications and Articles of Manufacture for Dynamic Insect Adhesion Mitigation"\"06/10/2033"
+"NASA Langley Research Center"\"Application"\"LAR-18131-1"\\"13/774,422"\"Puncture- healing Thermoplastic Resin Carbon Fiber Reinforced Composites towards More Damage/Impact Tolerant Systems"\
+"NASA Langley Research Center"\"Application"\"LAR-18132-1"\\"13/673,360"\"Modeling of Laser Ablation and Plume Chemistry in a Boron Nitride Nanotube Production Rig"\"11/09/2032"
+"NASA Langley Research Center"\"Application"\"LAR-18143-1"\\"13/694,286"\"In-situ Mechanical Property Measurements of Amorphous Carbon-boron Nitride Nanotube"\"11/15/2032"
+"NASA Langley Research Center"\"Application"\"LAR-18144-1"\\"13/836,609"\"Method and System for Physiologically Modulating Videogames and Simulations Which Use Gesture and Body Image Sensing Control Input Devices"\"03/15/2033"
+"NASA Langley Research Center"\"Application"\"LAR-18160-1"\\"13/864,396"\"Tension Stiffened and Tendon Actuated Space Manipulators"\"04/17/2033"
+"NASA Langley Research Center"\"Application"\"LAR-18166-1"\\"13/764,062"\"Reactive Orthotropic Lattice Diffuser (ROLD) for Reducing Aerodynamic Noise from Aircraft Flap Tips"\"03/12/2032"
+"NASA Langley Research Center"\"Application"\"LAR-18179-1"\\"13/792,489"\"Extreme Reduced Instruction Set Computing (xRISC) for High Speed Execution of Computing Algorithms"\"03/11/2033"
+"NASA Langley Research Center"\"Application"\"LAR-18183-1"\\"13/834,294"\"Height Control and Deposition Measurement for the Electron Beam Free Form Fabrication (EBF3) Process"\"03/15/2033"
+"NASA Langley Research Center"\"Application"\"LAR-18184-1"\\"13/987,706"\"Conductive Polymer/Carbon Nanotube Structural Materials and Methods for Making Same"\"08/23/2033"
+"NASA Langley Research Center"\"Application"\"LAR-18186-1"\\"12/482,503"\"Flexible Volumetric Structure"\
+"NASA Langley Research Center"\"Application"\"LAR-18202-1"\\"13/713,033"\"Ground-to-Space Laser Calibration System"\"12/13/2032"
+"NASA Langley Research Center"\"Application"\"LAR-18204-1"\\"13/800,379"\"Quasi-Static Electric Field Generator"\"03/13/2033"
+"NASA Langley Research Center"\"Application"\"LAR-18211-1"\\"13/781,918"\"A Statistically Based Approach to Broadband Liner Design and Assessment"\"03/01/2033"
+"NASA Langley Research Center"\"Application"\"LAR-18217-1"\\"13/771,116"\"A Graphical Acoustic Liner Design and Analysis Tool"\"02/20/2033"
+"NASA Langley Research Center"\"Application"\"LAR-18246-1"\\"13/765,714"\"Tethered Vehicle Control and Tracking System"\"02/13/2033"
+"NASA Langley Research Center"\"Application"\"LAR-18266-1"\\"14/079,914"\"Airborne Wind Profiling Algorithm for Doppler Wind Lidar (APOLO)"\"11/14/2033"
+"NASA Langley Research Center"\"Application"\"LAR-18267-1"\\"13/838,260"\"Method and System for Physiologically Modulating Action Role-playing Open World Video Games and Simulations Which Use Gesture and Body Image Sensing Control Input Devices"\
+"NASA Langley Research Center"\"Application"\"LAR-18270-1"\\"14/079,965"\"Airborne Doppler Wind Lidar Post Data Processing Software DAPS-LV"\"11/14/2033"
+"NASA Langley Research Center"\"Application"\"LAR-18301-1"\\"13/838,163"\"Flap Edge Noise Reduction Fins (FENoRFins)"\"03/15/2033"
+"NASA Langley Research Center"\"Application"\"LAR-18318-1"\\"14/191,898"\"In-Situ Load System (ILS) for Calibrating and Validating Aerodynamic Properties of Scaled Aircraft in Ground-based Aerospace Testing Applications"\"02/27/2034"
+"NASA Langley Research Center"\"Application"\"LAR-18374-1"\\"14/072,019"\"Modulated Sine Waves for Differential Absorption Measurements Using a CW Laser System"\"06/23/2031"
+"NASA Glenn Research Center"\"Issued"\"LEW-16183-1"\5866518\"08/786,360"\"PS300 - Self Lubricating Readily Polished High Temperature Composite"\"01/16/2017"
+"NASA Glenn Research Center"\"Issued"\"LEW-16519-2"\6291838\"09/448,406"\"Gas Sensing Diode"\"11/15/2019"
+"NASA Glenn Research Center"\"Issued"\"LEW-16901-1"\7190741\"10/274,756"\"A Real-Time Signal-To-Noise Ratio Estimation Technique For BPSK And QPSK Modulation Using The Active Communications Channel"\"10/21/2022"
+"NASA Glenn Research Center"\"Issued"\"LEW-17153-1"\6550696\"09/794,794"\"Lean Direct Injection Combustor/Multi Point Integrate Module Fuel-Air Mixer"\"02/27/2021"
+"NASA Glenn Research Center"\"Issued"\"LEW-17157-1"\6869480\"10/198,668"\"Method For Production Of Atomic Scale Step Height Reference Specimens With Atomically Flat Surfaces"\"07/17/2022"
+"NASA Glenn Research Center"\"Issued"\"LEW-17166-1"\7497443\"11/121,850"\"Resilient, Flexible, Pressure-Activated Seal"\"05/03/2025"
+"NASA Glenn Research Center"\"Issued"\"LEW-17167-1"\6667725\"10/196,391"\"Radio Frequency (RF) Telemetry System For Sensors And Actuators"\"07/11/2022"
+"NASA Glenn Research Center"\"Issued"\"LEW-17170-1"\6706549\"10/124,689"\"Common-Layered Architecture For Semiconductor Silicon Carbide (CLASSiC) Bulk Fabrication"\"04/12/2022"
+"NASA Glenn Research Center"\"Issued"\"LEW-17182-1"\7086648\"10/652,088"\"Acoustic Seal"\"08/22/2023"
+"NASA Glenn Research Center"\"Issued"\"LEW-17240-1"\7427428\"10/601,657"\"Mechanically Improved Interphase Coating For Silicon-Carbide Fiber-Reinforced Silicon-Carbide Matrix Composites"\"06/24/2023"
+"NASA Glenn Research Center"\"Issued"\"LEW-17256-1"\6845664\"10/263,980"\"MEMS Direct Chip Attach (MEMS-DCA) Packaging Methodologies For Harsh Environments"\"10/03/2022"
+"NASA Glenn Research Center"\"Issued"\"LEW-17256-2"\7518234\"10/926,206"\"MEMS Direct Chip Attach Packaging Methodologies And Apparatus For Harsh Environments"\"08/25/2024"
+"NASA Glenn Research Center"\"Issued"\"LEW-17269-2"\8212138\"11/696,441"\"Reverse-Bias Protected Solar Array With Integrated ByPass Battery"\"04/04/2027"
+"NASA Glenn Research Center"\"Application"\"LEW-17269-3"\0\"13/482,493"\"Reverse-Bias Protected Solar Array With Integrated ByPass Battery"\
+"NASA Glenn Research Center"\"Issued"\"LEW-17291-1"\6784276\"10/202,643"\"Improved Processing For Polyimdes Via Concentrated Solid Monomer Reactants Approach"\"07/25/2022"
+"NASA Glenn Research Center"\"Issued"\"LEW-17293-1"\7023118\"10/390,256"\"A Comprehensive C++ Controller For A Magnetically Supported Vertical Rotor: Version 1.0"\"03/12/2023"
+"NASA Glenn Research Center"\"Issued"\"LEW-17293-2"\6809450\"10/729,580"\"Software For System For Controlling A Magnetically Levitated Rotor"\"12/04/2023"
+"NASA Glenn Research Center"\"Issued"\"LEW-17299-1"\6881820\"10/147,477"\"Polyimide Rod-Coil Block Copolymers As Membrane Materials For Ion Conduction"\"05/13/2022"
+"NASA Glenn Research Center"\"Issued"\"LEW-17317-1"\7687016\"10/777,630"\"Process For Improving Properties Of Silicon Carbide (SiC) Fibers And SiC Fiber-Reinforced Ceramic Matrix Composites"\"02/13/2024"
+"NASA Glenn Research Center"\"Application"\"LEW-17317-2"\0\"12/709,086"\"Process For Improving Properties Of Silicon Carbide (SiC) Fibers And SiC Fiber-Reinforced Ceramic Matrix Composites"\
+"NASA Glenn Research Center"\"Issued"\"LEW-17345-2"\7813406\"11/402,997"\"Temporal Laser Pulse Manipulation Using Multiple Optical Ring Cavities"\"04/13/2026"
+"NASA Glenn Research Center"\"Issued"\"LEW-17383-1"\6967462\"10/455,139"\"Wireless Consumer Power"\"06/05/2023"
+"NASA Glenn Research Center"\"Application"\"LEW-17458-2"\0\"13/113,458"\"Compact Solid-state Entangled Photon Source"\
+"NASA Glenn Research Center"\"Issued"\"LEW-17483-1"\7191013\"10/983,230"\"Hand Held Device For Wireless Powering And Interrogation Of BioMEMS Sensors And Actuators"\"11/08/2024"
+"NASA Glenn Research Center"\"Issued"\"LEW-17484-5"\7268939\"11/363,300"\"Tracking Of Cells With A Compact Microscope Imaging System Using Intelligent Controls"\"02/24/2026"
+"NASA Glenn Research Center"\"Issued"\"LEW-17494-1"\7458221\"10/693,850"\"Self-Sealing, Smart, Variable Area Nozzle (S3VAN) For Dynamic Flow Control In Gas Turbine Engines"\"10/23/2023"
+"NASA Glenn Research Center"\"Issued"\"LEW-17498-1"\7187835\"11/44,063"\"Selective Wavelength Filtering"\"01/28/2025"
+"NASA Glenn Research Center"\"Issued"\"LEW-17510-1"\7416062\"10/693,853"\"Torsional Magnetorheological Fluid Resistant Device (TMRFRD)"\"10/23/2023"
+"NASA Glenn Research Center"\"Issued"\"LEW-17517-1"\7326027\"10/856,361"\"Flow-Field Control-Rods To Stabilize Flow In A Centrifugal Compressor"\"05/25/2024"
+"NASA Glenn Research Center"\"Issued"\"LEW-17520-1"\7259692\"10/931,205"\"Hybrid Power Management (HPM) Upgrade"\"09/01/2024"
+"NASA Glenn Research Center"\"Issued"\"LEW-17551-1"\7410714\"10/891,599"\"Unitized Regenerative Fuel Cell System"\"07/15/2024"
+"NASA Glenn Research Center"\"Issued"\"LEW-17561-1"\7400096\"10/894,225"\"Large Area Permanent Magnet ECR Plasma Source"\"07/19/2024"
+"NASA Glenn Research Center"\"Issued"\"LEW-17589-1"\7305935\"10/925,499"\"Slotted Antenna Rectangular Waveguide Plasma Source For Ion Beam And Electron Beam Production"\"08/25/2024"
+"NASA Glenn Research Center"\"Issued"\"LEW-17592-1"\7704622\"10/926,457"\"New Ion Conducting Organic/Inorganic Hybrid Polymers"\"08/26/2024"
+"NASA Glenn Research Center"\"Application"\"LEW-17595-1"\0\"13/018,611"\"A Method Of Improving The Thermo-Mechanical Properties Of Fiber-Reinforced Silicon Carbide Matrix Composites"\
+"NASA Glenn Research Center"\"Issued"\"LEW-17605-1"\8394492\"10/974,991"\"Skin Modified Aerogel Monoliths For Improved Ruggedness And Lower Hydrophylicity"\"10/28/2024"
+"NASA Glenn Research Center"\"Issued"\"LEW-17618-1"\7015304\"10/897,279"\"High Tg Polyimides For Resin Transfer Molding (RTM)"\"07/23/2024"
+"NASA Glenn Research Center"\"Issued"\"LEW-17618-1-REIS"\"RE43,880"\"11/429,639"\"Solvent-Free Low Melt Viscosity Imide Oligomers and Thermosetting Polyimide Composites"\"05/08/2026"
+"NASA Glenn Research Center"\"Application"\"LEW-17618-3"\\"13/952,872"\"High Tg Polyimides For Resin Transfer Molding (RTM)"\"07/29/2033"
+"NASA Glenn Research Center"\"Issued"\"LEW-17630-1"\7534519\"11/228,185"\"Bi-Electrode Supported Cell For High Power Density Solid Oxide Fuel Cells"\"09/16/2025"
+"NASA Glenn Research Center"\"Application"\"LEW-17634-1"\0\"11/228,184"\"Solid Oxide Fuel Cell Stack Design With Bi-Electrode Supported Cells"\
+"NASA Glenn Research Center"\"Application"\"LEW-17634-2"\0\"12/860,210"\"Solid Oxide Fuel Cell Stack Design With Bi-Electrode Supported Cells"\
+"NASA Glenn Research Center"\"Issued"\"LEW-17642-2"\7308164\"11/398,734"\"Energetic Atomic And Ionic Oxygen Textured Optical Surfaces For Blood Glucose Monitoring"\"03/23/2026"
+"NASA Glenn Research Center"\"Issued"\"LEW-17642-4"\7305154\"11/483,887"\"Energetic Atomic And Ionic Oxygen Textured Optical Surfaces For Blood Glucose Monitoring"\"07/11/2026"
+"NASA Glenn Research Center"\"Issued"\"LEW-17661-1 with LEW-17765-1"\7438030\"11/213,604"\"Method of Fabricating Silicon Carbide Corrugated Diaphragms and Modular Actuator"\"08/26/2025"
+"NASA Glenn Research Center"\"Issued"\"LEW-17664-1"\7500350\"11/44,471"\"Elimination Of Lifetime Limiting Mechanism Of Hall Thrusters"\"01/28/2025"
+"NASA Glenn Research Center"\"Issued"\"LEW-17671-1"\7493869\"11/311,183"\"Very Large Area/Volume Microwave ECR Plasma And Ion Source"\"12/16/2025"
+"NASA Glenn Research Center"\"Issued"\"LEW-17672-1"\7261783\"10/946,286"\"Low Density High Creep Resistant Single Crystal Superalloy For Turbine Airfoils"\"09/22/2024"
+"NASA Glenn Research Center"\"Issued"\"LEW-17678-1"\7624566\"11/40,304"\"Magnetic Circuit For Hall Effect Plasma Accelerator"\"01/18/2025"
+"NASA Glenn Research Center"\"Issued"\"LEW-17694-1"\7397978\"11/180,990"\"Carrier Structure For Packaging Microphotonic Millimeter-Wave Receiver Based On Lithium Niobate Electro-Optic Resonator Disk Technology"\"07/13/2025"
+"NASA Glenn Research Center"\"Issued"\"LEW-17704-1"\7250723\"11/16,735"\"Cathode Luminescence Light Source For Broad Band Application In The Visible"\"12/21/2024"
+"NASA Glenn Research Center"\"Issued"\"LEW-17765-1 with LEW-17661-1"\7438030\"11/213,604"\"Side Sliding Microactuator"\"10/21/2025"
+"NASA Glenn Research Center"\"Issued"\"LEW-17786-1"\8197249\"11/412,935"\"Fully-Premixed Low-Emissions High-Pressure Multi-fuel Burner"\"04/28/2026"
+"NASA Glenn Research Center"\"Issued"\"LEW-17814-1"\7574137\"11/418,304"\"Multi-wavelength Time-coincident Optical Communications System"\"05/05/2026"
+"NASA Glenn Research Center"\"Issued"\"LEW-17820-1"\7755292\"11/625,545"\"Method For Ultraminiature Fiber Light Source"\"01/22/2027"
+"NASA Glenn Research Center"\"Issued"\"LEW-17820-2"\8264134\"12/795,356"\"Method For Ultraminiature Fiber Light Source"\"09/11/2032"
+"NASA Glenn Research Center"\"Issued"\"LEW-17825-1"\8163243\"11/517,555"\"Zero G Condensing Heat Exchanger With Integral Disinfection"\"09/07/2026"
+"NASA Glenn Research Center"\"Issued"\"LEW-17826-1"\7385692\"11/412,924"\"Method And System For Fiber Optic Determination Of Nitrogen And Oxygen Concentrations In Ullage Of Liquid Fuel Tanks"\"04/28/2026"
+"NASA Glenn Research Center"\"Issued"\"LEW-17859-1"\7389675\"11/434,578"\"Miniaturized Metal (Metal Alloy)/PdOx/SiC Schottky Diode Gas Sensors For Hydrogen And Hydrocarbons Detection At High Temperatures"\"05/12/2026"
+"NASA Glenn Research Center"\"Issued"\"LEW-17859-2"\8001828\"12/143,139"\"Miniaturized Metal (Metal Alloy) PdOx/Sic Hydrogen And Hydrocarbon Gas Sensors"\"06/20/2028"
+"NASA Glenn Research Center"\"Issued"\"LEW-17877-1"\7876276\"11/499,982"\"Antenna Near-Field Probe Station Scanner"\"08/02/2026"
+"NASA Glenn Research Center"\"Application"\"LEW-17877-2"\\"12/857,004"\"Antenna Near-Field Probe Station Scanner"\
+"NASA Glenn Research Center"\"Issued"\"LEW-17904-1"\7425650\"11/378,553"\"Syntheis Of Asymmetric Dianhydrides"\"03/15/2026"
+"NASA Glenn Research Center"\"Issued"\"LEW-17904-2"\7381849\"11/890,104"\"Synthesis Of Asymmetrical Benzophenone Dianhydride And Asymmetrical 6F-Dianhydride And Polyimides Therefrom (ALSO See LEW 18236-1)"\"07/19/2027"
+"NASA Glenn Research Center"\"Application"\"LEW-17915-1"\0\"12/536,969"\"Secure Optical Communications Using Quantum Two-Photon Transparency Modulation Spectroscopy"\
+"NASA Glenn Research Center"\"Issued"\"LEW-17916-1"\8052854\"11/754,255"\"Miniature Amperometric Solid Electrolyte Carbon Dioxide Sensor"\"05/25/2027"
+"NASA Glenn Research Center"\"Application"\"LEW-17916-2"\\"13/267,978"\"Miniature Amperometric Solid Electrolyte Carbon Dioxide Sensor"\
+"NASA Glenn Research Center"\"Application"\"LEW-17945-1"\0\"11/677,654"\"Portable Unit For Metabolic Analysis PUMA"\
+"NASA Glenn Research Center"\"Issued"\"LEW-17951-1"\8545786\"10/621,752"\"Manufacture Of Porous Net-Shaped Materials Comprising Alpha Or Beta Tricalcium Phosphate Or Mixtures Thereof"\"07/16/2023"
+"NASA Glenn Research Center"\"Issued"\"LEW-17954-1"\8016543\"11/695,435"\"Composite Case Armor"\"04/02/2027"
+"NASA Glenn Research Center"\"Application"\"LEW-17963-1"\0\"11/860,661"\"Passive Gas/Liquid Separation Within a Fuel Cell or Electrolysis Cell Using A Conductive Porous Separator"\
+"NASA Glenn Research Center"\"Issued"\"LEW-17975-1"\7382944\"11/489,813"\"Aluminization And Hyperthermal Atomic Oxygen Texturing Of Polymethylmethacralate Optical Fibers For Blood Glucose Monitoring"\"07/14/2026"
+"NASA Glenn Research Center"\"Issued"\"LEW-17991-1"\7390161\"/0"\"Toughened Composite Structures"\"06/24/2025"
+"NASA Glenn Research Center"\"Issued"\"LEW-18003-1"\7583169\"11/689,770"\"RF MEMS Switches Utilizing Non-Metallic Thin Film Cantilevers/Bridges With Controlled Stress And Conductivity"\"03/22/2027"
+"NASA Glenn Research Center"\"Issued"\"LEW-18042-1"\8067478\"11/582,693"\"A Method of Crosslinking Aerogels Using a One-pot Reaction Scheme"\"10/16/2026"
+"NASA Glenn Research Center"\"Application"\"LEW-18042-2"\0\"13/242,425"\"A Method of Crosslinking Aerogels Using a One-pot Reaction Scheme"\
+"NASA Glenn Research Center"\"Application"\"LEW-18043-1"\7341040\"11/486,460"\"Supercharged Two-Cycle Engines Employing Novel Single Element Reciprocating Shuttle Inlet Valve Mechanisms And With A Variable Compression Ratio"\"07/14/2026"
+"NASA Glenn Research Center"\"Application"\"LEW-18048-1"\0\"12/285,157"\"Two And Three Dimensional Near Infrared Subcutaneous Structure Imager Using Adaptive Nonlinear Video Processing"\
+"NASA Glenn Research Center"\"Issued"\"LEW-18049-1"\7909897\"11/946,079"\"Direct Fuel Impingement Planar-Array-Microreactor"\"11/28/2028"
+"NASA Glenn Research Center"\"Issued"\"LEW-18054-1"\7501032\"11/364,283"\"High Work Output Ni-Ti-Pt High Temperature Shape Memory Alloys And Associated Processing Methods"\"02/28/2026"
+"NASA Glenn Research Center"\"Issued"\"LEW-18059-1"\8242162\"11/956,848"\"Fluorescent On-Off Chemical Sensors"\"11/30/2019"
+"NASA Glenn Research Center"\"Issued"\"LEW-18076-1"\7999173\"11/689,431"\"Dust removal from solar cells"\"03/21/2027"
+"NASA Glenn Research Center"\"Application"\"LEW-18076-2"\\"13/198,896"\"Dust Removal from Solar Cells"\
+"NASA Glenn Research Center"\"Issued"\"LEW-18089-1"\8077103\"11/774,574"\"Cup Cylindrical Waveguide Antenna"\"07/06/2027"
+"NASA Glenn Research Center"\"Issued"\"LEW-18138-1"\7904282\"11/689,874"\"In-Flight Fault Accommodation Through Automated Control Parameter Changes"\"03/22/2027"
+"NASA Glenn Research Center"\"Application"\"LEW-18205-1"\0\"12/317,232"\"Branched Rod-Coil Polyimide-poly(ethylene Oxide) (PEO) Copolymers That Are Cured In The Solid State At Ambient Temperatures"\
+"NASA Glenn Research Center"\"Application"\"LEW-18207-1"\0\"11/759,570"\"Circuit For Communication Over DC Power Line Using High Temperature Electronics"\
+"NASA Glenn Research Center"\"Issued"\"LEW-18221-1"\7763325\"11/864,607"\"A Method For Thermal Spraying Of Coatings Using Resonant Pulsed Combustion"\"09/28/2027"
+"NASA Glenn Research Center"\"Application"\"LEW-18221-2"\\"12/835,345"\"A Method For Thermal Spraying Of Coatings Using Resonant Pulsed Combustion"\
+"NASA Glenn Research Center"\"Issued"\"LEW-18236-1"\8093348\"11/894,290"\"Synthesis Of Asymmetrical Benzophenone Dianhydride And Asymmetrical 6F-Dianhydride And Polyimides Therefrom"\"08/22/2027"
+"NASA Glenn Research Center"\"Application"\"LEW-18236-2"\0\"13/325,626"\"Synthesis Of Asymmetrical Benzophenone Dianhydride And Asymmetrical 6F-Dianhydride And Polyimides Therefrom"\
+"NASA Glenn Research Center"\"Issued"\"LEW-18248-1"\7791552\"11/871,237"\"Cellular Reflectarray Antenna"\"10/12/2027"
+"NASA Glenn Research Center"\"Issued"\"LEW-18248-2"\7990327\"12/874,370"\"Cellular Reflectarray Antenna"\"09/02/2030"
+"NASA Glenn Research Center"\"Issued"\"LEW-18253-1"\8191426\"12/133,743"\"Low TCR Nanocomposite Strain Gages"\"06/05/2028"
+"NASA Glenn Research Center"\"Issued"\"LEW-18254-1"\7876423\"12/163,382"\"Simultaneous Non-Contact Precision Measurement Of Microstructual And Thickness Variation In Dielectric Materials Using Terahertz Energy"\"06/27/2028"
+"NASA Glenn Research Center"\"Issued"\"LEW-18255-1"\7630736\"11/541,102"\"Autonomous Wireless Sensor Transceiver"\"05/09/2028"
+"NASA Glenn Research Center"\"Issued"\"LEW-18256-1"\7688117\"12/081,762"\"An N Channel JFET Based Digital Logic Gate Structure Using Resistive Level Shifters And Having Direct Application To High Temperature Silicon Carbide Electronics"\"04/21/2028"
+"NASA Glenn Research Center"\"Issued"\"LEW-18261-1"\7933027\"12/326,436"\"A Software Platform For Post-Processing Waveform-Based NDE"\"12/02/2028"
+"NASA Glenn Research Center"\"Application"\"LEW-18291-1"\0\"12/214,114"\"Adaptive Morphological Feature-Based Object Classifier For A Color Imaging System"\
+"NASA Glenn Research Center"\"Application"\"LEW-18296-1"\0\"13/193,160"\"Modular Battery Charge Controller"\
+"NASA Glenn Research Center"\"Issued"\"LEW-18313-1"\7923715\"12/336,503"\"A Novel Nanoionics-based Switch For Radiofrequency (RF) Applications"\"12/06/2028"
+"NASA Glenn Research Center"\"Issued"\"LEW-18313-2"\8410469\"13/050,229"\"A Novel Nanoionics-based Switch For Radiofrequency (RF) Applications"\"03/17/2031"
+"NASA Glenn Research Center"\"Application"\"LEW-18324-1"\0\"12/195,358"\"Semiconductor Metal Oxide Modified Solid Electrolyte Carbon Dioxide Microsensors With Reduced Operation Temperature"\
+"NASA Glenn Research Center"\"Issued"\"LEW-18325-1"\8415839\"12/319,617"\"External Magnetic Field Reduction Techniquie For Advanced Stirling Radioisotope Generator"\"01/09/2029"
+"NASA Glenn Research Center"\"Application"\"LEW-18325-2"\\"13/859,179"\"External Magnetic Field Reduction Techniquie For Advanced Stirling Radioisotope Generator"\"01/09/2029"
+"NASA Glenn Research Center"\"Issued"\"LEW-18338-1"\8506787\"12/533/258"\"Advancd Lightweight, High-Strength Electrochemical Cell Design and Structures"\"07/31/2029"
+"NASA Glenn Research Center"\"Issued"\"LEW-18340-1"\8091445\"12/431,456"\"Offset Compound Gear Inline Two-Speed Drive"\"04/28/2029"
+"NASA Glenn Research Center"\"Issued"\"LEW-18340-2"\8668613\"13/346,959"\"Offset Compound Gear Inline Two-Speed Drive"\"01/10/2032"
+"NASA Glenn Research Center"\"Issued"\"LEW-18356-1"\8220989\"12/571,215"\"Device for Measuring the Thermal Conductivity of Small, Highly Insulating Materials"\"09/30/2029"
+"NASA Glenn Research Center"\"Issued"\"LEW-18356-2"\8573835\"13/492,181"\"Device for Measuring the Thermal Conductivity of Small, Highly Insulating Materials"\"06/08/2032"
+"NASA Glenn Research Center"\"Issued"\"LEW-18362-1"\7872750\"12/285,173"\"Space Radiation Detector with Spherical Geometry"\"09/30/2028"
+"NASA Glenn Research Center"\"Issued"\"LEW-18362-2"\8159669\"12/972,624"\"Space Radiation Detector with Spherical Geometry"\"12/20/2030"
+"NASA Glenn Research Center"\"Issued"\"LEW-18373-1"\8353209\"12/570,841"\"A Radio Frequency Tank Eigenmode Sensor For Propellant Quantity Gauging"\"02/04/2031"
+"NASA Glenn Research Center"\"Issued"\"LEW-18426-1"\8484980\"12/894,346"\"A Free-Jet Dual-Mode Combustor Concept for Wide Operating Range Ramjet Propulsion"\"09/30/2030"
+"NASA Glenn Research Center"\"Application"\"LEW-18426-2"\0\"13/941,987"\"A Free-Jet Dual-Mode Combustor Concept for Wide Operating Range Ramjet Propulsion"\"07/15/2033"
+"NASA Glenn Research Center"\"Issued"\"LEW-18432-1"\7935601\"12/584,497"\"Addendum of Self-Aligned Ion Implant to Design and Processing of SiC High Temperature Transistors for Durable Operation Above 400 C"\"09/04/2029"
+"NASA Glenn Research Center"\"Application"\"LEW-18432-2"\0\"13/078,510"\"Addendum of Self-Aligned Ion Implant to Design and Processing of SiC High Temperature Transistors for Durable Operation Above 400 C"\
+"NASA Glenn Research Center"\"Issued"\"LEW-18458-1"\8386121\"12/791,907"\"Optimal Tuner Selection For Kalman Filter-Based Aircraft Engine Performance Estimation"\"06/02/2030"
+"NASA Glenn Research Center"\"Issued"\"LEW-18461-1"\8159238\"12/570,742"\"Method and Circuit for In-Situ Health Monitoring of Solar Cells in Space"\"09/30/2029"
+"NASA Glenn Research Center"\"Application"\"LEW-18461-2"\\"13/448,801"\"Method and Circuit for In-Situ Health Monitoring of Solar Cells in Space"\
+"NASA Glenn Research Center"\"Application"\"LEW-18466-1"\0\"12/616,952"\"Spring Tire"\
+"NASA Glenn Research Center"\"Application"\"LEW-18473-1"\0\"12/879,713"\"Ka-Band Waveguide 2-Way Hybrid Combiner for MMIC Amplifiers With Unequal and Arbitrary Power Output Ratio"\
+"NASA Glenn Research Center"\"Issued"\"LEW-18474-1"\8609750\"12/792,380"\"Selective Clay Placement Within A Silicate Clay-Epoxy Blend Nanocomposite"\"06/02/2030"
+"NASA Glenn Research Center"\"Issued"\"LEW-18476-1"\8182741\"12/544,742"\"Ball Bearings Comprising Nickel-Titanium And Methods Of Manufacture Thereof"\"08/20/2029"
+"NASA Glenn Research Center"\"Application"\"LEW-18476-2"\0\"12/544,674"\"Ball Bearings Comprising Nickel-Titanium And Methods Of Manufacture Thereof"\
+"NASA Glenn Research Center"\"Application"\"LEW-18477-1"\0\"13/242,300"\"Graphene Based Reversible Nano-Switch/Sensor Schottky Diode (nanoSSSD) Device"\
+"NASA Glenn Research Center"\"Issued"\"LEW-18483-1"\8310671\"12/893,627"\"Frame-Transfer Gating (FTG) Raman Spectroscopy for Time-Resolved Multiscalar Combustion Diagnostics"\"09/29/2030"
+"NASA Glenn Research Center"\"Application"\"LEW-18486-2"\0\"14/168,830"\"Polyimide Aerogels With Three Dimensional Cross-Linked Structure"\"01/30/2034"
+"NASA Glenn Research Center"\"Issued"\"LEW-18491-1"\8209976\"12/323,091"\"Shape Memory Based Actuators and Release Mechanisms"\"11/25/2028"
+"NASA Glenn Research Center"\"Application"\"LEW-18492-1"\0\"13/036,887"\"Synthesis Methods, Microscopy Characterization and Device Integration of Nanoscale Metal Oxide Semiconductors for Gas Sensing in Aerospace Applications"\
+"NASA Glenn Research Center"\"Issued"\"LEW-18496-1"\8283172\"12/711,465"\"Process to Produce Iron Nanoparticles - Lunar Dust Simulant Composite"\"02/24/2030"
+"NASA Glenn Research Center"\"Application"\"LEW-18500-1"\0\"12/848,903"\"Precision Time Protocol Base Trilateration for Planetary Navigation"\
+"NASA Glenn Research Center"\"Application"\"LEW-18516-1"\0\"13/542,163"\"Hybrid Gear"\
+"NASA Glenn Research Center"\"Issued"\"LEW-18538-1"\8373175\"12/791,276"\"Ohmic Contact to N- and P-type Silicon Carbide"\"06/01/2030"
+"NASA Glenn Research Center"\"Application"\"LEW-18542-1"\0\"12/870,475"\"Functionalization of Single Wall Carbon Nanotubes (SWCNTs) by Photooxidation"\
+"NASA Glenn Research Center"\"Application"\"LEW-18554-1"\0\"12/845,998"\"Internal Limit Sensor (ILS)"\
+"NASA Glenn Research Center"\"Application"\"LEW-18561-1"\0\"12/726,926"\"NASA PS400: A New High Temperature Solid Lubricant Coating for High Temperature Wear Applications"\
+"NASA Glenn Research Center"\"Application"\"LEW-18565-1"\0\"13/646,100"\"Catalytic Microtube Rocket Igniter"\"10/05/2032"
+"NASA Glenn Research Center"\"Application"\"LEW-18566-1"\0\"12/829,663"\"Low Density, High Creep Resistant Single Crystal Superalloy with Lower Manufacturing Cost"\
+"NASA Glenn Research Center"\"Application"\"LEW-18586-1"\\"13/030,342"\"Shock Sensing Apparatus"\
+"NASA Glenn Research Center"\"Issued"\"LEW-18593-1"\8653693\"13/014,849"\"Integrated Exciter/Igniter"\"01/27/2031"
+"NASA Glenn Research Center"\"Issued"\"LEW-18594-1"\8409372\"12/874,523"\"Thermomechanical Methodology for Stabilizing Shape Memory Alloy (SMA) Response"\"09/02/2030"
+"NASA Glenn Research Center"\"Application"\"LEW-18594-2"\\"13/845,526"\"Thermomechanical Methodology for Stabilizing Shape Memory Alloy (SMA) Response"\
+"NASA Glenn Research Center"\"Issued"\"LEW-18601-1"\8577504\"12/954,009"\"Inductive Power Device (IDP)"\"11/24/2030"
+"NASA Glenn Research Center"\"Application"\"LEW-18604-1"\\"12/894,444"\"Shock Resistant, Debris Tolerant, Lightweight, Corrosion Proof Bearings, Mechanical Components and Mechanisms Made From Hard, Highly Elastic Materials"\
+"NASA Glenn Research Center"\"Issued"\"LEW-18605-1"\8468794\"12/894,565"\"Dual-Mode Hybrid-Engine (DMH-Engine): A Next-Generation Electric Propulsion Thruster"\"09/30/2030"
+"NASA Glenn Research Center"\"Application"\"LEW-18605-2"\\"13/713,907"\"Dual-Mode Hybrid-Engine (DMH-Engine): A Next-Generation Electric Propulsion Thruster"\
+"NASA Glenn Research Center"\"Application"\"LEW-18605-3"\\"14/152,125"\"Dual-Mode Hybrid-Engine (DMH-Engine): A Next-Generation Electric Propulsion Thruster"\
+"NASA Glenn Research Center"\"Application"\"LEW-18608-1"\\"12/892,339"\"Liquid Tin Electrodes for Directo Conversion of JP-8 Fuel using the NASA BSC Solid Oxide Fuel Cell"\
+"NASA Glenn Research Center"\"Application"\"LEW-18614-1"\\"13/303,292"\"High-Temperature Thermometer Using Cr-Doped GdAlO3 Broadband Luminescence"\
+"NASA Glenn Research Center"\"Application"\"LEW-18615-1"\\"12/892,278"\"Purify Nanomaterials By Dissolving Excess Reactants And Catalysts In Ferric Chloride"\
+"NASA Glenn Research Center"\"Application"\"LEW-18629-1"\\"13/731,314"\"Electrospray Collection of Lunar Dust"\
+"NASA Glenn Research Center"\"Application"\"LEW-18631-1"\\"13/218,847"\"Circuit for Communication Over Power Lines"\
+"NASA Glenn Research Center"\"Application"\"LEW-18632-1"\\"13/311,987"\"Method For Fabricating Diamond-Dispersed Fiber-Reinforced Composite Coating On Low Temperature Sliding Thrust Bearing Interfaces"\
+"NASA Glenn Research Center"\"Application"\"LEW-18634-1"\\"13/134,959"\"Multi-Parameter Aerosol Scattering Sensor"\
+"NASA Glenn Research Center"\"Issued"\"LEW-18636-1"\8416007\"13/098,918"\"A Source Coupled N Channel JFET Based Digital Logic Gate Structure Using Resistive Level Shifters and Having Direct Application to High Temperature Silicon Carbide Electronics"\"05/02/2031"
+"NASA Glenn Research Center"\"Application"\"LEW-18639-1"\\"13/112,293"\"Atomic Oxygen Fluence Monitor"\
+"NASA Glenn Research Center"\"Application"\"LEW-18649-1"\\"12/870,443"\"Ultracapacitor Based Uninterruptible Power Supply (UPS) System"\
+"NASA Glenn Research Center"\"Application"\"LEW-18652-1"\\"13/476,470"\"Polarization Dependent Whispering Gallery Modes in Microspheres"\
+"NASA Glenn Research Center"\"Application"\"LEW-18658-1"\\"13/250,300"\"Levitated Ducted Fan (LDF) Aircraft Auxiliary Generator"\
+"NASA Glenn Research Center"\"Application"\"LEW-18674-1"\\"13/552,760"\"Polymer Electrolyte Based Ambient Temperature Oxygen Microsensors with Extremely Low Power Consumption for Enviromental Monitoring Applications"\
+"NASA Johnson Space Center"\"Application"\"MSC-25349-1"\0\"13/922036"\"Robonaut Teleoperation System"\
+"NASA Glenn Research Center"\"Issued"\"LEW-18691-1"\7588746\"11/431,815"\"Process and Apparatus for Hydrogen and Carbon Production via Carbon Aerosol-Catalyzed Dissociation of Hydrocarbons"\"05/10/2026"
+"NASA Glenn Research Center"\"Issued"\"LEW-18692-1"\7332146\"11/148,778"\"Method For Zero Emission Liquid Hydrogen Production From Methane & Landfill Gas"\"06/08/2025"
+"NASA Glenn Research Center"\"Application"\"LEW-18693-1"\\"/"\"Process For Hydrogen Production via Integrated Processing of Landfill Gas and Biomass"\
+"NASA Glenn Research Center"\"Application"\"LEW-18694-1"\\"13/075,879"\"Discrete Data Qualification System and Method Comprising Noise Series Fault Detection"\
+"NASA Glenn Research Center"\"Application"\"LEW-18704-1"\\"13/531,763"\"A Hybrid Power Management (HPM) Based Vehicle Architecture"\
+"NASA Glenn Research Center"\"Application"\"LEW-18714-1"\\"13/361,220"\"High Strength Nanocomposite Glass Fibers"\
+"NASA Glenn Research Center"\"Issued"\"LEW-18717-1"\8476979\"13/178,101"\"A Novel Wideband GaN MMIC Distributed Amplifier Based Microwave Power Module for Space Communications, Navigation, and Radar"\"07/07/2031"
+"NASA Glenn Research Center"\"Application"\"LEW-18717-2"\\"13/847,779"\"A Novel Wideband GaN MMIC Distributed Amplifier Based Microwave Power Module for Space Communications, Navigation, and Radar"\
+"NASA Glenn Research Center"\"Application"\"LEW-18724-1"\\"13/339,521"\"VESGEN Software for Mapping and Quantification of Vascular Remodeling in Botanical Plant Leaves"\
+"NASA Glenn Research Center"\"Application"\"LEW-18732-1"\\"13/514,582"\"Water Purification by High Voltage, Nanosecond, Non-Equilibrium Plasma: Applications to Human Spaceflight and Terrestrial Point-of-Use"\"08/16/2032"
+"NASA Glenn Research Center"\"Application"\"LEW-18736-1"\\"13/534,745"\"Iridium Interfacial Stack (IrIS) Final"\
+"NASA Glenn Research Center"\"Application"\"LEW-18738-1"\\"13/474,948"\"Atmospheric Turbulence Modeling for Aero Vehicles"\
+"NASA Glenn Research Center"\"Application"\"LEW-18752-1"\\"13/686,000"\"Large Strain Transparent Magneto-active Polymer Nanocomposites"\"11/28/2031"
+"NASA Glenn Research Center"\"Application"\"LEW-18754-1"\\"13/534,870"\"Method For Making Measurements Of The Post-Combustion Residence Time In A Gas Turbine Engine"\
+"NASA Glenn Research Center"\"Application"\"LEW-18761-1"\\"13/247,601"\"Temperature Sensitive Coating Sensor Based On Hematite"\
+"NASA Glenn Research Center"\"Application"\"LEW-18762-1"\\"13/364691"\"Selenium Interlayer for High-efficiency Multijunction Solar Cell"\
+"NASA Glenn Research Center"\"Application"\"LEW-18768-1"\\"13/788,041"\"Processing of Nanosensors Using a Sacrificial Template Approach"\"03/23/2032"
+"NASA Glenn Research Center"\"Application"\"LEW-18769-1"\\"13/537,816"\"Compact, Lightweight, CMC (Ceramic Matrix Composite)-Based Acoustic Liner for Subsonic Jet Aircraft Engines--Offering High Temperature Capability, Weight Reduction, and Broadband Acoustic Treatment"\
+"NASA Glenn Research Center"\"Application"\"LEW-18771-1"\\"13/301,249"\"Integrated Temperature and Capacitive Ablation Recession Rate Sensors"\
+"NASA Glenn Research Center"\"Application"\"LEW-18785-1"\\"13/246,440"\"Method to Pre-Stress Shock Resistant Mechanical Components and Mechanisms made from Hard, Highly Elastic Materials"\
+"NASA Glenn Research Center"\"Application"\"LEW-18789-1"\\"13/771,833"\"Method to Increase Performance of Foil Bearings Through Passive Thermal Management"\"02/27/2032"
+"NASA Glenn Research Center"\"Application"\"LEW-18797-1"\\"13/714,906"\"High Speed, Compliant, Planetary Flywheel Touchdown Bearing"\"12/16/2031"
+"NASA Glenn Research Center"\"Application"\"LEW-18802-1"\\"13/534,804"\"Alpha-STREAM Convertor - A Stirling Engine with no moving parts, eliminated streaming losses, high efficiency, low cost fabrication, and electronic wave modulation."\
+"NASA Glenn Research Center"\"Application"\"LEW-18809-1"\\"13/410,663"\"Sampling and Control Circuit Board for an Inertial Measurement Unit"\"08/03/2032"
+"NASA Glenn Research Center"\"Application"\"LEW-18816-1"\\"13/749,773"\"High Speed Edge Detecting Circuit For Use With Linear Image Sensor"\"06/01/2032"
+"NASA Glenn Research Center"\"Application"\"LEW-18821-1"\\"13/561,359"\"Dopant Selective Reactive Ion Etching of Silicon Carbide"\"07/30/2032"
+"NASA Glenn Research Center"\"Application"\"LEW-18822-1"\\"13/524,327"\"Planar Modular Package"\
+"NASA Glenn Research Center"\"Application"\"LEW-18825-1"\0\"13/804,546"\"Porous Cross-Linked Polyimide-UREA Networks"\"03/14/2033"
+"NASA Glenn Research Center"\"Application"\"LEW-18837-1"\\"13/527,181"\"In-Situ Solid Particle Generator"\
+"NASA Glenn Research Center"\"Application"\"LEW-18844-1"\\"13/918,333"\"Electrospun Nanofiber Coating Of Fiber Materials: A Composite Toughening Approach"\"06/14/2033"
+"NASA Glenn Research Center"\"Application"\"LEW-18849-1"\\"13/906,521"\"Paired Threaded Film Cooling Holes for Improved Turbine Film Cooling"\"05/31/2033"
+"NASA Glenn Research Center"\"Application"\"LEW-18858-1"\\"13/904,513"\"V-Cess: A Novel Flow Control Method Using A Shaped Recess"\"05/29/2033"
+"NASA Glenn Research Center"\"Application"\"LEW-18862-1"\\"13/474,972"\"Cascading TESLA oscillating flow diode for Stirling Engine Gas Bearings"\
+"NASA Glenn Research Center"\"Application"\"LEW-18864-1"\\"13/756,855"\"Polyimide Aerogel Thin Films"\"02/03/2032"
+"NASA Glenn Research Center"\"Application"\"LEW-18873-1"\\"13/968,000"\"High Temperature Single Crystal Preloader"\"08/15/2033"
+"NASA Glenn Research Center"\"Application"\"LEW-18887-1"\\"13/756,604"\"Fuzzy Neuron: Method and Hardware Realization"\"02/01/2033"
+"NASA Glenn Research Center"\"Application"\"LEW-18889-1"\\"13/713,846"\"High Speed Idle Engine Control Mode"\"12/13/2032"
+"NASA Glenn Research Center"\"Application"\"LEW-18890-1"\\"13/871,114"\"Suppression Of Unwanted Noise And Howl In A Test Configuration Where A Jet Exhaust Is Discharged Into A Duct"\
+"NASA Glenn Research Center"\"Application"\"LEW-18891-1 with LEW-18611-1 and LEW-18895-1"\\"13/723,598"\"G6 Flywheel Design"\"12/23/2031"
+"NASA Glenn Research Center"\"Application"\"LEW-18893-1"\\"13/653,027"\"Novel Aerogel-Based Antennas (ABA) for Aerospace Applications"\
+"NASA Glenn Research Center"\"Application"\"LEW-18900-1"\\\"High Efficiency, High Temperature Titanium Heat Pipe Radiator for Space Power and Propulsion Systems"\
+"NASA Glenn Research Center"\"Application"\"LEW-18902-1"\\"14/094,006"\"Analog Correlator Based on One Bit Digital Correlator"\"12/02/2033"
+"NASA Glenn Research Center"\"Application"\"LEW-18903-1"\\"13/923,441"\"Modeling and Simulation of a Solar Electric Propulsion Vehicle in Near-Earth Vicinity Including Solar Array Degradation"\"06/21/2033"
+"NASA Glenn Research Center"\"Application"\"LEW-18919-1"\\"13/645,799"\"Wireless Controlled Chalcogenide Nanoionic Radio Frequency Switch"\"04/04/2032"
+"NASA Glenn Research Center"\"Application"\"LEW-18923-1"\\"13/963,060"\"New Power Source For Deep Space Missions- Utilizing The Doubly Exothermic Reaction Between Deuterium And Palladium To Produce Electrical Power"\"08/09/2033"
+"NASA Glenn Research Center"\"Application"\"LEW-18928-1"\\\"Pt-Ti-Si Simultaneous Ohmic Contacts to N- and P-Type Silicon Carbide"\
+"NASA Glenn Research Center"\"Application"\"LEW-18934-1"\\"13/900,642"\"Conditionally Active Min-Max Limit Regulators"\"05/23/2033"
+"NASA Glenn Research Center"\"Application"\"LEW-18939-1"\\"13/916,797"\"Magnetostrictive Alternator - Low cost, No moving part, High Efficiency, Oscillating Acoustic Pressure Wave to Electric Power Transducer"\"06/13/2033"
+"NASA Glenn Research Center"\"Application"\"LEW-18942-1"\\"13/771,920"\"Adaptive Phase Delay Generator"\"02/20/2033"
+"NASA Glenn Research Center"\"Application"\"LEW-18949-1"\\"13/923,450"\"Advanced High Temperature and Fatigue Resistant Environmental Barrier Coating Bond Coat Systems for SiC/SiC Ceramic Matrix Composites"\"06/21/2033"
+"NASA Glenn Research Center"\"Application"\"LEW-18952-1"\\\"A Novel Real Time Adaptive Filter For The Reduction Of Artifacts In Functional Near Infrared Spectroscopy Signals"\
+"NASA Glenn Research Center"\"Application"\"LEW-18957-1"\\"14/048,895"\"Dynamic Range Enhancement Of High-Speed Data Acquisition Systems By Reversible Non-Linear Amplitude Compression"\"10/08/2033"
+"NASA Glenn Research Center"\"Application"\"LEW-18960-1"\\"13/891,461"\"Dry Snorkel Cold Immersion Suit for Hypothermia Prevention"\"05/11/2032"
+"NASA Glenn Research Center"\"Application"\"LEW-18963-1"\\"13/853,308"\"Flywheel Pulse & Glide System for Vehicles"\
+"NASA Glenn Research Center"\"Application"\"LEW-18964-1"\\"13/905,333"\"High Temperature Lightweight Self-Healing Ceramic Composites for Aircraft Engine Applications"\"05/30/2033"
+"NASA Glenn Research Center"\"Application"\"LEW-18970-1"\\"14/158,080"\"Methods for Intercalating and Exfoliating Hexagonal Boron Nitride"\"01/17/2034"
+"NASA Glenn Research Center"\"Application"\"LEW-18986-1"\\\"Generation Of High Pressure Oxygen Via Electrochemical Pumping In A Multi-Stage Electrolysis Stack"\
+"NASA Glenn Research Center"\"Application"\"LEW-19013-1"\\"14/095,442"\"Spoked Wheel Assembly With Two Rotational Modes"\"12/03/2033"
+"NASA Glenn Research Center"\"Application"\"LEW-19029-1"\\"14/191,708"\"Superelastic Ternary Ordered Intermetallic Compounds"\"02/27/2034"
+"NASA Glenn Research Center"\"Application"\"LEW-19040-1"\\"14/193,024"\"Fast, Large Area, Wide Band Gap UV Photodetector for Cherenkov Light Detection"\"02/28/2034"
+"NASA Glenn Research Center"\"Application"\"LEW-19045-1"\\"13/968,531"\"Multimode Directional Coupler for Measurement and Utilization of Harmonic Frequencies from Traveling Wave Tube Amplifiers"\"08/16/2033"
+"NASA Glenn Research Center"\"Application"\"LEW-19053-1"\\"14/193,719"\"Process for Preparing Aerogels from Polyamides"\"02/28/2034"
+"NASA Glenn Research Center"\"Application"\"LEW-19067-1"\\\"Plasma Spray-Physical Vapor Deposition (PS-PVD) of Advanced Environmental Barrier Coatings"\
+"NASA Glenn Research Center"\"Application"\"LEW-19077-1"\\\"Improved Composite Damage Tolerance and Through Thickness Conductivity By Interleaving Carbon Fiber Veil Nanocomposites"\
+"NASA Glenn Research Center"\"Application"\"LEW-19080-1"\\\"Crosslinked Polyethylene Aerogels from Low Density Polyethylene, Linear Low Density Polyethylene, and Repurposed Polyethylene"\
+"NASA Glenn Research Center"\"Application"\"LEW-19098-1"\\"61/866,585"\"High Temperature, Flexible Composite Seals for Aeronautics and Space Environments Incorporating Aerogel Insulation"\
+"NASA Glenn Research Center"\"Application"\"LEW-19171-1"\\"61/931,189"\"Low Power Charged Particle Counter for Space Radiation Monitoring"\
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-28402-2"\5780594\"08/448,196"\"Biologically Active Protein Fragments Containing Specific Binding Regions Of Serum Albumin Or Related Proteins"\"07/14/2015"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-28985-1"\5641681\"08/422,963"\"Device And Method For Screening Crystallization Conditions In Solution Crystal Growth"\"04/17/2015"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31175-2-CIP"\6578851\"09/693,098"\"Gasket Assembly For Sealing Mating Surfaces"\"10/16/2020"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31243-1"\6459822\" 09/364,919"\"Video Image Stabilization And Registration (VISAR)"\"07/26/2019"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31243-2-CON"\6560375\"10/143,539"\"Video Image Stabilization And Registration"\"05/10/2022"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31258-1"\6135255\"09/207,710"\"Releasable Conical Roller Clutch"\"12/09/2018"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31294-2-CIP2"\6592687\"10/196,389"\"Aluminum Alloy And Article Cast Therefrom"\"07/11/2022"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31294-5-CIP"\6399020\"09/688,729"\"Aluminum-Silicon Alloy Having Improved Properties At Elevated Temperatures And Articles Cast Therefrom"\"10/11/2020"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31294-6-CIP"\6419769\"09/749,503"\"Aluminum-Silicon Alloy Having Improved Properties At Elevated Temperatures And Process For Producing Cast Articles Therefrom"\"12/22/2020"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31294-7-CIP"\6669792\"09/800,312"\"Process For Producing A Cast Article From A Hypereutectic Aluminum-Silicon Alloy"\"03/02/2021"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31303-1"\6748349\"09/313,576"\"Generalized Fluid System Simulation Program (GFSSP) Version 2.01c"\"05/07/2019"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31387-1"\6361961\"09/560,532"\"GRAVITY RESPONSIVE NADH OXIDASE OF THE PLASMA MEMBRANE"\"04/25/2020"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31399-1"\6658329\"10/138,887"\"Addition Of Rangefinder To The Video Guidance Sensor"\"06/05/2022"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31413-1"\6497355\"09/690,035"\"Precision Penetration Control System For The Friction Stir Welding (FSW) Retractable Pin Tool"\"10/19/2020"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31475-1"\6424470\"09/616,624"\"Panoramic Refracting Optic (PRO)"\"07/28/2020"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31475-2-DIV"\6580567\"10/173,410"\"Panoramic Refracting Conical Optic"\"06/17/2022"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31488-1"\6028693\"09/7,124"\"Microresonator And Associated Method For Producing And Controlling Photonic Signals With A Photonic Bandgap Delay Apparatus"\"01/14/2018"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31490-1"\7118074\"10/690,161"\"Electrodynamic Tether System Design For Spacecraft Deorbit"\"10/17/2023"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31529-1"\7081730\"10/857,375"\"Micro-Commanding Servo Motor Controller With Greater Than Fifty Million To One Dynamic Rate Range"\"06/19/2024"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31559-1-CON"\8127977\"13/157,895"\"Phase/Matrix Transformation Weld Process And Apparatus"\"11/27/2021"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31559-1-DIV"\7980449\"10/385,168"\"Phase/Matrix Transformation Weld Process And Apparatus"\"11/27/2021"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31559-2-DIV"\8225984\"13/157988"\"Phase/Matrix Transformation Weld Process And Apparatus"\"11/27/2021"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31565-1"\6885779\"09/877,801"\"Full-Cycle, Low Loss, Low Distortion Phase Modulation From Multi-Layered Dielectric Stack With Terahertz Optical Bandwidth"\"08/17/2022"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31584-1"\6497091\"09/877,800"\"Hypergolic Ignitor Assembly"\"06/06/2021"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31584-1-CIP"\6845605\"10/288,800"\"Hypergolic Ignitor"\"01/26/2023"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31593-1"\6939610\"10/212,564"\"Smart Thermal Management Coating"\"09/20/2022"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31596-1"\6873762\"10/118,626"\"Fabrication Of Fiber-Optic Gratings Over A Wide Range Of Bragg Wavelength And Bandwidth Using A Single Phase Mask"\"10/12/2022"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31616-1"\6540426\"09/949,408"\"Passive Ball Capture Latch Docking Mechanism"\"09/04/2021"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31646-1"\6860099\"10/263,297"\"Liquid Propellant Tracing Impingement Injector"\"05/24/2023"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31649-1"\7446860\"11/527,648"\"Nonintrusive, Remote, Micron Accuracy, Laser Fresnel Ranging System"\"10/19/2026"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31698-1"\6802999\"10/173,536"\"Method Of Fabricating A Protective Crucible Wall Coating Incorporating Designed Multi-Use Channels"\"05/02/2023"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31706-1"\6886392\"10/622,174"\"Single Ball Bearing Lubricant And Material Evaluator"\"07/17/2023"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31727-1"\6953129\"10/231,428"\"Impact And Fire Resistant Coating For Pressure Vessels"\"11/07/2022"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31761-1"\6802488\"10/232,974"\"Electro-Mechanically Actuated Propellant Valve"\"01/29/2023"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31768-1"\6745942\"10/214,482"\"Magnetic Symbology Reader"\"08/05/2022"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31776-1"\7735265\"11/780,610"\"Foam-Rigidized Inflatable Tubular Space Booms"\"07/20/2027"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31785-1"\7006203\"10/646,000"\"Integrated Rangefinding Measurement In Video Guidance Sensor"\"08/21/2023"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31789-1"\7265476\"10/975,121"\"MEMS- Micro-Translation Stage With Indefinite Linear Travel Capability"\"11/01/2025"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31807-1"\7050161\"10/637,085"\"Global Radius Of Curvature Estimation And Control System For Segmented Mirrors (GRoCECS)"\"01/07/2025"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31813-1"\7802799\"11/527,653"\"Joining Metallic To Composite Components"\"07/29/2029"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31815-1"\7325749\"10/738,352"\"Distributed Solid State Programmable Thermostat / Power Controller"\"01/29/2026"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31817-1"\7515257\"11/14,455"\"Short-Range / Long-Range Integrated Target (SLIT) For Video Guidance Sensor Rendezvous And Docking"\"06/07/2027"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31823-1-DIV"\7095000\"10/943,827"\"Radio-Frequency Driven Dielectric Heaters For Non-Nuclear Testing In Nuclear Core Development"\"11/27/2024"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31828-1"\6918970\"10/120,226"\"High Strength Aluminum Alloy For High Temperature Applications"\"04/12/2023"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31838-1"\7641949\"10/857,379"\"Improved Pressure Vessel Impact Resistance Utilizing Filament Wound Hybrid Fibers"\"10/15/2025"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31842-1"\7347089\"11/215,749"\"Gas Volume Contents Within A Container, Smart Volume Instrument"\"11/26/2025"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31843-1"\7174077\"10/631,220"\"Fiber-Coupled Laser Diodes With Even Illumination Pattern"\"07/30/2023"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31852-1"\7106457\"10/857,372"\"Achromatic Shearing Phase Sensor For Phase Alignment Of A Segmented Telescope"\"01/21/2025"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31865-1"\6888476\"10/615,369"\"Advanced Video Guidance Sensor Software"\"07/21/2023"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31886-1"\6850592\"10/321,873"\"Digital Equivalent System (DEDS) For X-Ray Flourescent Spectral Output"\"01/08/2023"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31891-1"\7375801\"11/108,140"\"Video Sensor With Range Measurement Capability"\"11/06/2025"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31918-1"\7275675\"10/928,876"\"Optimal Design Geometry For All Friction Stir Weld Tools"\"01/15/2025"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-31944-1"\7017812\"10/730,191"\"Variable Distance Angular Symbology Reader"\"11/26/2023"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32024-1"\8297468\"10/857,380"\"Liquefied Natural Gas Fuel Tank"\"07/13/2021"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32031-1"\7738084\"11/543,284"\"Fiber Optic Liquid Mass Flow Sensor - Improved Prototype Design"\"09/29/2026"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32099-1-CON"\8561829\"13/544,066"\"Composite Pressure Vessel Including Crack Arresting Barrier"\"10/23/2029"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32102-1"\7540143\"11/172,665"\"Heated Pressure Balls Monopropellant Thermal Rocket Engine Cycle"\"12/12/2026"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32105-1-DIV"\7568608\"11/700,972"\"Ultrasonic Stir Welding Process And Apparatus"\"01/29/2027"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32115-1"\7686202\"11/543,287"\"Gimbling Shoulder For Friction Stir Welding"\"06/18/2027"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32136-1"\7595841\"11/174,210"\"Video Image Stabilization And Registration - Plus (VISAR+)"\"12/03/2027"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32137-1"\7177164\"11/376,632"\"Multi-loop High Voltage Power Supply with Fast Rise/Fall Time"\"03/10/2026"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32175-1"\7228241\"11/152,810"\"An Extended Lee-Kesler Equation-of-State (ELK-EoS) For The Volumetric And Thermodynamic Properties Of Propellant Fluids, Including The Non-Polar Quantum And Polar Fluids"\"06/13/2025"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32192-1"\7116098\"11/357,454"\"Absolute Limit Sensor (ALS)"\"02/16/2026"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32208-1"\7259981\"11/296,719"\"Analog Nonvolatile Computer Memory"\"12/14/2025"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32214-1"\7418814\"11/172,666"\"Dual Expander Cycle Rocket Engine Cycle with an Intermediate Brayton Cycle Heat Exchanger"\"12/19/2026"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32228-1"\8290435\"12/241,322"\"Short Range Antenna / Close Proximity Transmitter and Receiver"\"08/17/2031"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32253-1"\7469878\"11/518,733"\"Magnetorestrictive Valves"\"10/17/2026"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32307-1"\7908079\"11/527,658"\"Portable Runway Intersection Display And Monitoring System"\"01/13/2030"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32311-1"\7623621\"12/47,686"\"Identification And Authentication System Using Integrated Optical And X-ray Fluorescene Spectral Methods"\"03/13/2028"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32318-1"\8098060\"12/173,318"\"SCAPS(Single Coil Absolute Position Sensor) GAPSYN (Inductive Gap Sensor) Digital Signal Conditioning Electronics"\"09/29/2030"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32323-1"\8169620\"12/563,819"\"Sub-Pixel Spatial Resolution Interferometry With Interlaced Stitching"\"10/15/2030"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32324-1"\7594530\"11/942,322"\"Orbital Foamed Metal Extruder"\"06/09/2028"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32341-1"\8550468\"12/210,843"\"High Load Fully Retained Dynamic Cryogenic Seal"\"01/09/2032"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32364-1"\7808353\"11/513,433"\"Plasmoid Thruster for Electrode-less, High Specific Impulse Propulsion"\"07/22/2029"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32390-1"\7867589\"11/780,561"\"Hybrid composite cryogenic tank structure"\"10/14/2029"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32400-1"\7900436\"11/780,626"\"Gas Generator Augmented Expander Cycle Rocket Engine"\"01/04/2030"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32402-1"\7911174\"12/39,506"\"Inexpensive, Rate Insensitive, Linear, Load Compensating System for Hybrid Stepper Motors"\"01/25/2029"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32429-1"\7807097\"12/123,170"\"Orbital Batch Process Foamed Aluminum Facility"\"07/11/2028"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32438-1"\8004364\"11/828,563"\"16-Kilowatt (KW) 2-30MHz Solid State Power Amplifier using innovative combining methods"\"11/03/2028"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32439-1"\7831225\"11/828,590"\"H2O-NaCl based radio frequency power load"\"04/07/2029"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32497-1"\7848606\"12/047,805"\"Reprocessing Non-Oxide Optical Fiber Preforms Utilizing an Axial Magnetic Field"\"05/26/2029"
+"NASA Marshall Space Flight Center"\"Application"\"MFS-32518-1-CIP"\\"13/452,303"\"Liquid Propellant Injection Elements with Self-Adjusted Inlet Area for Rocket and Other Combustor-Type Engines Applications"\"10/03/2028"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32521-1"\7804600\"12/44,740"\"Dispersive Filter For Enhancement Of Laser Gyroscopes"\"06/10/2029"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32548-1"\7409875\"11/862,793"\"Optical Hotspot Conductive Fluid Flow Sensor"\"09/27/2027"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32558-1"\8490470\"12/569,555"\"True Shear Parallel Plate Viscometer"\"12/04/2031"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32584-1"\7929144\"12/336,260"\"Local Leak Detection and Health Monitoring of Pressurized Tanks in a Space Environment"\"11/17/2029"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32588-1"\8052860\"11/957,051"\"ELECTROCHEMICALLY-ENHANCED MECHANICAL POLISHING OF OPTICS"\"09/06/2030"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32605-1"\8309944\"12/240,626"\"Grazing Incidence Optics for Neutron Analysis and Imaging"\"12/07/2030"
+"NASA Marshall Space Flight Center"\"Application"\"MFS-32605-1-CIP"\0\"12/717,450"\"Novel Grazing Incidence Neutron Optics"\"09/29/2028"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32605-1-DIV"\8575577\"13/534,951"\"Novel Grazing Incidence Neutron Optics"\"09/29/2028"
+"NASA Marshall Space Flight Center"\"Application"\"MFS-32612-1-CIP"\\"13/796,693"\"Protective Safety Cover for Pool and Spa Drains"\"03/24/2029"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32614-1"\464750\"12/826,887"\"Magnetostrictive Regulator"\"04/03/2031"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32615-1"\8132772\"12/567,451"\"Avionics/Electronics Box Rail Mount System"\"11/27/2030"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32638-1"\8291776\"12/827,515"\"Magnetostrictive Force-to-Angle Sensor"\"03/12/2031"
+"NASA Marshall Space Flight Center"\"Application"\"MFS-32642-1"\0\"12/827,598"\"Cryogenic and Non-Cryogenic Optical Liquid Level Instrument for Stratified Conditions"\"04/05/2031"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32651-1"\8090484\"12/403,096"\"A Planar Translation Device for Solar Sail Spacecraft Attitude Control and Maneuvering"\"07/03/2030"
+"NASA Marshall Space Flight Center"\"Application"\"MFS-32655-1"\0\"12/862,510"\"AEROSPACE LASER IGNITION/ABLATION VARIABLE, HIGH PRECISION THRUSTER"\
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32667-1"\8357884\"12/839,848"\"Extraction of Water from the Soil of Space Bodies Using Microwave processes"\"04/22/2031"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32697-1"\8252734\"12/634,502"\"Multi Layered or Mixed Element Aqueous Ionic Fluids As Fuel or Lubrication Friction Modifiers"\"08/26/2030"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32697-1-CIP"\8563487\"13/525,623"\"Multi Layered or Mixed Element Aqueous Ionic Fluids As Fuel or Lubrication Friction Modifiers"\"12/09/2029"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32715-1"\8535440\"12/758169"\"Improvement of Crystalline Quality during Melt Growth of Semiconductors by Mechanically Induced Nucleation"\"07/18/2032"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32719-1"\8564770\"13/150832"\"Field-Deployable Spectral Estimator of Trichloroacetic Acid (TCAA) in Plants"\"05/18/2032"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32733-1"\7621670\"12/392,867"\"Unbalanced Flow Distribution Mixer with Flow Metering Capability"\"02/25/2029"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32737-1"\8448498\"12/870,468"\"Hermetic Seal Leak Detection Apparatus"\"06/06/2031"
+"NASA Marshall Space Flight Center"\"Application"\"MFS-32737-1-CIP"\\"13/874182"\"Hermetic Seal Leak Detection Apparatus"\"08/27/2030"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32748-1"\8132961\"12/397,973"\"Optimized Length-to-Diameter Ratio Flow Meter"\"08/16/2030"
+"NASA Marshall Space Flight Center"\"Application"\"MFS-32757-1"\0\"13/118086"\"Compliant Mechanical Motor"\
+"NASA Marshall Space Flight Center"\"Application"\"MFS-32761-1-CIP"\\"13/673,309"\"Multi-Channel Flow Plug with Eddy Current Minimization for Metering, Mixing, and Conditioning"\"07/23/2029"
+"NASA Marshall Space Flight Center"\"Application"\"MFS-32761-1-CON"\\"13/729,861"\"Multi-Channel Flow Plug with Eddy Current Minimization for Meeting, Mixing, and Conditioning"\"07/23/2029"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32777-1"\8425751\"13/020144"\"Electrodeposited Nickel-Cobalt Alloy Development"\"05/31/2031"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32797-1"\8330961\"12/837,173"\"A compact sensor for in-situ measurements of gas leaks"\"08/24/2031"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32803-1"\8133768\"12/560,371"\"Method of Manufacturing Light Emmitting, Photovoltaic or other Electronic Apparatus"\"05/31/2027"
+"NASA Marshall Space Flight Center"\"Application"\"MFS-32809-1"\0\"13/369,704"\"Telemetry encoder/decoder"\
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32817-1"\8290006\"13/281,025"\"Variable Power Handheld Laser Torch for Joining Processes"\"10/25/2031"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32826-1"\8316884\"12/846,429"\"Drain System for Pools, Spas, and Tanks. (Reference MFS 32612-1)"\"03/23/2031"
+"NASA Marshall Space Flight Center"\"Application"\"MFS-33054-1"\\"14/020,326"\"Multi-spacecraft Autonomous Positioning System / Network-Based Navigation"\"09/06/2033"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32830-1"\8420582\"13/027472"\"FRICTION MANAGEMENT USING SOLVENT PARTITIONING OF SINGLE ELEMENT AND MULTI-ELEMENT HYDROPHILIC SURFACE-INTERACTIVE CHEMICALS CONTAINED IN HYDROPHILIC TARGETED EMULSIONS"\"02/15/2031"
+"NASA Marshall Space Flight Center"\"Application"\"MFS-32830-1-CIP"\\"13/900,452"\"Friction and Wear Management Using Solvent Partioning of Hydrophilic Surface-Interactive Chemicals contains in Boundary Layer-Targeted Emulsions"\"03/07/2033"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32840-1"\8322685\"12/842,218"\"Non-collinear Valve Actuator"\"04/02/2031"
+"NASA Marshall Space Flight Center"\"Application"\"MFS-32841-1"\\"13/424,754"\"DUPLICATE of Telemetry encoder/decoder"\
+"NASA Marshall Space Flight Center"\"Application"\"MFS-32853-1"\\"14/196,203"\"Particle Damping for Vibration Mitigation of Circuit Cards"\"03/04/2034"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32857-1"\8668168\"13/326,513"\"Rocket Vent Design with Variable Flow Control and Rain Protection"\"01/21/2032"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32859-1"\8393520\"13/240,075"\"Variably Pulsed High Power Ultrasonic (HPU) Energy for Ultrasonic Stir Welding (USW)"\"11/07/2031"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32859-1-DIV"\8393523\"13/523,310"\"Pulsed Ultrasonic Stir Welding Method"\"09/22/2031"
+"NASA Marshall Space Flight Center"\"Application"\"MFS-32865-1"\\"13/302,734"\"Easily Installed, In-situ Adaptable Flow Measurement Device and Method."\
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32865-2"\8555731\"13/302,773"\"Easily Installed, In-situ Adaptable Flow Measurement Device and Method."\"06/04/2032"
+"NASA Marshall Space Flight Center"\"Application"\"MFS-32865-3"\\"13/302,817"\"Easily Installed, In-situ Adaptable Flow Measurement Device and Method."\
+"NASA Marshall Space Flight Center"\"Application"\"MFS-32865-4"\\"13/302,845"\"Easily Installed, In-situ Adaptable Flow Measurement Device and Method."\"08/23/2032"
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32871-1"\8577519\"13/424,898"\"Low Cost Telemetry System for Small/micro satellites"\"06/13/2032"
+"NASA Marshall Space Flight Center"\"Application"\"MFS-32873-1"\\"13/523210"\"High-current, high-voltage switch using non-hazardous liquid metals"\"11/29/2032"
+"NASA Marshall Space Flight Center"\"Application"\"MFS-32889-1"\\"13/174,084"\"Pyrotechnic Pipe Plug and Variable Area Flow Meter"\
+"NASA Marshall Space Flight Center"\"Application"\"MFS-32895-1"\\"13/242,734"\"High Powered Ultrasonically Assisted Thermal Stir Welding"\
+"NASA Marshall Space Flight Center"\"Application"\"MFS-32912-1"\\"13/299,930"\"Salt Water Power Load - Part II"\
+"NASA Marshall Space Flight Center"\"Application"\"MFS-32916-1"\\"13/333283"\"Improved Impact Toughness and Heat Treatment for Cast Aluminum Wheels"\
+"NASA Marshall Space Flight Center"\"Application"\"MFS-32924-1"\\"13/312,481"\"Partial Automated Alignment & Integration System"\"07/09/2032"
+"NASA Marshall Space Flight Center"\"Application"\"MFS-32934-1"\\"12/833,894"\"Methods, Devices, and Systems Relating to a Sensing Device"\
+"NASA Marshall Space Flight Center"\"Issued"\"MFS-32940-1"\8657179\"13/430,268"\"Closed Loop Temperature Control for the Thermal Stir Welding Process"\"03/26/2032"
+"NASA Marshall Space Flight Center"\"Application"\"MFS-32944-1"\\"13/896,137"\"Mitigation of Sonic Boom from Supersonic Vehicles by means of Long Penetration Mode (LPM) Counter-Flowing Cold Gas Jets"\"05/16/2033"
+"NASA Marshall Space Flight Center"\"Application"\"MFS-32945-1"\\"14/082,956"\"Piezoelectric Gravity Gradient and Multiple Purpose Sensor Detection System"\"11/18/2033"
+"NASA Marshall Space Flight Center"\"Application"\"MFS-32986-1"\\"13/961,573"\"Non-Explosively-Actuated Pressurization Start Valve"\"08/07/2033"
+"NASA Marshall Space Flight Center"\"Application"\"MFS-33007-1"\\"14/192,350"\"Carbon Nanotube Tape Vibrating Gyroscope Update"\"02/27/2034"
+"NASA Marshall Space Flight Center"\"Application"\"MFS-33022-1"\\"14/192,395"\"A Design Technology to Eliminate Dribble Volume in Rocket Engine Manifolds for Swirl-Coaxial Injectors"\"02/27/2034"
+"NASA Marshall Space Flight Center"\"Application"\"MFS-33031-1"\\"13/949,361"\"An aerodynamic design concept for rocket nozzle side load reduction"\"07/24/2033"
+"NASA Marshall Space Flight Center"\"Application"\"MFS-33060-1"\\"14/104,881"\"Carbon Nanotube Tape Single Axis Accelerometer"\"12/12/2033"
+"NASA Johnson Space Center"\"Issued"\"MSC-21715-2"\5869238\"08/390,904"\"Quantitative Method Of Measuring Cancer Cell Urokinase And Metastatic Potential"\"02/09/2016"
+"NASA Johnson Space Center"\"Issued"\"MSC-21947-1"\7541159\"10/828,531"\"MOLECULAR SPECIFIC ANTIBODIES AGAINST UROKINASE"\"08/28/2025"
+"NASA Johnson Space Center"\"Issued"\"MSC-22119-1"\5851816\"08/172,962"\"A PROCESS FOR DEVELOPING HIGH-FIDELITY THREE-DIMENSIONAL TUMOR MODELS OF HUMAN PROSTATE CARCINOMA"\"12/22/2015"
+"NASA Johnson Space Center"\"Issued"\"MSC-22122-1"\6117674\"08/366,065"\"HORIZONTAL ROTATING-WALL VESSEL PROPAGATION IN IN VITRO HUMAN TISSUE MODELS"\"09/12/2017"
+"NASA Johnson Space Center"\"Issued"\"MSC-22489-1"\5827531\"08/349,169"\"Multi-Lamellar, Immiscible-Phase Microencapsulation of Drugs"\"10/27/2015"
+"NASA Johnson Space Center"\"Issued"\"MSC-22616-2"\6133036\"09/7,239"\"Preservation Of Liquid Biological Samples"\"12/12/2015"
+"NASA Johnson Space Center"\"Issued"\"MSC-22616-3"\6716392\"09/630,979"\"Preservation Of Liquid Biological Samples"\"01/14/2018"
+"NASA Johnson Space Center"\"Issued"\"MSC-22633-1"\6485963\"09/587,028"\"Electrically Potentiated Growth Of Mammalian Neuronal Tissue Facilitated By Rotating Wall Vessel Culture"\"06/02/2020"
+"NASA Johnson Space Center"\"Issued"\"MSC-22633-2"\6673597\"09/798,854"\"Growth Stimulation Of Biological Cells And Tissue By Electromagnetic Fields And Uses Thereof"\"02/28/2021"
+"NASA Johnson Space Center"\"Issued"\"MSC-22695-1"\6261844\"09/213,988"\"A Unique Urine Preservative With Combined Antibacterial And Antioxidant Properties"\"12/17/2018"
+"NASA Johnson Space Center"\"Issued"\"MSC-22721-2"\6254359\"09/354,915"\"Blood Pump Bearing System"\"07/09/2019"
+"NASA Johnson Space Center"\"Issued"\"MSC-22724-1"\6047216\"09/129,832"\"Millimeter Wave/Microwave Ablation For Treatment Of Atherosclerotic Lesions"\"08/05/2018"
+"NASA Johnson Space Center"\"Issued"\"MSC-22724-2"\6226553\"09/501,150"\"Endothelium Preserving Microwave Treatment For Atherosclerosis"\"02/09/2020"
+"NASA Johnson Space Center"\"Issued"\"MSC-22724-3"\6223086\"09/504,768"\"Endothelium Preserving Microwave Treatment For Atherosclerosis"\"02/09/2020"
+"NASA Johnson Space Center"\"Issued"\"MSC-22724-5"\6496736\"09/500,538"\"Endothelium Preserving Microwave Treatment For Atherosclerosis"\"02/09/2020"
+"NASA Johnson Space Center"\"Issued"\"MSC-22757-1"\5879079\"08/917,581"\"Automated Propellant Blending Machine"\"08/20/2017"
+"NASA Johnson Space Center"\"Issued"\"MSC-22797-1"\6312398\"08/786,842"\"A Method Of Applying External Power To Assist In The Operation Of Joints In Pressure Suits And Inflatable Structures2283"\"12/19/2016"
+"NASA Johnson Space Center"\"Issued"\"MSC-22839-1"\6501414\"09/826,402"\"Locating Concealed Objects Using Spectral Signatures"\"04/02/2021"
+"NASA Johnson Space Center"\"Issued"\"MSC-22859-1"\6730498\"09/56,363"\"Production Of 1-25diOH Vitamin D3, Erythropoietin And Other Products By Epithelial And Interstitial Cells In Response To Shear Stress"\"04/08/2017"
+"NASA Johnson Space Center"\"Issued"\"MSC-22859-2"\6946246\"09/532,001"\"Production Of Functional Proteins: Balance Of Shear Stress And Gravity"\"03/21/2020"
+"NASA Johnson Space Center"\"Issued"\"MSC-22859-3"\7198947\"10/734,759"\"Production Of Functional Proteins: Balance Of Shear Stress And Gravity"\"12/22/2023"
+"NASA Johnson Space Center"\"Issued"\"MSC-22859-5"\7972821\"12/174,221"\"Production of Functional Proteins: Balance of Shear Stress and Gravity"\"02/11/2029"
+"NASA Johnson Space Center"\"Issued"\"MSC-22863-1"\7122071\"10/263,280"\"Centrifugal Adsorption Cartridge System (CACS)"\"12/21/2022"
+"NASA Johnson Space Center"\"Issued"\"MSC-22866-1"\6099864\"09/79,741"\"INSITU Activation Of Microcapsules"\"05/15/2018"
+"NASA Johnson Space Center"\"Issued"\"MSC-22900-1"\6231010\"09/236,785"\"Advanced Structural/Inflatable Hybrid Spacecraft Habitation Module"\"01/25/2019"
+"NASA Johnson Space Center"\"Issued"\"MSC-23563-2"\8039099\"11/848,332"\"Nanoencapsulated Aerogels Produced By Monomer Vapor Deposition And Polymerization"\"08/13/2028"
+"NASA Johnson Space Center"\"Issued"\"MSC-22931-1"\6354540\"09/405,301"\"Electro-Mechanically Actuated Magnetic Ring With Load Sensing Feedback And Closed Loop Control Docking/Berthing System For Alignment And Mating Of Multiple Vehicles, Structures, And/or Assemblies"\"09/20/2019"
+"NASA Johnson Space Center"\"Issued"\"MSC-22936-1"\6387399\"09/79,766"\"Protein Crystal Encapsulation Process"\"05/15/2018"
+"NASA Johnson Space Center"\"Issued"\"MSC-22936-2"\6558698\"09/733,391"\"Microencapsulated Bioactive Agents And Method Of Making"\"12/06/2020"
+"NASA Johnson Space Center"\"Issued"\"MSC-22936-3"\6676964\"09/774,168"\"Method For Determining The Three-Dimensional Structure Of A Protein"\"01/26/2021"
+"NASA Johnson Space Center"\"Issued"\"MSC-22936-4"\6599449\"09/774,169"\"X-Ray Crystallography Reagent"\"01/24/2021"
+"NASA Johnson Space Center"\"Issued"\"MSC-22937-1"\6214300\"09/79,833"\"Microencapsulation And Electrostatic Processing Device (MEPS)"\"05/15/2018"
+"NASA Johnson Space Center"\"Issued"\"MSC-22938-1"\6103271\"09/79,770"\"Low-Shear Microencapsulation & Electrostatic Coating Process"\"05/15/2018"
+"NASA Johnson Space Center"\"Issued"\"MSC-22939-4"\7968117\"12/100,009"\"Externally Triggered Microcapsules"\"07/09/2029"
+"NASA Johnson Space Center"\"Issued"\"MSC-22970-1"\6253563\"09/337,208"\"Solar-Powered Refrigeration System"\"06/03/2019"
+"NASA Johnson Space Center"\"Issued"\"MSC-22970-2"\6469487\"09/838,679"\"Solar Powered Refrigeration System"\"06/03/2019"
+"NASA Johnson Space Center"\"Issued"\"MSC-22970-3"\6453693\"09/838,680"\"Solar Powered Refrigeration System"\"06/03/2019"
+"NASA Johnson Space Center"\"Issued"\"MSC-23029-1"\6651739\"09/793,817"\"Medium Frequency Pseudo Noise Geological Radar"\"07/20/2021"
+"NASA Johnson Space Center"\"Issued"\"MSC-23037-1"\6864473\"09/988,855"\"Variable Shadow Screen For Optical Devices"\"11/14/2021"
+"NASA Johnson Space Center"\"Issued"\"MSC-23041-1"\6334302\"09/351,152"\"Variable Specific Impulse Magnetoplasma Rocket (VASIMR)"\"06/28/2019"
+"NASA Johnson Space Center"\"Issued"\"MSC-23049-3"\6592579\"09/746,542"\"Method For Selective Thermal Ablation"\"06/28/2021"
+"NASA Johnson Space Center"\"Issued"\"MSC-23049-4"\6675050\"09/746,533"\"Computer Program For Microwave Antenna"\"05/07/2021"
+"NASA Johnson Space Center"\"Issued"\"MSC-23076-1"\6321746\"09/574,758"\"Collapsable, Light, Portable Human Hyperbaric Chamber/Airlock System"\"05/17/2020"
+"NASA Johnson Space Center"\"Issued"\"MSC-23092-1"\6547189\"09/826,403"\"Advanced, Large Volume, Highly Loaded, Hybrid Inflatable Pressure Vessel"\"05/26/2021"
+"NASA Johnson Space Center"\"Issued"\"MSC-23153-1"\6995572\"09/803,613"\"Coplanar Waveguide Ice Detection Sensor"\"11/04/2023"
+"NASA Johnson Space Center"\"Issued"\"MSC-23154-1"\7113820\"09/906,013"\"A Real-Time, High Frequency QRS Electrocardiograph."\"05/03/2023"
+"NASA Johnson Space Center"\"Issued"\"MSC-23154-2"\7539535\"11/345,687"\"A Real-Time, High Frequency QRS Electrocardiograph"\"07/13/2027"
+"NASA Johnson Space Center"\"Issued"\"MSC-23178-1"\6997637\"10/5,820"\"Deceleration Limiting Safety Crash Wall"\"05/19/2022"
+"NASA Johnson Space Center"\"Issued"\"MSC-23193-1"\6618010\"09/994,989"\"Passive Noncoherent Tracking Of A Data-Modulated Signal"\"11/14/2021"
+"NASA Johnson Space Center"\"Issued"\"MSC-23277-1"\7295309\"10/734,753"\"Microcapsule Flow Sensor"\"11/12/2024"
+"NASA Johnson Space Center"\"Issued"\"MSC-23303-1"\7397774\"10/446,283"\"Downlink Data Multiplexer"\"01/16/2026"
+"NASA Johnson Space Center"\"Issued"\"MSC-23307-1"\6559645\"10/28,962"\"Detection Of Subterranean Metal Objects Using Differential Spectral Processing"\"11/17/2020"
+"NASA Johnson Space Center"\"Issued"\"MSC-23309-1"\7040319\"10/87,866"\"Oxygen Partial Pressure Monitoring Device For Aircraft Oxygen Masks."\"04/27/2022"
+"NASA Johnson Space Center"\"Issued"\"MSC-23311-1"\6650280\"09/953,612"\"Mass Measurement During Fluid Flow Using An Integrated Sonic/Microwave Detector."\"09/14/2021"
+"NASA Johnson Space Center"\"Issued"\"MSC-23314-1"\6899009\"09/892,355"\"Flexshield (Flexible Multi-Shock Shield Technology)"\"06/26/2021"
+"NASA Johnson Space Center"\"Issued"\"MSC-23349-1"\7415005\"10/283,354"\"MCC Voice Over Internet Protocol (VOIP)"\"08/08/2026"
+"NASA Johnson Space Center"\"Application"\"MSC-23349-2-SB"\0\"12/170,614"\"Ad Hoc Selection of Voice Over Internet Streams"\
+"NASA Johnson Space Center"\"Issued"\"MSC-23424-1"\6985606\"10/212,579"\"Global Distribution Of Large Fluvial Fans/Potential Hydrocarbon Exploration Guide"\"06/12/2024"
+"NASA Johnson Space Center"\"Issued"\"MSC-23427-1"\6944504\"10/302,323"\"Microwave Ablation Of Prostatic Cells Using A Separated Antenna Array"\"07/23/2023"
+"NASA Johnson Space Center"\"Issued"\"MSC-23436-1"\7126553\"10/679,688"\"Tri-Sector Deployable Array Antenna"\"08/11/2024"
+"NASA Johnson Space Center"\"Issued"\"MSC-23443-1"\6647855\"10/263,293"\"Method And Apparatus For Deploying A Hypervelocity Shield"\"09/30/2022"
+"NASA Johnson Space Center"\"Issued"\"MSC-23444-1"\6932090\"10/361,046"\"A Simple Countermeasure For Management Of Motion Sickness And Vestibular/Sensory-Motor Problems Associated With Space Flight And Terrestial Motion Sickness"\"07/01/2023"
+"NASA Johnson Space Center"\"Issued"\"MSC-23449-1"\7386340\"10/402,866"\"Method For Diagnosis Of Coronary Artery Disease And Related Conditions Using 12-Lead High Frequency QRS Electrocardiography"\"12/30/2025"
+"NASA Johnson Space Center"\"Issued"\"MSC-23510-1"\6851647\"10/417,377"\"Portable Catapult Launcher For Small Aircraft"\"04/03/2023"
+"NASA Johnson Space Center"\"Issued"\"MSC-23518-1"\7168935\"10/637,086"\"Low Voltage Electron Beam Solid Freeform Fabrication System"\"09/29/2024"
+"NASA Johnson Space Center"\"Issued"\"MSC-23538-1"\6943619\"10/443,233"\"Practical Active Capacitor Filter"\"05/21/2023"
+"NASA Johnson Space Center"\"Issued"\"MSC-23539-1"\6943621\"10/443,234"\"Auto-Routable, Configurable, Daisy Chainable Data Acquisition System"\"08/16/2023"
+"NASA Johnson Space Center"\"Issued"\"MSC-23563-1"\7270851\"10/985,081"\"Nano-Encapsulated Aerogel"\"05/14/2025"
+"NASA Johnson Space Center"\"Issued"\"MSC-23594-1"\7125370\"10/845,608"\"Articulating Subject Support For Resistive Exercise In The Horizontal Position"\"02/22/2025"
+"NASA Johnson Space Center"\"Issued"\"MSC-23623-1"\7212934\"11/370,379"\"String Resistance Detector Concept"\"03/06/2026"
+"NASA Johnson Space Center"\"Issued"\"MSC-23659-1"\7094045\"10/734,754"\"Pulse-Flow Microencapsulation System"\"06/09/2024"
+"NASA Johnson Space Center"\"Issued"\"MSC-23659-2"\7588703\"11/428,465"\"Microencapsulation System And Method"\"03/14/2027"
+"NASA Johnson Space Center"\"Issued"\"MSC-23668-1"\7250075\"10/874,004"\"Water Outlet Control Mechanism For Fuel Cell System Operation In Variable Gravity Environments"\"11/04/2025"
+"NASA Johnson Space Center"\"Issued"\"MSC-23695-1"\7249540\"11/177,652"\"Torquing Tool Attachment For Round Connectors With Attached Cables"\"08/27/2025"
+"NASA Johnson Space Center"\"Issued"\"MSC-23781-1"\7410485\"11/40,613"\"Directional Microwave Applicator/Antenna"\"10/16/2026"
+"NASA Johnson Space Center"\"Issued"\"MSC-23805-1"\7462141\"11/31,942"\"Advanced Resistive Exercise Device (ARED)"\"01/10/2027"
+"NASA Johnson Space Center"\"Issued"\"MSC-23881-1"\7686529\"11/958,908"\"Low Friction, Low Profile, High Moment Two-Axis Joint"\"12/18/2027"
+"NASA Johnson Space Center"\"Application"\"MSC-23882-1"\0\"12/899654"\"Analog Strain Gage Conditioning System for Space Environment"\
+"NASA Johnson Space Center"\"Issued"\"MSC-23906-1"\7295884\"11/158,354"\"Method for the Design and Analysis of the Primary Load Bearing Layer of an Inflatable Vessel"\"07/20/2026"
+"NASA Johnson Space Center"\"Issued"\"MSC-23933-1"\7543779\"11/625,066"\"Low Impact Docking System (LIDS) A.k.a, International Berthing Docking Mechanism (IBDM)"\"02/22/2028"
+"NASA Johnson Space Center"\"Issued"\"MSC-23954-1"\7357606\"11/357,461"\"Self-Advancing Step-Tap Drill"\"08/14/2026"
+"NASA Johnson Space Center"\"Issued"\"MSC-23988-1"\8343740\"12/58,227"\"Micro-Organ Device"\"10/31/2031"
+"NASA Johnson Space Center"\"Issued"\"MSC-23988-2"\8580546\"13/688982"\"Micro-Organ Device"\"11/29/2032"
+"NASA Johnson Space Center"\"Issued"\"MSC-23997-2"\7815149\"12/388,345"\"Magnetic Capture Docking Mechanism"\"04/01/2025"
+"NASA Johnson Space Center"\"Issued"\"MSC-24000-1"\8076136\"/0"\"Development And Characterization Of A Three-Dimensional Tissue Culture Model Of Bone"\"10/31/2021"
+"NASA Johnson Space Center"\"Issued"\"MSC-24042-1"\7411198\"11/421,174"\"New Architecture for Space Radiation Detection"\"02/01/2027"
+"NASA Johnson Space Center"\"Issued"\"MSC-24106-1"\7577482\"11/683,770"\"Network System Plug And Play Through Positional And Functional Connectivity Identification"\"04/21/2028"
+"NASA Johnson Space Center"\"Issued"\"MSC-24115-1"\8022307\"11/772,999"\"Method and Apparatus for Fabric Circuits and Antennas"\"06/19/2030"
+"NASA Johnson Space Center"\"Issued"\"MSC-24149-1"\8122646\"12/402,986"\"A Description Of An Improved Method For Folding, Assembling, And Weight Relief Of An Inflatable Shell"\"02/04/2030"
+"NASA Johnson Space Center"\"Issued"\"MSC-24149-2"\8266866\"13/346137"\"A Description Of An Improved Method For Folding, Assembling, And Weight Relief Of An Inflatable Shell"\"03/12/2029"
+"NASA Johnson Space Center"\"Issued"\"MSC-24164-1"\8338114\"11/789,117"\"Methods For Growing Tissue-Like 3D Assemblies (TLA) Of Human Broncho-Epithelial Cells"\"05/04/2030"
+"NASA Johnson Space Center"\"Issued"\"MSC-24169-1"\7862946\"11/671,210"\"Self-Regulating Control of Parasitic Electric Loads in Fuel Cell Power Systems"\"11/05/2029"
+"NASA Johnson Space Center"\"Issued"\"MSC-24180-1"\7935259\"12/167,332"\"Water Filtering Device, 100% Effective"\"09/14/2029"
+"NASA Johnson Space Center"\"Issued"\"MSC-24184-1"\8116350\"12/353,755"\"Ultra-Wideband (UWB) Two-Cluster Angle Of Arrival (AOA) Passive Tracking System Design"\"07/22/2030"
+"NASA Johnson Space Center"\"Issued"\"MSC-24201-1"\7509774\"11/610,295"\"A Description Of An Improved Method For Attaching An Inflatable Shell To A Rigid Interface"\"06/13/2027"
+"NASA Johnson Space Center"\"Issued"\"MSC-24207-1"\7604782\"11/625,670"\"X-38 Advanced Sublimator"\"04/12/2028"
+"NASA Johnson Space Center"\"Issued"\"MSC-24215-1"\8070105\"11/956,826"\"A Description Of A Concentric Nested Torroidal Inflatable Habitat"\"10/04/2030"
+"NASA Johnson Space Center"\"Issued"\"MSC-24216-1"\8047473\"12/240,537"\"A Description Of An Octonode Connecting Node Concept And Method"\"01/10/2030"
+"NASA Johnson Space Center"\"Issued"\"MSC-24228-1"\7521682\"11/421,196"\"New Architecture For Space Radiation Detection"\"03/07/2027"
+"NASA Johnson Space Center"\"Issued"\"MSC-24238-1"\8388613\"12/757657"\"Microwave Tissue Welding For Wound Closure"\"11/17/2031"
+"NASA Johnson Space Center"\"Issued"\"MSC-24263-1"\7805276\"11/958,937"\"Impact Detection System"\"02/12/2029"
+"NASA Johnson Space Center"\"Issued"\"MSC-24273-1"\7840387\"11/778,858"\"Method For The Design And Analysis Of The Primary Load Bearing Layer That Interfaces To The Structural Pass-through Of An Inflatable Vessel"\"07/31/2029"
+"NASA Johnson Space Center"\"Application"\"MSC-24314-1"\0\"12/880602"\"HDSS - High Density Spot Seeding"\
+"NASA Johnson Space Center"\"Issued"\"MSC-24346-1"\8466776\"12/828558"\"Extended Range RFID and Sensor Tag"\"09/05/2031"
+"NASA Johnson Space Center"\"Issued"\"MSC-24387-1"\8011229\"12/323,912"\"Artificial Intelligence Algorithm For Assessing Postural Stability During Normal Daily Activities Using Shoe Insert Pressure Sensors"\"11/26/2028"
+"NASA Johnson Space Center"\"Issued"\"MSC-24441-1"\7905946\"12/190,364"\"A Capillary-based Static Phase Separator For Highly Variable Wetting Conditions"\"07/02/2029"
+"NASA Johnson Space Center"\"Issued"\"MSC-24444-1"\8577120\"12/900644"\"Flash Infrared (IR) Thermography Contrast Computer Simulation And Data Analysis Software"\"04/22/2031"
+"NASA Johnson Space Center"\"Application"\"MSC-24451-1"\0\"13/057399"\"Rapid Detection Of The Varicella Zoster Virus (VZV) In Saliva Samples"\
+"NASA Johnson Space Center"\"Issued"\"MSC-24464-1"\7859292\"12/502,575"\"Reconfigurable SEU/SET Tolerance for FPGAs"\"07/14/2029"
+"NASA Johnson Space Center"\"Issued"\"MSC-24466-1"\8183870\"12/370,021"\"Battery cell voltage sensing and balancing using addressable transformers with electrical isolation and minimal additional connector pins and circuitry."\"07/01/2030"
+"NASA Johnson Space Center"\"Application"\"MSC-24490-1"\0\"12/612,171"\"High Altitude Hydration System"\
+"NASA Johnson Space Center"\"Application"\"MSC-24506-1"\0\"12/971919"\"A Method to Measure and Estimate Normalized contrast In Infrared Flash Thermography"\"01/08/2030"
+"NASA Johnson Space Center"\"Issued"\"MSC-24508-1"\8343403\"12/174,380"\"METHOD FOR MAKING A MICROPOROUS MEMBRANE"\"12/31/2030"
+"NASA Johnson Space Center"\"Issued"\"MSC-24509-1"\8570047\"12/855384"\"Battery Fault Detection with Saturating Transformers"\"02/02/2032"
+"NASA Johnson Space Center"\"Issued"\"MSC-24525-1"\8384614\"12/894749"\"Deployable Fresnel Rings"\"10/11/2031"
+"NASA Johnson Space Center"\"Application"\"MSC-24541-1"\0\"12/899815"\"Electromagnetic Time-Variance Magnetic Fields (TVMF) to generate, and re-grow Cartilage Cells by a Noninvasive Method"\
+"NASA Johnson Space Center"\"Issued"\"MSC-24569-1"\8176809\"12/331844"\"Planar Torsion Spring"\
+"NASA Johnson Space Center"\"Issued"\"MSC-24570-1"\8276958\"12/269579"\"Bidirectional Tendon Terminator"\
+"NASA Johnson Space Center"\"Issued"\"MSC-24571-1"\8371177\"12/241309"\"Tendon Tension Sensor"\
+"NASA Johnson Space Center"\"Application"\"MSC-24685-1"\8056423\"12/269,552"\"Sensing the Tendon Tension through the Conduit Reaction Forces"\"11/12/2028"
+"NASA Johnson Space Center"\"Application"\"MSC-24686-1"\8060250\"12/335,153"\"Joint Space Impedance Control for Tendon-Driven Manipulators"\"12/15/2028"
+"NASA Johnson Space Center"\"Issued"\"MSC-24687-1"\8170718\"12/338697"\"Multiple Priority Operational Space Impedance Control"\
+"NASA Johnson Space Center"\"Issued"\"MSC-24688-1"\8280837\"12/474068"\"CONTACT STATE ESTIMATION FOR MULTI-FINGER ROBOT HANDS USING PARTICLE FILTERS"\
+"NASA Johnson Space Center"\"Issued"\"MSC-24689-1"\7784363\"12/241320"\"PHALANGE TACTILE LOAD CELL"\"09/30/2028"
+"NASA Johnson Space Center"\"Issued"\"MSC-24732-1"\8364314\"12/624445"\"METHOD AND APPARATUS FOR AUTOMATIC CONTROL OF A HUMANOID ROBOT"\
+"NASA Johnson Space Center"\"Application"\"MSC-24733-1"\0\"13/349265"\"Pyrometer"\
+"NASA Johnson Space Center"\"Application"\"MSC-24734-1"\8498741\"12/564088"\"Dexterous Humanoid Robotic Wrist"\
+"NASA Johnson Space Center"\"Application"\"MSC-24735-1"\8467903\"12/564086"\"Tendon Driven Finger Actuation System"\
+"NASA Johnson Space Center"\"Issued"\"MSC-24736-1"\8291788\"12/564090"\"Rotary Series Elastic Actuator"\
+"NASA Johnson Space Center"\"Issued"\"MSC-24737-1"\8401700\"12/564124"\"ACTUATOR AND ELECTRONICS PACKAGING FOR EXTRINSIC HUMANOID HAND"\
+"NASA Johnson Space Center"\"Application"\"MSC-24738-1"\0\"12/564094"\"FRAMEWORK AND METHOD FOR CONTROLLING A ROBOTIC SYSTEM USING A DISTRIBUTED COMPUTER NETWORK"\
+"NASA Johnson Space Center"\"Application"\"MSC-24739-1"\8511964\"12/564084"\"Dexterous Humanoid Robot"\
+"NASA Johnson Space Center"\"Application"\"MSC-24740-1"\0\"12/564078"\"Dexterous Humanoid Robotic Finger"\
+"NASA Johnson Space Center"\"Issued"\"MSC-24741-1"\8255079\"12/564095"\"Human Grasp Assist"\"09/23/2029"
+"NASA Johnson Space Center"\"Application"\"MSC-24742-1"\8442684\"12/564076"\"Integrated High Speed FPGA Based Torque Controller"\
+"NASA Johnson Space Center"\"Application"\"MSC-24743-1"\8250901\"12/564092"\"Rotary Absolute Position Sensor Calibration"\
+"NASA Johnson Space Center"\"Application"\"MSC-24744-1"\8369992\"12/564083"\"Diagnostics, prognostics & health management for humanoid robotics and method thereof"\
+"NASA Johnson Space Center"\"GM"\"MSC-24745-1"\8424941\"12/564085"\"ROBOTIC THUMB ASSEMBLY"\
+"NASA Johnson Space Center"\"Application"\"MSC-24746-1"\8260460\"12/564096"\"Interactive Robot Control System"\
+"NASA Johnson Space Center"\"Issued"\"MSC-24747-1"\8244402\"12/564074"\"VISUAL PERCEPTION SYSTEM AND METHOD FOR A HUMANOID ROBOT"\
+"NASA Johnson Space Center"\"Issued"\"MSC-24750-1"\8483882\"12/686512"\"HIERARCHICAL ROBOT CONTROL SYSTEM AND METHOD FOR CONTROLLING SELECT DEGREES OF FREEDOM OF AN OBJECT USING MULTIPLE MANIPULATORS"\
+"NASA Johnson Space Center"\"Issued"\"MSC-24751-1"\8412376\"12/720725"\"TENSION DISTRIBUTION IN A TENDON-DRIVEN ROBOTIC FINGER"\
+"NASA Johnson Space Center"\"Issued"\"MSC-24752-1"\8033876\"12/706744"\"CONNECTOR PIN AND METHOD"\
+"NASA Johnson Space Center"\"Application"\"MSC-24753-1"\0\"12/720727"\"UNDERACTUATED DESIGN AND CONTROL OF A TENDON-DRIVEN FINGER"\
+"NASA Johnson Space Center"\"Application"\"MSC-24755-1"\0\"12/698832"\"Architecture For Robust Force and Impedance Control Of Series Elastic Actuators"\
+"NASA Johnson Space Center"\"Application"\"MSC-24758-1"\0\"14/184278"\"RFID Cavity"\"03/11/2033"
+"NASA Johnson Space Center"\"Application"\"MSC-24798-1"\0\"13/789903"\"Soft Decision Analyzer (SDA)"\"03/08/2033"
+"NASA Johnson Space Center"\"Application"\"MSC-24811-1"\0\"13/461,487"\"Self-enclosed and pipette free DNA/RNA Isolation device"\
+"NASA Johnson Space Center"\"Application"\"MSC-24813-1"\0\"13/791290"\"Pre-Polymerase Chain Reaction Preparation Kit"\"08/06/2032"
+"NASA Johnson Space Center"\"Application"\"MSC-24817-1"\8265792\"12/760954"\"Method and Apparatus for Calibrating Multi-Axis Load Cells in a Dexterous Robot"\
+"NASA Johnson Space Center"\"Application"\"MSC-24837-1"\0\"12/787479"\"Applying Workspace Limitations in a Velocity-Controlled Robotic Mechanism"\
+"NASA Johnson Space Center"\"Application"\"MSC-24919-1"\0\"13/790591"\"RFID Waveguide, Antenna, and Cavity Sensors"\"07/13/2032"
+"NASA Johnson Space Center"\"Issued"\"MSC-24926-1"\8412378\"12/629637"\"IN-VIVO TENSION CALIBRATION IN TENDON-DRIVEN MANIPULATORS"\
+"NASA Johnson Space Center"\"Issued"\"MSC-24930-1"\8489239\"12/916803"\"ROBUST OPERATION OF TENDON-DRIVEN ROBOT FINGERS USING FORCE AND POSITION-BASED CONTROL LAWS"\
+"NASA Johnson Space Center"\"Application"\"MSC-25026-1"\0\"13/354552"\"Battery Charge Equalizer with transformer array"\
+"NASA Johnson Space Center"\"Issued"\"MSC-25053-1"\"D628,609"\"29/359105"\"ROBOT"\"04/06/2030"
+"NASA Johnson Space Center"\"Application"\"MSC-25056-1"\0\"13/014901"\"SYSTEM AND METHOD FOR TENSIONING A ROBOTICALLY ACTUATED TENDON"\
+"NASA Johnson Space Center"\"Issued"\"MSC-25084-1"\8067909\"12/474430"\"METHOD AND APPARATUS FOR ELECTROMAGNETICALLY BRAKING A MOTOR"\"05/29/2029"
+"NASA Johnson Space Center"\"Application"\"MSC-25084-DE"\0\"12/474430"\"Method and Apparatus for Electromagnetically Braking a Motor"\
+"NASA Johnson Space Center"\"Application"\"MSC-25084-JP"\0\"12/474430"\"Method and Apparatus for Electromagnetically Braking a Motor"\
+"NASA Johnson Space Center"\"Application"\"MSC-25091-1"\0\"13/199484"\"FRET-Aptamer Assays for C-Telopeptide, Creatinine and Vitamin D"\"08/31/2031"
+"NASA Johnson Space Center"\"Issued"\"MSC-25121-1"\8483877\"12/875254"\"WORKSPACE SAFE OPERATION OF A FORCE- OR IMPEDANCE-CONTROLLED ROBOT"\
+"NASA Johnson Space Center"\"Application"\"MSC-25149-1"\0\"13/196252"\"Controlling Execution Sequence Using Tactile-Classification during manipulation by a humanoid robot"\
+"NASA Johnson Space Center"\"Application"\"MSC-25216-1"\0\"13/439,546"\"METHOD AND COMPOSITION FOR AMELIORATING THE EFFECTS FOR A SUBJECT EXPOSED TO RADIATION OR OTHER SOURCES OF OXIDATIVE STRESS"\
+"NASA Johnson Space Center"\"Application"\"MSC-25217-1"\0\"13/272442"\"METHOD FOR DYNAMIC OPTIMIZATION OF A ROBOT CONTROL INTERFACE"\
+"NASA Johnson Space Center"\"Application"\"MSC-25219"\0\"13/207911"\"FAST GRASP CONTACT COMPUTATION FOR A SERIAL ROBOT"\
+"NASA Johnson Space Center"\"Application"\"MSC-25265-1"\0\"13/851778"\"New method and device for digital to analog transformations and reconstructions of multichannel electrocardiograms"\"10/30/2032"
+"NASA Johnson Space Center"\"Application"\"MSC-25286-1"\0\"14/252660"\"A chemical formulation to stabilize urine and minimize the precipitation potential of minerals during distillation of urine"\"03/11/2033"
+"NASA Johnson Space Center"\"Application"\"MSC-25313-1"\0\"13/774835"\"Hydrostatic Hyperbaric Chamber"\"02/22/2033"
+"NASA Johnson Space Center"\"Application"\"MSC-25318"\0\"13/408668"\"HUMAN GRASP ASSIST SOFT"\
+"NASA Johnson Space Center"\"Application"\"MSC-25319"\0\"13/408656"\"HUMAN GRASP ASSIST "\
+"NASA Johnson Space Center"\"Application"\"MSC-25320"\0\"13/408675"\"HUMAN GRASP ASSIST CONTROLS"\
+"NASA Johnson Space Center"\"Application"\"MSC-25327-1"\0\"13/459557"\"COMMUNICATION SYSTEM AND METHOD"\
+"NASA Johnson Space Center"\"Application"\"MSC-25386-1"\0\"13/951671"\"Active Response Gravity Offload System - Vertical Software Release"\"07/26/2033"
+"NASA Johnson Space Center"\"Application"\"MSC-25590-1"\0\"13/790927"\"Systems and Methods for RFID-Enabled Information Collection"\
+"NASA Johnson Space Center"\"Application"\"MSC-25604-1"\0\"13/791584"\"Systems and Methods for RFID-Enabled Dispenser"\
+"NASA Johnson Space Center"\"Application"\"MSC-25605-1"\0\"13/790721"\"Switch Using Radio Frequency Identification"\
+"NASA Johnson Space Center"\"Application"\"MSC-25626-1"\0\"14/200,122"\"RFID Torque-Sensing Tag System for Fasteners"\"03/07/2034"
+"NASA Johnson Space Center"\"Application"\"MSC-25632-1"\0\"13/803017"\"ROBOT TASK COMMANDER WITH EXTENSIBLE PROGRAMMING ENVIRONMENT
+"\"03/14/2033"
+"NASA Johnson Space Center"\"Application"\"MSC-25758-1"\0\"14/184303"\"Methods, Systems and Apparatuses for Radio Frequency Identification"\"03/11/2033"
+"NASA Johnson Space Center"\"Application"\"MSC-25759-1"\0\"14/184337"\"Methods, Systems and Apparatuses for Radio Frequency Identification"\"03/11/2033"
+"NASA Johnson Space Center"\"Application"\"MSC-25760-1"\0\"14/184365"\"Methods, Systems and Apparatuses for Radio Frequency Identification"\"03/11/2033"
+"NASA Jet Propulsion Laboratory"\"Application"\"NPO-17734-1"\0\"07/700,830"\"Formation Of Self-Aligned Guard Ring For Silicide Schottky-Barrier Diodes Used For Infrared Detection"\
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-19289-1"\6513023\"09/412,199"\"On-Chip Learning In VLSI Hardware"\"10/01/2019"
+"NASA Jet Propulsion Laboratory"\"Application"\"NPO-19769-1"\0\"08/868,175"\"Automated Cargo Inventory Identification Transponder"\
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-19855-1"\6374630\"09/853,931"\"Champagne Heat Pump"\"05/09/2021"
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-20031-1"\6828935\"10/176,761"\"Receiver Controlled Phased Array Antenna"\"07/19/2022"
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-20837-1"\6526556\"09/591,386"\"MORPHING TECHNIQUE FOR ACCELERATED EVOLUTIONARY SYNTHESIS OF ELECTRONIC CIRCUITS"\"06/07/2020"
+"NASA Jet Propulsion Laboratory"\"Application"\"NPO-21136-1"\0\"10/219,384"\"A CMOS ACTIVE PIXEL SENSOR (APS) FOR READING COMPACT DISCS"\
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-30703-1"\7240208\"10/424,287"\"ENCRYPTING DIGITAL CAMERA"\"04/23/2023"
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-40040-1"\7480984\"40/863,835"\"A Concept For Suppressing Sublimation In Advanced Thermoelectric Devices"\"06/07/2024"
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-40407-1"\7592747\"11/056,633"\"Piezoelectrically Enhanced PhotoCathode (PEPC)"\"02/09/2025"
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-40827-1"\7156189\"11/1,465"\"SELF-MOUNTABLE AND EXTRACTABLE ULTRASONIC/SONIC ANCHOR (U/S-Anchor)"\"12/01/2024"
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-41446-1"\8358723\"11/602,440"\"Architecture Of An Autonomous Radio"\"09/12/2031"
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-41506-2"\8492160\"12/720,103"\"BIOMARKER SENSOR SYSTEM AND METHOD FOR MULTI-COLOR IMAGING AND PROCESSING OF SINGLE-MOLECULE LIFE SIGNATURES"\"04/09/2031"
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-41511-1"\7385462\"11/376,638"\"Wideband (31 To 36 GHz) 24-Way Radial Power Combiner/Divider Fed By A Marie Transducer"\"03/14/2026"
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-41982-1"\8078309\"12/415,206"\"Inverse Tomographic Approach To Create Arbitrary Sidewall Geometries In 3D Using LiGA Technologies"\"03/03/2021"
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-42131-1"\7824247\"11/756,819"\"PORTABLE RAPID AND QUIET DRILL (PRAQD)"\"11/02/2027"
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-42312-1"\7184624\"11/422,147"\"Slow light in chains of vertically coupled whispering gallery mode resonators"\"06/05/2026"
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-42466-1"\7764384\"11/924,766"\"Swept frequency laser metrology system"\"10/26/2027"
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-42563-1"\7353768\"11/456,441"\"Submersible Vehicle Propulsion and Power Generation"\"07/10/2026"
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-42672-1"\7996112\"11/756,793"\"Micro Robot Explorer (SpiderBot) Mesh Crawler"\"06/08/2030"
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-43213-1"\7850861\"11/764,359"\"Patterning packing materials for Fluidic Channels"\"10/13/2029"
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-43348-1"\7809521\"12/40,459"\"Precise delay measurement circuit on FPGAs"\"01/31/2029"
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-43361-1"\7773121\"11/741,213"\"High Resolution, Continuous Field of View, Non-Rotating Imaging Sensor Head"\"10/15/2028"
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-43524-1"\7773362\"11/683,007"\"Dusty Plasma Thruster"\"01/03/2029"
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-44079-1"\8022860\"11/781,022"\"Enhanced Interference Cancellation and Telemetry Reception with a Single Parabolic Dish Antenna using a Focal Plane Array"\"04/30/2030"
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-44765-1"\7740088\"11/928,069"\"Ultrasonic/Sonic Rotary-Hammer Drill (USRoHD)"\"04/15/2028"
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-44914-1"\8407979\"11/926,279"\"Magnetically-Conformed, Variable Area Discharge Chamber for Hall Thruster Plasma Accelerators"\"06/08/2031"
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-45053-1"\8057283\"12/119,989"\"The process of significant improving of optical quality factor of whispering gallery mode resonator."\"09/15/2030"
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-45911-1"\8163094\"12/508,006"\"Method to Improve Indium Bump Bonding Via Indium Oxide Removal Using a Two Step Plasma Process"\"08/16/2030"
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-45948-1"\7843650\"12/490,422"\"Monolithic Afocal Telescope"\"06/24/2029"
+"NASA Jet Propulsion Laboratory"\"Application"\"NPO-46253-1"\0\"12/237,159"\"Generation of optical combs in a whispering gallery mode resonator from a bichromatic pump"\
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-46843-1"\8169371\"12/541,725"\"A single-layer, all-metal patch antenna element with wide bandwidth"\"09/25/2030"
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-46938-1"\8026768\"12/691,070"\"A 201Hg+ co-magnetometer for 199Hg+ trapped ion space atomic clocks"\"04/03/2030"
+"NASA Jet Propulsion Laboratory"\"Application"\"NPO-47300-1"\0\"13/017,174"\"Textured Si Anode for High Capacity, Rapid Charge Rate Li Ion Batteries"\
+"NASA Jet Propulsion Laboratory"\"Application"\"NPO-47300-2"\0\"13/895,499"\"Textured Si Anode for High Capacity, Rapid Charge Rate Li Ion Batteries"\"01/31/2031"
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-47310-1"\8502987\"13/018,672"\"Coherent Detector for Near-Angle Scattering and Polarization Characterization of Telescope Mirror Coatings"\"03/24/2032"
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-47604-1"\8649000\"13/277,954"\"Surface Enhanced Raman Scattering using Silica Whispering-Gallery Mode Resonators"\"07/10/2032"
+"NASA Jet Propulsion Laboratory"\"Application"\"NPO-47717-1"\\"13/281,683"\"360-Degree Camera Head for Unmanned Surface Sea Vehicles"\
+"NASA Jet Propulsion Laboratory"\"Issued"\"NPO-47869-1"\8649609\"13/071,299"\"FPGA Vision Data Architecture"\"04/17/2032"
+"NASA Jet Propulsion Laboratory"\"Application"\"NPO-47881-1"\\"14/151,684"\"Pulsed Plasma Lubricator (PPL) Technology for the In Situ Replenishment of Dry Lubricants in Extreme Environments"\
+"NASA Jet Propulsion Laboratory"\"Application"\"NPO-48140-1"\\"13/456,451"\"Probabilistic Surface Characterization for Safe Landing Hazard Detection and Avoidance"\
+"NASA Jet Propulsion Laboratory"\"Application"\"NPO-48413-1"\\"13/757,929"\"Simple Laser-Communications Terminal for Downlink from Earth-Orbit at Rates Exceeding 10 Gb/s"\"02/04/2033"
+"NASA Jet Propulsion Laboratory"\"Application"\"NPO-48539-1"\\"13/858,267"\"Neutral mounting of whispering gallery mode resonators for suppression of acceleration-induced frequency fluctuations"\"04/08/2033"
+"NASA Jet Propulsion Laboratory"\"Application"\"NPO-49086-1"\\"14/101,547"\"Electride Mediated Surface Enhanced Raman Spectroscopy"\"12/10/2033"
+"NASA Stennis Space Center"\"Issued"\"SSC-00040"\5726632\"08/622,178"\"HANDHELD HYDROGEN FIRE IMAGER"\"03/14/2016"
+"NASA Stennis Space Center"\"Issued"\"SSC-00050"\6020587\"09/3,212"\"A HAND HELD PLANT STRESS DETECTION SYSTEM"\"01/06/2018"
+"NASA Stennis Space Center"\"Issued"\"SSC-00247"\8618933\"11/866,042"\"Valve Health Monitoring System Utilizing Smart Instrumentation for Real Time and Historical Data Tracking"\"05/03/2032"
+"NASA Stennis Space Center"\"Issued"\"SSC-00264"\8336849\"12/704193"\"Conical Seat Shut Off Valve"\"01/13/2031"
+"NASA Stennis Space Center"\"Issued"\"SSC-00327"\8401820\"12/566,111"\"IN SITU HEALTH MONITORING OF PIEZOELECTRIC SENSORS"\"07/31/2030"
|
google__jax-9658 | [QoL] Add copy button in docs code snippets
Since I'm a bit lazy, I'd like to have a "copy to clipboard" button in jax docs to copy over code snippets instead of drag-select-copying it. Like this:

Dupicate Checks:
Nothing relevant comes up when searching for "copy button", "docs copy button" or even "button" for that matter.
| [
{
"content": "# Copyright 2018 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n# -*- coding: utf-8 -*-\n#\n# Configuration file for the Sphinx documentation builder.\n#\n# This file does only contain a selection of the most common options. For a\n# full list see the documentation:\n# http://www.sphinx-doc.org/en/master/config\n\n# -- Path setup --------------------------------------------------------------\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\n#\nimport os\nimport sys\n\nsys.path.insert(0, os.path.abspath('..'))\n\n\n# Currently type aliases are expanded. We tried a workaround along the lines of:\n# https://github.com/sphinx-doc/sphinx/issues/6518#issuecomment-589613836\n# Unfortunately, this workaround makes Sphinx drop module-level documentation.\n# See https://github.com/google/jax/issues/3452.\n\n# -- Project information -----------------------------------------------------\n\nproject = 'JAX'\ncopyright = '2020, Google LLC. NumPy and SciPy documentation are copyright the respective authors.'\nauthor = 'The JAX authors'\n\n# The short X.Y version\nversion = ''\n# The full version, including alpha/beta/rc tags\nrelease = ''\n\n\n# -- General configuration ---------------------------------------------------\n\n# If your documentation needs a minimal Sphinx version, state it here.\n#\nneeds_sphinx = '2.1'\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nsys.path.append(os.path.abspath('sphinxext'))\nextensions = [\n 'sphinx.ext.autodoc',\n 'sphinx.ext.autosummary',\n 'sphinx.ext.intersphinx',\n 'sphinx.ext.mathjax',\n 'sphinx.ext.napoleon',\n 'sphinx.ext.viewcode',\n 'matplotlib.sphinxext.plot_directive',\n 'sphinx_autodoc_typehints',\n 'myst_nb',\n \"sphinx_remove_toctrees\",\n 'jax_extensions',\n]\n\nintersphinx_mapping = {\n 'python': ('https://docs.python.org/3/', None),\n 'numpy': ('https://numpy.org/doc/stable/', None),\n 'scipy': ('https://docs.scipy.org/doc/scipy-1.8.0/html-scipyorg/', None),\n}\n\nsuppress_warnings = [\n 'ref.citation', # Many duplicated citations in numpy/scipy docstrings.\n 'ref.footnote', # Many unreferenced footnotes in numpy/scipy docstrings\n]\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = ['_templates']\n\n# The suffix(es) of source filenames.\n# Note: important to list ipynb before md here: we have both md and ipynb\n# copies of each notebook, and myst will choose which to convert based on\n# the order in the source_suffix list. Notebooks which are not executed have\n# outputs stored in ipynb but not in md, so we must convert the ipynb.\nsource_suffix = ['.rst', '.ipynb', '.md']\n\n# The main toctree document.\nmain_doc = 'index'\n\n# The language for content autogenerated by Sphinx. Refer to documentation\n# for a list of supported languages.\n#\n# This is also used if you do content translation via gettext catalogs.\n# Usually you set \"language\" from the command line for these cases.\nlanguage = None\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\n# This pattern also affects html_static_path and html_extra_path.\nexclude_patterns = [\n # Sometimes sphinx reads its own outputs as inputs!\n 'build/html',\n 'build/jupyter_execute',\n 'notebooks/README.md',\n 'README.md',\n # Ignore markdown source for notebooks; myst-nb builds from the ipynb\n # These are kept in sync using the jupytext pre-commit hook.\n 'notebooks/*.md',\n 'design_notes/type_promotion.md',\n # TODO: revert to jax-101/*.md once 08-pjit has a notebook\n 'jax-101/01-jax-basics.md',\n 'jax-101/02-jitting.md',\n 'jax-101/03-vectorization.md',\n 'jax-101/04-advanced-autodiff.md',\n 'jax-101/05-random-numbers.md',\n 'jax-101/05.1-pytrees.md',\n 'jax-101/06-parallelism.md',\n 'jax-101/07-state.md',\n 'autodidax.md',\n # Attempt to fix RTD build failure\n 'transformations.md',\n]\n\n# The name of the Pygments (syntax highlighting) style to use.\npygments_style = None\n\n\nautosummary_generate = True\nnapolean_use_rtype = False\n\n# mathjax_config = {\n# 'TeX': {'equationNumbers': {'autoNumber': 'AMS', 'useLabelIds': True}},\n# }\n\n# Additional files needed for generating LaTeX/PDF output:\n# latex_additional_files = ['references.bib']\n\n# -- Options for HTML output -------------------------------------------------\n\n# The theme to use for HTML and HTML Help pages. See the documentation for\n# a list of builtin themes.\n#\nhtml_theme = 'sphinx_book_theme'\n\n# Theme options are theme-specific and customize the look and feel of a theme\n# further. For a list of options available for each theme, see the\n# documentation.\nhtml_theme_options = {\n 'logo_only': True,\n 'show_toc_level': 2,\n}\n\n# The name of an image file (relative to this directory) to place at the top\n# of the sidebar.\nhtml_logo = '_static/jax_logo_250px.png'\n\nhtml_favicon = '_static/favicon.png'\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = ['_static']\n\n# Custom sidebar templates, must be a dictionary that maps document names\n# to template names.\n#\n# The default sidebars (for documents that don't match any pattern) are\n# defined by theme itself. Builtin themes are using these templates by\n# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',\n# 'searchbox.html']``.\n#\n# html_sidebars = {}\n\n# -- Options for myst ----------------------------------------------\njupyter_execute_notebooks = \"force\"\nexecution_allow_errors = False\nexecution_fail_on_error = True # Requires https://github.com/executablebooks/MyST-NB/pull/296\n\n# Notebook cell execution timeout; defaults to 30.\nexecution_timeout = 100\n\n# List of patterns, relative to source directory, that match notebook\n# files that will not be executed.\nexecution_excludepatterns = [\n # Slow notebook: long time to load tf.ds\n 'notebooks/neural_network_with_tfds_data.*',\n # Slow notebook\n 'notebooks/Neural_Network_and_Data_Loading.*',\n # Strange error apparently due to asynchronous cell execution\n 'notebooks/thinking_in_jax.*',\n # TODO(jakevdp): enable execution on these\n 'design_notes/type_promotion.*',\n 'jax-101/*',\n 'notebooks/xmap_tutorial.*',\n]\n\n# -- Options for HTMLHelp output ---------------------------------------------\n\n# Output file base name for HTML help builder.\nhtmlhelp_basename = 'JAXdoc'\n\n\n# -- Options for LaTeX output ------------------------------------------------\n\nlatex_elements = {\n # The paper size ('letterpaper' or 'a4paper').\n #\n # 'papersize': 'letterpaper',\n\n # The font size ('10pt', '11pt' or '12pt').\n #\n # 'pointsize': '10pt',\n\n # Additional stuff for the LaTeX preamble.\n #\n # 'preamble': '',\n\n # Latex figure (float) alignment\n #\n # 'figure_align': 'htbp',\n}\n\n# Grouping the document tree into LaTeX files. List of tuples\n# (source start file, target name, title,\n# author, documentclass [howto, manual, or own class]).\nlatex_documents = [\n (main_doc, 'JAX.tex', 'JAX Documentation',\n 'The JAX authors', 'manual'),\n]\n\n\n# -- Options for manual page output ------------------------------------------\n\n# One entry per manual page. List of tuples\n# (source start file, name, description, authors, manual section).\nman_pages = [\n (main_doc, 'jax', 'JAX Documentation',\n [author], 1)\n]\n\n\n# -- Options for Texinfo output ----------------------------------------------\n\n# Grouping the document tree into Texinfo files. List of tuples\n# (source start file, target name, title, author,\n# dir menu entry, description, category)\ntexinfo_documents = [\n (main_doc, 'JAX', 'JAX Documentation',\n author, 'JAX', 'One line description of project.',\n 'Miscellaneous'),\n]\n\n\n# -- Options for Epub output -------------------------------------------------\n\n# Bibliographic Dublin Core info.\nepub_title = project\n\n# The unique identifier of the text. This can be a ISBN number\n# or the project homepage.\n#\n# epub_identifier = ''\n\n# A unique identification for the text.\n#\n# epub_uid = ''\n\n# A list of files that should not be packed into the epub file.\nepub_exclude_files = ['search.html']\n\n\n# -- Extension configuration -------------------------------------------------\n\n# Tell sphinx-autodoc-typehints to generate stub parameter annotations including\n# types, even if the parameters aren't explicitly documented.\nalways_document_param_types = True\n\n\n# Remove auto-generated API docs from sidebars. They take too long to build.\nremove_from_toctrees = [\"_autosummary/*\"]\n",
"path": "docs/conf.py"
}
] | [
{
"content": "# Copyright 2018 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n# -*- coding: utf-8 -*-\n#\n# Configuration file for the Sphinx documentation builder.\n#\n# This file does only contain a selection of the most common options. For a\n# full list see the documentation:\n# http://www.sphinx-doc.org/en/master/config\n\n# -- Path setup --------------------------------------------------------------\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\n#\nimport os\nimport sys\n\nsys.path.insert(0, os.path.abspath('..'))\n\n\n# Currently type aliases are expanded. We tried a workaround along the lines of:\n# https://github.com/sphinx-doc/sphinx/issues/6518#issuecomment-589613836\n# Unfortunately, this workaround makes Sphinx drop module-level documentation.\n# See https://github.com/google/jax/issues/3452.\n\n# -- Project information -----------------------------------------------------\n\nproject = 'JAX'\ncopyright = '2020, Google LLC. NumPy and SciPy documentation are copyright the respective authors.'\nauthor = 'The JAX authors'\n\n# The short X.Y version\nversion = ''\n# The full version, including alpha/beta/rc tags\nrelease = ''\n\n\n# -- General configuration ---------------------------------------------------\n\n# If your documentation needs a minimal Sphinx version, state it here.\n#\nneeds_sphinx = '2.1'\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nsys.path.append(os.path.abspath('sphinxext'))\nextensions = [\n 'sphinx.ext.autodoc',\n 'sphinx.ext.autosummary',\n 'sphinx.ext.intersphinx',\n 'sphinx.ext.mathjax',\n 'sphinx.ext.napoleon',\n 'sphinx.ext.viewcode',\n 'matplotlib.sphinxext.plot_directive',\n 'sphinx_autodoc_typehints',\n 'myst_nb',\n \"sphinx_remove_toctrees\",\n 'sphinx_copybutton',\n 'jax_extensions',\n]\n\nintersphinx_mapping = {\n 'python': ('https://docs.python.org/3/', None),\n 'numpy': ('https://numpy.org/doc/stable/', None),\n 'scipy': ('https://docs.scipy.org/doc/scipy-1.8.0/html-scipyorg/', None),\n}\n\nsuppress_warnings = [\n 'ref.citation', # Many duplicated citations in numpy/scipy docstrings.\n 'ref.footnote', # Many unreferenced footnotes in numpy/scipy docstrings\n]\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = ['_templates']\n\n# The suffix(es) of source filenames.\n# Note: important to list ipynb before md here: we have both md and ipynb\n# copies of each notebook, and myst will choose which to convert based on\n# the order in the source_suffix list. Notebooks which are not executed have\n# outputs stored in ipynb but not in md, so we must convert the ipynb.\nsource_suffix = ['.rst', '.ipynb', '.md']\n\n# The main toctree document.\nmain_doc = 'index'\n\n# The language for content autogenerated by Sphinx. Refer to documentation\n# for a list of supported languages.\n#\n# This is also used if you do content translation via gettext catalogs.\n# Usually you set \"language\" from the command line for these cases.\nlanguage = None\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\n# This pattern also affects html_static_path and html_extra_path.\nexclude_patterns = [\n # Sometimes sphinx reads its own outputs as inputs!\n 'build/html',\n 'build/jupyter_execute',\n 'notebooks/README.md',\n 'README.md',\n # Ignore markdown source for notebooks; myst-nb builds from the ipynb\n # These are kept in sync using the jupytext pre-commit hook.\n 'notebooks/*.md',\n 'design_notes/type_promotion.md',\n # TODO: revert to jax-101/*.md once 08-pjit has a notebook\n 'jax-101/01-jax-basics.md',\n 'jax-101/02-jitting.md',\n 'jax-101/03-vectorization.md',\n 'jax-101/04-advanced-autodiff.md',\n 'jax-101/05-random-numbers.md',\n 'jax-101/05.1-pytrees.md',\n 'jax-101/06-parallelism.md',\n 'jax-101/07-state.md',\n 'autodidax.md',\n # Attempt to fix RTD build failure\n 'transformations.md',\n]\n\n# The name of the Pygments (syntax highlighting) style to use.\npygments_style = None\n\n\nautosummary_generate = True\nnapolean_use_rtype = False\n\n# mathjax_config = {\n# 'TeX': {'equationNumbers': {'autoNumber': 'AMS', 'useLabelIds': True}},\n# }\n\n# Additional files needed for generating LaTeX/PDF output:\n# latex_additional_files = ['references.bib']\n\n# -- Options for HTML output -------------------------------------------------\n\n# The theme to use for HTML and HTML Help pages. See the documentation for\n# a list of builtin themes.\n#\nhtml_theme = 'sphinx_book_theme'\n\n# Theme options are theme-specific and customize the look and feel of a theme\n# further. For a list of options available for each theme, see the\n# documentation.\nhtml_theme_options = {\n 'logo_only': True,\n 'show_toc_level': 2,\n}\n\n# The name of an image file (relative to this directory) to place at the top\n# of the sidebar.\nhtml_logo = '_static/jax_logo_250px.png'\n\nhtml_favicon = '_static/favicon.png'\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = ['_static']\n\n# Custom sidebar templates, must be a dictionary that maps document names\n# to template names.\n#\n# The default sidebars (for documents that don't match any pattern) are\n# defined by theme itself. Builtin themes are using these templates by\n# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',\n# 'searchbox.html']``.\n#\n# html_sidebars = {}\n\n# -- Options for myst ----------------------------------------------\njupyter_execute_notebooks = \"force\"\nexecution_allow_errors = False\nexecution_fail_on_error = True # Requires https://github.com/executablebooks/MyST-NB/pull/296\n\n# Notebook cell execution timeout; defaults to 30.\nexecution_timeout = 100\n\n# List of patterns, relative to source directory, that match notebook\n# files that will not be executed.\nexecution_excludepatterns = [\n # Slow notebook: long time to load tf.ds\n 'notebooks/neural_network_with_tfds_data.*',\n # Slow notebook\n 'notebooks/Neural_Network_and_Data_Loading.*',\n # Strange error apparently due to asynchronous cell execution\n 'notebooks/thinking_in_jax.*',\n # TODO(jakevdp): enable execution on these\n 'design_notes/type_promotion.*',\n 'jax-101/*',\n 'notebooks/xmap_tutorial.*',\n]\n\n# -- Options for HTMLHelp output ---------------------------------------------\n\n# Output file base name for HTML help builder.\nhtmlhelp_basename = 'JAXdoc'\n\n\n# -- Options for LaTeX output ------------------------------------------------\n\nlatex_elements = {\n # The paper size ('letterpaper' or 'a4paper').\n #\n # 'papersize': 'letterpaper',\n\n # The font size ('10pt', '11pt' or '12pt').\n #\n # 'pointsize': '10pt',\n\n # Additional stuff for the LaTeX preamble.\n #\n # 'preamble': '',\n\n # Latex figure (float) alignment\n #\n # 'figure_align': 'htbp',\n}\n\n# Grouping the document tree into LaTeX files. List of tuples\n# (source start file, target name, title,\n# author, documentclass [howto, manual, or own class]).\nlatex_documents = [\n (main_doc, 'JAX.tex', 'JAX Documentation',\n 'The JAX authors', 'manual'),\n]\n\n\n# -- Options for manual page output ------------------------------------------\n\n# One entry per manual page. List of tuples\n# (source start file, name, description, authors, manual section).\nman_pages = [\n (main_doc, 'jax', 'JAX Documentation',\n [author], 1)\n]\n\n\n# -- Options for Texinfo output ----------------------------------------------\n\n# Grouping the document tree into Texinfo files. List of tuples\n# (source start file, target name, title, author,\n# dir menu entry, description, category)\ntexinfo_documents = [\n (main_doc, 'JAX', 'JAX Documentation',\n author, 'JAX', 'One line description of project.',\n 'Miscellaneous'),\n]\n\n\n# -- Options for Epub output -------------------------------------------------\n\n# Bibliographic Dublin Core info.\nepub_title = project\n\n# The unique identifier of the text. This can be a ISBN number\n# or the project homepage.\n#\n# epub_identifier = ''\n\n# A unique identification for the text.\n#\n# epub_uid = ''\n\n# A list of files that should not be packed into the epub file.\nepub_exclude_files = ['search.html']\n\n\n# -- Extension configuration -------------------------------------------------\n\n# Tell sphinx-autodoc-typehints to generate stub parameter annotations including\n# types, even if the parameters aren't explicitly documented.\nalways_document_param_types = True\n\n\n# Remove auto-generated API docs from sidebars. They take too long to build.\nremove_from_toctrees = [\"_autosummary/*\"]\n",
"path": "docs/conf.py"
}
] | diff --git a/docs/conf.py b/docs/conf.py
index c75d97ec95ce..42e5bc261200 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -70,6 +70,7 @@
'sphinx_autodoc_typehints',
'myst_nb',
"sphinx_remove_toctrees",
+ 'sphinx_copybutton',
'jax_extensions',
]
diff --git a/docs/requirements.txt b/docs/requirements.txt
index 09454bb50aff..b62db0316a84 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -5,6 +5,7 @@ sphinx-book-theme
sphinx-remove-toctrees
# Newer versions cause issues; see https://github.com/google/jax/pull/6449
sphinx-autodoc-typehints==1.11.1
+sphinx-copybutton>=0.5.0
jupyter-sphinx>=0.3.2
myst-nb
|
numpy__numpy-3235 | 2to3 run `itertools_imports` fixer
| [
{
"content": "#!/usr/bin/env python3\n# -*- python -*-\n\"\"\"\n%prog SUBMODULE...\n\nHack to pipe submodules of Numpy through 2to3 and build them in-place\none-by-one.\n\nExample usage:\n\n python3 tools/py3tool.py testing distutils core\n\nThis will copy files to _py3k/numpy, add a dummy __init__.py and\nversion.py on the top level, and copy and 2to3 the files of the three\nsubmodules.\n\nWhen running py3tool again, only changed files are re-processed, which\nmakes the test-bugfix cycle faster.\n\n\"\"\"\nfrom __future__ import division, absolute_import, print_function\n\nfrom optparse import OptionParser\nimport shutil\nimport os\nimport sys\nimport re\nimport subprocess\nimport fnmatch\n\nif os.environ.get('USE_2TO3CACHE'):\n import lib2to3cache\n\nBASE = os.path.normpath(os.path.join(os.path.dirname(__file__), '..'))\nTEMP = os.path.normpath(os.path.join(BASE, '_py3k'))\n\nSCRIPT_2TO3 = os.path.join(BASE, 'tools', '2to3.py')\n\nEXTRA_2TO3_FLAGS = {\n 'numpy/core/defchararray.py': '-x unicode',\n 'numpy/compat/py3k.py': '-x unicode',\n 'numpy/ma/timer_comparison.py': 'skip',\n}\n\n# Names of fixers to skip when running 2to3. This is a complete list of\n# available fixers, with fixers not currently skipped commented out.\nFIXES_TO_SKIP = [\n 'apply',\n# 'basestring',\n 'buffer',\n 'callable',\n 'dict',\n 'exec',\n 'execfile',\n 'exitfunc',\n 'filter',\n 'funcattrs',\n 'future',\n 'getcwdu',\n 'has_key',\n# 'idioms',\n 'import',\n 'imports',\n 'imports2',\n 'input',\n 'intern',\n# 'isinstance',\n# 'itertools',\n# 'itertools_imports',\n# 'long',\n 'map',\n 'metaclass',\n 'methodattrs',\n 'ne',\n# 'next',\n# 'nonzero',\n# 'numliterals',\n 'operator',\n 'paren',\n 'print',\n 'raise',\n 'raw_input',\n 'reduce',\n# 'renames',\n 'repr',\n 'setliteral',\n 'standarderror',\n 'sys_exc',\n 'throw',\n 'tuple_params',\n# 'types',\n# 'unicode',\n# 'urllib',\n# 'ws_comma',\n 'xrange',\n 'xreadlines',\n# 'zip',\n]\n\nskip_fixes= []\nfor _t in FIXES_TO_SKIP:\n skip_fixes.append('-x')\n skip_fixes.append(_t)\n\n\ndef main():\n p = OptionParser(usage=__doc__.strip())\n p.add_option(\"--clean\", \"-c\", action=\"store_true\",\n help=\"clean source directory\")\n options, args = p.parse_args()\n\n if not args:\n p.error('no submodules given')\n else:\n dirs = ['numpy/%s' % x for x in map(os.path.basename, args)]\n\n # Prepare\n if not os.path.isdir(TEMP):\n os.makedirs(TEMP)\n\n # Set up dummy files (for building only submodules)\n dummy_files = {\n '__init__.py': 'from numpy.version import version as __version__',\n 'version.py': 'version = \"1.4.0.dev\"'\n }\n\n for fn, content in dummy_files.items():\n fn = os.path.join(TEMP, 'numpy', fn)\n if not os.path.isfile(fn):\n try:\n os.makedirs(os.path.dirname(fn))\n except OSError:\n pass\n f = open(fn, 'wb+')\n f.write(content.encode('ascii'))\n f.close()\n\n # Environment\n pp = [os.path.abspath(TEMP)]\n def getenv():\n env = dict(os.environ)\n env.update({'PYTHONPATH': ':'.join(pp)})\n return env\n\n # Copy\n for d in dirs:\n src = os.path.join(BASE, d)\n dst = os.path.join(TEMP, d)\n\n # Run 2to3\n sync_2to3(dst=dst,\n src=src,\n patchfile=os.path.join(TEMP, os.path.basename(d) + '.patch'),\n clean=options.clean)\n\n # Run setup.py, falling back to Pdb post-mortem on exceptions\n setup_py = os.path.join(dst, 'setup.py')\n if os.path.isfile(setup_py):\n code = \"\"\"\\\nimport pdb, sys, traceback\np = pdb.Pdb()\ntry:\n import __main__\n __main__.__dict__.update({\n \"__name__\": \"__main__\", \"__file__\": \"setup.py\",\n \"__builtins__\": __builtins__})\n fp = open(\"setup.py\", \"rb\")\n try:\n exec(compile(fp.read(), \"setup.py\", 'exec'))\n finally:\n fp.close()\nexcept SystemExit:\n raise\nexcept:\n traceback.print_exc()\n t = sys.exc_info()[2]\n p.interaction(None, t)\n\"\"\"\n ret = subprocess.call([sys.executable, '-c', code,\n 'build_ext', '-i'],\n cwd=dst,\n env=getenv())\n if ret != 0:\n raise RuntimeError(\"Build failed.\")\n\n # Run nosetests\n subprocess.call(['nosetests3', '-v', d], cwd=TEMP)\n\n\ndef walk_sync(dir1, dir2, _seen=None):\n if _seen is None:\n seen = {}\n else:\n seen = _seen\n\n if not dir1.endswith(os.path.sep):\n dir1 = dir1 + os.path.sep\n\n # Walk through stuff (which we haven't yet gone through) in dir1\n for root, dirs, files in os.walk(dir1):\n sub = root[len(dir1):]\n if sub in seen:\n dirs = [x for x in dirs if x not in seen[sub][0]]\n files = [x for x in files if x not in seen[sub][1]]\n seen[sub][0].extend(dirs)\n seen[sub][1].extend(files)\n else:\n seen[sub] = (dirs, files)\n if not dirs and not files:\n continue\n yield os.path.join(dir1, sub), os.path.join(dir2, sub), dirs, files\n\n if _seen is None:\n # Walk through stuff (which we haven't yet gone through) in dir2\n for root2, root1, dirs, files in walk_sync(dir2, dir1, _seen=seen):\n yield root1, root2, dirs, files\n\ndef sync_2to3(src, dst, patchfile=None, clean=False):\n import lib2to3.main\n from io import StringIO\n\n to_convert = []\n\n for src_dir, dst_dir, dirs, files in walk_sync(src, dst):\n for fn in dirs + files:\n src_fn = os.path.join(src_dir, fn)\n dst_fn = os.path.join(dst_dir, fn)\n\n # skip temporary etc. files\n if fn.startswith('.#') or fn.endswith('~'):\n continue\n\n # remove non-existing\n if os.path.exists(dst_fn) and not os.path.exists(src_fn):\n if clean:\n if os.path.isdir(dst_fn):\n shutil.rmtree(dst_fn)\n else:\n os.unlink(dst_fn)\n continue\n\n # make directories\n if os.path.isdir(src_fn):\n if not os.path.isdir(dst_fn):\n os.makedirs(dst_fn)\n continue\n\n dst_dir = os.path.dirname(dst_fn)\n if os.path.isfile(dst_fn) and not os.path.isdir(dst_dir):\n os.makedirs(dst_dir)\n\n # don't replace up-to-date files\n try:\n if os.path.isfile(dst_fn) and \\\n os.stat(dst_fn).st_mtime >= os.stat(src_fn).st_mtime:\n continue\n except OSError:\n pass\n\n # copy file\n shutil.copyfile(src_fn, dst_fn)\n\n # add .py files to 2to3 list\n if dst_fn.endswith('.py'):\n to_convert.append((src_fn, dst_fn))\n\n # run 2to3\n flag_sets = {}\n for fn, dst_fn in to_convert:\n flag = ''\n for pat, opt in EXTRA_2TO3_FLAGS.items():\n if fnmatch.fnmatch(fn, pat):\n flag = opt\n break\n flag_sets.setdefault(flag, []).append(dst_fn)\n\n if patchfile:\n p = open(patchfile, 'wb+')\n else:\n p = open(os.devnull, 'wb')\n\n for flags, filenames in flag_sets.items():\n if flags == 'skip':\n continue\n\n _old_stdout = sys.stdout\n try:\n sys.stdout = StringIO()\n opt = []\n opt.extend(['-w', '-n'])\n opt.extend(skip_fixes)\n opt.extend(flags.split())\n opt.extend(filenames)\n lib2to3.main.main(\"lib2to3.fixes\", opt)\n finally:\n sys.stdout = _old_stdout\n\n p.close()\n\nif __name__ == \"__main__\":\n main()\n",
"path": "tools/py3tool.py"
}
] | [
{
"content": "#!/usr/bin/env python3\n# -*- python -*-\n\"\"\"\n%prog SUBMODULE...\n\nHack to pipe submodules of Numpy through 2to3 and build them in-place\none-by-one.\n\nExample usage:\n\n python3 tools/py3tool.py testing distutils core\n\nThis will copy files to _py3k/numpy, add a dummy __init__.py and\nversion.py on the top level, and copy and 2to3 the files of the three\nsubmodules.\n\nWhen running py3tool again, only changed files are re-processed, which\nmakes the test-bugfix cycle faster.\n\n\"\"\"\nfrom __future__ import division, absolute_import, print_function\n\nfrom optparse import OptionParser\nimport shutil\nimport os\nimport sys\nimport re\nimport subprocess\nimport fnmatch\n\nif os.environ.get('USE_2TO3CACHE'):\n import lib2to3cache\n\nBASE = os.path.normpath(os.path.join(os.path.dirname(__file__), '..'))\nTEMP = os.path.normpath(os.path.join(BASE, '_py3k'))\n\nSCRIPT_2TO3 = os.path.join(BASE, 'tools', '2to3.py')\n\nEXTRA_2TO3_FLAGS = {\n 'numpy/core/defchararray.py': '-x unicode',\n 'numpy/compat/py3k.py': '-x unicode',\n 'numpy/ma/timer_comparison.py': 'skip',\n}\n\n# Names of fixers to skip when running 2to3. This is a complete list of\n# available fixers, with fixers not currently skipped commented out.\nFIXES_TO_SKIP = [\n 'apply',\n# 'basestring',\n 'buffer',\n 'callable',\n 'dict',\n 'exec',\n 'execfile',\n 'exitfunc',\n 'filter',\n 'funcattrs',\n 'future',\n 'getcwdu',\n 'has_key',\n# 'idioms',\n 'import',\n 'imports',\n 'imports2',\n 'input',\n 'intern',\n# 'isinstance',\n# 'itertools',\n 'itertools_imports',\n# 'long',\n 'map',\n 'metaclass',\n 'methodattrs',\n 'ne',\n# 'next',\n# 'nonzero',\n# 'numliterals',\n 'operator',\n 'paren',\n 'print',\n 'raise',\n 'raw_input',\n 'reduce',\n# 'renames',\n 'repr',\n 'setliteral',\n 'standarderror',\n 'sys_exc',\n 'throw',\n 'tuple_params',\n# 'types',\n# 'unicode',\n# 'urllib',\n# 'ws_comma',\n 'xrange',\n 'xreadlines',\n# 'zip',\n]\n\nskip_fixes= []\nfor _t in FIXES_TO_SKIP:\n skip_fixes.append('-x')\n skip_fixes.append(_t)\n\n\ndef main():\n p = OptionParser(usage=__doc__.strip())\n p.add_option(\"--clean\", \"-c\", action=\"store_true\",\n help=\"clean source directory\")\n options, args = p.parse_args()\n\n if not args:\n p.error('no submodules given')\n else:\n dirs = ['numpy/%s' % x for x in map(os.path.basename, args)]\n\n # Prepare\n if not os.path.isdir(TEMP):\n os.makedirs(TEMP)\n\n # Set up dummy files (for building only submodules)\n dummy_files = {\n '__init__.py': 'from numpy.version import version as __version__',\n 'version.py': 'version = \"1.4.0.dev\"'\n }\n\n for fn, content in dummy_files.items():\n fn = os.path.join(TEMP, 'numpy', fn)\n if not os.path.isfile(fn):\n try:\n os.makedirs(os.path.dirname(fn))\n except OSError:\n pass\n f = open(fn, 'wb+')\n f.write(content.encode('ascii'))\n f.close()\n\n # Environment\n pp = [os.path.abspath(TEMP)]\n def getenv():\n env = dict(os.environ)\n env.update({'PYTHONPATH': ':'.join(pp)})\n return env\n\n # Copy\n for d in dirs:\n src = os.path.join(BASE, d)\n dst = os.path.join(TEMP, d)\n\n # Run 2to3\n sync_2to3(dst=dst,\n src=src,\n patchfile=os.path.join(TEMP, os.path.basename(d) + '.patch'),\n clean=options.clean)\n\n # Run setup.py, falling back to Pdb post-mortem on exceptions\n setup_py = os.path.join(dst, 'setup.py')\n if os.path.isfile(setup_py):\n code = \"\"\"\\\nimport pdb, sys, traceback\np = pdb.Pdb()\ntry:\n import __main__\n __main__.__dict__.update({\n \"__name__\": \"__main__\", \"__file__\": \"setup.py\",\n \"__builtins__\": __builtins__})\n fp = open(\"setup.py\", \"rb\")\n try:\n exec(compile(fp.read(), \"setup.py\", 'exec'))\n finally:\n fp.close()\nexcept SystemExit:\n raise\nexcept:\n traceback.print_exc()\n t = sys.exc_info()[2]\n p.interaction(None, t)\n\"\"\"\n ret = subprocess.call([sys.executable, '-c', code,\n 'build_ext', '-i'],\n cwd=dst,\n env=getenv())\n if ret != 0:\n raise RuntimeError(\"Build failed.\")\n\n # Run nosetests\n subprocess.call(['nosetests3', '-v', d], cwd=TEMP)\n\n\ndef walk_sync(dir1, dir2, _seen=None):\n if _seen is None:\n seen = {}\n else:\n seen = _seen\n\n if not dir1.endswith(os.path.sep):\n dir1 = dir1 + os.path.sep\n\n # Walk through stuff (which we haven't yet gone through) in dir1\n for root, dirs, files in os.walk(dir1):\n sub = root[len(dir1):]\n if sub in seen:\n dirs = [x for x in dirs if x not in seen[sub][0]]\n files = [x for x in files if x not in seen[sub][1]]\n seen[sub][0].extend(dirs)\n seen[sub][1].extend(files)\n else:\n seen[sub] = (dirs, files)\n if not dirs and not files:\n continue\n yield os.path.join(dir1, sub), os.path.join(dir2, sub), dirs, files\n\n if _seen is None:\n # Walk through stuff (which we haven't yet gone through) in dir2\n for root2, root1, dirs, files in walk_sync(dir2, dir1, _seen=seen):\n yield root1, root2, dirs, files\n\ndef sync_2to3(src, dst, patchfile=None, clean=False):\n import lib2to3.main\n from io import StringIO\n\n to_convert = []\n\n for src_dir, dst_dir, dirs, files in walk_sync(src, dst):\n for fn in dirs + files:\n src_fn = os.path.join(src_dir, fn)\n dst_fn = os.path.join(dst_dir, fn)\n\n # skip temporary etc. files\n if fn.startswith('.#') or fn.endswith('~'):\n continue\n\n # remove non-existing\n if os.path.exists(dst_fn) and not os.path.exists(src_fn):\n if clean:\n if os.path.isdir(dst_fn):\n shutil.rmtree(dst_fn)\n else:\n os.unlink(dst_fn)\n continue\n\n # make directories\n if os.path.isdir(src_fn):\n if not os.path.isdir(dst_fn):\n os.makedirs(dst_fn)\n continue\n\n dst_dir = os.path.dirname(dst_fn)\n if os.path.isfile(dst_fn) and not os.path.isdir(dst_dir):\n os.makedirs(dst_dir)\n\n # don't replace up-to-date files\n try:\n if os.path.isfile(dst_fn) and \\\n os.stat(dst_fn).st_mtime >= os.stat(src_fn).st_mtime:\n continue\n except OSError:\n pass\n\n # copy file\n shutil.copyfile(src_fn, dst_fn)\n\n # add .py files to 2to3 list\n if dst_fn.endswith('.py'):\n to_convert.append((src_fn, dst_fn))\n\n # run 2to3\n flag_sets = {}\n for fn, dst_fn in to_convert:\n flag = ''\n for pat, opt in EXTRA_2TO3_FLAGS.items():\n if fnmatch.fnmatch(fn, pat):\n flag = opt\n break\n flag_sets.setdefault(flag, []).append(dst_fn)\n\n if patchfile:\n p = open(patchfile, 'wb+')\n else:\n p = open(os.devnull, 'wb')\n\n for flags, filenames in flag_sets.items():\n if flags == 'skip':\n continue\n\n _old_stdout = sys.stdout\n try:\n sys.stdout = StringIO()\n opt = []\n opt.extend(['-w', '-n'])\n opt.extend(skip_fixes)\n opt.extend(flags.split())\n opt.extend(filenames)\n lib2to3.main.main(\"lib2to3.fixes\", opt)\n finally:\n sys.stdout = _old_stdout\n\n p.close()\n\nif __name__ == \"__main__\":\n main()\n",
"path": "tools/py3tool.py"
}
] | diff --git a/tools/py3tool.py b/tools/py3tool.py
index 9d67bf2c52fc..6fca72ebae45 100755
--- a/tools/py3tool.py
+++ b/tools/py3tool.py
@@ -66,7 +66,7 @@
'intern',
# 'isinstance',
# 'itertools',
-# 'itertools_imports',
+ 'itertools_imports',
# 'long',
'map',
'metaclass',
|
sunpy__sunpy-3973 | We don't close VSO connections
When I run the some of the examples that call the VSO, I see this in the output in my terminal:
```
generating gallery for generated/gallery/acquiring_data... [100%] searching_vso.py
/home/nabil/GitHub/sunpy/.tox/build_docs/lib/python3.8/site-packages/sphinx_gallery/gen_rst.py:692: ResourceWarning: unclosed <socket.socket fd=14, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6, laddr=('192.168.1.103', 45712), raddr=('146.5.21.123', 80)>
gc.collect()
/home/nabil/GitHub/sunpy/.tox/build_docs/lib/python3.8/site-packages/sphinx_gallery/gen_rst.py:692: ResourceWarning: unclosed <socket.socket fd=17, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6, laddr=('192.168.1.103', 45792), raddr=('146.5.21.123', 80)>
gc.collect()
/home/nabil/GitHub/sunpy/.tox/build_docs/lib/python3.8/site-packages/sphinx_gallery/gen_rst.py:692: ResourceWarning: unclosed <socket.socket fd=16, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6, laddr=('192.168.1.103', 45790), raddr=('146.5.21.123', 80)>
gc.collect()
```
We should find out where we aren't closing these connections and close them.
| [
{
"content": "# -*- coding: utf-8 -*-\n\"\"\"\nThis module provides a wrapper around the VSO API.\n\"\"\"\n\nimport os\nimport re\nimport cgi\nimport socket\nimport datetime\nimport warnings\nimport itertools\nfrom functools import partial\nfrom collections import defaultdict\nfrom urllib.error import URLError, HTTPError\nfrom urllib.request import urlopen\n\nimport zeep\nfrom parfive import Downloader, Results\nfrom zeep.helpers import serialize_object\n\nimport astropy.units as u\nfrom astropy.table import QTable as Table\n\nfrom sunpy import config\nfrom sunpy.net.attr import and_\nfrom sunpy.net.base_client import BaseClient, BaseQueryResponse\nfrom sunpy.net.vso import attrs\nfrom sunpy.net.vso.attrs import _TIMEFORMAT as TIMEFORMAT\nfrom sunpy.net.vso.attrs import _walker as walker\nfrom sunpy.time import TimeRange, parse_time\nfrom sunpy.util.decorators import deprecated\nfrom sunpy.util.exceptions import SunpyUserWarning\nfrom sunpy.util.net import slugify\n\nfrom .. import _attrs as core_attrs\nfrom .zeep_plugins import SunPyLoggingZeepPlugin\nfrom .exceptions import *\n\nTIME_FORMAT = config.get(\"general\", \"time_format\")\n\nDEFAULT_URL_PORT = [{'url': 'http://docs.virtualsolar.org/WSDL/VSOi_rpc_literal.wsdl',\n 'port': 'nsoVSOi'},\n {'url': 'https://sdac.virtualsolar.org/API/VSOi_rpc_literal.wsdl',\n 'port': 'sdacVSOi'}]\n\nRANGE = re.compile(r'(\\d+)(\\s*-\\s*(\\d+))?(\\s*([a-zA-Z]+))?')\n\n\nclass _Str(str):\n\n \"\"\" Subclass of string that contains a meta attribute for the\n record_item associated with the file. \"\"\"\n pass\n\n\n# ----------------------------------------\n\ndef _parse_waverange(string):\n min_, max_, unit = RANGE.match(string).groups()[::2]\n return {\n 'wave_wavemin': min_,\n 'wave_wavemax': min_ if max_ is None else max_,\n 'wave_waveunit': 'Angstrom' if unit is None else unit,\n }\n\n\ndef _parse_date(string):\n start, end = string.split(' - ')\n return {'time_start': start.strip(), 'time_end': end.strip()}\n\n\ndef iter_records(response):\n for prov_item in response.provideritem:\n if not hasattr(prov_item, 'record') or not prov_item.record:\n continue\n yield from prov_item.record.recorditem\n\n\ndef iter_errors(response):\n for prov_item in response.provideritem:\n if not hasattr(prov_item, 'record') or not prov_item.record:\n yield prov_item\n\n\ndef check_connection(url):\n try:\n return urlopen(url).getcode() == 200\n except (socket.error, socket.timeout, HTTPError, URLError) as e:\n warnings.warn(f\"Connection to {url} failed with error {e}. Retrying with different url and port.\",\n SunpyUserWarning)\n return None\n\n\ndef get_online_vso_url():\n \"\"\"\n Return the first VSO url and port combination that is online.\n \"\"\"\n for mirror in DEFAULT_URL_PORT:\n if check_connection(mirror['url']):\n return mirror\n\n\ndef build_client(url=None, port_name=None, **kwargs):\n \"\"\"\n Construct a `zeep.Client` object to connect to VSO.\n\n Parameters\n ----------\n url : `str`\n The URL to connect to.\n\n port_name : `str`\n The \"port\" to use.\n\n kwargs : `dict`\n All extra keyword arguments are passed to `zeep.Client`.\n\n Returns\n -------\n\n `zeep.Client`\n \"\"\"\n if url is None and port_name is None:\n mirror = get_online_vso_url()\n if mirror is None:\n raise ConnectionError(\"No online VSO mirrors could be found.\")\n url = mirror['url']\n port_name = mirror['port']\n elif url and port_name:\n if not check_connection(url):\n raise ConnectionError(f\"Can't connect to url {url}\")\n else:\n raise ValueError(\"Both url and port_name must be specified if either is.\")\n\n if \"plugins\" not in kwargs:\n kwargs[\"plugins\"] = [SunPyLoggingZeepPlugin()]\n\n client = zeep.Client(url, port_name=port_name, **kwargs)\n client.set_ns_prefix('VSO', 'http://virtualsolar.org/VSO/VSOi')\n return client\n\n\nclass QueryResponse(BaseQueryResponse):\n \"\"\"\n A container for VSO Records returned from VSO Searches.\n \"\"\"\n\n def __init__(self, lst, queryresult=None):\n super().__init__()\n self._data = lst\n self.queryresult = queryresult\n self.errors = []\n self._client = VSOClient()\n\n def __getitem__(self, item):\n # Always index so a list comes back\n if isinstance(item, int):\n item = slice(item, item+1)\n return type(self)(self._data[item], queryresult=self.queryresult)\n\n def __len__(self):\n return len(self._data)\n\n def __iter__(self):\n for block in self._data:\n yield block\n\n @property\n def blocks(self):\n return self._data\n\n @property\n def client(self):\n return self._client\n\n @client.setter\n def client(self, client):\n self._client = client\n\n def search(self, *query):\n \"\"\" Furtherly reduce the query response by matching it against\n another query, e.g. response.search(attrs.Instrument('aia')). \"\"\"\n query = and_(*query)\n return QueryResponse(\n attrs._filter_results(query, self), self.queryresult\n )\n\n @classmethod\n def create(cls, queryresult):\n return cls(list(iter_records(queryresult)), queryresult)\n\n def total_size(self):\n \"\"\" Total size of data in KB. May be less than the actual\n size because of inaccurate data providers. \"\"\"\n # Warn about -1 values?\n return sum(record.size for record in self if record.size > 0)\n\n def time_range(self):\n \"\"\" Return total time-range all records span across. \"\"\"\n return TimeRange(min(record.time.start for record in self if record.time.start is not None),\n max(record.time.end for record in self if record.time.end is not None))\n\n def build_table(self):\n \"\"\"\n Create a human readable table.\n\n Returns\n -------\n table : `astropy.table.QTable`\n \"\"\"\n keywords = ['Start Time', 'End Time', 'Source', 'Instrument', 'Type', 'Wavelength']\n record_items = {}\n for key in keywords:\n record_items[key] = []\n\n def validate_time(time):\n # Handle if the time is None when coming back from VSO\n if time is None:\n return ['None']\n if record.time.start is not None:\n return [parse_time(time).strftime(TIME_FORMAT)]\n else:\n return ['N/A']\n\n for record in self:\n record_items['Start Time'].append(validate_time(record.time.start))\n record_items['End Time'].append(validate_time(record.time.end))\n record_items['Source'].append(str(record.source))\n record_items['Instrument'].append(str(record.instrument))\n record_items['Type'].append(str(record.extent.type)\n if record.extent.type is not None else ['N/A'])\n # If we have a start and end Wavelength, make a quantity\n if hasattr(record, 'wave') and record.wave.wavemin and record.wave.wavemax:\n unit = record.wave.waveunit\n # Convert this so astropy units parses it correctly\n if unit == \"kev\":\n unit = \"keV\"\n record_items['Wavelength'].append(u.Quantity([float(record.wave.wavemin),\n float(record.wave.wavemax)],\n unit=unit))\n # If not save None\n else:\n record_items['Wavelength'].append(None)\n # If we have no wavelengths for the whole list, drop the col\n if all([a is None for a in record_items['Wavelength']]):\n record_items.pop('Wavelength')\n keywords.remove('Wavelength')\n else:\n # Make whole column a quantity\n try:\n with u.set_enabled_equivalencies(u.spectral()):\n record_items['Wavelength'] = u.Quantity(record_items['Wavelength'])\n # If we have mixed units or some Nones just represent as strings\n except (u.UnitConversionError, TypeError):\n record_items['Wavelength'] = [str(a) for a in record_items['Wavelength']]\n\n return Table(record_items)[keywords]\n\n def add_error(self, exception):\n self.errors.append(exception)\n\n def response_block_properties(self):\n \"\"\"\n Returns a set of class attributes on all the response blocks.\n\n Returns\n -------\n s : `set`\n List of strings, containing attribute names in the response blocks.\n \"\"\"\n s = {a if not a.startswith('_') else None for a in dir(self[0])}\n for resp in self[1:]:\n s = s.intersection({a if not a.startswith('_') else None for a in dir(resp)})\n\n s.remove(None)\n return s\n\n\nclass VSOClient(BaseClient):\n \"\"\"\n VSO Client\n\n Parameters\n ----------\n url : `str`, optional\n The VSO url to use. If not specified will use the first online known URL.\n\n port : `str`, optional\n The VSO port name to use. If not specified will use the first online known URL.\n\n api : `zeep.Client`, optional\n The `zeep.Client` instance to use for interacting with the VSO. If not\n specified one will be created.\n \"\"\"\n method_order = [\n 'URL-FILE_Rice', 'URL-FILE', 'URL-packaged', 'URL-TAR_GZ', 'URL-ZIP', 'URL-TAR',\n ]\n\n def __init__(self, url=None, port=None, api=None):\n if not isinstance(api, zeep.Client):\n api = build_client(url, port)\n if api is None:\n raise ConnectionError(\"Cannot find an online VSO mirror.\")\n self.api = api\n\n def make(self, atype, **kwargs):\n \"\"\"\n Create a new SOAP object.\n \"\"\"\n obj = self.api.get_type(f\"VSO:{atype}\")\n return obj(**kwargs)\n\n def search(self, *query):\n \"\"\" Query data from the VSO with the new API. Takes a variable number\n of attributes as parameter, which are chained together using AND.\n\n The new query language allows complex queries to be easily formed.\n\n Examples\n --------\n Query all data from eit or aia between 2010-01-01T00:00 and\n 2010-01-01T01:00.\n\n >>> from datetime import datetime\n >>> from sunpy.net import vso, attrs as a\n >>> client = vso.VSOClient() # doctest: +REMOTE_DATA\n >>> client.search(\n ... a.Time(datetime(2010, 1, 1), datetime(2010, 1, 1, 1)),\n ... a.Instrument('eit') | a.Instrument('aia')) # doctest: +REMOTE_DATA\n <sunpy.net.vso.vso.QueryResponse object at ...>\n Start Time [1] End Time [1] Source ... Type Wavelength [2]\n ... Angstrom\n ------------------- ------------------- ------ ... -------- --------------\n 2010-01-01 00:00:08 2010-01-01 00:00:20 SOHO ... FULLDISK 195.0 .. 195.0\n 2010-01-01 00:12:08 2010-01-01 00:12:20 SOHO ... FULLDISK 195.0 .. 195.0\n 2010-01-01 00:24:10 2010-01-01 00:24:22 SOHO ... FULLDISK 195.0 .. 195.0\n 2010-01-01 00:36:08 2010-01-01 00:36:20 SOHO ... FULLDISK 195.0 .. 195.0\n 2010-01-01 00:48:09 2010-01-01 00:48:21 SOHO ... FULLDISK 195.0 .. 195.0\n\n Returns\n -------\n out : :py:class:`QueryResult` (enhanced list)\n Matched items. Return value is of same type as the one of\n :py:meth:`VSOClient.search`.\n \"\"\"\n query = and_(*query)\n QueryRequest = self.api.get_type('VSO:QueryRequest')\n VSOQueryResponse = self.api.get_type('VSO:QueryResponse')\n responses = []\n for block in walker.create(query, self.api):\n try:\n query_response = self.api.service.Query(\n QueryRequest(block=block)\n )\n for resp in query_response:\n if resp[\"error\"]:\n warnings.warn(resp[\"error\"], SunpyUserWarning)\n responses.append(\n VSOQueryResponse(query_response)\n )\n except Exception as ex:\n response = QueryResponse.create(self.merge(responses))\n response.add_error(ex)\n\n return QueryResponse.create(self.merge(responses))\n\n def merge(self, queryresponses):\n \"\"\" Merge responses into one. \"\"\"\n if len(queryresponses) == 1:\n return queryresponses[0]\n\n fileids = set()\n providers = {}\n\n for queryresponse in queryresponses:\n for provideritem in queryresponse.provideritem:\n provider = provideritem.provider\n if not hasattr(provideritem, 'record'):\n continue\n if not hasattr(provideritem.record, 'recorditem'):\n continue\n if provideritem.provider not in providers:\n providers[provider] = provideritem\n fileids |= {\n record_item.fileid\n for record_item in provideritem.record.recorditem\n }\n else:\n for record_item in provideritem.record.recorditem:\n if record_item.fileid not in fileids:\n fileids.add(record_item.fileid)\n providers[provider].record.recorditem.append(\n record_item\n )\n providers[provider].no_of_records_found += 1\n providers[provider].no_of_records_returned += 1\n return self.make('QueryResponse',\n provideritem=list(providers.values()))\n\n @staticmethod\n def mk_filename(pattern, queryresponse, resp, url):\n \"\"\"\n Generate the best possible (or least-worse) filename for a VSO download.\n\n * Use the ``content-disposition`` header.\n * Use `fileid` to generate a file name if content-disposition fails\n * If everything else fails use the last segment of the URL and hope.\n \"\"\"\n name = None\n if resp:\n cdheader = resp.headers.get(\"Content-Disposition\", None)\n if cdheader:\n value, params = cgi.parse_header(cdheader)\n name = params.get('filename', \"\")\n # Work around https://github.com/sunpy/sunpy/issues/3372\n if name.count('\"') >= 2:\n name = name.split('\"')[1]\n\n if name is None:\n # Advice from the VSO is to fallback to providerid + fileid\n # As it's possible multiple providers give the same fileid.\n # However, I haven't implemented this yet as it would be a breaking\n # change to the filenames we expect.\n\n # I don't know if we still need this bytes check in Python 3 only\n # land, but I don't dare remove it.\n if isinstance(queryresponse.fileid, bytes):\n fileid = queryresponse.fileid.decode(\"ascii\", \"ignore\")\n else:\n fileid = queryresponse.fileid\n\n # Some providers make fileid a path\n # Some also don't specify a file extension, but not a lot we can do\n # about that.\n name = fileid.split(\"/\")[-1]\n\n # If somehow we have got this far with an empty string, fallback to url segment\n if not name:\n name = url.split('/')[-1]\n\n # Remove any not-filename appropriate characters\n name = slugify(name)\n\n # If absolutely everything else fails make a filename based on download time\n if not name:\n name = f\"vso_file_{datetime.datetime.now().strftime('%Y%m%d%H%M%S%f')}\"\n\n fname = pattern.format(file=name, **serialize_object(queryresponse))\n\n return fname\n\n @deprecated(\"1.0\", alternative=\"sunpy.net.Fido\")\n def query_legacy(self, tstart=None, tend=None, **kwargs):\n \"\"\"\n Query data from the VSO mocking the IDL API as close as possible.\n Either tstart and tend or date_start and date_end or date have\n to be supplied.\n\n Parameters\n ----------\n tstart : datetime.datetime\n Start of the time-range in which records are searched.\n tend : datetime.datetime\n Start of the time-range in which records are searched.\n date : str\n (start date) - (end date)\n start_date : datetime\n the start date\n end_date : datetime\n the end date\n wave : str\n (min) - (max) (unit)\n min_wave : str\n minimum spectral range\n max_wave : str\n maximum spectral range\n unit_wave : str\n spectral range units (Angstrom, GHz, keV)\n extent : str\n VSO 'extent type' ... (FULLDISK, CORONA, LIMB, etc)\n physobj : str\n VSO 'physical observable'\n provider : str\n VSO ID for the data provider (SDAC, NSO, SHA, MSU, etc)\n source : str\n spacecraft or observatory (SOHO, YOHKOH, BBSO, etc)\n synonyms : spacecraft, observatory\n instrument : str\n instrument ID (EIT, SXI-0, SXT, etc)\n synonyms : telescope, inst\n detector : str\n detector ID (C3, EUVI, COR2, etc.)\n layout : str\n layout of the data (image, spectrum, time_series, etc.)\n level : str\n level of the data product (numeric range, see below)\n pixels : str\n number of pixels (numeric range, see below)\n resolution : str\n effective resolution (1 = full, 0.5 = 2x2 binned, etc)\n numeric range, see below.\n pscale : str\n pixel scale, in arcseconds (numeric range, see below)\n near_time : datetime\n return record closest to the time. See below.\n sample : int\n attempt to return only one record per SAMPLE seconds. See below.\n\n Numeric Ranges:\n\n - May be entered as a string or any numeric type for equality matching\n - May be a string of the format '(min) - (max)' for range matching\n - May be a string of the form '(operator) (number)' where operator\n is one of: lt gt le ge < > <= >=\n\n\n Examples\n --------\n Query all data from eit between 2010-01-01T00:00 and\n 2010-01-01T01:00.\n\n >>> from datetime import datetime\n >>> from sunpy.net import vso\n >>> client = vso.VSOClient() # doctest: +SKIP\n >>> qr = client.query_legacy(datetime(2010, 1, 1),\n ... datetime(2010, 1, 1, 1),\n ... instrument='eit') # doctest: +SKIP\n\n Returns\n -------\n out : :py:class:`QueryResult` (enhanced list)\n Matched items. Return value is of same type as the one of\n :py:class:`VSOClient.search`.\n \"\"\"\n def sdk(key): return partial(lambda key, value: {key: value}, key)\n ALIASES = {\n 'wave_min': sdk('wave_wavemin'),\n 'wave_max': sdk('wave_wavemax'),\n 'wave_type': sdk('wave_wavetype'),\n 'wave_unit': sdk('wave_waveunit'),\n 'min_wave': sdk('wave_wavemin'),\n 'max_wave': sdk('wave_wavemax'),\n 'type_wave': sdk('wave_wavetype'),\n 'unit_wave': sdk('wave_waveunit'),\n 'wave': _parse_waverange,\n 'inst': sdk('instrument'),\n 'telescope': sdk('instrument'),\n 'spacecraft': sdk('source'),\n 'observatory': sdk('source'),\n 'start_date': sdk('time_start'),\n 'end_date': sdk('time_end'),\n 'start': sdk('time_start'),\n 'end': sdk('time_end'),\n 'near_time': sdk('time_near'),\n 'date': _parse_date,\n 'layout': sdk('datatype'),\n }\n if tstart is not None:\n kwargs.update({'time_start': tstart})\n if tend is not None:\n kwargs.update({'time_end': tend})\n\n QueryRequest = self.api.get_type('VSO:QueryRequest')\n VSOQueryResponse = self.api.get_type('VSO:QueryResponse')\n block = self.api.get_type('VSO:QueryRequestBlock')()\n\n for key, value in kwargs.items():\n for k, v in ALIASES.get(key, sdk(key))(value).items():\n if k.startswith('time'):\n v = parse_time(v).strftime(TIMEFORMAT)\n attr = k.split('_')\n lst = attr[-1]\n rest = attr[:-1]\n\n for elem in rest:\n try:\n if block[elem] is None:\n block[elem] = {}\n block = block[elem]\n except KeyError:\n raise ValueError(\n f\"Unexpected argument {key!s}.\")\n if lst in block and block[lst]:\n raise ValueError(\n f\"Got multiple values for {k!s}.\")\n block[lst] = v\n\n return QueryResponse.create(VSOQueryResponse(\n self.api.service.Query(QueryRequest(block=block))))\n\n @deprecated(\"1.0\")\n def latest(self):\n \"\"\" Return newest record (limited to last week). \"\"\"\n from datetime import datetime, timedelta\n return self.query_legacy(\n datetime.utcnow() - timedelta(7),\n datetime.utcnow(),\n time_near=datetime.utcnow()\n )\n\n def fetch(self, query_response, path=None, methods=None, site=None,\n progress=True, overwrite=False, downloader=None, wait=True):\n \"\"\"\n Download data specified in the query_response.\n\n Parameters\n ----------\n query_response : sunpy.net.vso.QueryResponse\n QueryResponse containing the items to be downloaded.\n\n path : str\n Specify where the data is to be downloaded. Can refer to arbitrary\n fields of the QueryResponseItem (instrument, source, time, ...) via\n string formatting, moreover the file-name of the file downloaded can\n be referred to as file, e.g.\n \"{source}/{instrument}/{time.start}/{file}\".\n\n methods : {list of str}\n Download methods, defaults to URL-FILE_Rice then URL-FILE.\n Methods are a concatenation of one PREFIX followed by any number of\n SUFFIXES i.e. `PREFIX-SUFFIX_SUFFIX2_SUFFIX3`.\n The full list of\n `PREFIXES <https://sdac.virtualsolar.org/cgi/show_details?keyword=METHOD_PREFIX>`_\n and `SUFFIXES <https://sdac.virtualsolar.org/cgi/show_details?keyword=METHOD_SUFFIX>`_\n are listed on the VSO site.\n\n site : str\n There are a number of caching mirrors for SDO and other\n instruments, some available ones are listed below.\n\n =============== ========================================================\n NSO National Solar Observatory, Tucson (US)\n SAO (aka CFA) Smithonian Astronomical Observatory, Harvard U. (US)\n SDAC (aka GSFC) Solar Data Analysis Center, NASA/GSFC (US)\n ROB Royal Observatory of Belgium (Belgium)\n MPS Max Planck Institute for Solar System Research (Germany)\n UCLan University of Central Lancashire (UK)\n IAS Institut Aeronautique et Spatial (France)\n KIS Kiepenheuer-Institut fur Sonnenphysik Germany)\n NMSU New Mexico State University (US)\n =============== ========================================================\n\n progress : `bool`, optional\n If `True` show a progress bar showing how many of the total files\n have been downloaded. If `False`, no progress bars will be shown at all.\n\n overwrite : `bool` or `str`, optional\n Determine how to handle downloading if a file already exists with the\n same name. If `False` the file download will be skipped and the path\n returned to the existing file, if `True` the file will be downloaded\n and the existing file will be overwritten, if `'unique'` the filename\n will be modified to be unique.\n\n downloader : `parfive.Downloader`, optional\n The download manager to use.\n\n wait : `bool`, optional\n If `False` ``downloader.download()`` will not be called. Only has\n any effect if `downloader` is not `None`.\n\n Returns\n -------\n out : `parfive.Results`\n Object that supplies a list of filenames and any errors.\n\n Examples\n --------\n >>> files = fetch(qr) # doctest:+SKIP\n \"\"\"\n if path is None:\n path = os.path.join(config.get('downloads', 'download_dir'),\n '{file}')\n elif isinstance(path, str) and '{file}' not in path:\n path = os.path.join(path, '{file}')\n path = os.path.expanduser(path)\n\n dl_set = True\n if not downloader:\n dl_set = False\n downloader = Downloader(progress=progress)\n\n fileids = VSOClient.by_fileid(query_response)\n if not fileids:\n return downloader.download() if wait else Results()\n # Adding the site parameter to the info\n info = {}\n if site is not None:\n info['site'] = site\n\n VSOGetDataResponse = self.api.get_type(\"VSO:VSOGetDataResponse\")\n\n data_request = self.make_getdatarequest(query_response, methods, info)\n data_response = VSOGetDataResponse(self.api.service.GetData(data_request))\n\n err_results = self.download_all(data_response, methods, downloader, path, fileids)\n\n if dl_set and not wait:\n return err_results\n\n results = downloader.download()\n results += err_results\n results._errors += err_results.errors\n return results\n\n @staticmethod\n def link(query_response, maps):\n \"\"\" Return list of paths with records associated with them in\n the meta attribute. \"\"\"\n if not maps:\n return []\n ret = []\n\n for record_item in query_response:\n try:\n item = _Str(maps[record_item.fileid]['path'])\n except KeyError:\n continue\n # pylint: disable=W0201\n item.meta = record_item\n ret.append(item)\n return ret\n\n def make_getdatarequest(self, response, methods=None, info=None):\n \"\"\" Make datarequest with methods from response. \"\"\"\n if methods is None:\n methods = self.method_order + ['URL']\n\n return self.create_getdatarequest(\n {k: [x.fileid for x in v]\n for k, v in self.by_provider(response).items()},\n methods, info\n )\n\n def create_getdatarequest(self, maps, methods, info=None):\n \"\"\" Create datarequest from maps mapping data provider to\n fileids and methods, \"\"\"\n if info is None:\n info = {}\n\n if 'email' not in info:\n info['email'] = 'sunpy'\n\n # For the JSOC provider we need to make a DataRequestItem for each\n # series, not just one for the whole provider.\n\n # Remove JSOC provider items from the map\n jsoc = maps.pop('JSOC', [])\n # Make DRIs for everything that's not JSOC one per provider\n dris = [self.make('DataRequestItem', provider=k, fileiditem={'fileid': v})\n for k, v in maps.items()]\n\n def series_func(x):\n \"\"\" Extract the series from the fileid. \"\"\"\n return x.split(':')[0]\n\n # Sort the JSOC fileids by series\n # This is a precursor to groupby as recommended by the groupby docs\n series_sorted = sorted(jsoc, key=series_func)\n # Iterate over the series and make a DRI for each.\n # groupby creates an iterator based on a key function, in this case\n # based on the series (the part before the first ':')\n for series, fileids in itertools.groupby(series_sorted, key=series_func):\n dris.append(self.make('DataRequestItem',\n provider='JSOC',\n fileiditem={'fileid': list(fileids)}))\n\n request = {'method': {'methodtype': methods},\n 'info': info,\n 'datacontainer': {'datarequestitem': dris}\n }\n\n return self.make('VSOGetDataRequest', request=request)\n\n # pylint: disable=R0913,R0912\n def download_all(self, response, methods, downloader, path, qr, info=None):\n results = Results()\n GET_VERSION = [\n ('0.8', (5, 8)),\n ('0.7', (1, 4)),\n ('0.6', (0, 3)),\n ]\n\n for dresponse in response.getdataresponseitem:\n for version, (from_, to) in GET_VERSION:\n if getattr(dresponse, version, '0.6') >= version:\n break\n else:\n results.add_error('', UnknownVersion(dresponse))\n continue\n\n # If from_ and to are uninitialized, the else block of the loop\n # continues the outer loop and thus this code is never reached.\n # pylint: disable=W0631\n code = (\n dresponse.status[from_:to]\n if getattr(dresponse, 'status', None) else '200'\n )\n if code == '200':\n for dataitem in dresponse.getdataitem.dataitem:\n\n try:\n self.download(\n dresponse.method.methodtype[0],\n dataitem.url,\n downloader,\n path,\n qr[dataitem.fileiditem.fileid[0]]\n )\n except NoData:\n results.add_error('', '', DownloadFailed(dresponse))\n continue\n\n elif code == '300' or code == '412' or code == '405':\n if code == '300':\n try:\n methods = self.multiple_choices(\n dresponse.method.methodtype, dresponse\n )\n except NoData:\n results.add_error('', '', MultipleChoices(dresponse))\n continue\n elif code == '412':\n try:\n info = self.missing_information(\n info, dresponse.info\n )\n except NoData:\n results.add_error('', '', MissingInformation(dresponse))\n continue\n elif code == '405':\n try:\n methods = self.unknown_method(dresponse)\n except NoData:\n results.add_error('', '', UnknownMethod(dresponse))\n continue\n\n files = []\n for dataitem in dresponse.getdataitem.dataitem:\n files.extend(dataitem.fileiditem.fileid)\n\n request = self.create_getdatarequest(\n {dresponse.provider: files}, methods, info\n )\n\n self.download_all(\n self.api.service.GetData(request), methods, downloader, path,\n qr, info\n )\n else:\n results.add_error('', '', UnknownStatus(dresponse))\n\n return results\n\n def download(self, method, url, downloader, *args):\n \"\"\" Enqueue a file to be downloaded, extra args are passed to ``mk_filename``\"\"\"\n if method.startswith('URL'):\n return downloader.enqueue_file(url, filename=partial(self.mk_filename, *args))\n\n raise NoData\n\n @staticmethod\n def by_provider(response):\n \"\"\"\n Returns a dictionary of provider\n corresponding to records in the response.\n \"\"\"\n\n map_ = defaultdict(list)\n for record in response:\n map_[record.provider].append(record)\n return map_\n\n @staticmethod\n def by_fileid(response):\n \"\"\"\n Returns a dictionary of fileids\n corresponding to records in the response.\n \"\"\"\n return {\n record.fileid: record for record in response\n }\n\n # pylint: disable=W0613\n def multiple_choices(self, choices, response):\n \"\"\" Override to pick between multiple download choices. \"\"\"\n for elem in self.method_order:\n if elem in choices:\n return [elem]\n raise NoData\n\n # pylint: disable=W0613\n def missing_information(self, info, field):\n \"\"\" Override to provide missing information. \"\"\"\n raise NoData\n\n # pylint: disable=W0613\n def unknown_method(self, response):\n \"\"\" Override to pick a new method if the current one is unknown. \"\"\"\n raise NoData\n\n @classmethod\n def _can_handle_query(cls, *query):\n # VSO Queries must have time\n if not core_attrs.Time in [type(a) for a in query]:\n return False\n return all([x.__class__.__name__ in core_attrs.__all__ + attrs.__all__ for x in query])\n\n @classmethod\n def _attrs_module(cls):\n return 'vso', 'sunpy.net.vso.attrs'\n",
"path": "sunpy/net/vso/vso.py"
}
] | [
{
"content": "# -*- coding: utf-8 -*-\n\"\"\"\nThis module provides a wrapper around the VSO API.\n\"\"\"\n\nimport os\nimport re\nimport cgi\nimport socket\nimport datetime\nimport warnings\nimport itertools\nfrom functools import partial\nfrom collections import defaultdict\nfrom urllib.error import URLError, HTTPError\nfrom urllib.request import urlopen\n\nimport zeep\nfrom parfive import Downloader, Results\nfrom zeep.helpers import serialize_object\n\nimport astropy.units as u\nfrom astropy.table import QTable as Table\n\nfrom sunpy import config\nfrom sunpy.net.attr import and_\nfrom sunpy.net.base_client import BaseClient, BaseQueryResponse\nfrom sunpy.net.vso import attrs\nfrom sunpy.net.vso.attrs import _TIMEFORMAT as TIMEFORMAT\nfrom sunpy.net.vso.attrs import _walker as walker\nfrom sunpy.time import TimeRange, parse_time\nfrom sunpy.util.decorators import deprecated\nfrom sunpy.util.exceptions import SunpyUserWarning\nfrom sunpy.util.net import slugify\n\nfrom .. import _attrs as core_attrs\nfrom .zeep_plugins import SunPyLoggingZeepPlugin\nfrom .exceptions import *\n\nTIME_FORMAT = config.get(\"general\", \"time_format\")\n\nDEFAULT_URL_PORT = [{'url': 'http://docs.virtualsolar.org/WSDL/VSOi_rpc_literal.wsdl',\n 'port': 'nsoVSOi'},\n {'url': 'https://sdac.virtualsolar.org/API/VSOi_rpc_literal.wsdl',\n 'port': 'sdacVSOi'}]\n\nRANGE = re.compile(r'(\\d+)(\\s*-\\s*(\\d+))?(\\s*([a-zA-Z]+))?')\n\n\nclass _Str(str):\n\n \"\"\" Subclass of string that contains a meta attribute for the\n record_item associated with the file. \"\"\"\n pass\n\n\n# ----------------------------------------\n\ndef _parse_waverange(string):\n min_, max_, unit = RANGE.match(string).groups()[::2]\n return {\n 'wave_wavemin': min_,\n 'wave_wavemax': min_ if max_ is None else max_,\n 'wave_waveunit': 'Angstrom' if unit is None else unit,\n }\n\n\ndef _parse_date(string):\n start, end = string.split(' - ')\n return {'time_start': start.strip(), 'time_end': end.strip()}\n\n\ndef iter_records(response):\n for prov_item in response.provideritem:\n if not hasattr(prov_item, 'record') or not prov_item.record:\n continue\n yield from prov_item.record.recorditem\n\n\ndef iter_errors(response):\n for prov_item in response.provideritem:\n if not hasattr(prov_item, 'record') or not prov_item.record:\n yield prov_item\n\n\ndef check_connection(url):\n try:\n return urlopen(url).getcode() == 200\n except (socket.error, socket.timeout, HTTPError, URLError) as e:\n warnings.warn(f\"Connection to {url} failed with error {e}. Retrying with different url and port.\",\n SunpyUserWarning)\n return None\n\n\ndef get_online_vso_url():\n \"\"\"\n Return the first VSO url and port combination that is online.\n \"\"\"\n for mirror in DEFAULT_URL_PORT:\n if check_connection(mirror['url']):\n return mirror\n\n\ndef build_client(url=None, port_name=None, **kwargs):\n \"\"\"\n Construct a `zeep.Client` object to connect to VSO.\n\n Parameters\n ----------\n url : `str`\n The URL to connect to.\n\n port_name : `str`\n The \"port\" to use.\n\n kwargs : `dict`\n All extra keyword arguments are passed to `zeep.Client`.\n\n Returns\n -------\n\n `zeep.Client`\n \"\"\"\n if url is None and port_name is None:\n mirror = get_online_vso_url()\n if mirror is None:\n raise ConnectionError(\"No online VSO mirrors could be found.\")\n url = mirror['url']\n port_name = mirror['port']\n elif url and port_name:\n if not check_connection(url):\n raise ConnectionError(f\"Can't connect to url {url}\")\n else:\n raise ValueError(\"Both url and port_name must be specified if either is.\")\n\n if \"plugins\" not in kwargs:\n kwargs[\"plugins\"] = [SunPyLoggingZeepPlugin()]\n\n client = zeep.Client(url, port_name=port_name, **kwargs)\n client.set_ns_prefix('VSO', 'http://virtualsolar.org/VSO/VSOi')\n return client\n\n\nclass QueryResponse(BaseQueryResponse):\n \"\"\"\n A container for VSO Records returned from VSO Searches.\n \"\"\"\n\n def __init__(self, lst, queryresult=None):\n super().__init__()\n self._data = lst\n self.queryresult = queryresult\n self.errors = []\n self._client = VSOClient()\n\n def __getitem__(self, item):\n # Always index so a list comes back\n if isinstance(item, int):\n item = slice(item, item+1)\n return type(self)(self._data[item], queryresult=self.queryresult)\n\n def __len__(self):\n return len(self._data)\n\n def __iter__(self):\n for block in self._data:\n yield block\n\n @property\n def blocks(self):\n return self._data\n\n @property\n def client(self):\n return self._client\n\n @client.setter\n def client(self, client):\n self._client = client\n\n def search(self, *query):\n \"\"\" Furtherly reduce the query response by matching it against\n another query, e.g. response.search(attrs.Instrument('aia')). \"\"\"\n query = and_(*query)\n return QueryResponse(\n attrs._filter_results(query, self), self.queryresult\n )\n\n @classmethod\n def create(cls, queryresult):\n return cls(list(iter_records(queryresult)), queryresult)\n\n def total_size(self):\n \"\"\" Total size of data in KB. May be less than the actual\n size because of inaccurate data providers. \"\"\"\n # Warn about -1 values?\n return sum(record.size for record in self if record.size > 0)\n\n def time_range(self):\n \"\"\" Return total time-range all records span across. \"\"\"\n return TimeRange(min(record.time.start for record in self if record.time.start is not None),\n max(record.time.end for record in self if record.time.end is not None))\n\n def build_table(self):\n \"\"\"\n Create a human readable table.\n\n Returns\n -------\n table : `astropy.table.QTable`\n \"\"\"\n keywords = ['Start Time', 'End Time', 'Source', 'Instrument', 'Type', 'Wavelength']\n record_items = {}\n for key in keywords:\n record_items[key] = []\n\n def validate_time(time):\n # Handle if the time is None when coming back from VSO\n if time is None:\n return ['None']\n if record.time.start is not None:\n return [parse_time(time).strftime(TIME_FORMAT)]\n else:\n return ['N/A']\n\n for record in self:\n record_items['Start Time'].append(validate_time(record.time.start))\n record_items['End Time'].append(validate_time(record.time.end))\n record_items['Source'].append(str(record.source))\n record_items['Instrument'].append(str(record.instrument))\n record_items['Type'].append(str(record.extent.type)\n if record.extent.type is not None else ['N/A'])\n # If we have a start and end Wavelength, make a quantity\n if hasattr(record, 'wave') and record.wave.wavemin and record.wave.wavemax:\n unit = record.wave.waveunit\n # Convert this so astropy units parses it correctly\n if unit == \"kev\":\n unit = \"keV\"\n record_items['Wavelength'].append(u.Quantity([float(record.wave.wavemin),\n float(record.wave.wavemax)],\n unit=unit))\n # If not save None\n else:\n record_items['Wavelength'].append(None)\n # If we have no wavelengths for the whole list, drop the col\n if all([a is None for a in record_items['Wavelength']]):\n record_items.pop('Wavelength')\n keywords.remove('Wavelength')\n else:\n # Make whole column a quantity\n try:\n with u.set_enabled_equivalencies(u.spectral()):\n record_items['Wavelength'] = u.Quantity(record_items['Wavelength'])\n # If we have mixed units or some Nones just represent as strings\n except (u.UnitConversionError, TypeError):\n record_items['Wavelength'] = [str(a) for a in record_items['Wavelength']]\n\n return Table(record_items)[keywords]\n\n def add_error(self, exception):\n self.errors.append(exception)\n\n def response_block_properties(self):\n \"\"\"\n Returns a set of class attributes on all the response blocks.\n\n Returns\n -------\n s : `set`\n List of strings, containing attribute names in the response blocks.\n \"\"\"\n s = {a if not a.startswith('_') else None for a in dir(self[0])}\n for resp in self[1:]:\n s = s.intersection({a if not a.startswith('_') else None for a in dir(resp)})\n\n s.remove(None)\n return s\n\n\nclass VSOClient(BaseClient):\n \"\"\"\n VSO Client\n\n Parameters\n ----------\n url : `str`, optional\n The VSO url to use. If not specified will use the first online known URL.\n\n port : `str`, optional\n The VSO port name to use. If not specified will use the first online known URL.\n\n api : `zeep.Client`, optional\n The `zeep.Client` instance to use for interacting with the VSO. If not\n specified one will be created.\n \"\"\"\n method_order = [\n 'URL-FILE_Rice', 'URL-FILE', 'URL-packaged', 'URL-TAR_GZ', 'URL-ZIP', 'URL-TAR',\n ]\n\n def __init__(self, url=None, port=None, api=None):\n if not isinstance(api, zeep.Client):\n api = build_client(url, port)\n if api is None:\n raise ConnectionError(\"Cannot find an online VSO mirror.\")\n self.api = api\n\n def make(self, atype, **kwargs):\n \"\"\"\n Create a new SOAP object.\n \"\"\"\n obj = self.api.get_type(f\"VSO:{atype}\")\n return obj(**kwargs)\n\n def search(self, *query):\n \"\"\" Query data from the VSO with the new API. Takes a variable number\n of attributes as parameter, which are chained together using AND.\n\n The new query language allows complex queries to be easily formed.\n\n Examples\n --------\n Query all data from eit or aia between 2010-01-01T00:00 and\n 2010-01-01T01:00.\n\n >>> from datetime import datetime\n >>> from sunpy.net import vso, attrs as a\n >>> client = vso.VSOClient() # doctest: +REMOTE_DATA\n >>> client.search(\n ... a.Time(datetime(2010, 1, 1), datetime(2010, 1, 1, 1)),\n ... a.Instrument('eit') | a.Instrument('aia')) # doctest: +REMOTE_DATA\n <sunpy.net.vso.vso.QueryResponse object at ...>\n Start Time [1] End Time [1] Source ... Type Wavelength [2]\n ... Angstrom\n ------------------- ------------------- ------ ... -------- --------------\n 2010-01-01 00:00:08 2010-01-01 00:00:20 SOHO ... FULLDISK 195.0 .. 195.0\n 2010-01-01 00:12:08 2010-01-01 00:12:20 SOHO ... FULLDISK 195.0 .. 195.0\n 2010-01-01 00:24:10 2010-01-01 00:24:22 SOHO ... FULLDISK 195.0 .. 195.0\n 2010-01-01 00:36:08 2010-01-01 00:36:20 SOHO ... FULLDISK 195.0 .. 195.0\n 2010-01-01 00:48:09 2010-01-01 00:48:21 SOHO ... FULLDISK 195.0 .. 195.0\n\n Returns\n -------\n out : :py:class:`QueryResult` (enhanced list)\n Matched items. Return value is of same type as the one of\n :py:meth:`VSOClient.search`.\n \"\"\"\n query = and_(*query)\n QueryRequest = self.api.get_type('VSO:QueryRequest')\n VSOQueryResponse = self.api.get_type('VSO:QueryResponse')\n responses = []\n for block in walker.create(query, self.api):\n try:\n query_response = self.api.service.Query(\n QueryRequest(block=block)\n )\n for resp in query_response:\n if resp[\"error\"]:\n warnings.warn(resp[\"error\"], SunpyUserWarning)\n responses.append(\n VSOQueryResponse(query_response)\n )\n except Exception as ex:\n response = QueryResponse.create(self.merge(responses))\n response.add_error(ex)\n\n return QueryResponse.create(self.merge(responses))\n\n def merge(self, queryresponses):\n \"\"\" Merge responses into one. \"\"\"\n if len(queryresponses) == 1:\n return queryresponses[0]\n\n fileids = set()\n providers = {}\n\n for queryresponse in queryresponses:\n for provideritem in queryresponse.provideritem:\n provider = provideritem.provider\n if not hasattr(provideritem, 'record'):\n continue\n if not hasattr(provideritem.record, 'recorditem'):\n continue\n if provideritem.provider not in providers:\n providers[provider] = provideritem\n fileids |= {\n record_item.fileid\n for record_item in provideritem.record.recorditem\n }\n else:\n for record_item in provideritem.record.recorditem:\n if record_item.fileid not in fileids:\n fileids.add(record_item.fileid)\n providers[provider].record.recorditem.append(\n record_item\n )\n providers[provider].no_of_records_found += 1\n providers[provider].no_of_records_returned += 1\n return self.make('QueryResponse',\n provideritem=list(providers.values()))\n\n @staticmethod\n def mk_filename(pattern, queryresponse, resp, url):\n \"\"\"\n Generate the best possible (or least-worse) filename for a VSO download.\n\n * Use the ``content-disposition`` header.\n * Use `fileid` to generate a file name if content-disposition fails\n * If everything else fails use the last segment of the URL and hope.\n \"\"\"\n name = None\n if resp:\n cdheader = resp.headers.get(\"Content-Disposition\", None)\n if cdheader:\n value, params = cgi.parse_header(cdheader)\n name = params.get('filename', \"\")\n # Work around https://github.com/sunpy/sunpy/issues/3372\n if name.count('\"') >= 2:\n name = name.split('\"')[1]\n\n if name is None:\n # Advice from the VSO is to fallback to providerid + fileid\n # As it's possible multiple providers give the same fileid.\n # However, I haven't implemented this yet as it would be a breaking\n # change to the filenames we expect.\n\n # I don't know if we still need this bytes check in Python 3 only\n # land, but I don't dare remove it.\n if isinstance(queryresponse.fileid, bytes):\n fileid = queryresponse.fileid.decode(\"ascii\", \"ignore\")\n else:\n fileid = queryresponse.fileid\n\n # Some providers make fileid a path\n # Some also don't specify a file extension, but not a lot we can do\n # about that.\n name = fileid.split(\"/\")[-1]\n\n # If somehow we have got this far with an empty string, fallback to url segment\n if not name:\n name = url.split('/')[-1]\n\n # Remove any not-filename appropriate characters\n name = slugify(name)\n\n # If absolutely everything else fails make a filename based on download time\n if not name:\n name = f\"vso_file_{datetime.datetime.now().strftime('%Y%m%d%H%M%S%f')}\"\n\n fname = pattern.format(file=name, **serialize_object(queryresponse))\n\n return fname\n\n @deprecated(\"1.0\", alternative=\"sunpy.net.Fido\")\n def query_legacy(self, tstart=None, tend=None, **kwargs):\n \"\"\"\n Query data from the VSO mocking the IDL API as close as possible.\n Either tstart and tend or date_start and date_end or date have\n to be supplied.\n\n Parameters\n ----------\n tstart : datetime.datetime\n Start of the time-range in which records are searched.\n tend : datetime.datetime\n Start of the time-range in which records are searched.\n date : str\n (start date) - (end date)\n start_date : datetime\n the start date\n end_date : datetime\n the end date\n wave : str\n (min) - (max) (unit)\n min_wave : str\n minimum spectral range\n max_wave : str\n maximum spectral range\n unit_wave : str\n spectral range units (Angstrom, GHz, keV)\n extent : str\n VSO 'extent type' ... (FULLDISK, CORONA, LIMB, etc)\n physobj : str\n VSO 'physical observable'\n provider : str\n VSO ID for the data provider (SDAC, NSO, SHA, MSU, etc)\n source : str\n spacecraft or observatory (SOHO, YOHKOH, BBSO, etc)\n synonyms : spacecraft, observatory\n instrument : str\n instrument ID (EIT, SXI-0, SXT, etc)\n synonyms : telescope, inst\n detector : str\n detector ID (C3, EUVI, COR2, etc.)\n layout : str\n layout of the data (image, spectrum, time_series, etc.)\n level : str\n level of the data product (numeric range, see below)\n pixels : str\n number of pixels (numeric range, see below)\n resolution : str\n effective resolution (1 = full, 0.5 = 2x2 binned, etc)\n numeric range, see below.\n pscale : str\n pixel scale, in arcseconds (numeric range, see below)\n near_time : datetime\n return record closest to the time. See below.\n sample : int\n attempt to return only one record per SAMPLE seconds. See below.\n\n Numeric Ranges:\n\n - May be entered as a string or any numeric type for equality matching\n - May be a string of the format '(min) - (max)' for range matching\n - May be a string of the form '(operator) (number)' where operator\n is one of: lt gt le ge < > <= >=\n\n\n Examples\n --------\n Query all data from eit between 2010-01-01T00:00 and\n 2010-01-01T01:00.\n\n >>> from datetime import datetime\n >>> from sunpy.net import vso\n >>> client = vso.VSOClient() # doctest: +SKIP\n >>> qr = client.query_legacy(datetime(2010, 1, 1),\n ... datetime(2010, 1, 1, 1),\n ... instrument='eit') # doctest: +SKIP\n\n Returns\n -------\n out : :py:class:`QueryResult` (enhanced list)\n Matched items. Return value is of same type as the one of\n :py:class:`VSOClient.search`.\n \"\"\"\n def sdk(key): return partial(lambda key, value: {key: value}, key)\n ALIASES = {\n 'wave_min': sdk('wave_wavemin'),\n 'wave_max': sdk('wave_wavemax'),\n 'wave_type': sdk('wave_wavetype'),\n 'wave_unit': sdk('wave_waveunit'),\n 'min_wave': sdk('wave_wavemin'),\n 'max_wave': sdk('wave_wavemax'),\n 'type_wave': sdk('wave_wavetype'),\n 'unit_wave': sdk('wave_waveunit'),\n 'wave': _parse_waverange,\n 'inst': sdk('instrument'),\n 'telescope': sdk('instrument'),\n 'spacecraft': sdk('source'),\n 'observatory': sdk('source'),\n 'start_date': sdk('time_start'),\n 'end_date': sdk('time_end'),\n 'start': sdk('time_start'),\n 'end': sdk('time_end'),\n 'near_time': sdk('time_near'),\n 'date': _parse_date,\n 'layout': sdk('datatype'),\n }\n if tstart is not None:\n kwargs.update({'time_start': tstart})\n if tend is not None:\n kwargs.update({'time_end': tend})\n\n QueryRequest = self.api.get_type('VSO:QueryRequest')\n VSOQueryResponse = self.api.get_type('VSO:QueryResponse')\n block = self.api.get_type('VSO:QueryRequestBlock')()\n\n for key, value in kwargs.items():\n for k, v in ALIASES.get(key, sdk(key))(value).items():\n if k.startswith('time'):\n v = parse_time(v).strftime(TIMEFORMAT)\n attr = k.split('_')\n lst = attr[-1]\n rest = attr[:-1]\n\n for elem in rest:\n try:\n if block[elem] is None:\n block[elem] = {}\n block = block[elem]\n except KeyError:\n raise ValueError(\n f\"Unexpected argument {key!s}.\")\n if lst in block and block[lst]:\n raise ValueError(\n f\"Got multiple values for {k!s}.\")\n block[lst] = v\n\n return QueryResponse.create(VSOQueryResponse(\n self.api.service.Query(QueryRequest(block=block))))\n\n @deprecated(\"1.0\")\n def latest(self):\n \"\"\" Return newest record (limited to last week). \"\"\"\n from datetime import datetime, timedelta\n return self.query_legacy(\n datetime.utcnow() - timedelta(7),\n datetime.utcnow(),\n time_near=datetime.utcnow()\n )\n\n def fetch(self, query_response, path=None, methods=None, site=None,\n progress=True, overwrite=False, downloader=None, wait=True):\n \"\"\"\n Download data specified in the query_response.\n\n Parameters\n ----------\n query_response : sunpy.net.vso.QueryResponse\n QueryResponse containing the items to be downloaded.\n\n path : str\n Specify where the data is to be downloaded. Can refer to arbitrary\n fields of the QueryResponseItem (instrument, source, time, ...) via\n string formatting, moreover the file-name of the file downloaded can\n be referred to as file, e.g.\n \"{source}/{instrument}/{time.start}/{file}\".\n\n methods : {list of str}\n Download methods, defaults to URL-FILE_Rice then URL-FILE.\n Methods are a concatenation of one PREFIX followed by any number of\n SUFFIXES i.e. `PREFIX-SUFFIX_SUFFIX2_SUFFIX3`.\n The full list of\n `PREFIXES <https://sdac.virtualsolar.org/cgi/show_details?keyword=METHOD_PREFIX>`_\n and `SUFFIXES <https://sdac.virtualsolar.org/cgi/show_details?keyword=METHOD_SUFFIX>`_\n are listed on the VSO site.\n\n site : str\n There are a number of caching mirrors for SDO and other\n instruments, some available ones are listed below.\n\n =============== ========================================================\n NSO National Solar Observatory, Tucson (US)\n SAO (aka CFA) Smithonian Astronomical Observatory, Harvard U. (US)\n SDAC (aka GSFC) Solar Data Analysis Center, NASA/GSFC (US)\n ROB Royal Observatory of Belgium (Belgium)\n MPS Max Planck Institute for Solar System Research (Germany)\n UCLan University of Central Lancashire (UK)\n IAS Institut Aeronautique et Spatial (France)\n KIS Kiepenheuer-Institut fur Sonnenphysik Germany)\n NMSU New Mexico State University (US)\n =============== ========================================================\n\n progress : `bool`, optional\n If `True` show a progress bar showing how many of the total files\n have been downloaded. If `False`, no progress bars will be shown at all.\n\n overwrite : `bool` or `str`, optional\n Determine how to handle downloading if a file already exists with the\n same name. If `False` the file download will be skipped and the path\n returned to the existing file, if `True` the file will be downloaded\n and the existing file will be overwritten, if `'unique'` the filename\n will be modified to be unique.\n\n downloader : `parfive.Downloader`, optional\n The download manager to use.\n\n wait : `bool`, optional\n If `False` ``downloader.download()`` will not be called. Only has\n any effect if `downloader` is not `None`.\n\n Returns\n -------\n out : `parfive.Results`\n Object that supplies a list of filenames and any errors.\n\n Examples\n --------\n >>> files = fetch(qr) # doctest:+SKIP\n \"\"\"\n if path is None:\n path = os.path.join(config.get('downloads', 'download_dir'),\n '{file}')\n elif isinstance(path, str) and '{file}' not in path:\n path = os.path.join(path, '{file}')\n path = os.path.expanduser(path)\n\n dl_set = True\n if not downloader:\n dl_set = False\n downloader = Downloader(progress=progress)\n\n fileids = VSOClient.by_fileid(query_response)\n if not fileids:\n return downloader.download() if wait else Results()\n # Adding the site parameter to the info\n info = {}\n if site is not None:\n info['site'] = site\n\n VSOGetDataResponse = self.api.get_type(\"VSO:VSOGetDataResponse\")\n\n data_request = self.make_getdatarequest(query_response, methods, info)\n data_response = VSOGetDataResponse(self.api.service.GetData(data_request))\n\n err_results = self.download_all(data_response, methods, downloader, path, fileids)\n\n if dl_set and not wait:\n return err_results\n\n results = downloader.download()\n results += err_results\n results._errors += err_results.errors\n return results\n\n @staticmethod\n def link(query_response, maps):\n \"\"\" Return list of paths with records associated with them in\n the meta attribute. \"\"\"\n if not maps:\n return []\n ret = []\n\n for record_item in query_response:\n try:\n item = _Str(maps[record_item.fileid]['path'])\n except KeyError:\n continue\n # pylint: disable=W0201\n item.meta = record_item\n ret.append(item)\n return ret\n\n def make_getdatarequest(self, response, methods=None, info=None):\n \"\"\" Make datarequest with methods from response. \"\"\"\n if methods is None:\n methods = self.method_order + ['URL']\n\n return self.create_getdatarequest(\n {k: [x.fileid for x in v]\n for k, v in self.by_provider(response).items()},\n methods, info\n )\n\n def create_getdatarequest(self, maps, methods, info=None):\n \"\"\" Create datarequest from maps mapping data provider to\n fileids and methods, \"\"\"\n if info is None:\n info = {}\n\n if 'email' not in info:\n info['email'] = 'sunpy'\n\n # For the JSOC provider we need to make a DataRequestItem for each\n # series, not just one for the whole provider.\n\n # Remove JSOC provider items from the map\n jsoc = maps.pop('JSOC', [])\n # Make DRIs for everything that's not JSOC one per provider\n dris = [self.make('DataRequestItem', provider=k, fileiditem={'fileid': v})\n for k, v in maps.items()]\n\n def series_func(x):\n \"\"\" Extract the series from the fileid. \"\"\"\n return x.split(':')[0]\n\n # Sort the JSOC fileids by series\n # This is a precursor to groupby as recommended by the groupby docs\n series_sorted = sorted(jsoc, key=series_func)\n # Iterate over the series and make a DRI for each.\n # groupby creates an iterator based on a key function, in this case\n # based on the series (the part before the first ':')\n for series, fileids in itertools.groupby(series_sorted, key=series_func):\n dris.append(self.make('DataRequestItem',\n provider='JSOC',\n fileiditem={'fileid': list(fileids)}))\n\n request = {'method': {'methodtype': methods},\n 'info': info,\n 'datacontainer': {'datarequestitem': dris}\n }\n\n return self.make('VSOGetDataRequest', request=request)\n\n # pylint: disable=R0913,R0912\n def download_all(self, response, methods, downloader, path, qr, info=None):\n results = Results()\n GET_VERSION = [\n ('0.8', (5, 8)),\n ('0.7', (1, 4)),\n ('0.6', (0, 3)),\n ]\n\n for dresponse in response.getdataresponseitem:\n for version, (from_, to) in GET_VERSION:\n if getattr(dresponse, version, '0.6') >= version:\n break\n else:\n results.add_error('', UnknownVersion(dresponse))\n continue\n\n # If from_ and to are uninitialized, the else block of the loop\n # continues the outer loop and thus this code is never reached.\n # pylint: disable=W0631\n code = (\n dresponse.status[from_:to]\n if getattr(dresponse, 'status', None) else '200'\n )\n if code == '200':\n for dataitem in dresponse.getdataitem.dataitem:\n\n try:\n self.download(\n dresponse.method.methodtype[0],\n dataitem.url,\n downloader,\n path,\n qr[dataitem.fileiditem.fileid[0]]\n )\n except NoData:\n results.add_error('', '', DownloadFailed(dresponse))\n continue\n\n elif code == '300' or code == '412' or code == '405':\n if code == '300':\n try:\n methods = self.multiple_choices(\n dresponse.method.methodtype, dresponse\n )\n except NoData:\n results.add_error('', '', MultipleChoices(dresponse))\n continue\n elif code == '412':\n try:\n info = self.missing_information(\n info, dresponse.info\n )\n except NoData:\n results.add_error('', '', MissingInformation(dresponse))\n continue\n elif code == '405':\n try:\n methods = self.unknown_method(dresponse)\n except NoData:\n results.add_error('', '', UnknownMethod(dresponse))\n continue\n\n files = []\n for dataitem in dresponse.getdataitem.dataitem:\n files.extend(dataitem.fileiditem.fileid)\n\n request = self.create_getdatarequest(\n {dresponse.provider: files}, methods, info\n )\n\n self.download_all(\n self.api.service.GetData(request), methods, downloader, path,\n qr, info\n )\n else:\n results.add_error('', '', UnknownStatus(dresponse))\n\n return results\n\n def download(self, method, url, downloader, *args):\n \"\"\" Enqueue a file to be downloaded, extra args are passed to ``mk_filename``\"\"\"\n if method.startswith('URL'):\n return downloader.enqueue_file(url, filename=partial(self.mk_filename, *args))\n\n raise NoData\n\n @staticmethod\n def by_provider(response):\n \"\"\"\n Returns a dictionary of provider\n corresponding to records in the response.\n \"\"\"\n\n map_ = defaultdict(list)\n for record in response:\n map_[record.provider].append(record)\n return map_\n\n @staticmethod\n def by_fileid(response):\n \"\"\"\n Returns a dictionary of fileids\n corresponding to records in the response.\n \"\"\"\n return {\n record.fileid: record for record in response\n }\n\n # pylint: disable=W0613\n def multiple_choices(self, choices, response):\n \"\"\" Override to pick between multiple download choices. \"\"\"\n for elem in self.method_order:\n if elem in choices:\n return [elem]\n raise NoData\n\n # pylint: disable=W0613\n def missing_information(self, info, field):\n \"\"\" Override to provide missing information. \"\"\"\n raise NoData\n\n # pylint: disable=W0613\n def unknown_method(self, response):\n \"\"\" Override to pick a new method if the current one is unknown. \"\"\"\n raise NoData\n\n @classmethod\n def _can_handle_query(cls, *query):\n # VSO Queries must have time\n if not core_attrs.Time in [type(a) for a in query]:\n return False\n return all([x.__class__.__name__ in core_attrs.__all__ + attrs.__all__ for x in query])\n\n @classmethod\n def _attrs_module(cls):\n return 'vso', 'sunpy.net.vso.attrs'\n\n def __del__(self):\n self.api.transport.session.close()\n",
"path": "sunpy/net/vso/vso.py"
}
] | diff --git a/changelog/3973.bugfix.rst b/changelog/3973.bugfix.rst
new file mode 100644
index 00000000000..a512749c567
--- /dev/null
+++ b/changelog/3973.bugfix.rst
@@ -0,0 +1 @@
+Closed the session in the destructor of VSOClient thus solving the problem of socket being left open
diff --git a/sunpy/net/tests/test_fido.py b/sunpy/net/tests/test_fido.py
index 93d4e3b4c6c..601be7d2d72 100644
--- a/sunpy/net/tests/test_fido.py
+++ b/sunpy/net/tests/test_fido.py
@@ -206,7 +206,7 @@ def test_tables_single_response():
@pytest.mark.remote_data
def test_tables_multiple_response():
results = Fido.search(a.Time('2012/3/4', '2012/3/6'),
- a.Instrument('lyra') | (a.Instrument('rhessi') & a.Physobs("summary_lightcurve")))
+ a.Instrument('lyra') | (a.Instrument('rhessi') & a.Physobs("summary_lightcurve")))
tables = results.tables
assert isinstance(tables, list)
assert all(isinstance(t, Table) for t in tables)
@@ -460,6 +460,14 @@ def test_vso_fetch_hmi(tmpdir):
assert len(files) == 1
[email protected]_data
+def test_unclosedSocket_warning():
+ with pytest.warns(None):
+ attrs_time = a.Time('2005/01/01 00:10', '2005/01/01 00:15')
+ result = Fido.search(attrs_time, a.Instrument('eit'))
+ Fido.fetch(result)
+
+
def test_fido_no_time(mocker):
jsoc_mock = mocker.patch("sunpy.net.jsoc.JSOCClient.search")
jsoc_mock.return_value = jsoc.JSOCResponse()
@@ -468,6 +476,7 @@ def test_fido_no_time(mocker):
jsoc_mock.assert_called_once()
+
@pytest.mark.remote_data
def test_slice_jsoc():
tstart = '2011/06/07 06:32:45'
diff --git a/sunpy/net/vso/vso.py b/sunpy/net/vso/vso.py
index 1076632b3ee..d12cd20eecf 100644
--- a/sunpy/net/vso/vso.py
+++ b/sunpy/net/vso/vso.py
@@ -909,3 +909,6 @@ def _can_handle_query(cls, *query):
@classmethod
def _attrs_module(cls):
return 'vso', 'sunpy.net.vso.attrs'
+
+ def __del__(self):
+ self.api.transport.session.close()
|
Nitrate__Nitrate-603 | Upgrade celery to 4.3.0
As per title. Remove `skipIf` from test `test_uses_celery`.
| [
{
"content": "# -*- coding: utf-8 -*-\n\nfrom setuptools import setup, find_packages\n\n\nwith open('VERSION.txt', 'r') as f:\n pkg_version = f.read().strip()\n\n\ndef get_long_description():\n with open('README.rst', 'r') as f:\n return f.read()\n\n\ninstall_requires = [\n 'beautifulsoup4 >= 4.1.1',\n 'django >= 2.1,<3.0',\n 'django-contrib-comments == 1.9.1',\n 'django-tinymce == 2.7.0',\n 'django-uuslug == 1.1.8',\n 'html2text',\n 'odfpy >= 0.9.6',\n 'python-bugzilla',\n 'xmltodict',\n 'kobo == 0.9.0'\n]\n\nextras_require = {\n 'mysql': ['mysqlclient >= 1.2.3'],\n 'pgsql': ['psycopg2 == 2.7.5'],\n\n # Required for tcms.auth.backends.KerberosBackend\n 'krbauth': [\n 'kerberos == 1.2.5'\n ],\n\n # Packages for building documentation\n 'docs': [\n 'Sphinx >= 1.1.2',\n 'sphinx_rtd_theme',\n ],\n\n # Necessary packages for running tests\n 'tests': [\n 'beautifulsoup4',\n 'coverage',\n 'factory_boy',\n 'flake8',\n 'pytest',\n 'pytest-cov',\n 'pytest-django',\n ],\n\n # Contain tools that assists the development\n 'devtools': [\n 'django-debug-toolbar',\n 'tox',\n 'django-extensions',\n 'pygraphviz',\n ],\n\n # Required packages required to run async tasks\n 'async': [\n 'celery == 4.2.0',\n ],\n\n 'multiauth': [\n 'social-auth-app-django == 3.1.0',\n ]\n}\n\nsetup(\n name='nitrate-tcms',\n version=pkg_version,\n description='A full-featured Test Case Management System',\n long_description=get_long_description(),\n author='Nitrate Team',\n maintainer='Chenxiong Qi',\n maintainer_email='[email protected]',\n url='https://github.com/Nitrate/Nitrate/',\n license='GPLv2+',\n keywords='test case',\n install_requires=install_requires,\n extras_require=extras_require,\n python_requires='>=3.6',\n package_dir={'': 'src'},\n packages=find_packages('src', exclude=['test*']),\n include_package_data=True,\n zip_safe=False,\n classifiers=[\n 'Framework :: Django',\n 'Framework :: Django :: 2.0',\n 'Framework :: Django :: 2.1',\n 'Framework :: Django :: 2.2',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3 :: Only',\n 'Topic :: Software Development :: Quality Assurance',\n 'Topic :: Software Development :: Testing',\n ],\n project_urls={\n 'Issue Tracker': 'https://github.com/Nitrate/Nitrate/issues',\n 'Source Code': 'https://github.com/Nitrate/Nitrate',\n 'Documentation': 'https://nitrate.readthedocs.io/',\n },\n)\n",
"path": "setup.py"
}
] | [
{
"content": "# -*- coding: utf-8 -*-\n\nfrom setuptools import setup, find_packages\n\n\nwith open('VERSION.txt', 'r') as f:\n pkg_version = f.read().strip()\n\n\ndef get_long_description():\n with open('README.rst', 'r') as f:\n return f.read()\n\n\ninstall_requires = [\n 'beautifulsoup4 >= 4.1.1',\n 'django >= 2.1,<3.0',\n 'django-contrib-comments == 1.9.1',\n 'django-tinymce == 2.7.0',\n 'django-uuslug == 1.1.8',\n 'html2text',\n 'odfpy >= 0.9.6',\n 'python-bugzilla',\n 'xmltodict',\n 'kobo == 0.9.0'\n]\n\nextras_require = {\n 'mysql': ['mysqlclient >= 1.2.3'],\n 'pgsql': ['psycopg2 == 2.7.5'],\n\n # Required for tcms.auth.backends.KerberosBackend\n 'krbauth': [\n 'kerberos == 1.2.5'\n ],\n\n # Packages for building documentation\n 'docs': [\n 'Sphinx >= 1.1.2',\n 'sphinx_rtd_theme',\n ],\n\n # Necessary packages for running tests\n 'tests': [\n 'beautifulsoup4',\n 'coverage',\n 'factory_boy',\n 'flake8',\n 'pytest',\n 'pytest-cov',\n 'pytest-django',\n ],\n\n # Contain tools that assists the development\n 'devtools': [\n 'django-debug-toolbar',\n 'tox',\n 'django-extensions',\n 'pygraphviz',\n ],\n\n # Required packages required to run async tasks\n 'async': [\n 'celery == 4.4.2',\n ],\n\n 'multiauth': [\n 'social-auth-app-django == 3.1.0',\n ]\n}\n\nsetup(\n name='nitrate-tcms',\n version=pkg_version,\n description='A full-featured Test Case Management System',\n long_description=get_long_description(),\n author='Nitrate Team',\n maintainer='Chenxiong Qi',\n maintainer_email='[email protected]',\n url='https://github.com/Nitrate/Nitrate/',\n license='GPLv2+',\n keywords='test case',\n install_requires=install_requires,\n extras_require=extras_require,\n python_requires='>=3.6',\n package_dir={'': 'src'},\n packages=find_packages('src', exclude=['test*']),\n include_package_data=True,\n zip_safe=False,\n classifiers=[\n 'Framework :: Django',\n 'Framework :: Django :: 2.0',\n 'Framework :: Django :: 2.1',\n 'Framework :: Django :: 2.2',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3 :: Only',\n 'Topic :: Software Development :: Quality Assurance',\n 'Topic :: Software Development :: Testing',\n ],\n project_urls={\n 'Issue Tracker': 'https://github.com/Nitrate/Nitrate/issues',\n 'Source Code': 'https://github.com/Nitrate/Nitrate',\n 'Documentation': 'https://nitrate.readthedocs.io/',\n },\n)\n",
"path": "setup.py"
}
] | diff --git a/setup.py b/setup.py
index 566a6623..00700d43 100644
--- a/setup.py
+++ b/setup.py
@@ -61,7 +61,7 @@ def get_long_description():
# Required packages required to run async tasks
'async': [
- 'celery == 4.2.0',
+ 'celery == 4.4.2',
],
'multiauth': [
diff --git a/src/tests/core/test_core.py b/src/tests/core/test_core.py
index 4abc92e3..55255fd6 100644
--- a/src/tests/core/test_core.py
+++ b/src/tests/core/test_core.py
@@ -328,8 +328,6 @@ def test_uses_threading(self, Thread):
self.assertTrue(thread.daemon)
thread.start.assert_called_once()
- @unittest.skipIf(PY37, 'Celery<4.3 does not work with Python 3.7. '
- 'Waiting for 4.3 to be released.')
@patch('celery.shared_task')
def test_uses_celery(self, shared_task):
with patch.object(settings, 'ASYNC_TASK', new=AsyncTask.CELERY.value):
|
cisagov__manage.get.gov-1672 | In the request form, make the behavior for submitting domain requests and alternates the same
### Issue description
In the request form, the "What .gov domain do you want?" and "Alternative domains (optional)" fields behave differently when given input.
* `What .gov...`, the first field, requires a user to click "Check availability" before showing whether the desired domain is available.
* `Alternative domains...` automatically shows the response a few moments after typing has stopped.
Implement the design approach to make the behavior for submitting domain requests and alternates the same. Design approach defined in figma.
### Acceptance criteria
- [ ] In the request form, the behavior for submitting domain requests and alternates are the same
### Additional context
[proto type ](https://www.figma.com/proto/v9EiY4kYfIHVWb8J58vXHS/Registrar%2FRequest%2F.gov-Domain?type=design&node-id=3-4259&t=1FVDm6ht1KQxFJc9-1&scaling=min-zoom&page-id=0%3A1&starting-point-node-id=3%3A4259)
[figma](https://www.figma.com/file/v9EiY4kYfIHVWb8J58vXHS/Registrar%2FRequest%2F.gov-Domain?type=design&node-id=0%3A1&mode=design&t=G0cZ3bAI9gEZck03-1)
### Links to other issues
Reference #1495 (design ticket)
| [
{
"content": "from __future__ import annotations # allows forward references in annotations\nfrom itertools import zip_longest\nimport logging\nfrom typing import Callable\nfrom api.views import DOMAIN_API_MESSAGES\nfrom phonenumber_field.formfields import PhoneNumberField # type: ignore\n\nfrom django import forms\nfrom django.core.validators import RegexValidator, MaxLengthValidator\nfrom django.utils.safestring import mark_safe\nfrom django.db.models.fields.related import ForeignObjectRel\n\nfrom registrar.models import Contact, DomainApplication, DraftDomain, Domain\nfrom registrar.templatetags.url_helpers import public_site_url\nfrom registrar.utility.enums import ValidationReturnType\n\nlogger = logging.getLogger(__name__)\n\n\nclass RegistrarForm(forms.Form):\n \"\"\"\n A common set of methods and configuration.\n\n The registrar's domain application is several pages of \"steps\".\n Each step is an HTML form containing one or more Django \"forms\".\n\n Subclass this class to create new forms.\n \"\"\"\n\n def __init__(self, *args, **kwargs):\n kwargs.setdefault(\"label_suffix\", \"\")\n # save a reference to an application object\n self.application = kwargs.pop(\"application\", None)\n super(RegistrarForm, self).__init__(*args, **kwargs)\n\n def to_database(self, obj: DomainApplication | Contact):\n \"\"\"\n Adds this form's cleaned data to `obj` and saves `obj`.\n\n Does nothing if form is not valid.\n \"\"\"\n if not self.is_valid():\n return\n for name, value in self.cleaned_data.items():\n setattr(obj, name, value)\n obj.save()\n\n @classmethod\n def from_database(cls, obj: DomainApplication | Contact | None):\n \"\"\"Returns a dict of form field values gotten from `obj`.\"\"\"\n if obj is None:\n return {}\n return {name: getattr(obj, name) for name in cls.declared_fields.keys()} # type: ignore\n\n\nclass RegistrarFormSet(forms.BaseFormSet):\n \"\"\"\n As with RegistrarForm, a common set of methods and configuration.\n\n Subclass this class to create new formsets.\n \"\"\"\n\n def __init__(self, *args, **kwargs):\n # save a reference to an application object\n self.application = kwargs.pop(\"application\", None)\n super(RegistrarFormSet, self).__init__(*args, **kwargs)\n # quick workaround to ensure that the HTML `required`\n # attribute shows up on required fields for any forms\n # in the formset which have data already (stated another\n # way: you can leave a form in the formset blank, but\n # if you opt to fill it out, you must fill it out _right_)\n for index in range(self.initial_form_count()):\n self.forms[index].use_required_attribute = True\n\n def should_delete(self, cleaned):\n \"\"\"Should this entry be deleted from the database?\"\"\"\n raise NotImplementedError\n\n def pre_update(self, db_obj, cleaned):\n \"\"\"Code to run before an item in the formset is saved.\"\"\"\n for key, value in cleaned.items():\n setattr(db_obj, key, value)\n\n def pre_create(self, db_obj, cleaned):\n \"\"\"Code to run before an item in the formset is created in the database.\"\"\"\n return cleaned\n\n def to_database(self, obj: DomainApplication):\n \"\"\"\n Adds this form's cleaned data to `obj` and saves `obj`.\n\n Does nothing if form is not valid.\n\n Hint: Subclass should call `self._to_database(...)`.\n \"\"\"\n raise NotImplementedError\n\n def _to_database(\n self,\n obj: DomainApplication,\n join: str,\n should_delete: Callable,\n pre_update: Callable,\n pre_create: Callable,\n ):\n \"\"\"\n Performs the actual work of saving.\n\n Has hooks such as `should_delete` and `pre_update` by which the\n subclass can control behavior. Add more hooks whenever needed.\n \"\"\"\n if not self.is_valid():\n return\n obj.save()\n\n query = getattr(obj, join).order_by(\"created_at\").all() # order matters\n\n # get the related name for the join defined for the db_obj for this form.\n # the related name will be the reference on a related object back to db_obj\n related_name = \"\"\n field = obj._meta.get_field(join)\n if isinstance(field, ForeignObjectRel) and callable(field.related_query_name):\n related_name = field.related_query_name()\n elif hasattr(field, \"related_query_name\") and callable(field.related_query_name):\n related_name = field.related_query_name()\n\n # the use of `zip` pairs the forms in the formset with the\n # related objects gotten from the database -- there should always be\n # at least as many forms as database entries: extra forms means new\n # entries, but fewer forms is _not_ the correct way to delete items\n # (likely a client-side error or an attempt at data tampering)\n for db_obj, post_data in zip_longest(query, self.forms, fillvalue=None):\n cleaned = post_data.cleaned_data if post_data is not None else {}\n\n # matching database object exists, update it\n if db_obj is not None and cleaned:\n if should_delete(cleaned):\n if hasattr(db_obj, \"has_more_than_one_join\") and db_obj.has_more_than_one_join(related_name):\n # Remove the specific relationship without deleting the object\n getattr(db_obj, related_name).remove(self.application)\n else:\n # If there are no other relationships, delete the object\n db_obj.delete()\n else:\n if hasattr(db_obj, \"has_more_than_one_join\") and db_obj.has_more_than_one_join(related_name):\n # create a new db_obj and disconnect existing one\n getattr(db_obj, related_name).remove(self.application)\n kwargs = pre_create(db_obj, cleaned)\n getattr(obj, join).create(**kwargs)\n else:\n pre_update(db_obj, cleaned)\n db_obj.save()\n\n # no matching database object, create it\n # make sure not to create a database object if cleaned has 'delete' attribute\n elif db_obj is None and cleaned and not cleaned.get(\"DELETE\", False):\n kwargs = pre_create(db_obj, cleaned)\n getattr(obj, join).create(**kwargs)\n\n @classmethod\n def on_fetch(cls, query):\n \"\"\"Code to run when fetching formset's objects from the database.\"\"\"\n return query.values()\n\n @classmethod\n def from_database(cls, obj: DomainApplication, join: str, on_fetch: Callable):\n \"\"\"Returns a dict of form field values gotten from `obj`.\"\"\"\n return on_fetch(getattr(obj, join).order_by(\"created_at\")) # order matters\n\n\nclass OrganizationTypeForm(RegistrarForm):\n organization_type = forms.ChoiceField(\n # use the long names in the application form\n choices=DomainApplication.OrganizationChoicesVerbose.choices,\n widget=forms.RadioSelect,\n error_messages={\"required\": \"Select the type of organization you represent.\"},\n )\n\n\nclass TribalGovernmentForm(RegistrarForm):\n federally_recognized_tribe = forms.BooleanField(\n label=\"Federally-recognized tribe \",\n required=False,\n )\n\n state_recognized_tribe = forms.BooleanField(\n label=\"State-recognized tribe \",\n required=False,\n )\n\n tribe_name = forms.CharField(\n label=\"Name of tribe\",\n error_messages={\"required\": \"Enter the tribe you represent.\"},\n )\n\n def clean(self):\n \"\"\"Needs to be either state or federally recognized.\"\"\"\n if not (self.cleaned_data[\"federally_recognized_tribe\"] or self.cleaned_data[\"state_recognized_tribe\"]):\n raise forms.ValidationError(\n # no sec because we are using it to include an internal URL\n # into a link. There should be no user-facing input in the\n # HTML indicated here.\n mark_safe( # nosec\n \"You can’t complete this application yet. \"\n \"Only tribes recognized by the U.S. federal government \"\n \"or by a U.S. state government are eligible for .gov \"\n 'domains. Use our <a href=\"{}\">contact form</a> to '\n \"tell us more about your tribe and why you want a .gov \"\n \"domain. We’ll review your information and get back \"\n \"to you.\".format(public_site_url(\"contact\"))\n ),\n code=\"invalid\",\n )\n\n\nclass OrganizationFederalForm(RegistrarForm):\n federal_type = forms.ChoiceField(\n choices=DomainApplication.BranchChoices.choices,\n widget=forms.RadioSelect,\n error_messages={\"required\": (\"Select the part of the federal government your organization is in.\")},\n )\n\n\nclass OrganizationElectionForm(RegistrarForm):\n is_election_board = forms.NullBooleanField(\n widget=forms.RadioSelect(\n choices=[\n (True, \"Yes\"),\n (False, \"No\"),\n ],\n )\n )\n\n def clean_is_election_board(self):\n \"\"\"This box must be checked to proceed but offer a clear error.\"\"\"\n # already converted to a boolean\n is_election_board = self.cleaned_data[\"is_election_board\"]\n if is_election_board is None:\n raise forms.ValidationError(\n (\"Select “Yes” if you represent an election office. Select “No” if you don’t.\"),\n code=\"required\",\n )\n return is_election_board\n\n\nclass OrganizationContactForm(RegistrarForm):\n # for federal agencies we also want to know the top-level agency.\n federal_agency = forms.ChoiceField(\n label=\"Federal agency\",\n # not required because this field won't be filled out unless\n # it is a federal agency. Use clean to check programatically\n # if it has been filled in when required.\n required=False,\n choices=[(\"\", \"--Select--\")] + DomainApplication.AGENCY_CHOICES,\n )\n organization_name = forms.CharField(\n label=\"Organization name\",\n error_messages={\"required\": \"Enter the name of your organization.\"},\n )\n address_line1 = forms.CharField(\n label=\"Street address\",\n error_messages={\"required\": \"Enter the street address of your organization.\"},\n )\n address_line2 = forms.CharField(\n required=False,\n label=\"Street address line 2 (optional)\",\n )\n city = forms.CharField(\n label=\"City\",\n error_messages={\"required\": \"Enter the city where your organization is located.\"},\n )\n state_territory = forms.ChoiceField(\n label=\"State, territory, or military post\",\n choices=[(\"\", \"--Select--\")] + DomainApplication.StateTerritoryChoices.choices,\n error_messages={\n \"required\": (\"Select the state, territory, or military post where your organization is located.\")\n },\n )\n zipcode = forms.CharField(\n label=\"Zip code\",\n validators=[\n RegexValidator(\n \"^[0-9]{5}(?:-[0-9]{4})?$|^$\",\n message=\"Enter a zip code in the form of 12345 or 12345-6789.\",\n )\n ],\n )\n urbanization = forms.CharField(\n required=False,\n label=\"Urbanization (required for Puerto Rico only)\",\n )\n\n def clean_federal_agency(self):\n \"\"\"Require something to be selected when this is a federal agency.\"\"\"\n federal_agency = self.cleaned_data.get(\"federal_agency\", None)\n # need the application object to know if this is federal\n if self.application is None:\n # hmm, no saved application object?, default require the agency\n if not federal_agency:\n # no answer was selected\n raise forms.ValidationError(\n \"Select the federal agency your organization is in.\",\n code=\"required\",\n )\n if self.application.is_federal():\n if not federal_agency:\n # no answer was selected\n raise forms.ValidationError(\n \"Select the federal agency your organization is in.\",\n code=\"required\",\n )\n return federal_agency\n\n\nclass AboutYourOrganizationForm(RegistrarForm):\n about_your_organization = forms.CharField(\n label=\"About your organization\",\n widget=forms.Textarea(),\n validators=[\n MaxLengthValidator(\n 1000,\n message=\"Response must be less than 1000 characters.\",\n )\n ],\n error_messages={\"required\": (\"Enter more information about your organization.\")},\n )\n\n\nclass AuthorizingOfficialForm(RegistrarForm):\n JOIN = \"authorizing_official\"\n\n def to_database(self, obj):\n if not self.is_valid():\n return\n contact = getattr(obj, \"authorizing_official\", None)\n if contact is not None and not contact.has_more_than_one_join(\"authorizing_official\"):\n # if contact exists in the database and is not joined to other entities\n super().to_database(contact)\n else:\n # no contact exists OR contact exists which is joined also to other entities;\n # in either case, create a new contact and update it\n contact = Contact()\n super().to_database(contact)\n obj.authorizing_official = contact\n obj.save()\n\n @classmethod\n def from_database(cls, obj):\n contact = getattr(obj, \"authorizing_official\", None)\n return super().from_database(contact)\n\n first_name = forms.CharField(\n label=\"First name / given name\",\n error_messages={\"required\": (\"Enter the first name / given name of your authorizing official.\")},\n )\n last_name = forms.CharField(\n label=\"Last name / family name\",\n error_messages={\"required\": (\"Enter the last name / family name of your authorizing official.\")},\n )\n title = forms.CharField(\n label=\"Title or role in your organization\",\n error_messages={\n \"required\": (\n \"Enter the title or role your authorizing official has in your\"\n \" organization (e.g., Chief Information Officer).\"\n )\n },\n )\n email = forms.EmailField(\n label=\"Email\",\n error_messages={\"invalid\": (\"Enter an email address in the required format, like [email protected].\")},\n )\n\n\nclass CurrentSitesForm(RegistrarForm):\n website = forms.URLField(\n required=False,\n label=\"Public website\",\n error_messages={\n \"invalid\": (\"Enter your organization's current website in the required format, like example.com.\")\n },\n )\n\n\nclass BaseCurrentSitesFormSet(RegistrarFormSet):\n JOIN = \"current_websites\"\n\n def should_delete(self, cleaned):\n website = cleaned.get(\"website\", \"\")\n return website.strip() == \"\"\n\n def to_database(self, obj: DomainApplication):\n # If we want to test against multiple joins for a website object, replace the empty array\n # and change the JOIN in the models to allow for reverse references\n self._to_database(obj, self.JOIN, self.should_delete, self.pre_update, self.pre_create)\n\n @classmethod\n def from_database(cls, obj):\n return super().from_database(obj, cls.JOIN, cls.on_fetch)\n\n\nCurrentSitesFormSet = forms.formset_factory(\n CurrentSitesForm,\n extra=1,\n absolute_max=1500, # django default; use `max_num` to limit entries\n formset=BaseCurrentSitesFormSet,\n)\n\n\nclass AlternativeDomainForm(RegistrarForm):\n def clean_alternative_domain(self):\n \"\"\"Validation code for domain names.\"\"\"\n requested = self.cleaned_data.get(\"alternative_domain\", None)\n validated, _ = DraftDomain.validate_and_handle_errors(\n domain=requested,\n return_type=ValidationReturnType.FORM_VALIDATION_ERROR,\n blank_ok=True,\n )\n return validated\n\n alternative_domain = forms.CharField(\n required=False,\n label=\"\",\n )\n\n\nclass BaseAlternativeDomainFormSet(RegistrarFormSet):\n JOIN = \"alternative_domains\"\n\n def should_delete(self, cleaned):\n domain = cleaned.get(\"alternative_domain\", \"\")\n return domain.strip() == \"\"\n\n def pre_update(self, db_obj, cleaned):\n domain = cleaned.get(\"alternative_domain\", None)\n if domain is not None:\n db_obj.website = f\"{domain}.gov\"\n\n def pre_create(self, db_obj, cleaned):\n domain = cleaned.get(\"alternative_domain\", None)\n if domain is not None:\n return {\"website\": f\"{domain}.gov\"}\n else:\n return {}\n\n def to_database(self, obj: DomainApplication):\n # If we want to test against multiple joins for a website object, replace the empty array and\n # change the JOIN in the models to allow for reverse references\n self._to_database(obj, self.JOIN, self.should_delete, self.pre_update, self.pre_create)\n\n @classmethod\n def on_fetch(cls, query):\n return [{\"alternative_domain\": Domain.sld(domain.website)} for domain in query]\n\n @classmethod\n def from_database(cls, obj):\n return super().from_database(obj, cls.JOIN, cls.on_fetch)\n\n\nAlternativeDomainFormSet = forms.formset_factory(\n AlternativeDomainForm,\n extra=1,\n absolute_max=1500, # django default; use `max_num` to limit entries\n formset=BaseAlternativeDomainFormSet,\n)\n\n\nclass DotGovDomainForm(RegistrarForm):\n def to_database(self, obj):\n if not self.is_valid():\n return\n domain = self.cleaned_data.get(\"requested_domain\", None)\n if domain:\n requested_domain = getattr(obj, \"requested_domain\", None)\n if requested_domain is not None:\n requested_domain.name = f\"{domain}.gov\"\n requested_domain.save()\n else:\n requested_domain = DraftDomain.objects.create(name=f\"{domain}.gov\")\n obj.requested_domain = requested_domain\n obj.save()\n\n obj.save()\n\n @classmethod\n def from_database(cls, obj):\n values = {}\n requested_domain = getattr(obj, \"requested_domain\", None)\n if requested_domain is not None:\n domain_name = requested_domain.name\n values[\"requested_domain\"] = Domain.sld(domain_name)\n return values\n\n def clean_requested_domain(self):\n \"\"\"Validation code for domain names.\"\"\"\n requested = self.cleaned_data.get(\"requested_domain\", None)\n validated, _ = DraftDomain.validate_and_handle_errors(\n domain=requested,\n return_type=ValidationReturnType.FORM_VALIDATION_ERROR,\n )\n return validated\n\n requested_domain = forms.CharField(\n label=\"What .gov domain do you want?\",\n error_messages={\n \"required\": DOMAIN_API_MESSAGES[\"required\"],\n },\n )\n\n\nclass PurposeForm(RegistrarForm):\n purpose = forms.CharField(\n label=\"Purpose\",\n widget=forms.Textarea(),\n validators=[\n MaxLengthValidator(\n 1000,\n message=\"Response must be less than 1000 characters.\",\n )\n ],\n error_messages={\"required\": \"Describe how you’ll use the .gov domain you’re requesting.\"},\n )\n\n\nclass YourContactForm(RegistrarForm):\n JOIN = \"submitter\"\n\n def to_database(self, obj):\n if not self.is_valid():\n return\n contact = getattr(obj, \"submitter\", None)\n if contact is not None and not contact.has_more_than_one_join(\"submitted_applications\"):\n # if contact exists in the database and is not joined to other entities\n super().to_database(contact)\n else:\n # no contact exists OR contact exists which is joined also to other entities;\n # in either case, create a new contact and update it\n contact = Contact()\n super().to_database(contact)\n obj.submitter = contact\n obj.save()\n\n @classmethod\n def from_database(cls, obj):\n contact = getattr(obj, \"submitter\", None)\n return super().from_database(contact)\n\n first_name = forms.CharField(\n label=\"First name / given name\",\n error_messages={\"required\": \"Enter your first name / given name.\"},\n )\n middle_name = forms.CharField(\n required=False,\n label=\"Middle name (optional)\",\n )\n last_name = forms.CharField(\n label=\"Last name / family name\",\n error_messages={\"required\": \"Enter your last name / family name.\"},\n )\n title = forms.CharField(\n label=\"Title or role in your organization\",\n error_messages={\n \"required\": (\"Enter your title or role in your organization (e.g., Chief Information Officer).\")\n },\n )\n email = forms.EmailField(\n label=\"Email\",\n error_messages={\"invalid\": (\"Enter your email address in the required format, like [email protected].\")},\n )\n phone = PhoneNumberField(\n label=\"Phone\",\n error_messages={\"invalid\": \"Enter a valid 10-digit phone number.\", \"required\": \"Enter your phone number.\"},\n )\n\n\nclass OtherContactsYesNoForm(RegistrarForm):\n def __init__(self, *args, **kwargs):\n \"\"\"Extend the initialization of the form from RegistrarForm __init__\"\"\"\n super().__init__(*args, **kwargs)\n # set the initial value based on attributes of application\n if self.application and self.application.has_other_contacts():\n initial_value = True\n elif self.application and self.application.has_rationale():\n initial_value = False\n else:\n # No pre-selection for new applications\n initial_value = None\n\n self.fields[\"has_other_contacts\"] = forms.TypedChoiceField(\n coerce=lambda x: x.lower() == \"true\" if x is not None else None, # coerce strings to bool, excepting None\n choices=((True, \"Yes, I can name other employees.\"), (False, \"No. (We’ll ask you to explain why.)\")),\n initial=initial_value,\n widget=forms.RadioSelect,\n error_messages={\n \"required\": \"This question is required.\",\n },\n )\n\n\nclass OtherContactsForm(RegistrarForm):\n first_name = forms.CharField(\n label=\"First name / given name\",\n error_messages={\"required\": \"Enter the first name / given name of this contact.\"},\n )\n middle_name = forms.CharField(\n required=False,\n label=\"Middle name (optional)\",\n )\n last_name = forms.CharField(\n label=\"Last name / family name\",\n error_messages={\"required\": \"Enter the last name / family name of this contact.\"},\n )\n title = forms.CharField(\n label=\"Title or role in your organization\",\n error_messages={\n \"required\": (\n \"Enter the title or role in your organization of this contact (e.g., Chief Information Officer).\"\n )\n },\n )\n email = forms.EmailField(\n label=\"Email\",\n error_messages={\n \"required\": (\"Enter an email address in the required format, like [email protected].\"),\n \"invalid\": (\"Enter an email address in the required format, like [email protected].\"),\n },\n )\n phone = PhoneNumberField(\n label=\"Phone\",\n error_messages={\n \"invalid\": \"Enter a valid 10-digit phone number.\",\n \"required\": \"Enter a phone number for this contact.\",\n },\n )\n\n def __init__(self, *args, **kwargs):\n \"\"\"\n Override the __init__ method for RegistrarForm.\n Set form_data_marked_for_deletion to false.\n Empty_permitted set to False, as this is overridden in certain circumstances by\n Django's BaseFormSet, and results in empty forms being allowed and field level\n errors not appropriately raised. This works with code in the view which appropriately\n displays required attributes on fields.\n \"\"\"\n self.form_data_marked_for_deletion = False\n super().__init__(*args, **kwargs)\n self.empty_permitted = False\n\n def mark_form_for_deletion(self):\n self.form_data_marked_for_deletion = True\n\n def clean(self):\n \"\"\"\n This method overrides the default behavior for forms.\n This cleans the form after field validation has already taken place.\n In this override, allow for a form which is deleted by user or marked for\n deletion by formset to be considered valid even though certain required fields have\n not passed field validation\n \"\"\"\n if self.form_data_marked_for_deletion or self.cleaned_data.get(\"DELETE\"):\n # clear any errors raised by the form fields\n # (before this clean() method is run, each field\n # performs its own clean, which could result in\n # errors that we wish to ignore at this point)\n #\n # NOTE: we cannot just clear() the errors list.\n # That causes problems.\n for field in self.fields:\n if field in self.errors:\n del self.errors[field]\n # return empty object with only 'delete' attribute defined.\n # this will prevent _to_database from creating an empty\n # database object\n return {\"DELETE\": True}\n\n return self.cleaned_data\n\n\nclass BaseOtherContactsFormSet(RegistrarFormSet):\n \"\"\"\n FormSet for Other Contacts\n\n There are two conditions by which a form in the formset can be marked for deletion.\n One is if the user clicks 'DELETE' button, and this is submitted in the form. The\n other is if the YesNo form, which is submitted with this formset, is set to No; in\n this case, all forms in formset are marked for deletion. Both of these conditions\n must co-exist.\n Also, other_contacts have db relationships to multiple db objects. When attempting\n to delete an other_contact from an application, those db relationships must be\n tested and handled.\n \"\"\"\n\n JOIN = \"other_contacts\"\n\n def get_deletion_widget(self):\n return forms.HiddenInput(attrs={\"class\": \"deletion\"})\n\n def __init__(self, *args, **kwargs):\n \"\"\"\n Override __init__ for RegistrarFormSet.\n \"\"\"\n self.formset_data_marked_for_deletion = False\n self.application = kwargs.pop(\"application\", None)\n super(RegistrarFormSet, self).__init__(*args, **kwargs)\n # quick workaround to ensure that the HTML `required`\n # attribute shows up on required fields for the first form\n # in the formset plus those that have data already.\n for index in range(max(self.initial_form_count(), 1)):\n self.forms[index].use_required_attribute = True\n\n def should_delete(self, cleaned):\n \"\"\"\n Implements should_delete method from BaseFormSet.\n \"\"\"\n return self.formset_data_marked_for_deletion or cleaned.get(\"DELETE\", False)\n\n def pre_create(self, db_obj, cleaned):\n \"\"\"Code to run before an item in the formset is created in the database.\"\"\"\n # remove DELETE from cleaned\n if \"DELETE\" in cleaned:\n cleaned.pop(\"DELETE\")\n return cleaned\n\n def to_database(self, obj: DomainApplication):\n self._to_database(obj, self.JOIN, self.should_delete, self.pre_update, self.pre_create)\n\n @classmethod\n def from_database(cls, obj):\n return super().from_database(obj, cls.JOIN, cls.on_fetch)\n\n def mark_formset_for_deletion(self):\n \"\"\"Mark other contacts formset for deletion.\n Updates forms in formset as well to mark them for deletion.\n This has an effect on validity checks and to_database methods.\n \"\"\"\n self.formset_data_marked_for_deletion = True\n for form in self.forms:\n form.mark_form_for_deletion()\n\n def is_valid(self):\n \"\"\"Extend is_valid from RegistrarFormSet. When marking this formset for deletion, set\n validate_min to false so that validation does not attempt to enforce a minimum\n number of other contacts when contacts marked for deletion\"\"\"\n if self.formset_data_marked_for_deletion:\n self.validate_min = False\n return super().is_valid()\n\n\nOtherContactsFormSet = forms.formset_factory(\n OtherContactsForm,\n extra=0,\n absolute_max=1500, # django default; use `max_num` to limit entries\n min_num=1,\n can_delete=True,\n validate_min=True,\n formset=BaseOtherContactsFormSet,\n)\n\n\nclass NoOtherContactsForm(RegistrarForm):\n no_other_contacts_rationale = forms.CharField(\n required=True,\n # label has to end in a space to get the label_suffix to show\n label=(\"No other employees rationale\"),\n widget=forms.Textarea(),\n validators=[\n MaxLengthValidator(\n 1000,\n message=\"Response must be less than 1000 characters.\",\n )\n ],\n error_messages={\"required\": (\"Rationale for no other employees is required.\")},\n )\n\n def __init__(self, *args, **kwargs):\n self.form_data_marked_for_deletion = False\n super().__init__(*args, **kwargs)\n\n def mark_form_for_deletion(self):\n \"\"\"Marks no_other_contacts form for deletion.\n This changes behavior of validity checks and to_database\n methods.\"\"\"\n self.form_data_marked_for_deletion = True\n\n def clean(self):\n \"\"\"\n This method overrides the default behavior for forms.\n This cleans the form after field validation has already taken place.\n In this override, remove errors associated with the form if form data\n is marked for deletion.\n \"\"\"\n\n if self.form_data_marked_for_deletion:\n # clear any errors raised by the form fields\n # (before this clean() method is run, each field\n # performs its own clean, which could result in\n # errors that we wish to ignore at this point)\n #\n # NOTE: we cannot just clear() the errors list.\n # That causes problems.\n for field in self.fields:\n if field in self.errors:\n del self.errors[field]\n\n return self.cleaned_data\n\n def to_database(self, obj):\n \"\"\"\n This method overrides the behavior of RegistrarForm.\n If form data is marked for deletion, set relevant fields\n to None before saving.\n Do nothing if form is not valid.\n \"\"\"\n if not self.is_valid():\n return\n if self.form_data_marked_for_deletion:\n for field_name, _ in self.fields.items():\n setattr(obj, field_name, None)\n else:\n for name, value in self.cleaned_data.items():\n setattr(obj, name, value)\n obj.save()\n\n\nclass AnythingElseForm(RegistrarForm):\n anything_else = forms.CharField(\n required=False,\n label=\"Anything else?\",\n widget=forms.Textarea(),\n validators=[\n MaxLengthValidator(\n 1000,\n message=\"Response must be less than 1000 characters.\",\n )\n ],\n )\n\n\nclass RequirementsForm(RegistrarForm):\n is_policy_acknowledged = forms.BooleanField(\n label=\"I read and agree to the requirements for operating a .gov domain.\",\n error_messages={\n \"required\": (\"Check the box if you read and agree to the requirements for operating a .gov domain.\")\n },\n )\n",
"path": "src/registrar/forms/application_wizard.py"
}
] | [
{
"content": "from __future__ import annotations # allows forward references in annotations\nfrom itertools import zip_longest\nimport logging\nfrom typing import Callable\nfrom api.views import DOMAIN_API_MESSAGES\nfrom phonenumber_field.formfields import PhoneNumberField # type: ignore\n\nfrom django import forms\nfrom django.core.validators import RegexValidator, MaxLengthValidator\nfrom django.utils.safestring import mark_safe\nfrom django.db.models.fields.related import ForeignObjectRel\n\nfrom registrar.models import Contact, DomainApplication, DraftDomain, Domain\nfrom registrar.templatetags.url_helpers import public_site_url\nfrom registrar.utility.enums import ValidationReturnType\n\nlogger = logging.getLogger(__name__)\n\n\nclass RegistrarForm(forms.Form):\n \"\"\"\n A common set of methods and configuration.\n\n The registrar's domain application is several pages of \"steps\".\n Each step is an HTML form containing one or more Django \"forms\".\n\n Subclass this class to create new forms.\n \"\"\"\n\n def __init__(self, *args, **kwargs):\n kwargs.setdefault(\"label_suffix\", \"\")\n # save a reference to an application object\n self.application = kwargs.pop(\"application\", None)\n super(RegistrarForm, self).__init__(*args, **kwargs)\n\n def to_database(self, obj: DomainApplication | Contact):\n \"\"\"\n Adds this form's cleaned data to `obj` and saves `obj`.\n\n Does nothing if form is not valid.\n \"\"\"\n if not self.is_valid():\n return\n for name, value in self.cleaned_data.items():\n setattr(obj, name, value)\n obj.save()\n\n @classmethod\n def from_database(cls, obj: DomainApplication | Contact | None):\n \"\"\"Returns a dict of form field values gotten from `obj`.\"\"\"\n if obj is None:\n return {}\n return {name: getattr(obj, name) for name in cls.declared_fields.keys()} # type: ignore\n\n\nclass RegistrarFormSet(forms.BaseFormSet):\n \"\"\"\n As with RegistrarForm, a common set of methods and configuration.\n\n Subclass this class to create new formsets.\n \"\"\"\n\n def __init__(self, *args, **kwargs):\n # save a reference to an application object\n self.application = kwargs.pop(\"application\", None)\n super(RegistrarFormSet, self).__init__(*args, **kwargs)\n # quick workaround to ensure that the HTML `required`\n # attribute shows up on required fields for any forms\n # in the formset which have data already (stated another\n # way: you can leave a form in the formset blank, but\n # if you opt to fill it out, you must fill it out _right_)\n for index in range(self.initial_form_count()):\n self.forms[index].use_required_attribute = True\n\n def should_delete(self, cleaned):\n \"\"\"Should this entry be deleted from the database?\"\"\"\n raise NotImplementedError\n\n def pre_update(self, db_obj, cleaned):\n \"\"\"Code to run before an item in the formset is saved.\"\"\"\n for key, value in cleaned.items():\n setattr(db_obj, key, value)\n\n def pre_create(self, db_obj, cleaned):\n \"\"\"Code to run before an item in the formset is created in the database.\"\"\"\n return cleaned\n\n def to_database(self, obj: DomainApplication):\n \"\"\"\n Adds this form's cleaned data to `obj` and saves `obj`.\n\n Does nothing if form is not valid.\n\n Hint: Subclass should call `self._to_database(...)`.\n \"\"\"\n raise NotImplementedError\n\n def _to_database(\n self,\n obj: DomainApplication,\n join: str,\n should_delete: Callable,\n pre_update: Callable,\n pre_create: Callable,\n ):\n \"\"\"\n Performs the actual work of saving.\n\n Has hooks such as `should_delete` and `pre_update` by which the\n subclass can control behavior. Add more hooks whenever needed.\n \"\"\"\n if not self.is_valid():\n return\n obj.save()\n\n query = getattr(obj, join).order_by(\"created_at\").all() # order matters\n\n # get the related name for the join defined for the db_obj for this form.\n # the related name will be the reference on a related object back to db_obj\n related_name = \"\"\n field = obj._meta.get_field(join)\n if isinstance(field, ForeignObjectRel) and callable(field.related_query_name):\n related_name = field.related_query_name()\n elif hasattr(field, \"related_query_name\") and callable(field.related_query_name):\n related_name = field.related_query_name()\n\n # the use of `zip` pairs the forms in the formset with the\n # related objects gotten from the database -- there should always be\n # at least as many forms as database entries: extra forms means new\n # entries, but fewer forms is _not_ the correct way to delete items\n # (likely a client-side error or an attempt at data tampering)\n for db_obj, post_data in zip_longest(query, self.forms, fillvalue=None):\n cleaned = post_data.cleaned_data if post_data is not None else {}\n\n # matching database object exists, update it\n if db_obj is not None and cleaned:\n if should_delete(cleaned):\n if hasattr(db_obj, \"has_more_than_one_join\") and db_obj.has_more_than_one_join(related_name):\n # Remove the specific relationship without deleting the object\n getattr(db_obj, related_name).remove(self.application)\n else:\n # If there are no other relationships, delete the object\n db_obj.delete()\n else:\n if hasattr(db_obj, \"has_more_than_one_join\") and db_obj.has_more_than_one_join(related_name):\n # create a new db_obj and disconnect existing one\n getattr(db_obj, related_name).remove(self.application)\n kwargs = pre_create(db_obj, cleaned)\n getattr(obj, join).create(**kwargs)\n else:\n pre_update(db_obj, cleaned)\n db_obj.save()\n\n # no matching database object, create it\n # make sure not to create a database object if cleaned has 'delete' attribute\n elif db_obj is None and cleaned and not cleaned.get(\"DELETE\", False):\n kwargs = pre_create(db_obj, cleaned)\n getattr(obj, join).create(**kwargs)\n\n @classmethod\n def on_fetch(cls, query):\n \"\"\"Code to run when fetching formset's objects from the database.\"\"\"\n return query.values()\n\n @classmethod\n def from_database(cls, obj: DomainApplication, join: str, on_fetch: Callable):\n \"\"\"Returns a dict of form field values gotten from `obj`.\"\"\"\n return on_fetch(getattr(obj, join).order_by(\"created_at\")) # order matters\n\n\nclass OrganizationTypeForm(RegistrarForm):\n organization_type = forms.ChoiceField(\n # use the long names in the application form\n choices=DomainApplication.OrganizationChoicesVerbose.choices,\n widget=forms.RadioSelect,\n error_messages={\"required\": \"Select the type of organization you represent.\"},\n )\n\n\nclass TribalGovernmentForm(RegistrarForm):\n federally_recognized_tribe = forms.BooleanField(\n label=\"Federally-recognized tribe \",\n required=False,\n )\n\n state_recognized_tribe = forms.BooleanField(\n label=\"State-recognized tribe \",\n required=False,\n )\n\n tribe_name = forms.CharField(\n label=\"Name of tribe\",\n error_messages={\"required\": \"Enter the tribe you represent.\"},\n )\n\n def clean(self):\n \"\"\"Needs to be either state or federally recognized.\"\"\"\n if not (self.cleaned_data[\"federally_recognized_tribe\"] or self.cleaned_data[\"state_recognized_tribe\"]):\n raise forms.ValidationError(\n # no sec because we are using it to include an internal URL\n # into a link. There should be no user-facing input in the\n # HTML indicated here.\n mark_safe( # nosec\n \"You can’t complete this application yet. \"\n \"Only tribes recognized by the U.S. federal government \"\n \"or by a U.S. state government are eligible for .gov \"\n 'domains. Use our <a href=\"{}\">contact form</a> to '\n \"tell us more about your tribe and why you want a .gov \"\n \"domain. We’ll review your information and get back \"\n \"to you.\".format(public_site_url(\"contact\"))\n ),\n code=\"invalid\",\n )\n\n\nclass OrganizationFederalForm(RegistrarForm):\n federal_type = forms.ChoiceField(\n choices=DomainApplication.BranchChoices.choices,\n widget=forms.RadioSelect,\n error_messages={\"required\": (\"Select the part of the federal government your organization is in.\")},\n )\n\n\nclass OrganizationElectionForm(RegistrarForm):\n is_election_board = forms.NullBooleanField(\n widget=forms.RadioSelect(\n choices=[\n (True, \"Yes\"),\n (False, \"No\"),\n ],\n )\n )\n\n def clean_is_election_board(self):\n \"\"\"This box must be checked to proceed but offer a clear error.\"\"\"\n # already converted to a boolean\n is_election_board = self.cleaned_data[\"is_election_board\"]\n if is_election_board is None:\n raise forms.ValidationError(\n (\"Select “Yes” if you represent an election office. Select “No” if you don’t.\"),\n code=\"required\",\n )\n return is_election_board\n\n\nclass OrganizationContactForm(RegistrarForm):\n # for federal agencies we also want to know the top-level agency.\n federal_agency = forms.ChoiceField(\n label=\"Federal agency\",\n # not required because this field won't be filled out unless\n # it is a federal agency. Use clean to check programatically\n # if it has been filled in when required.\n required=False,\n choices=[(\"\", \"--Select--\")] + DomainApplication.AGENCY_CHOICES,\n )\n organization_name = forms.CharField(\n label=\"Organization name\",\n error_messages={\"required\": \"Enter the name of your organization.\"},\n )\n address_line1 = forms.CharField(\n label=\"Street address\",\n error_messages={\"required\": \"Enter the street address of your organization.\"},\n )\n address_line2 = forms.CharField(\n required=False,\n label=\"Street address line 2 (optional)\",\n )\n city = forms.CharField(\n label=\"City\",\n error_messages={\"required\": \"Enter the city where your organization is located.\"},\n )\n state_territory = forms.ChoiceField(\n label=\"State, territory, or military post\",\n choices=[(\"\", \"--Select--\")] + DomainApplication.StateTerritoryChoices.choices,\n error_messages={\n \"required\": (\"Select the state, territory, or military post where your organization is located.\")\n },\n )\n zipcode = forms.CharField(\n label=\"Zip code\",\n validators=[\n RegexValidator(\n \"^[0-9]{5}(?:-[0-9]{4})?$|^$\",\n message=\"Enter a zip code in the form of 12345 or 12345-6789.\",\n )\n ],\n )\n urbanization = forms.CharField(\n required=False,\n label=\"Urbanization (required for Puerto Rico only)\",\n )\n\n def clean_federal_agency(self):\n \"\"\"Require something to be selected when this is a federal agency.\"\"\"\n federal_agency = self.cleaned_data.get(\"federal_agency\", None)\n # need the application object to know if this is federal\n if self.application is None:\n # hmm, no saved application object?, default require the agency\n if not federal_agency:\n # no answer was selected\n raise forms.ValidationError(\n \"Select the federal agency your organization is in.\",\n code=\"required\",\n )\n if self.application.is_federal():\n if not federal_agency:\n # no answer was selected\n raise forms.ValidationError(\n \"Select the federal agency your organization is in.\",\n code=\"required\",\n )\n return federal_agency\n\n\nclass AboutYourOrganizationForm(RegistrarForm):\n about_your_organization = forms.CharField(\n label=\"About your organization\",\n widget=forms.Textarea(),\n validators=[\n MaxLengthValidator(\n 1000,\n message=\"Response must be less than 1000 characters.\",\n )\n ],\n error_messages={\"required\": (\"Enter more information about your organization.\")},\n )\n\n\nclass AuthorizingOfficialForm(RegistrarForm):\n JOIN = \"authorizing_official\"\n\n def to_database(self, obj):\n if not self.is_valid():\n return\n contact = getattr(obj, \"authorizing_official\", None)\n if contact is not None and not contact.has_more_than_one_join(\"authorizing_official\"):\n # if contact exists in the database and is not joined to other entities\n super().to_database(contact)\n else:\n # no contact exists OR contact exists which is joined also to other entities;\n # in either case, create a new contact and update it\n contact = Contact()\n super().to_database(contact)\n obj.authorizing_official = contact\n obj.save()\n\n @classmethod\n def from_database(cls, obj):\n contact = getattr(obj, \"authorizing_official\", None)\n return super().from_database(contact)\n\n first_name = forms.CharField(\n label=\"First name / given name\",\n error_messages={\"required\": (\"Enter the first name / given name of your authorizing official.\")},\n )\n last_name = forms.CharField(\n label=\"Last name / family name\",\n error_messages={\"required\": (\"Enter the last name / family name of your authorizing official.\")},\n )\n title = forms.CharField(\n label=\"Title or role in your organization\",\n error_messages={\n \"required\": (\n \"Enter the title or role your authorizing official has in your\"\n \" organization (e.g., Chief Information Officer).\"\n )\n },\n )\n email = forms.EmailField(\n label=\"Email\",\n error_messages={\"invalid\": (\"Enter an email address in the required format, like [email protected].\")},\n )\n\n\nclass CurrentSitesForm(RegistrarForm):\n website = forms.URLField(\n required=False,\n label=\"Public website\",\n error_messages={\n \"invalid\": (\"Enter your organization's current website in the required format, like example.com.\")\n },\n )\n\n\nclass BaseCurrentSitesFormSet(RegistrarFormSet):\n JOIN = \"current_websites\"\n\n def should_delete(self, cleaned):\n website = cleaned.get(\"website\", \"\")\n return website.strip() == \"\"\n\n def to_database(self, obj: DomainApplication):\n # If we want to test against multiple joins for a website object, replace the empty array\n # and change the JOIN in the models to allow for reverse references\n self._to_database(obj, self.JOIN, self.should_delete, self.pre_update, self.pre_create)\n\n @classmethod\n def from_database(cls, obj):\n return super().from_database(obj, cls.JOIN, cls.on_fetch)\n\n\nCurrentSitesFormSet = forms.formset_factory(\n CurrentSitesForm,\n extra=1,\n absolute_max=1500, # django default; use `max_num` to limit entries\n formset=BaseCurrentSitesFormSet,\n)\n\n\nclass AlternativeDomainForm(RegistrarForm):\n def clean_alternative_domain(self):\n \"\"\"Validation code for domain names.\"\"\"\n requested = self.cleaned_data.get(\"alternative_domain\", None)\n validated, _ = DraftDomain.validate_and_handle_errors(\n domain=requested,\n return_type=ValidationReturnType.FORM_VALIDATION_ERROR,\n blank_ok=True,\n )\n return validated\n\n alternative_domain = forms.CharField(\n required=False,\n label=\"Alternative domain\",\n )\n\n\nclass BaseAlternativeDomainFormSet(RegistrarFormSet):\n JOIN = \"alternative_domains\"\n\n def should_delete(self, cleaned):\n domain = cleaned.get(\"alternative_domain\", \"\")\n return domain.strip() == \"\"\n\n def pre_update(self, db_obj, cleaned):\n domain = cleaned.get(\"alternative_domain\", None)\n if domain is not None:\n db_obj.website = f\"{domain}.gov\"\n\n def pre_create(self, db_obj, cleaned):\n domain = cleaned.get(\"alternative_domain\", None)\n if domain is not None:\n return {\"website\": f\"{domain}.gov\"}\n else:\n return {}\n\n def to_database(self, obj: DomainApplication):\n # If we want to test against multiple joins for a website object, replace the empty array and\n # change the JOIN in the models to allow for reverse references\n self._to_database(obj, self.JOIN, self.should_delete, self.pre_update, self.pre_create)\n\n @classmethod\n def on_fetch(cls, query):\n return [{\"alternative_domain\": Domain.sld(domain.website)} for domain in query]\n\n @classmethod\n def from_database(cls, obj):\n return super().from_database(obj, cls.JOIN, cls.on_fetch)\n\n\nAlternativeDomainFormSet = forms.formset_factory(\n AlternativeDomainForm,\n extra=1,\n absolute_max=1500, # django default; use `max_num` to limit entries\n formset=BaseAlternativeDomainFormSet,\n)\n\n\nclass DotGovDomainForm(RegistrarForm):\n def to_database(self, obj):\n if not self.is_valid():\n return\n domain = self.cleaned_data.get(\"requested_domain\", None)\n if domain:\n requested_domain = getattr(obj, \"requested_domain\", None)\n if requested_domain is not None:\n requested_domain.name = f\"{domain}.gov\"\n requested_domain.save()\n else:\n requested_domain = DraftDomain.objects.create(name=f\"{domain}.gov\")\n obj.requested_domain = requested_domain\n obj.save()\n\n obj.save()\n\n @classmethod\n def from_database(cls, obj):\n values = {}\n requested_domain = getattr(obj, \"requested_domain\", None)\n if requested_domain is not None:\n domain_name = requested_domain.name\n values[\"requested_domain\"] = Domain.sld(domain_name)\n return values\n\n def clean_requested_domain(self):\n \"\"\"Validation code for domain names.\"\"\"\n requested = self.cleaned_data.get(\"requested_domain\", None)\n validated, _ = DraftDomain.validate_and_handle_errors(\n domain=requested,\n return_type=ValidationReturnType.FORM_VALIDATION_ERROR,\n )\n return validated\n\n requested_domain = forms.CharField(\n label=\"What .gov domain do you want?\",\n error_messages={\n \"required\": DOMAIN_API_MESSAGES[\"required\"],\n },\n )\n\n\nclass PurposeForm(RegistrarForm):\n purpose = forms.CharField(\n label=\"Purpose\",\n widget=forms.Textarea(),\n validators=[\n MaxLengthValidator(\n 1000,\n message=\"Response must be less than 1000 characters.\",\n )\n ],\n error_messages={\"required\": \"Describe how you’ll use the .gov domain you’re requesting.\"},\n )\n\n\nclass YourContactForm(RegistrarForm):\n JOIN = \"submitter\"\n\n def to_database(self, obj):\n if not self.is_valid():\n return\n contact = getattr(obj, \"submitter\", None)\n if contact is not None and not contact.has_more_than_one_join(\"submitted_applications\"):\n # if contact exists in the database and is not joined to other entities\n super().to_database(contact)\n else:\n # no contact exists OR contact exists which is joined also to other entities;\n # in either case, create a new contact and update it\n contact = Contact()\n super().to_database(contact)\n obj.submitter = contact\n obj.save()\n\n @classmethod\n def from_database(cls, obj):\n contact = getattr(obj, \"submitter\", None)\n return super().from_database(contact)\n\n first_name = forms.CharField(\n label=\"First name / given name\",\n error_messages={\"required\": \"Enter your first name / given name.\"},\n )\n middle_name = forms.CharField(\n required=False,\n label=\"Middle name (optional)\",\n )\n last_name = forms.CharField(\n label=\"Last name / family name\",\n error_messages={\"required\": \"Enter your last name / family name.\"},\n )\n title = forms.CharField(\n label=\"Title or role in your organization\",\n error_messages={\n \"required\": (\"Enter your title or role in your organization (e.g., Chief Information Officer).\")\n },\n )\n email = forms.EmailField(\n label=\"Email\",\n error_messages={\"invalid\": (\"Enter your email address in the required format, like [email protected].\")},\n )\n phone = PhoneNumberField(\n label=\"Phone\",\n error_messages={\"invalid\": \"Enter a valid 10-digit phone number.\", \"required\": \"Enter your phone number.\"},\n )\n\n\nclass OtherContactsYesNoForm(RegistrarForm):\n def __init__(self, *args, **kwargs):\n \"\"\"Extend the initialization of the form from RegistrarForm __init__\"\"\"\n super().__init__(*args, **kwargs)\n # set the initial value based on attributes of application\n if self.application and self.application.has_other_contacts():\n initial_value = True\n elif self.application and self.application.has_rationale():\n initial_value = False\n else:\n # No pre-selection for new applications\n initial_value = None\n\n self.fields[\"has_other_contacts\"] = forms.TypedChoiceField(\n coerce=lambda x: x.lower() == \"true\" if x is not None else None, # coerce strings to bool, excepting None\n choices=((True, \"Yes, I can name other employees.\"), (False, \"No. (We’ll ask you to explain why.)\")),\n initial=initial_value,\n widget=forms.RadioSelect,\n error_messages={\n \"required\": \"This question is required.\",\n },\n )\n\n\nclass OtherContactsForm(RegistrarForm):\n first_name = forms.CharField(\n label=\"First name / given name\",\n error_messages={\"required\": \"Enter the first name / given name of this contact.\"},\n )\n middle_name = forms.CharField(\n required=False,\n label=\"Middle name (optional)\",\n )\n last_name = forms.CharField(\n label=\"Last name / family name\",\n error_messages={\"required\": \"Enter the last name / family name of this contact.\"},\n )\n title = forms.CharField(\n label=\"Title or role in your organization\",\n error_messages={\n \"required\": (\n \"Enter the title or role in your organization of this contact (e.g., Chief Information Officer).\"\n )\n },\n )\n email = forms.EmailField(\n label=\"Email\",\n error_messages={\n \"required\": (\"Enter an email address in the required format, like [email protected].\"),\n \"invalid\": (\"Enter an email address in the required format, like [email protected].\"),\n },\n )\n phone = PhoneNumberField(\n label=\"Phone\",\n error_messages={\n \"invalid\": \"Enter a valid 10-digit phone number.\",\n \"required\": \"Enter a phone number for this contact.\",\n },\n )\n\n def __init__(self, *args, **kwargs):\n \"\"\"\n Override the __init__ method for RegistrarForm.\n Set form_data_marked_for_deletion to false.\n Empty_permitted set to False, as this is overridden in certain circumstances by\n Django's BaseFormSet, and results in empty forms being allowed and field level\n errors not appropriately raised. This works with code in the view which appropriately\n displays required attributes on fields.\n \"\"\"\n self.form_data_marked_for_deletion = False\n super().__init__(*args, **kwargs)\n self.empty_permitted = False\n\n def mark_form_for_deletion(self):\n self.form_data_marked_for_deletion = True\n\n def clean(self):\n \"\"\"\n This method overrides the default behavior for forms.\n This cleans the form after field validation has already taken place.\n In this override, allow for a form which is deleted by user or marked for\n deletion by formset to be considered valid even though certain required fields have\n not passed field validation\n \"\"\"\n if self.form_data_marked_for_deletion or self.cleaned_data.get(\"DELETE\"):\n # clear any errors raised by the form fields\n # (before this clean() method is run, each field\n # performs its own clean, which could result in\n # errors that we wish to ignore at this point)\n #\n # NOTE: we cannot just clear() the errors list.\n # That causes problems.\n for field in self.fields:\n if field in self.errors:\n del self.errors[field]\n # return empty object with only 'delete' attribute defined.\n # this will prevent _to_database from creating an empty\n # database object\n return {\"DELETE\": True}\n\n return self.cleaned_data\n\n\nclass BaseOtherContactsFormSet(RegistrarFormSet):\n \"\"\"\n FormSet for Other Contacts\n\n There are two conditions by which a form in the formset can be marked for deletion.\n One is if the user clicks 'DELETE' button, and this is submitted in the form. The\n other is if the YesNo form, which is submitted with this formset, is set to No; in\n this case, all forms in formset are marked for deletion. Both of these conditions\n must co-exist.\n Also, other_contacts have db relationships to multiple db objects. When attempting\n to delete an other_contact from an application, those db relationships must be\n tested and handled.\n \"\"\"\n\n JOIN = \"other_contacts\"\n\n def get_deletion_widget(self):\n return forms.HiddenInput(attrs={\"class\": \"deletion\"})\n\n def __init__(self, *args, **kwargs):\n \"\"\"\n Override __init__ for RegistrarFormSet.\n \"\"\"\n self.formset_data_marked_for_deletion = False\n self.application = kwargs.pop(\"application\", None)\n super(RegistrarFormSet, self).__init__(*args, **kwargs)\n # quick workaround to ensure that the HTML `required`\n # attribute shows up on required fields for the first form\n # in the formset plus those that have data already.\n for index in range(max(self.initial_form_count(), 1)):\n self.forms[index].use_required_attribute = True\n\n def should_delete(self, cleaned):\n \"\"\"\n Implements should_delete method from BaseFormSet.\n \"\"\"\n return self.formset_data_marked_for_deletion or cleaned.get(\"DELETE\", False)\n\n def pre_create(self, db_obj, cleaned):\n \"\"\"Code to run before an item in the formset is created in the database.\"\"\"\n # remove DELETE from cleaned\n if \"DELETE\" in cleaned:\n cleaned.pop(\"DELETE\")\n return cleaned\n\n def to_database(self, obj: DomainApplication):\n self._to_database(obj, self.JOIN, self.should_delete, self.pre_update, self.pre_create)\n\n @classmethod\n def from_database(cls, obj):\n return super().from_database(obj, cls.JOIN, cls.on_fetch)\n\n def mark_formset_for_deletion(self):\n \"\"\"Mark other contacts formset for deletion.\n Updates forms in formset as well to mark them for deletion.\n This has an effect on validity checks and to_database methods.\n \"\"\"\n self.formset_data_marked_for_deletion = True\n for form in self.forms:\n form.mark_form_for_deletion()\n\n def is_valid(self):\n \"\"\"Extend is_valid from RegistrarFormSet. When marking this formset for deletion, set\n validate_min to false so that validation does not attempt to enforce a minimum\n number of other contacts when contacts marked for deletion\"\"\"\n if self.formset_data_marked_for_deletion:\n self.validate_min = False\n return super().is_valid()\n\n\nOtherContactsFormSet = forms.formset_factory(\n OtherContactsForm,\n extra=0,\n absolute_max=1500, # django default; use `max_num` to limit entries\n min_num=1,\n can_delete=True,\n validate_min=True,\n formset=BaseOtherContactsFormSet,\n)\n\n\nclass NoOtherContactsForm(RegistrarForm):\n no_other_contacts_rationale = forms.CharField(\n required=True,\n # label has to end in a space to get the label_suffix to show\n label=(\"No other employees rationale\"),\n widget=forms.Textarea(),\n validators=[\n MaxLengthValidator(\n 1000,\n message=\"Response must be less than 1000 characters.\",\n )\n ],\n error_messages={\"required\": (\"Rationale for no other employees is required.\")},\n )\n\n def __init__(self, *args, **kwargs):\n self.form_data_marked_for_deletion = False\n super().__init__(*args, **kwargs)\n\n def mark_form_for_deletion(self):\n \"\"\"Marks no_other_contacts form for deletion.\n This changes behavior of validity checks and to_database\n methods.\"\"\"\n self.form_data_marked_for_deletion = True\n\n def clean(self):\n \"\"\"\n This method overrides the default behavior for forms.\n This cleans the form after field validation has already taken place.\n In this override, remove errors associated with the form if form data\n is marked for deletion.\n \"\"\"\n\n if self.form_data_marked_for_deletion:\n # clear any errors raised by the form fields\n # (before this clean() method is run, each field\n # performs its own clean, which could result in\n # errors that we wish to ignore at this point)\n #\n # NOTE: we cannot just clear() the errors list.\n # That causes problems.\n for field in self.fields:\n if field in self.errors:\n del self.errors[field]\n\n return self.cleaned_data\n\n def to_database(self, obj):\n \"\"\"\n This method overrides the behavior of RegistrarForm.\n If form data is marked for deletion, set relevant fields\n to None before saving.\n Do nothing if form is not valid.\n \"\"\"\n if not self.is_valid():\n return\n if self.form_data_marked_for_deletion:\n for field_name, _ in self.fields.items():\n setattr(obj, field_name, None)\n else:\n for name, value in self.cleaned_data.items():\n setattr(obj, name, value)\n obj.save()\n\n\nclass AnythingElseForm(RegistrarForm):\n anything_else = forms.CharField(\n required=False,\n label=\"Anything else?\",\n widget=forms.Textarea(),\n validators=[\n MaxLengthValidator(\n 1000,\n message=\"Response must be less than 1000 characters.\",\n )\n ],\n )\n\n\nclass RequirementsForm(RegistrarForm):\n is_policy_acknowledged = forms.BooleanField(\n label=\"I read and agree to the requirements for operating a .gov domain.\",\n error_messages={\n \"required\": (\"Check the box if you read and agree to the requirements for operating a .gov domain.\")\n },\n )\n",
"path": "src/registrar/forms/application_wizard.py"
}
] | diff --git a/src/registrar/assets/js/get-gov.js b/src/registrar/assets/js/get-gov.js
index de7ef6172..52f88bb1d 100644
--- a/src/registrar/assets/js/get-gov.js
+++ b/src/registrar/assets/js/get-gov.js
@@ -130,7 +130,7 @@ function inlineToast(el, id, style, msg) {
}
}
-function _checkDomainAvailability(el) {
+function checkDomainAvailability(el) {
const callback = (response) => {
toggleInputValidity(el, (response && response.available), msg=response.message);
announce(el.id, response.message);
@@ -154,9 +154,6 @@ function _checkDomainAvailability(el) {
fetchJSON(`available/?domain=${el.value}`, callback);
}
-/** Call the API to see if the domain is good. */
-const checkDomainAvailability = debounce(_checkDomainAvailability);
-
/** Hides the toast message and clears the aira live region. */
function clearDomainAvailability(el) {
el.classList.remove('usa-input--success');
@@ -206,13 +203,33 @@ function handleInputValidation(e) {
}
/** On button click, handles running any associated validators. */
-function handleValidationClick(e) {
+function validateFieldInput(e) {
const attribute = e.target.getAttribute("validate-for") || "";
if (!attribute.length) return;
const input = document.getElementById(attribute);
+ removeFormErrors(input, true);
runValidators(input);
}
+
+function validateFormsetInputs(e, availabilityButton) {
+
+ // Collect input IDs from the repeatable forms
+ let inputs = Array.from(document.querySelectorAll('.repeatable-form input'))
+
+ // Run validators for each input
+ inputs.forEach(input => {
+ runValidators(input);
+ removeFormErrors(input, true);
+ });
+
+ // Set the validate-for attribute on the button with the collected input IDs
+ // Not needed for functionality but nice for accessibility
+ inputs = inputs.map(input => input.id).join(', ');
+ availabilityButton.setAttribute('validate-for', inputs);
+
+}
+
// <<>><<>><<>><<>><<>><<>><<>><<>><<>><<>><<>><<>><<>><<>><<>>
// Initialization code.
@@ -232,14 +249,64 @@ function handleValidationClick(e) {
for(const input of needsValidation) {
input.addEventListener('input', handleInputValidation);
}
+ const alternativeDomainsAvailability = document.getElementById('validate-alt-domains-availability');
const activatesValidation = document.querySelectorAll('[validate-for]');
+
for(const button of activatesValidation) {
- button.addEventListener('click', handleValidationClick);
+ // Adds multi-field validation for alternative domains
+ if (button === alternativeDomainsAvailability) {
+ button.addEventListener('click', (e) => {
+ validateFormsetInputs(e, alternativeDomainsAvailability)
+ });
+ } else {
+ button.addEventListener('click', validateFieldInput);
+ }
}
})();
/**
- * Delete method for formsets that diff in the view and delete in the model (Nameservers, DS Data)
+ * Removes form errors surrounding a form input
+ */
+function removeFormErrors(input, removeStaleAlerts=false){
+ // Remove error message
+ let errorMessage = document.getElementById(`${input.id}__error-message`);
+ if (errorMessage) {
+ errorMessage.remove();
+ }else{
+ return
+ }
+
+ // Remove error classes
+ if (input.classList.contains('usa-input--error')) {
+ input.classList.remove('usa-input--error');
+ }
+
+ // Get the form label
+ let label = document.querySelector(`label[for="${input.id}"]`);
+ if (label) {
+ label.classList.remove('usa-label--error');
+
+ // Remove error classes from parent div
+ let parentDiv = label.parentElement;
+ if (parentDiv) {
+ parentDiv.classList.remove('usa-form-group--error');
+ }
+ }
+
+ if (removeStaleAlerts){
+ let staleAlerts = document.querySelectorAll(".usa-alert--error")
+ for (let alert of staleAlerts){
+ // Don't remove the error associated with the input
+ if (alert.id !== `${input.id}--toast`) {
+ alert.remove()
+ }
+ }
+ }
+}
+
+/**
+ * Prepare the namerservers and DS data forms delete buttons
+ * We will call this on the forms init, and also every time we add a form
*
*/
function removeForm(e, formLabel, isNameserversForm, addButton, formIdentifier){
@@ -460,6 +527,7 @@ function hideDeletedForms() {
let isNameserversForm = document.querySelector(".nameservers-form");
let isOtherContactsForm = document.querySelector(".other-contacts-form");
let isDsDataForm = document.querySelector(".ds-data-form");
+ let isDotgovDomain = document.querySelector(".dotgov-domain-form");
// The Nameservers formset features 2 required and 11 optionals
if (isNameserversForm) {
cloneIndex = 2;
@@ -472,6 +540,8 @@ function hideDeletedForms() {
formLabel = "Organization contact";
container = document.querySelector("#other-employees");
formIdentifier = "other_contacts"
+ } else if (isDotgovDomain) {
+ formIdentifier = "dotgov_domain"
}
let totalForms = document.querySelector(`#id_${formIdentifier}-TOTAL_FORMS`);
@@ -554,6 +624,7 @@ function hideDeletedForms() {
// Reset the values of each input to blank
inputs.forEach((input) => {
input.classList.remove("usa-input--error");
+ input.classList.remove("usa-input--success");
if (input.type === "text" || input.type === "number" || input.type === "password" || input.type === "email" || input.type === "tel") {
input.value = ""; // Set the value to an empty string
@@ -566,22 +637,25 @@ function hideDeletedForms() {
let selects = newForm.querySelectorAll("select");
selects.forEach((select) => {
select.classList.remove("usa-input--error");
+ select.classList.remove("usa-input--success");
select.selectedIndex = 0; // Set the value to an empty string
});
let labels = newForm.querySelectorAll("label");
labels.forEach((label) => {
label.classList.remove("usa-label--error");
+ label.classList.remove("usa-label--success");
});
let usaFormGroups = newForm.querySelectorAll(".usa-form-group");
usaFormGroups.forEach((usaFormGroup) => {
usaFormGroup.classList.remove("usa-form-group--error");
+ usaFormGroup.classList.remove("usa-form-group--success");
});
- // Remove any existing error messages
- let usaErrorMessages = newForm.querySelectorAll(".usa-error-message");
- usaErrorMessages.forEach((usaErrorMessage) => {
+ // Remove any existing error and success messages
+ let usaMessages = newForm.querySelectorAll(".usa-error-message, .usa-alert");
+ usaMessages.forEach((usaErrorMessage) => {
let parentDiv = usaErrorMessage.closest('div');
if (parentDiv) {
parentDiv.remove(); // Remove the parent div if it exists
@@ -592,7 +666,8 @@ function hideDeletedForms() {
// Attach click event listener on the delete buttons of the new form
let newDeleteButton = newForm.querySelector(".delete-record");
- prepareNewDeleteButton(newDeleteButton, formLabel);
+ if (newDeleteButton)
+ prepareNewDeleteButton(newDeleteButton, formLabel);
// Disable the add more button if we have 13 forms
if (isNameserversForm && formNum == 13) {
diff --git a/src/registrar/forms/application_wizard.py b/src/registrar/forms/application_wizard.py
index 85ce28bb6..1ee7e0036 100644
--- a/src/registrar/forms/application_wizard.py
+++ b/src/registrar/forms/application_wizard.py
@@ -420,7 +420,7 @@ def clean_alternative_domain(self):
alternative_domain = forms.CharField(
required=False,
- label="",
+ label="Alternative domain",
)
diff --git a/src/registrar/templates/application_dotgov_domain.html b/src/registrar/templates/application_dotgov_domain.html
index 1838f33f4..f5b31fb15 100644
--- a/src/registrar/templates/application_dotgov_domain.html
+++ b/src/registrar/templates/application_dotgov_domain.html
@@ -50,14 +50,14 @@ <h2>What .gov domain do you want?</h2>
<button
id="check-availability-button"
type="button"
- class="usa-button"
+ class="usa-button usa-button--outline"
validate-for="{{ forms.0.requested_domain.auto_id }}"
>Check availability</button>
</fieldset>
{{ forms.1.management_form }}
- <fieldset class="usa-fieldset margin-top-1">
+ <fieldset class="usa-fieldset margin-top-1 dotgov-domain-form" id="form-container">
<legend>
<h2>Alternative domains (optional)</h2>
</legend>
@@ -66,23 +66,34 @@ <h2>Alternative domains (optional)</h2>
you your first choice?</p>
{% with attr_aria_describedby="alt_domain_instructions" %}
- {# attr_validate / validate="domain" invokes code in get-gov.js #}
- {# attr_auto_validate likewise triggers behavior in get-gov.js #}
- {% with append_gov=True attr_validate="domain" attr_auto_validate=True %}
- {% with add_class="blank-ok alternate-domain-input" %}
- {% for form in forms.1 %}
+ {# Will probably want to remove blank-ok and do related cleanup when we implement delete #}
+ {% with attr_validate="domain" append_gov=True add_label_class="usa-sr-only" add_class="blank-ok alternate-domain-input" %}
+ {% for form in forms.1 %}
+ <div class="repeatable-form">
{% input_with_errors form.alternative_domain %}
- {% endfor %}
- {% endwith %}
+ </div>
+ {% endfor %}
{% endwith %}
{% endwith %}
- <button type="submit" name="submit_button" value="save" class="usa-button usa-button--unstyled">
+ <button type="button" value="save" class="usa-button usa-button--unstyled" id="add-form">
<svg class="usa-icon" aria-hidden="true" focusable="false" role="img" width="24" height="24">
<use xlink:href="{%static 'img/sprite.svg'%}#add_circle"></use>
</svg><span class="margin-left-05">Add another alternative</span>
</button>
- </fieldset>
+ <div class="margin-bottom-3">
+ <button
+ id="validate-alt-domains-availability"
+ type="button"
+ class="usa-button usa-button--outline"
+ validate-for="{{ forms.1.requested_domain.auto_id }}"
+ >Check availability</button>
+ </div>
+
+
+ <p class="margin-top-05">If you’re not sure this is the domain you want, that’s ok. You can change the domain later. </p>
+
+</fieldset>
{% endblock %}
|
pyca__cryptography-3010 | rsa.rsa_recover_prime_factors() should return p > q
The documentation for `rsa_recover_prime_factors()` warns that it returns `p` and `q` such that `p < q`. However, things like OpenSSL and BoringSSL seem to require that `p > q`. Given this, would it be feasible to change the order around in cryptography so that it lines up with OpenSSL?
See also: http://crypto.stackexchange.com/questions/18084/in-rsa-why-does-p-have-to-be-bigger-than-q-where-n-p-times-q. @briansmith can provide more commentary if needed.
| [
{
"content": "# This file is dual licensed under the terms of the Apache License, Version\n# 2.0, and the BSD License. See the LICENSE file in the root of this repository\n# for complete details.\n\nfrom __future__ import absolute_import, division, print_function\n\nimport abc\nfrom fractions import gcd\n\nimport six\n\nfrom cryptography import utils\nfrom cryptography.exceptions import UnsupportedAlgorithm, _Reasons\nfrom cryptography.hazmat.backends.interfaces import RSABackend\n\n\[email protected]_metaclass(abc.ABCMeta)\nclass RSAPrivateKey(object):\n @abc.abstractmethod\n def signer(self, padding, algorithm):\n \"\"\"\n Returns an AsymmetricSignatureContext used for signing data.\n \"\"\"\n\n @abc.abstractmethod\n def decrypt(self, ciphertext, padding):\n \"\"\"\n Decrypts the provided ciphertext.\n \"\"\"\n\n @abc.abstractproperty\n def key_size(self):\n \"\"\"\n The bit length of the public modulus.\n \"\"\"\n\n @abc.abstractmethod\n def public_key(self):\n \"\"\"\n The RSAPublicKey associated with this private key.\n \"\"\"\n\n @abc.abstractmethod\n def sign(self, data, padding, algorithm):\n \"\"\"\n Signs the data.\n \"\"\"\n\n\[email protected]_metaclass(abc.ABCMeta)\nclass RSAPrivateKeyWithSerialization(RSAPrivateKey):\n @abc.abstractmethod\n def private_numbers(self):\n \"\"\"\n Returns an RSAPrivateNumbers.\n \"\"\"\n\n @abc.abstractmethod\n def private_bytes(self, encoding, format, encryption_algorithm):\n \"\"\"\n Returns the key serialized as bytes.\n \"\"\"\n\n\[email protected]_metaclass(abc.ABCMeta)\nclass RSAPublicKey(object):\n @abc.abstractmethod\n def verifier(self, signature, padding, algorithm):\n \"\"\"\n Returns an AsymmetricVerificationContext used for verifying signatures.\n \"\"\"\n\n @abc.abstractmethod\n def encrypt(self, plaintext, padding):\n \"\"\"\n Encrypts the given plaintext.\n \"\"\"\n\n @abc.abstractproperty\n def key_size(self):\n \"\"\"\n The bit length of the public modulus.\n \"\"\"\n\n @abc.abstractmethod\n def public_numbers(self):\n \"\"\"\n Returns an RSAPublicNumbers\n \"\"\"\n\n @abc.abstractmethod\n def public_bytes(self, encoding, format):\n \"\"\"\n Returns the key serialized as bytes.\n \"\"\"\n\n @abc.abstractmethod\n def verify(self, signature, data, padding, algorithm):\n \"\"\"\n Verifies the signature of the data.\n \"\"\"\n\n\nRSAPublicKeyWithSerialization = RSAPublicKey\n\n\ndef generate_private_key(public_exponent, key_size, backend):\n if not isinstance(backend, RSABackend):\n raise UnsupportedAlgorithm(\n \"Backend object does not implement RSABackend.\",\n _Reasons.BACKEND_MISSING_INTERFACE\n )\n\n _verify_rsa_parameters(public_exponent, key_size)\n return backend.generate_rsa_private_key(public_exponent, key_size)\n\n\ndef _verify_rsa_parameters(public_exponent, key_size):\n if public_exponent < 3:\n raise ValueError(\"public_exponent must be >= 3.\")\n\n if public_exponent & 1 == 0:\n raise ValueError(\"public_exponent must be odd.\")\n\n if key_size < 512:\n raise ValueError(\"key_size must be at least 512-bits.\")\n\n\ndef _check_private_key_components(p, q, private_exponent, dmp1, dmq1, iqmp,\n public_exponent, modulus):\n if modulus < 3:\n raise ValueError(\"modulus must be >= 3.\")\n\n if p >= modulus:\n raise ValueError(\"p must be < modulus.\")\n\n if q >= modulus:\n raise ValueError(\"q must be < modulus.\")\n\n if dmp1 >= modulus:\n raise ValueError(\"dmp1 must be < modulus.\")\n\n if dmq1 >= modulus:\n raise ValueError(\"dmq1 must be < modulus.\")\n\n if iqmp >= modulus:\n raise ValueError(\"iqmp must be < modulus.\")\n\n if private_exponent >= modulus:\n raise ValueError(\"private_exponent must be < modulus.\")\n\n if public_exponent < 3 or public_exponent >= modulus:\n raise ValueError(\"public_exponent must be >= 3 and < modulus.\")\n\n if public_exponent & 1 == 0:\n raise ValueError(\"public_exponent must be odd.\")\n\n if dmp1 & 1 == 0:\n raise ValueError(\"dmp1 must be odd.\")\n\n if dmq1 & 1 == 0:\n raise ValueError(\"dmq1 must be odd.\")\n\n if p * q != modulus:\n raise ValueError(\"p*q must equal modulus.\")\n\n\ndef _check_public_key_components(e, n):\n if n < 3:\n raise ValueError(\"n must be >= 3.\")\n\n if e < 3 or e >= n:\n raise ValueError(\"e must be >= 3 and < n.\")\n\n if e & 1 == 0:\n raise ValueError(\"e must be odd.\")\n\n\ndef _modinv(e, m):\n \"\"\"\n Modular Multiplicative Inverse. Returns x such that: (x*e) mod m == 1\n \"\"\"\n x1, y1, x2, y2 = 1, 0, 0, 1\n a, b = e, m\n while b > 0:\n q, r = divmod(a, b)\n xn, yn = x1 - q * x2, y1 - q * y2\n a, b, x1, y1, x2, y2 = b, r, x2, y2, xn, yn\n return x1 % m\n\n\ndef rsa_crt_iqmp(p, q):\n \"\"\"\n Compute the CRT (q ** -1) % p value from RSA primes p and q.\n \"\"\"\n return _modinv(q, p)\n\n\ndef rsa_crt_dmp1(private_exponent, p):\n \"\"\"\n Compute the CRT private_exponent % (p - 1) value from the RSA\n private_exponent (d) and p.\n \"\"\"\n return private_exponent % (p - 1)\n\n\ndef rsa_crt_dmq1(private_exponent, q):\n \"\"\"\n Compute the CRT private_exponent % (q - 1) value from the RSA\n private_exponent (d) and q.\n \"\"\"\n return private_exponent % (q - 1)\n\n\n# Controls the number of iterations rsa_recover_prime_factors will perform\n# to obtain the prime factors. Each iteration increments by 2 so the actual\n# maximum attempts is half this number.\n_MAX_RECOVERY_ATTEMPTS = 1000\n\n\ndef rsa_recover_prime_factors(n, e, d):\n \"\"\"\n Compute factors p and q from the private exponent d. We assume that n has\n no more than two factors. This function is adapted from code in PyCrypto.\n \"\"\"\n # See 8.2.2(i) in Handbook of Applied Cryptography.\n ktot = d * e - 1\n # The quantity d*e-1 is a multiple of phi(n), even,\n # and can be represented as t*2^s.\n t = ktot\n while t % 2 == 0:\n t = t // 2\n # Cycle through all multiplicative inverses in Zn.\n # The algorithm is non-deterministic, but there is a 50% chance\n # any candidate a leads to successful factoring.\n # See \"Digitalized Signatures and Public Key Functions as Intractable\n # as Factorization\", M. Rabin, 1979\n spotted = False\n a = 2\n while not spotted and a < _MAX_RECOVERY_ATTEMPTS:\n k = t\n # Cycle through all values a^{t*2^i}=a^k\n while k < ktot:\n cand = pow(a, k, n)\n # Check if a^k is a non-trivial root of unity (mod n)\n if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1:\n # We have found a number such that (cand-1)(cand+1)=0 (mod n).\n # Either of the terms divides n.\n p = gcd(cand + 1, n)\n spotted = True\n break\n k *= 2\n # This value was not any good... let's try another!\n a += 2\n if not spotted:\n raise ValueError(\"Unable to compute factors p and q from exponent d.\")\n # Found !\n q, r = divmod(n, p)\n assert r == 0\n\n return (p, q)\n\n\nclass RSAPrivateNumbers(object):\n def __init__(self, p, q, d, dmp1, dmq1, iqmp,\n public_numbers):\n if (\n not isinstance(p, six.integer_types) or\n not isinstance(q, six.integer_types) or\n not isinstance(d, six.integer_types) or\n not isinstance(dmp1, six.integer_types) or\n not isinstance(dmq1, six.integer_types) or\n not isinstance(iqmp, six.integer_types)\n ):\n raise TypeError(\n \"RSAPrivateNumbers p, q, d, dmp1, dmq1, iqmp arguments must\"\n \" all be an integers.\"\n )\n\n if not isinstance(public_numbers, RSAPublicNumbers):\n raise TypeError(\n \"RSAPrivateNumbers public_numbers must be an RSAPublicNumbers\"\n \" instance.\"\n )\n\n self._p = p\n self._q = q\n self._d = d\n self._dmp1 = dmp1\n self._dmq1 = dmq1\n self._iqmp = iqmp\n self._public_numbers = public_numbers\n\n p = utils.read_only_property(\"_p\")\n q = utils.read_only_property(\"_q\")\n d = utils.read_only_property(\"_d\")\n dmp1 = utils.read_only_property(\"_dmp1\")\n dmq1 = utils.read_only_property(\"_dmq1\")\n iqmp = utils.read_only_property(\"_iqmp\")\n public_numbers = utils.read_only_property(\"_public_numbers\")\n\n def private_key(self, backend):\n return backend.load_rsa_private_numbers(self)\n\n def __eq__(self, other):\n if not isinstance(other, RSAPrivateNumbers):\n return NotImplemented\n\n return (\n self.p == other.p and\n self.q == other.q and\n self.d == other.d and\n self.dmp1 == other.dmp1 and\n self.dmq1 == other.dmq1 and\n self.iqmp == other.iqmp and\n self.public_numbers == other.public_numbers\n )\n\n def __ne__(self, other):\n return not self == other\n\n def __hash__(self):\n return hash((\n self.p,\n self.q,\n self.d,\n self.dmp1,\n self.dmq1,\n self.iqmp,\n self.public_numbers,\n ))\n\n\nclass RSAPublicNumbers(object):\n def __init__(self, e, n):\n if (\n not isinstance(e, six.integer_types) or\n not isinstance(n, six.integer_types)\n ):\n raise TypeError(\"RSAPublicNumbers arguments must be integers.\")\n\n self._e = e\n self._n = n\n\n e = utils.read_only_property(\"_e\")\n n = utils.read_only_property(\"_n\")\n\n def public_key(self, backend):\n return backend.load_rsa_public_numbers(self)\n\n def __repr__(self):\n return \"<RSAPublicNumbers(e={0.e}, n={0.n})>\".format(self)\n\n def __eq__(self, other):\n if not isinstance(other, RSAPublicNumbers):\n return NotImplemented\n\n return self.e == other.e and self.n == other.n\n\n def __ne__(self, other):\n return not self == other\n\n def __hash__(self):\n return hash((self.e, self.n))\n",
"path": "src/cryptography/hazmat/primitives/asymmetric/rsa.py"
}
] | [
{
"content": "# This file is dual licensed under the terms of the Apache License, Version\n# 2.0, and the BSD License. See the LICENSE file in the root of this repository\n# for complete details.\n\nfrom __future__ import absolute_import, division, print_function\n\nimport abc\nfrom fractions import gcd\n\nimport six\n\nfrom cryptography import utils\nfrom cryptography.exceptions import UnsupportedAlgorithm, _Reasons\nfrom cryptography.hazmat.backends.interfaces import RSABackend\n\n\[email protected]_metaclass(abc.ABCMeta)\nclass RSAPrivateKey(object):\n @abc.abstractmethod\n def signer(self, padding, algorithm):\n \"\"\"\n Returns an AsymmetricSignatureContext used for signing data.\n \"\"\"\n\n @abc.abstractmethod\n def decrypt(self, ciphertext, padding):\n \"\"\"\n Decrypts the provided ciphertext.\n \"\"\"\n\n @abc.abstractproperty\n def key_size(self):\n \"\"\"\n The bit length of the public modulus.\n \"\"\"\n\n @abc.abstractmethod\n def public_key(self):\n \"\"\"\n The RSAPublicKey associated with this private key.\n \"\"\"\n\n @abc.abstractmethod\n def sign(self, data, padding, algorithm):\n \"\"\"\n Signs the data.\n \"\"\"\n\n\[email protected]_metaclass(abc.ABCMeta)\nclass RSAPrivateKeyWithSerialization(RSAPrivateKey):\n @abc.abstractmethod\n def private_numbers(self):\n \"\"\"\n Returns an RSAPrivateNumbers.\n \"\"\"\n\n @abc.abstractmethod\n def private_bytes(self, encoding, format, encryption_algorithm):\n \"\"\"\n Returns the key serialized as bytes.\n \"\"\"\n\n\[email protected]_metaclass(abc.ABCMeta)\nclass RSAPublicKey(object):\n @abc.abstractmethod\n def verifier(self, signature, padding, algorithm):\n \"\"\"\n Returns an AsymmetricVerificationContext used for verifying signatures.\n \"\"\"\n\n @abc.abstractmethod\n def encrypt(self, plaintext, padding):\n \"\"\"\n Encrypts the given plaintext.\n \"\"\"\n\n @abc.abstractproperty\n def key_size(self):\n \"\"\"\n The bit length of the public modulus.\n \"\"\"\n\n @abc.abstractmethod\n def public_numbers(self):\n \"\"\"\n Returns an RSAPublicNumbers\n \"\"\"\n\n @abc.abstractmethod\n def public_bytes(self, encoding, format):\n \"\"\"\n Returns the key serialized as bytes.\n \"\"\"\n\n @abc.abstractmethod\n def verify(self, signature, data, padding, algorithm):\n \"\"\"\n Verifies the signature of the data.\n \"\"\"\n\n\nRSAPublicKeyWithSerialization = RSAPublicKey\n\n\ndef generate_private_key(public_exponent, key_size, backend):\n if not isinstance(backend, RSABackend):\n raise UnsupportedAlgorithm(\n \"Backend object does not implement RSABackend.\",\n _Reasons.BACKEND_MISSING_INTERFACE\n )\n\n _verify_rsa_parameters(public_exponent, key_size)\n return backend.generate_rsa_private_key(public_exponent, key_size)\n\n\ndef _verify_rsa_parameters(public_exponent, key_size):\n if public_exponent < 3:\n raise ValueError(\"public_exponent must be >= 3.\")\n\n if public_exponent & 1 == 0:\n raise ValueError(\"public_exponent must be odd.\")\n\n if key_size < 512:\n raise ValueError(\"key_size must be at least 512-bits.\")\n\n\ndef _check_private_key_components(p, q, private_exponent, dmp1, dmq1, iqmp,\n public_exponent, modulus):\n if modulus < 3:\n raise ValueError(\"modulus must be >= 3.\")\n\n if p >= modulus:\n raise ValueError(\"p must be < modulus.\")\n\n if q >= modulus:\n raise ValueError(\"q must be < modulus.\")\n\n if dmp1 >= modulus:\n raise ValueError(\"dmp1 must be < modulus.\")\n\n if dmq1 >= modulus:\n raise ValueError(\"dmq1 must be < modulus.\")\n\n if iqmp >= modulus:\n raise ValueError(\"iqmp must be < modulus.\")\n\n if private_exponent >= modulus:\n raise ValueError(\"private_exponent must be < modulus.\")\n\n if public_exponent < 3 or public_exponent >= modulus:\n raise ValueError(\"public_exponent must be >= 3 and < modulus.\")\n\n if public_exponent & 1 == 0:\n raise ValueError(\"public_exponent must be odd.\")\n\n if dmp1 & 1 == 0:\n raise ValueError(\"dmp1 must be odd.\")\n\n if dmq1 & 1 == 0:\n raise ValueError(\"dmq1 must be odd.\")\n\n if p * q != modulus:\n raise ValueError(\"p*q must equal modulus.\")\n\n\ndef _check_public_key_components(e, n):\n if n < 3:\n raise ValueError(\"n must be >= 3.\")\n\n if e < 3 or e >= n:\n raise ValueError(\"e must be >= 3 and < n.\")\n\n if e & 1 == 0:\n raise ValueError(\"e must be odd.\")\n\n\ndef _modinv(e, m):\n \"\"\"\n Modular Multiplicative Inverse. Returns x such that: (x*e) mod m == 1\n \"\"\"\n x1, y1, x2, y2 = 1, 0, 0, 1\n a, b = e, m\n while b > 0:\n q, r = divmod(a, b)\n xn, yn = x1 - q * x2, y1 - q * y2\n a, b, x1, y1, x2, y2 = b, r, x2, y2, xn, yn\n return x1 % m\n\n\ndef rsa_crt_iqmp(p, q):\n \"\"\"\n Compute the CRT (q ** -1) % p value from RSA primes p and q.\n \"\"\"\n return _modinv(q, p)\n\n\ndef rsa_crt_dmp1(private_exponent, p):\n \"\"\"\n Compute the CRT private_exponent % (p - 1) value from the RSA\n private_exponent (d) and p.\n \"\"\"\n return private_exponent % (p - 1)\n\n\ndef rsa_crt_dmq1(private_exponent, q):\n \"\"\"\n Compute the CRT private_exponent % (q - 1) value from the RSA\n private_exponent (d) and q.\n \"\"\"\n return private_exponent % (q - 1)\n\n\n# Controls the number of iterations rsa_recover_prime_factors will perform\n# to obtain the prime factors. Each iteration increments by 2 so the actual\n# maximum attempts is half this number.\n_MAX_RECOVERY_ATTEMPTS = 1000\n\n\ndef rsa_recover_prime_factors(n, e, d):\n \"\"\"\n Compute factors p and q from the private exponent d. We assume that n has\n no more than two factors. This function is adapted from code in PyCrypto.\n \"\"\"\n # See 8.2.2(i) in Handbook of Applied Cryptography.\n ktot = d * e - 1\n # The quantity d*e-1 is a multiple of phi(n), even,\n # and can be represented as t*2^s.\n t = ktot\n while t % 2 == 0:\n t = t // 2\n # Cycle through all multiplicative inverses in Zn.\n # The algorithm is non-deterministic, but there is a 50% chance\n # any candidate a leads to successful factoring.\n # See \"Digitalized Signatures and Public Key Functions as Intractable\n # as Factorization\", M. Rabin, 1979\n spotted = False\n a = 2\n while not spotted and a < _MAX_RECOVERY_ATTEMPTS:\n k = t\n # Cycle through all values a^{t*2^i}=a^k\n while k < ktot:\n cand = pow(a, k, n)\n # Check if a^k is a non-trivial root of unity (mod n)\n if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1:\n # We have found a number such that (cand-1)(cand+1)=0 (mod n).\n # Either of the terms divides n.\n p = gcd(cand + 1, n)\n spotted = True\n break\n k *= 2\n # This value was not any good... let's try another!\n a += 2\n if not spotted:\n raise ValueError(\"Unable to compute factors p and q from exponent d.\")\n # Found !\n q, r = divmod(n, p)\n assert r == 0\n p, q = sorted((p, q), reverse=True)\n return (p, q)\n\n\nclass RSAPrivateNumbers(object):\n def __init__(self, p, q, d, dmp1, dmq1, iqmp,\n public_numbers):\n if (\n not isinstance(p, six.integer_types) or\n not isinstance(q, six.integer_types) or\n not isinstance(d, six.integer_types) or\n not isinstance(dmp1, six.integer_types) or\n not isinstance(dmq1, six.integer_types) or\n not isinstance(iqmp, six.integer_types)\n ):\n raise TypeError(\n \"RSAPrivateNumbers p, q, d, dmp1, dmq1, iqmp arguments must\"\n \" all be an integers.\"\n )\n\n if not isinstance(public_numbers, RSAPublicNumbers):\n raise TypeError(\n \"RSAPrivateNumbers public_numbers must be an RSAPublicNumbers\"\n \" instance.\"\n )\n\n self._p = p\n self._q = q\n self._d = d\n self._dmp1 = dmp1\n self._dmq1 = dmq1\n self._iqmp = iqmp\n self._public_numbers = public_numbers\n\n p = utils.read_only_property(\"_p\")\n q = utils.read_only_property(\"_q\")\n d = utils.read_only_property(\"_d\")\n dmp1 = utils.read_only_property(\"_dmp1\")\n dmq1 = utils.read_only_property(\"_dmq1\")\n iqmp = utils.read_only_property(\"_iqmp\")\n public_numbers = utils.read_only_property(\"_public_numbers\")\n\n def private_key(self, backend):\n return backend.load_rsa_private_numbers(self)\n\n def __eq__(self, other):\n if not isinstance(other, RSAPrivateNumbers):\n return NotImplemented\n\n return (\n self.p == other.p and\n self.q == other.q and\n self.d == other.d and\n self.dmp1 == other.dmp1 and\n self.dmq1 == other.dmq1 and\n self.iqmp == other.iqmp and\n self.public_numbers == other.public_numbers\n )\n\n def __ne__(self, other):\n return not self == other\n\n def __hash__(self):\n return hash((\n self.p,\n self.q,\n self.d,\n self.dmp1,\n self.dmq1,\n self.iqmp,\n self.public_numbers,\n ))\n\n\nclass RSAPublicNumbers(object):\n def __init__(self, e, n):\n if (\n not isinstance(e, six.integer_types) or\n not isinstance(n, six.integer_types)\n ):\n raise TypeError(\"RSAPublicNumbers arguments must be integers.\")\n\n self._e = e\n self._n = n\n\n e = utils.read_only_property(\"_e\")\n n = utils.read_only_property(\"_n\")\n\n def public_key(self, backend):\n return backend.load_rsa_public_numbers(self)\n\n def __repr__(self):\n return \"<RSAPublicNumbers(e={0.e}, n={0.n})>\".format(self)\n\n def __eq__(self, other):\n if not isinstance(other, RSAPublicNumbers):\n return NotImplemented\n\n return self.e == other.e and self.n == other.n\n\n def __ne__(self, other):\n return not self == other\n\n def __hash__(self):\n return hash((self.e, self.n))\n",
"path": "src/cryptography/hazmat/primitives/asymmetric/rsa.py"
}
] | diff --git a/docs/hazmat/primitives/asymmetric/rsa.rst b/docs/hazmat/primitives/asymmetric/rsa.rst
index 4cf9fa78673e..10e48b4a7078 100644
--- a/docs/hazmat/primitives/asymmetric/rsa.rst
+++ b/docs/hazmat/primitives/asymmetric/rsa.rst
@@ -509,7 +509,9 @@ this without having to do the math themselves.
.. note::
When recovering prime factors this algorithm will always return ``p``
- and ``q`` such that ``p < q``.
+ and ``q`` such that ``p > q``. Note: before 1.5, this function always
+ returned ``p`` and ``q`` such that ``p < q``. It was changed because
+ libraries commonly require ``p > q``.
:return: A tuple ``(p, q)``
diff --git a/src/cryptography/hazmat/primitives/asymmetric/rsa.py b/src/cryptography/hazmat/primitives/asymmetric/rsa.py
index d78b1b410f33..3157aed4d71f 100644
--- a/src/cryptography/hazmat/primitives/asymmetric/rsa.py
+++ b/src/cryptography/hazmat/primitives/asymmetric/rsa.py
@@ -257,7 +257,7 @@ def rsa_recover_prime_factors(n, e, d):
# Found !
q, r = divmod(n, p)
assert r == 0
-
+ p, q = sorted((p, q), reverse=True)
return (p, q)
diff --git a/tests/hazmat/primitives/test_rsa.py b/tests/hazmat/primitives/test_rsa.py
index e4e437804e0d..81e3f946046c 100644
--- a/tests/hazmat/primitives/test_rsa.py
+++ b/tests/hazmat/primitives/test_rsa.py
@@ -1985,10 +1985,11 @@ def test_recover_prime_factors(self, vector):
private["private_exponent"]
)
# Unfortunately there is no convention on which prime should be p
- # and which one q. The function we use always makes p < q, but the
+ # and which one q. The function we use always makes p > q, but the
# NIST vectors are not so consistent. Accordingly, we verify we've
# recovered the proper (p, q) by sorting them and asserting on that.
assert sorted([p, q]) == sorted([private["p"], private["q"]])
+ assert p > q
def test_invalid_recover_prime_factors(self):
with pytest.raises(ValueError):
|
ietf-tools__datatracker-5809 | Dev mode PDFization broken
### Describe the issue
The `STATIC_IETF_ORG_INTERNAL` stuff in https://github.com/ietf-tools/datatracker/blob/2bf7e8250c3fc2fcaf9a6223c331a52d1f6d89a4/ietf/doc/models.py#L630 causes a Python error in the dev environment.
CC @NGPixel
### Code of Conduct
- [X] I agree to follow the [IETF's Code of Conduct](https://github.com/ietf-tools/.github/blob/main/CODE_OF_CONDUCT.md)
| [
{
"content": "# Copyright The IETF Trust 2007-2019, All Rights Reserved\n# -*- coding: utf-8 -*-\n\nfrom ietf.settings import * # pyflakes:ignore\n\nALLOWED_HOSTS = ['*']\n\nfrom ietf.settings_postgresqldb import DATABASES # pyflakes:ignore\n\nIDSUBMIT_IDNITS_BINARY = \"/usr/local/bin/idnits\"\nIDSUBMIT_REPOSITORY_PATH = \"test/id/\"\nIDSUBMIT_STAGING_PATH = \"test/staging/\"\n\nAGENDA_PATH = '/assets/www6s/proceedings/'\nMEETINGHOST_LOGO_PATH = AGENDA_PATH\n\nUSING_DEBUG_EMAIL_SERVER=True\nEMAIL_HOST='localhost'\nEMAIL_PORT=2025\n\nMEDIA_BASE_DIR = '/assets'\nMEDIA_ROOT = MEDIA_BASE_DIR + '/media/'\nMEDIA_URL = '/media/'\n\nPHOTOS_DIRNAME = 'photo'\nPHOTOS_DIR = MEDIA_ROOT + PHOTOS_DIRNAME\n\nSUBMIT_YANG_CATALOG_MODEL_DIR = '/assets/ietf-ftp/yang/catalogmod/'\nSUBMIT_YANG_DRAFT_MODEL_DIR = '/assets/ietf-ftp/yang/draftmod/'\nSUBMIT_YANG_INVAL_MODEL_DIR = '/assets/ietf-ftp/yang/invalmod/'\nSUBMIT_YANG_IANA_MODEL_DIR = '/assets/ietf-ftp/yang/ianamod/'\nSUBMIT_YANG_RFC_MODEL_DIR = '/assets/ietf-ftp/yang/rfcmod/'\n\n# Set INTERNAL_IPS for use within Docker. See https://knasmueller.net/fix-djangos-debug-toolbar-not-showing-inside-docker\nimport socket\nhostname, _, ips = socket.gethostbyname_ex(socket.gethostname())\nINTERNAL_IPS = [\".\".join(ip.split(\".\")[:-1] + [\"1\"]) for ip in ips] + ['127.0.0.1']\n\n# DEV_TEMPLATE_CONTEXT_PROCESSORS = [\n# 'ietf.context_processors.sql_debug',\n# ]\n\nDOCUMENT_PATH_PATTERN = '/assets/ietf-ftp/{doc.type_id}/'\nINTERNET_DRAFT_PATH = '/assets/ietf-ftp/internet-drafts/'\nRFC_PATH = '/assets/ietf-ftp/rfc/'\nCHARTER_PATH = '/assets/ietf-ftp/charter/'\nBOFREQ_PATH = '/assets/ietf-ftp/bofreq/'\nCONFLICT_REVIEW_PATH = '/assets/ietf-ftp/conflict-reviews/'\nSTATUS_CHANGE_PATH = '/assets/ietf-ftp/status-changes/'\nINTERNET_DRAFT_ARCHIVE_DIR = '/assets/archive/id'\nINTERNET_ALL_DRAFTS_ARCHIVE_DIR = '/assets/archive/id'\nBIBXML_BASE_PATH = '/assets/ietfdata/derived/bibxml'\n\nNOMCOM_PUBLIC_KEYS_DIR = 'data/nomcom_keys/public_keys/'\nSLIDE_STAGING_PATH = 'test/staging/'\n\nDE_GFM_BINARY = '/usr/local/bin/de-gfm'\n\nSTATIC_IETF_ORG = \"/_static\"\nSTATIC_IETF_ORG_INTERNAL = \"http://localhost:80\"\n",
"path": "docker/configs/settings_local.py"
}
] | [
{
"content": "# Copyright The IETF Trust 2007-2019, All Rights Reserved\n# -*- coding: utf-8 -*-\n\nfrom ietf.settings import * # pyflakes:ignore\n\nALLOWED_HOSTS = ['*']\n\nfrom ietf.settings_postgresqldb import DATABASES # pyflakes:ignore\n\nIDSUBMIT_IDNITS_BINARY = \"/usr/local/bin/idnits\"\nIDSUBMIT_REPOSITORY_PATH = \"test/id/\"\nIDSUBMIT_STAGING_PATH = \"test/staging/\"\n\nAGENDA_PATH = '/assets/www6s/proceedings/'\nMEETINGHOST_LOGO_PATH = AGENDA_PATH\n\nUSING_DEBUG_EMAIL_SERVER=True\nEMAIL_HOST='localhost'\nEMAIL_PORT=2025\n\nMEDIA_BASE_DIR = '/assets'\nMEDIA_ROOT = MEDIA_BASE_DIR + '/media/'\nMEDIA_URL = '/media/'\n\nPHOTOS_DIRNAME = 'photo'\nPHOTOS_DIR = MEDIA_ROOT + PHOTOS_DIRNAME\n\nSUBMIT_YANG_CATALOG_MODEL_DIR = '/assets/ietf-ftp/yang/catalogmod/'\nSUBMIT_YANG_DRAFT_MODEL_DIR = '/assets/ietf-ftp/yang/draftmod/'\nSUBMIT_YANG_INVAL_MODEL_DIR = '/assets/ietf-ftp/yang/invalmod/'\nSUBMIT_YANG_IANA_MODEL_DIR = '/assets/ietf-ftp/yang/ianamod/'\nSUBMIT_YANG_RFC_MODEL_DIR = '/assets/ietf-ftp/yang/rfcmod/'\n\n# Set INTERNAL_IPS for use within Docker. See https://knasmueller.net/fix-djangos-debug-toolbar-not-showing-inside-docker\nimport socket\nhostname, _, ips = socket.gethostbyname_ex(socket.gethostname())\nINTERNAL_IPS = [\".\".join(ip.split(\".\")[:-1] + [\"1\"]) for ip in ips] + ['127.0.0.1']\n\n# DEV_TEMPLATE_CONTEXT_PROCESSORS = [\n# 'ietf.context_processors.sql_debug',\n# ]\n\nDOCUMENT_PATH_PATTERN = '/assets/ietf-ftp/{doc.type_id}/'\nINTERNET_DRAFT_PATH = '/assets/ietf-ftp/internet-drafts/'\nRFC_PATH = '/assets/ietf-ftp/rfc/'\nCHARTER_PATH = '/assets/ietf-ftp/charter/'\nBOFREQ_PATH = '/assets/ietf-ftp/bofreq/'\nCONFLICT_REVIEW_PATH = '/assets/ietf-ftp/conflict-reviews/'\nSTATUS_CHANGE_PATH = '/assets/ietf-ftp/status-changes/'\nINTERNET_DRAFT_ARCHIVE_DIR = '/assets/archive/id'\nINTERNET_ALL_DRAFTS_ARCHIVE_DIR = '/assets/archive/id'\nBIBXML_BASE_PATH = '/assets/ietfdata/derived/bibxml'\n\nNOMCOM_PUBLIC_KEYS_DIR = 'data/nomcom_keys/public_keys/'\nSLIDE_STAGING_PATH = 'test/staging/'\n\nDE_GFM_BINARY = '/usr/local/bin/de-gfm'\n\nSTATIC_IETF_ORG = \"/_static\"\nSTATIC_IETF_ORG_INTERNAL = \"http://static\"\n",
"path": "docker/configs/settings_local.py"
}
] | diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
index 422e77cf54..14a0d5ea90 100644
--- a/.devcontainer/devcontainer.json
+++ b/.devcontainer/devcontainer.json
@@ -67,7 +67,7 @@
},
// Use 'forwardPorts' to make a list of ports inside the container available locally.
- "forwardPorts": [3000, 5432, 5433, 8000],
+ "forwardPorts": [3000, 5432, 8000],
"portsAttributes": {
"3000": {
@@ -78,10 +78,6 @@
"label": "PostgreSQL",
"onAutoForward": "silent"
},
- "5433": {
- "label": "pgAdmin",
- "onAutoForward": "silent"
- },
"8000": {
"label": "NGINX",
"onAutoForward": "notify"
diff --git a/.devcontainer/docker-compose.extend.yml b/.devcontainer/docker-compose.extend.yml
index 1673e4e618..fa9a412cf2 100644
--- a/.devcontainer/docker-compose.extend.yml
+++ b/.devcontainer/docker-compose.extend.yml
@@ -15,11 +15,5 @@ services:
# Runs app on the same network as the database container, allows "forwardPorts" in devcontainer.json function.
network_mode: service:db
- pgadmin:
- network_mode: service:db
-
- static:
- network_mode: service:db
-
volumes:
datatracker-vscode-ext:
diff --git a/docker-compose.yml b/docker-compose.yml
index f8f933527c..2889bce9b0 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -53,9 +53,9 @@ services:
- [email protected]
- PGADMIN_DEFAULT_PASSWORD=dev
- PGADMIN_CONFIG_LOGIN_BANNER="Login with [email protected] / dev"
- - PGADMIN_LISTEN_PORT=5433
- PGADMIN_DISABLE_POSTFIX=True
- PGADMIN_CONFIG_MASTER_PASSWORD_REQUIRED=False
+ - SCRIPT_NAME=/pgadmin
volumes:
- ./docker/configs/pgadmin-servers.json:/pgadmin4/servers.json
diff --git a/docker/configs/nginx-502.html b/docker/configs/nginx-502.html
index 9d85600ecb..9e4374f3c0 100644
--- a/docker/configs/nginx-502.html
+++ b/docker/configs/nginx-502.html
@@ -23,7 +23,6 @@
background-color: #222;
border-radius: 10px;
padding: 10px 50px;
- display: inline-block;
}
i {
font-size: 64px;
@@ -54,6 +53,9 @@ <h2>Could not connect to dev server.</h2>
<p class="mt">Using <strong>VS Code</strong>, open the <strong>Run and Debug</strong> tab on the left and click the <i>‣</i> symbol (Run Server) to start the server.</p>
<p>Otherwise, run the command <code>ietf/manage.py runserver 0.0.0.0:8001</code> from the terminal.</p>
</div>
+ <div class="mt">
+ <p>You can manage the database at <a href="/pgadmin">/pgadmin</a>.</p>
+ </div>
<p class="mt">For more information, check out the <a href="https://github.com/ietf-tools/datatracker/blob/main/docker/README.md" target="_blank">Datatracker Development in Docker</a> guide.</p>
</body>
</html>
diff --git a/docker/configs/nginx-proxy.conf b/docker/configs/nginx-proxy.conf
index 02f5208caa..d5681fb239 100644
--- a/docker/configs/nginx-proxy.conf
+++ b/docker/configs/nginx-proxy.conf
@@ -8,7 +8,14 @@ server {
server_name _;
location /_static/ {
- proxy_pass http://localhost:80/;
+ proxy_pass http://static/;
+ }
+
+ location /pgadmin/ {
+ proxy_set_header X-Script-Name /pgadmin;
+ proxy_set_header Host $host;
+ proxy_pass http://pgadmin;
+ proxy_redirect off;
}
location / {
diff --git a/docker/configs/settings_local.py b/docker/configs/settings_local.py
index fc3052ff98..647fcd5b22 100644
--- a/docker/configs/settings_local.py
+++ b/docker/configs/settings_local.py
@@ -57,4 +57,4 @@
DE_GFM_BINARY = '/usr/local/bin/de-gfm'
STATIC_IETF_ORG = "/_static"
-STATIC_IETF_ORG_INTERNAL = "http://localhost:80"
+STATIC_IETF_ORG_INTERNAL = "http://static"
|
svthalia__concrexit-3475 | Switch to cached_db session backend
### What?
Once we have Redis set up (mainly for celery, #3357, #3361) we can use it to cache sessions.
See https://docs.djangoproject.com/en/4.2/topics/http/sessions/#using-cached-sessions
### Why?
A little performance boost for virtually no effort.
| [
{
"content": "\"\"\"Django settings for concrexit.\n\nFor more information on this file, see\nhttps://docs.djangoproject.com/en/dev/topics/settings/\n\nFor the full list of settings and their values, see\nhttps://docs.djangoproject.com/en/dev/ref/settings/\n\"\"\"\n\nimport base64\nimport json\nimport logging\nimport os\nfrom typing import Optional\n\nfrom django.core.management.commands import makemessages\nfrom django.utils import timezone\nfrom django.utils.translation import gettext_lazy as _\n\nfrom celery.schedules import crontab\n\nlogger = logging.getLogger(__name__)\n\n# Sentinel objects that are distinct from None\n_NOT_SET = object()\n\n\nclass Misconfiguration(Exception):\n \"\"\"Exception that is raised when something is misconfigured in this file.\"\"\"\n\n\n# Build paths inside the project like this: os.path.join(BASE_DIR, ...)\nBASE_DIR = os.path.abspath(\n os.path.join(os.path.dirname(os.path.abspath(__file__)), \"\", \"..\")\n)\n\nSOURCE_COMMIT = os.environ.get(\"SOURCE_COMMIT\", \"unknown\")\n\n# Many of the settings are dependent on the environment we're running in.\n# The default environment is development, so the programmer doesn't have to set anything\nDJANGO_ENV = os.environ.get(\"DJANGO_ENV\", \"development\")\n_environments = [\"production\", \"staging\", \"testing\", \"development\"]\nif DJANGO_ENV not in _environments:\n raise Misconfiguration(f\"Set DJANGO_ENV to one of: {', '.join(_environments)}\")\n\n\ndef _set_django_env(env):\n \"\"\"Set the DJANGO_ENV variable.\n\n This is a helper function for the doctests below because doctests cannot set global variables.\n \"\"\"\n global DJANGO_ENV # noqa: PLW0603\n DJANGO_ENV = env\n\n\ndef setting(*, development, production, staging=_NOT_SET, testing=_NOT_SET):\n \"\"\"Generate a setting depending on the DJANGO_ENV and the arguments.\n\n This function is meant for static settings that depend on the DJANGO_ENV. If the\n staging or testing arguments are left to their defaults, they will fall back to\n the production and development settings respectively.\n\n Example:\n >>> _set_django_env(\"production\")\n >>> SEND_MESSAGES_WITH = setting(development=\"console\", production=\"mail\", staging=\"DM\")\n >>> SEND_MESSAGES_WITH\n 'mail'\n >>> _set_django_env(\"testing\")\n >>> setting(development=\"console\", production=\"mail\", staging=\"DM\")\n 'console'\n \"\"\"\n if DJANGO_ENV == \"development\" or (DJANGO_ENV == \"testing\" and testing is _NOT_SET):\n return development\n if DJANGO_ENV == \"testing\":\n return testing\n if DJANGO_ENV == \"production\" or (DJANGO_ENV == \"staging\" and staging is _NOT_SET):\n return production\n if DJANGO_ENV == \"staging\":\n return staging\n raise Misconfiguration(f\"Set DJANGO_ENV to one of: {', '.join(_environments)}\")\n\n\ndef from_env(\n name, *, production=_NOT_SET, staging=_NOT_SET, testing=_NOT_SET, development=None\n):\n \"\"\"Generate a setting that's overridable by the process environment.\n\n This will raise an exception if a default is not set for production. Because we use\n the sentinel value _NOT_SET, you can still set a default of None for production if wanted.\n\n As with :func:`setting` the staging and testing values will fall back to production\n and development. So if an environment variable is required in production, and no default\n is set for staging, staging will also raise the exception.\n\n Example:\n >>> _set_django_env(\"production\")\n >>> # A secret key should always be set in production via the environment\n >>> from_env(\"MEDIA_ROOT\", development=\"/media/root\")\n Traceback (most recent call last):\n ...\n thaliawebsite.settings.Misconfiguration: Environment variable `MEDIA_ROOT` must be supplied in production\n >>> _set_django_env(\"development\")\n >>> from_env(\"MEDIA_ROOT\", development=\"/media/root\")\n '/media/root'\n \"\"\"\n try:\n return os.environ[name]\n except KeyError:\n if DJANGO_ENV == \"production\" or (\n DJANGO_ENV == \"staging\" and staging is _NOT_SET\n ):\n if production is _NOT_SET and os.environ.get(\"MANAGE_PY\", \"0\") == \"0\":\n raise Misconfiguration(\n f\"Environment variable `{name}` must be supplied in production\"\n )\n if production is _NOT_SET and os.environ.get(\"MANAGE_PY\", \"0\") == \"1\":\n logger.warning(\n \"Ignoring unset %s because we're running a management command\", name\n )\n return development\n return production\n if DJANGO_ENV == \"staging\":\n return staging\n if DJANGO_ENV == \"development\" or (\n DJANGO_ENV == \"testing\" and testing is _NOT_SET\n ):\n return development\n if DJANGO_ENV == \"testing\":\n return testing\n raise Misconfiguration(f\"DJANGO_ENV set to unsupported value: {DJANGO_ENV}\")\n\n\n###############################################################################\n# Site settings\n\n# We use this setting to generate the email addresses, and for BASE_URL below.\nSITE_DOMAIN = from_env(\"SITE_DOMAIN\", development=\"localhost\", production=\"thalia.nu\")\n\n# Used to generate some absolute urls when we don't have access to a request.\nBASE_URL = from_env(\n \"BASE_URL\",\n development=f\"http://{SITE_DOMAIN}:8000\",\n production=f\"https://{SITE_DOMAIN}\",\n)\n\n# Default FROM email\nDEFAULT_FROM_EMAIL = f\"{os.environ.get('ADDRESS_NOREPLY', 'noreply')}@{SITE_DOMAIN}\"\n# https://docs.djangoproject.com/en/dev/ref/settings/#server-email\nSERVER_EMAIL = DEFAULT_FROM_EMAIL\nNEWSLETTER_FROM_ADDRESS = (\n f\"{os.environ.get('ADDRESS_NEWSLETTER', 'newsletter')}@{SITE_DOMAIN}\"\n)\nBOARD_NOTIFICATION_ADDRESS = (\n f\"{os.environ.get('ADDRESS_CONTACT', 'info')}@{SITE_DOMAIN}\"\n)\nPARTNER_NOTIFICATION_ADDRESS = (\n f\"{os.environ.get('ADDRESS_COLLABORATION', 'samenwerking')}@{SITE_DOMAIN}\"\n)\nEDUCATION_NOTIFICATION_ADDRESS = (\n f\"{os.environ.get('ADDRESS_EDUCATION', 'educacie')}@{SITE_DOMAIN}\"\n)\nPROMO_REQUEST_NOTIFICATION_ADDRESS = (\n f\"{os.environ.get('ADDRESS_PROMOREQUESTS', 'promocie')}@{SITE_DOMAIN}\"\n)\nTREASURER_NOTIFICATION_ADDRESS = (\n f\"{os.environ.get('ADDRESS_TREASURER', 'treasurer')}@{SITE_DOMAIN}\"\n)\n\nPROMO_PUBLISH_DATE_TIMEDELTA = timezone.timedelta(weeks=1)\n\n# How many days to keep reference faces after a user marks them for deletion\nFACEDETECTION_REFERENCE_FACE_STORAGE_PERIOD_AFTER_DELETE_DAYS = 180\n\n# How many reference faces a user can have at the same time\nFACEDETECTION_MAX_NUM_REFERENCE_FACES = 5\n\n# ARN of the concrexit-facedetection-lambda function.\n# See https://github.com/svthalia/concrexit-facedetection-lambda.\nFACEDETECTION_LAMBDA_ARN = from_env(\"FACEDETECTION_LAMBDA_ARN\")\n\nFACEDETECTION_LAMBDA_BATCH_SIZE = int(\n os.environ.get(\"FACEDETECTION_LAMBDA_BATCH_SIZE\", 20)\n)\n\n# The scheme the app uses for oauth redirection\nAPP_OAUTH_SCHEME = os.environ.get(\"APP_OAUTH_SCHEME\", \"nu.thalia\")\n\n# Membership prices\nMEMBERSHIP_PRICES = {\n \"year\": int(os.environ.get(\"MEMBERSHIP_PRICE_YEAR_CENTS\", \"750\")) / 100,\n \"study\": int(os.environ.get(\"MEMBERSHIP_PRICE_STUDY_CENTS\", \"3000\")) / 100,\n}\n\n# Window during which a payment can be deleted again\nPAYMENT_CHANGE_WINDOW = int(os.environ.get(\"PAYMENTS_CHANGE_WINDOW\", 10 * 60))\n\n# Payments creditor identifier\nSEPA_CREDITOR_ID = os.environ.get(\"SEPA_CREDITOR_ID\", \"<unknown>\")\n\n# Payment batch withdrawal date default offset after creation date\nPAYMENT_BATCH_DEFAULT_WITHDRAWAL_DATE_OFFSET = timezone.timedelta(days=14)\n\nTHALIA_PAY_ENABLED_PAYMENT_METHOD = (\n from_env(\"THALIA_PAY_ENABLED\", development=\"1\", staging=\"1\", production=\"0\") == \"1\"\n)\nTHALIA_PAY_FOR_NEW_MEMBERS = os.environ.get(\"THALIA_PAY_FOR_NEW_MEMBERS\", \"1\") == \"1\"\n\n###############################################################################\n# Django settings\n\n# https://docs.djangoproject.com/en/dev/ref/settings/#secret-key\nSECRET_KEY = from_env(\n \"SECRET_KEY\", development=\"#o-0d1q5&^&06tn@8pr1f(n3$crafd++^%sacao7hj*ea@c)^t\"\n)\n\n# https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts\nALLOWED_HOSTS = [\n SITE_DOMAIN,\n *from_env(\"ALLOWED_HOSTS\", development=\"*\", production=\"\").split(\",\"),\n]\n# https://docs.djangoproject.com/en/dev/ref/settings/#internal-ips\nINTERNAL_IPS = setting(development=[\"127.0.0.1\", \"172.17.0.1\"], production=[])\n\nDJANGO_DRF_FILEPOND_UPLOAD_TMP = from_env(\n \"DJANGO_DRF_FILEPOND_UPLOAD_TMP\",\n development=os.path.join(BASE_DIR, \"filepond-temp-uploads\"),\n)\nDJANGO_DRF_FILEPOND_FILE_STORE_PATH = from_env(\n \"DJANGO_DRF_FILEPOND_FILE_STORE_PATH\",\n development=os.path.join(BASE_DIR, \"filepond-uploaded\"),\n)\nDJANGO_DRF_FILEPOND_ALLOW_EXTERNAL_UPLOAD_DIR = True\nDJANGO_DRF_FILEPOND_PERMISSION_CLASSES = {\n \"GET_FETCH\": [\n \"oauth2_provider.contrib.rest_framework.IsAuthenticatedOrTokenHasScope\",\n ],\n \"GET_LOAD\": [\n \"oauth2_provider.contrib.rest_framework.IsAuthenticatedOrTokenHasScope\",\n ],\n \"POST_PROCESS\": [\n \"oauth2_provider.contrib.rest_framework.IsAuthenticatedOrTokenHasScope\",\n ],\n \"GET_RESTORE\": [\n \"oauth2_provider.contrib.rest_framework.IsAuthenticatedOrTokenHasScope\",\n ],\n \"DELETE_REVERT\": [\n \"oauth2_provider.contrib.rest_framework.IsAuthenticatedOrTokenHasScope\",\n ],\n \"PATCH_PATCH\": [\n \"oauth2_provider.contrib.rest_framework.IsAuthenticatedOrTokenHasScope\",\n ],\n}\n\n# https://docs.djangoproject.com/en/dev/ref/settings/#static-root\nSTATIC_ROOT = from_env(\"STATIC_ROOT\", development=os.path.join(BASE_DIR, \"static\"))\n\n# https://docs.djangoproject.com/en/dev/ref/settings/#media-root\nMEDIA_ROOT = from_env(\"MEDIA_ROOT\", development=os.path.join(BASE_DIR, \"media\"))\n\n# https://github.com/johnsensible/django-sendfile#nginx-backend\nSENDFILE_URL = \"/media/sendfile/\"\nSENDFILE_ROOT = MEDIA_ROOT\nSENDFILE_BACKEND = setting(\n development=\"django_sendfile.backends.development\",\n production=\"django_sendfile.backends.nginx\",\n)\n\nPRIVATE_MEDIA_LOCATION = \"\"\nPUBLIC_MEDIA_LOCATION = \"public\"\nSTATICFILES_LOCATION = \"static\"\n\nMEDIA_URL = \"/media/private/\"\n\nAWS_ACCESS_KEY_ID = from_env(\"AWS_ACCESS_KEY_ID\", production=None)\nAWS_SECRET_ACCESS_KEY = from_env(\"AWS_SECRET_ACCESS_KEY\", production=None)\nAWS_STORAGE_BUCKET_NAME = from_env(\"AWS_STORAGE_BUCKET_NAME\", production=None)\nAWS_DEFAULT_ACL = \"private\"\nAWS_S3_OBJECT_PARAMETERS = {\"CacheControl\": \"max-age=86400\"}\nAWS_S3_SIGNATURE_VERSION = \"s3v4\"\n\nif AWS_STORAGE_BUCKET_NAME is not None:\n AWS_CLOUDFRONT_KEY = base64.urlsafe_b64decode(\n os.environ.get(\"AWS_CLOUDFRONT_KEY\", None)\n ).decode(\"utf-8\")\n AWS_CLOUDFRONT_KEY_ID = os.environ.get(\"AWS_CLOUDFRONT_KEY_ID\", None)\n AWS_S3_CUSTOM_DOMAIN = os.environ.get(\"AWS_CLOUDFRONT_DOMAIN\", None)\n\n _STATICFILES_STORAGE = \"thaliawebsite.storage.backend.StaticS3Storage\"\n STATIC_URL = f\"https://{AWS_S3_CUSTOM_DOMAIN}/static/\"\n\n _DEFAULT_FILE_STORAGE = \"thaliawebsite.storage.backend.PrivateS3Storage\"\n\n _PUBLIC_FILE_STORAGE = \"thaliawebsite.storage.backend.PublicS3Storage\"\n PUBLIC_MEDIA_URL = f\"https://{AWS_S3_CUSTOM_DOMAIN}/\"\nelse:\n _STATICFILES_STORAGE = setting(\n development=\"django.contrib.staticfiles.storage.StaticFilesStorage\",\n production=\"django.contrib.staticfiles.storage.ManifestStaticFilesStorage\",\n )\n STATIC_URL = \"/static/\"\n\n _DEFAULT_FILE_STORAGE = \"thaliawebsite.storage.backend.PrivateFileSystemStorage\"\n\n _PUBLIC_FILE_STORAGE = \"thaliawebsite.storage.backend.PublicFileSystemStorage\"\n PUBLIC_MEDIA_URL = \"/media/public/\"\n\nSTORAGES = {\n \"default\": {\"BACKEND\": _DEFAULT_FILE_STORAGE},\n \"public\": {\"BACKEND\": _PUBLIC_FILE_STORAGE},\n \"staticfiles\": {\"BACKEND\": _STATICFILES_STORAGE},\n}\n\n# https://docs.djangoproject.com/en/dev/ref/settings/#conn-max-age\nCONN_MAX_AGE = int(from_env(\"CONN_MAX_AGE\", development=\"0\", production=\"60\"))\n\n# Useful for managing members\n# https://docs.djangoproject.com/en/dev/ref/settings/#data-upload-max-number-fields\nDATA_UPLOAD_MAX_NUMBER_FIELDS = os.environ.get(\"DATA_UPLOAD_MAX_NUMBER_FIELDS\", 10000)\n\n# https://docs.djangoproject.com/en/dev/ref/settings/#debug\nDEBUG = setting(development=True, production=False, testing=False)\n\n# https://docs.djangoproject.com/en/dev/ref/settings/#session-cookie-secure\nSESSION_COOKIE_SECURE = setting(development=False, production=True)\n# https://docs.djangoproject.com/en/dev/ref/settings/#csrf-cookie-secure\nCSRF_COOKIE_SECURE = setting(development=False, production=True)\n\n# https://docs.djangoproject.com/en/dev/ref/settings/#std-setting-SECURE_PROXY_SSL_HEADER\nSECURE_PROXY_SSL_HEADER = (\"HTTP_X_FORWARDED_PROTO\", \"https\")\n\n# https://docs.djangoproject.com/en/dev/ref/settings/#default-auto-field\nDEFAULT_AUTO_FIELD = \"django.db.models.AutoField\"\n\n\n###############################################################################\n# Celery settings\n# https://docs.celeryq.dev/en/stable/userguide/configuration.html#configuration\n\n# Set CELERY_BROKER_URL=\"redis://127.0.0.1:6379\" to use a local redis server in development.\nCELERY_BROKER_URL = from_env(\"CELERY_BROKER_URL\")\n\n# Always execute tasks synchronously when no broker is configured in development and testing.\n# See https://docs.celeryq.dev/en/stable/userguide/configuration.html#std-setting-task_always_eager\nCELERY_TASK_ALWAYS_EAGER = CELERY_BROKER_URL is None\n\n\n# See https://docs.celeryq.dev/en/stable/getting-started/backends-and-brokers/redis.html#caveats\nCELERY_BROKER_TRANSPORT_OPTIONS = {\"visibility_timeout\": 18000}\n\n# https://docs.celeryq.dev/en/stable/userguide/periodic-tasks.html\nCELERY_BEAT_SCHEDULE = {\n \"synchronize_mailinglists\": {\n \"task\": \"mailinglists.tasks.sync_mail\",\n \"schedule\": crontab(minute=30),\n },\n \"synchronize_moneybird\": {\n \"task\": \"moneybirdsynchronization.tasks.synchronize_moneybird\",\n \"schedule\": crontab(minute=30, hour=1),\n },\n \"sendpromooverviewweekly\": {\n \"task\": \"promotion.tasks.promo_update_weekly\",\n \"schedule\": crontab(minute=0, hour=8, day_of_week=1),\n },\n \"sendpromoooverviewdaily\": {\n \"task\": \"promotion.tasks.promo_update_daily\",\n \"schedule\": crontab(minute=0, hour=8),\n },\n \"facedetectlambda\": {\n \"task\": \"facedetection.tasks.trigger_facedetect_lambda\",\n \"schedule\": crontab(minute=0, hour=1),\n },\n \"revokeoldmandates\": {\n \"task\": \"payments.tasks.revoke_mandates\",\n \"schedule\": crontab(minute=0, hour=1),\n },\n \"membershipannouncement\": {\n \"task\": \"members.tasks.membership_announcement\",\n \"schedule\": crontab(minute=0, hour=6, day_of_month=31, month_of_year=8),\n },\n \"inforequest\": {\n \"task\": \"members.tasks.info_request\",\n \"schedule\": crontab(minute=0, hour=6, day_of_month=15, month_of_year=10),\n },\n \"expirationannouncement\": {\n \"task\": \"members.tasks.expiration_announcement\",\n \"schedule\": crontab(minute=0, hour=6, day_of_month=8, month_of_year=8),\n },\n \"minimiseregistration\": {\n \"task\": \"registrations.tasks.minimise_registrations\",\n \"schedule\": crontab(minute=0, hour=3, day_of_month=1),\n },\n \"sendscheduledmessages\": {\n \"task\": \"pushnotifications.tasks.send_scheduled_messages\",\n \"schedule\": crontab(minute=\"*/2\"),\n \"args\": (120,),\n },\n \"revokestaff\": {\n \"task\": \"activemembers.tasks.revoke_staff\",\n \"schedule\": crontab(minute=30, hour=3),\n },\n \"deletegsuiteusers\": {\n \"task\": \"activemembers.tasks.delete_gsuite_users\",\n \"schedule\": crontab(minute=30, hour=3, day_of_week=1),\n },\n \"sendplannednewsletters\": {\n \"task\": \"newsletters.tasks.send_planned_newsletters\",\n \"schedule\": crontab(minute=\"*/5\"),\n },\n \"dataminimisation\": {\n \"task\": \"thaliawebsite.tasks.data_minimisation\",\n \"schedule\": crontab(minute=0, hour=3),\n },\n \"cleanup\": {\n \"task\": \"thaliawebsite.tasks.clean_up\",\n \"schedule\": crontab(minute=0, hour=23),\n },\n \"cleartokens\": {\n \"task\": \"thaliawebsite.tasks.clear_tokens\",\n \"schedule\": crontab(minute=30, hour=3),\n },\n}\n\n###############################################################################\n# Email settings\n# https://docs.djangoproject.com/en/dev/ref/settings/#email-backend\n_EMAIL_BACKEND = from_env(\"EMAIL_BACKEND\", development=\"console\", production=\"smtp\")\nif _EMAIL_BACKEND == \"console\":\n EMAIL_BACKEND = \"django.core.mail.backends.console.EmailBackend\"\n\nif _EMAIL_BACKEND == \"smtp\":\n EMAIL_BACKEND = \"django.core.mail.backends.smtp.EmailBackend\"\n EMAIL_HOST = os.environ.get(\"DJANGO_EMAIL_HOST\")\n EMAIL_PORT = os.environ.get(\"DJANGO_EMAIL_PORT\", 25)\n EMAIL_HOST_USER = os.environ.get(\"DJANGO_EMAIL_HOST_USER\", \"\")\n EMAIL_HOST_PASSWORD = os.environ.get(\"DJANGO_EMAIL_HOST_PASSWORD\", \"\")\n EMAIL_USE_TLS = os.environ.get(\"DJANGO_EMAIL_USE_TLS\", \"1\") == \"1\"\n EMAIL_TIMEOUT = int(os.environ.get(\"EMAIL_TIMEOUT\", \"10\"))\n if EMAIL_HOST is None:\n logger.warning(\n \"The email host is set to the default of localhost, are you sure you don't want to set EMAIL_HOST?\"\n )\n EMAIL_HOST = \"localhost\"\n\n###############################################################################\n# Database settings\n# https://docs.djangoproject.com/en/dev/ref/settings/#databases\nDATABASE_ENGINE = from_env(\n \"DATABASE_ENGINE\", development=\"sqlite\", production=\"postgresql\", testing=None\n)\nif DATABASE_ENGINE == \"sqlite\":\n DATABASES = {\n \"default\": {\n \"ENGINE\": \"django.db.backends.sqlite3\",\n \"NAME\": os.path.join(BASE_DIR, \"db.sqlite3\"),\n }\n }\n\nif DATABASE_ENGINE == \"postgresql\":\n DATABASES = {\n \"default\": {\n \"ENGINE\": \"django.db.backends.postgresql\",\n \"USER\": os.environ.get(\"POSTGRES_USER\", \"concrexit\"),\n \"PASSWORD\": os.environ.get(\"POSTGRES_PASSWORD\", None),\n \"NAME\": os.environ.get(\"POSTGRES_DB\", \"\"),\n \"HOST\": os.environ.get(\"POSTGRES_HOST\", \"\"),\n \"PORT\": os.environ.get(\"POSTGRES_PORT\", \"5432\"),\n }\n }\n\nif DJANGO_ENV == \"testing\":\n DATABASES = {\n \"default\": {\n \"ENGINE\": \"django.db.backends.postgresql\",\n \"NAME\": \"thalia\",\n \"USER\": \"postgres\",\n \"PASSWORD\": \"postgres\",\n \"HOST\": \"127.0.0.1\",\n \"PORT\": 5432,\n },\n }\n\n###############################################################################\n# Firebase config\nFIREBASE_CREDENTIALS = os.environ.get(\"FIREBASE_CREDENTIALS\", \"{}\")\nif FIREBASE_CREDENTIALS != \"{}\":\n FIREBASE_CREDENTIALS = base64.urlsafe_b64decode(FIREBASE_CREDENTIALS)\nFIREBASE_CREDENTIALS = json.loads(FIREBASE_CREDENTIALS)\n\nif FIREBASE_CREDENTIALS != {}:\n from firebase_admin import credentials, initialize_app\n\n try:\n initialize_app(credential=credentials.Certificate(FIREBASE_CREDENTIALS))\n except ValueError:\n logger.error(\"Firebase application failed to initialise\")\n\n###############################################################################\n# GSuite config\nGSUITE_ADMIN_SCOPES = [\n \"https://www.googleapis.com/auth/admin.directory.group\",\n \"https://www.googleapis.com/auth/admin.directory.user\",\n \"https://www.googleapis.com/auth/apps.groups.settings\",\n]\n\nGSUITE_ADMIN_CREDENTIALS = os.environ.get(\"GSUITE_ADMIN_CREDENTIALS\", \"{}\")\nif GSUITE_ADMIN_CREDENTIALS != \"{}\":\n GSUITE_ADMIN_CREDENTIALS = base64.urlsafe_b64decode(GSUITE_ADMIN_CREDENTIALS)\nGSUITE_ADMIN_CREDENTIALS = json.loads(GSUITE_ADMIN_CREDENTIALS)\nGSUITE_ADMIN_USER = os.environ.get(\"GSUITE_ADMIN_USER\", \"[email protected]\")\nGSUITE_DOMAIN = from_env(\n \"GSUITE_DOMAIN\", development=\"thalia.localhost\", production=\"thalia.nu\"\n)\nGSUITE_MEMBERS_DOMAIN = from_env(\n \"GSUITE_MEMBERS_DOMAIN\",\n development=\"members.thalia.localhost\",\n production=\"members.thalia.nu\",\n)\nGSUITE_MEMBERS_AUTOSYNC = os.environ.get(\"GSUITE_MEMBERS_AUTOSYNC\", \"0\") == \"1\"\n\nif GSUITE_ADMIN_CREDENTIALS != {}:\n from google.oauth2 import service_account\n\n GSUITE_ADMIN_CREDENTIALS = service_account.Credentials.from_service_account_info(\n GSUITE_ADMIN_CREDENTIALS, scopes=GSUITE_ADMIN_SCOPES\n ).with_subject(GSUITE_ADMIN_USER)\n\nEMAIL_DOMAIN_BLACKLIST = [GSUITE_MEMBERS_DOMAIN]\n\n###############################################################################\n# Google maps API key and secrets\nGOOGLE_MAPS_API_KEY = os.environ.get(\"GOOGLE_MAPS_API_KEY\", \"\")\nGOOGLE_MAPS_API_SECRET = os.environ.get(\"GOOGLE_MAPS_API_SECRET\", \"\")\nGOOGLE_PLACES_API_KEY = os.environ.get(\"GOOGLE_PLACES_API_KEY\", \"\")\n\n###############################################################################\n# Sentry setup\nif \"SENTRY_DSN\" in os.environ:\n import sentry_sdk\n from sentry_sdk.integrations.celery import CeleryIntegration\n from sentry_sdk.integrations.django import DjangoIntegration\n\n sentry_sdk.init(\n dsn=os.environ.get(\"SENTRY_DSN\"),\n integrations=[\n DjangoIntegration(),\n CeleryIntegration(\n monitor_beat_tasks=True,\n ),\n ],\n release=SOURCE_COMMIT,\n send_default_pii=True,\n environment=DJANGO_ENV,\n traces_sample_rate=float(os.environ.get(\"SENTRY_TRACES_SAMPLE_RATE\", 0.2)),\n profiles_sample_rate=float(os.environ.get(\"SENTRY_PROFILES_SAMPLE_RATE\", 0.0)),\n )\n\n\n###############################################################################\n# (Mostly) static settings\nINSTALLED_APPS = [\n \"django.contrib.auth\",\n \"django.contrib.contenttypes\",\n \"django.contrib.sessions\",\n \"django.contrib.messages\",\n \"django.contrib.staticfiles\",\n \"django.contrib.sitemaps\",\n # Dependencies\n \"oauth2_provider\",\n \"corsheaders\",\n \"django_bootstrap5\",\n \"tinymce\",\n \"rest_framework\",\n \"rest_framework.authtoken\",\n \"debug_toolbar\",\n \"sass_processor\",\n \"admin_auto_filters\",\n \"django_drf_filepond\",\n \"django_filepond_widget\",\n \"thumbnails\",\n # Our apps\n # Directly link to the app config when applicable as recommended\n # by the docs: https://docs.djangoproject.com/en/2.0/ref/applications/\n \"thaliawebsite.apps.ThaliaWebsiteConfig\", # include for admin settings\n # Load django.contrib.admin after thaliawebsite so the admin page gets modified\n \"django.contrib.admin\",\n # Our apps ordered such that templates in the first\n # apps can override those used by the later apps.\n \"pushnotifications.apps.PushNotificationsConfig\",\n \"facedetection.apps.FaceDetectionConfig\",\n \"announcements.apps.AnnouncementsConfig\",\n \"promotion.apps.PromotionConfig\",\n \"members.apps.MembersConfig\",\n \"documents.apps.DocumentsConfig\",\n \"activemembers.apps.ActiveMembersConfig\",\n \"photos.apps.PhotosConfig\",\n \"utils\",\n \"mailinglists.apps.MailinglistsConfig\",\n \"merchandise.apps.MerchandiseConfig\",\n \"thabloid.apps.ThabloidConfig\",\n \"partners.apps.PartnersConfig\",\n \"events.apps.EventsConfig\",\n \"pizzas.apps.PizzasConfig\",\n \"newsletters.apps.NewslettersConfig\",\n \"education.apps.EducationConfig\",\n \"registrations.apps.RegistrationsConfig\",\n \"payments.apps.PaymentsConfig\",\n \"singlepages.apps.SinglepagesConfig\",\n \"shortlinks.apps.ShortLinkConfig\",\n \"sales.apps.SalesConfig\",\n \"moneybirdsynchronization.apps.MoneybirdsynchronizationConfig\",\n]\n\nMIDDLEWARE = [\n \"debug_toolbar.middleware.DebugToolbarMiddleware\",\n \"django.middleware.security.SecurityMiddleware\",\n \"django.contrib.sessions.middleware.SessionMiddleware\",\n \"django.middleware.http.ConditionalGetMiddleware\",\n \"corsheaders.middleware.CorsMiddleware\",\n \"django.middleware.common.CommonMiddleware\",\n \"django.middleware.csrf.CsrfViewMiddleware\",\n \"django.contrib.auth.middleware.AuthenticationMiddleware\",\n \"django.contrib.messages.middleware.MessageMiddleware\",\n \"django.middleware.locale.LocaleMiddleware\",\n \"thaliawebsite.middleware.RealIPMiddleware\",\n \"django_ratelimit.middleware.RatelimitMiddleware\",\n \"members.middleware.MemberMiddleware\",\n \"announcements.middleware.AnnouncementMiddleware\",\n]\n\nif DJANGO_ENV in (\"development\", \"testing\"):\n INSTALLED_APPS += [\n \"django_template_check\",\n \"django_extensions\",\n ]\n\nif DJANGO_ENV == \"testing\":\n for x in (\n \"debug_toolbar.middleware.DebugToolbarMiddleware\",\n \"django.middleware.http.ConditionalGetMiddleware\",\n \"django.middleware.csrf.CsrfViewMiddleware\",\n ):\n MIDDLEWARE.remove(x)\n for x in (\"debug_toolbar\",):\n INSTALLED_APPS.remove(x)\n\nROOT_URLCONF = \"thaliawebsite.urls\"\n\nTEMPLATES = [\n {\n \"BACKEND\": \"django.template.backends.django.DjangoTemplates\",\n \"DIRS\": [os.path.join(BASE_DIR, \"templates\")],\n \"APP_DIRS\": setting(development=True, production=False),\n \"OPTIONS\": {\n \"context_processors\": [\n \"thaliawebsite.context_processors.source_commit\",\n \"django.template.context_processors.debug\",\n \"django.template.context_processors.request\",\n \"django.template.context_processors.media\",\n \"django.contrib.auth.context_processors.auth\",\n \"django.contrib.messages.context_processors.messages\",\n \"announcements.context_processors.announcements\",\n \"thaliawebsite.context_processors.aprilfools\",\n \"thaliawebsite.context_processors.lustrum_styling\",\n ],\n },\n },\n]\n\nif DJANGO_ENV in [\"production\", \"staging\"]:\n # Use caching template loader\n TEMPLATES[0][\"OPTIONS\"][\"loaders\"] = [\n (\n \"django.template.loaders.cached.Loader\",\n [\n \"django.template.loaders.filesystem.Loader\",\n \"django.template.loaders.app_directories.Loader\",\n ],\n )\n ]\n\n# Default logging: https://github.com/django/django/blob/master/django/utils/log.py\n# We disable mailing the admin.\n# Server errors will be sent to Sentry via the config below this.\nLOGGING = {\n \"version\": 1,\n \"disable_existing_loggers\": False,\n \"filters\": {\n \"require_debug_false\": {\n \"()\": \"django.utils.log.RequireDebugFalse\",\n },\n \"require_debug_true\": {\n \"()\": \"django.utils.log.RequireDebugTrue\",\n },\n },\n \"formatters\": {\n \"django.server\": {\n \"()\": \"django.utils.log.ServerFormatter\",\n \"format\": \"[{server_time}] {message}\",\n \"style\": \"{\",\n }\n },\n \"handlers\": {\n \"console\": {\n \"level\": \"INFO\",\n \"filters\": [\"require_debug_true\"],\n \"class\": \"logging.StreamHandler\",\n },\n \"django.server\": {\n \"level\": \"INFO\",\n \"class\": \"logging.StreamHandler\",\n \"formatter\": \"django.server\",\n },\n },\n \"loggers\": {\n \"django\": {\n \"handlers\": [\"console\"],\n \"level\": \"INFO\",\n },\n \"django.server\": {\n \"handlers\": [\"django.server\"],\n \"level\": \"INFO\",\n \"propagate\": False,\n },\n },\n}\n\nREDIS_CACHE_PORT = int(\n from_env(\"REDIS_CACHE_PORT\", development=\"6379\", production=\"6379\")\n)\nREDIS_CACHE_HOST = from_env(\"REDIS_CACHE_HOST\")\nREDIS_CACHE_URL = (\n f\"redis://{REDIS_CACHE_HOST}:{REDIS_CACHE_PORT}\" if REDIS_CACHE_HOST else None\n)\n\nCACHES = {\n \"default\": {\n \"BACKEND\": \"django.core.cache.backends.redis.RedisCache\",\n \"LOCATION\": REDIS_CACHE_URL,\n }\n if REDIS_CACHE_URL is not None\n else {\n \"BACKEND\": \"django.core.cache.backends.db.DatabaseCache\",\n \"LOCATION\": \"django_default_db_cache\",\n },\n}\n\nWSGI_APPLICATION = \"thaliawebsite.wsgi.application\"\n\n# Login pages\nLOGIN_URL = \"/user/login/\"\nLOGIN_REDIRECT_URL = \"/\"\n\n# Cors configuration\nCORS_ORIGIN_ALLOW_ALL = True\nCORS_URLS_REGEX = r\"^/(?:api/v1|api/v2|user/oauth)/.*\"\n\n# OAuth configuration\nOIDC_RSA_PRIVATE_KEY = from_env(\"OIDC_RSA_PRIVATE_KEY\", testing=None)\nif OIDC_RSA_PRIVATE_KEY is not None:\n OIDC_RSA_PRIVATE_KEY = base64.urlsafe_b64decode(OIDC_RSA_PRIVATE_KEY).decode()\n\nOAUTH2_PROVIDER = {\n \"OIDC_ENABLED\": True,\n \"OIDC_RSA_PRIVATE_KEY\": OIDC_RSA_PRIVATE_KEY,\n \"ALLOWED_REDIRECT_URI_SCHEMES\": setting(\n production=[\"https\", APP_OAUTH_SCHEME],\n staging=[\"http\", \"https\", APP_OAUTH_SCHEME],\n development=[\"http\", \"https\", APP_OAUTH_SCHEME],\n ),\n \"SCOPES\": {\n \"openid\": \"OpenID Connect\",\n \"read\": \"Authenticated read access to the website\",\n \"write\": \"Authenticated write access to the website\",\n \"activemembers:read\": \"Read access to committee, society and board groups\",\n \"announcements:read\": \"Read access to announcements\",\n \"events:read\": \"Read access to events and your event registrations\",\n \"events:register\": \"Write access to the state of your event registrations\",\n \"events:admin\": \"Admin access to the events\",\n \"food:read\": \"Read access to food events\",\n \"food:order\": \"Order access to food events\",\n \"food:admin\": \"Admin access to food events\",\n \"members:read\": \"Read access to the members directory\",\n \"photos:read\": \"Read access to photos\",\n \"profile:read\": \"Read access to your member profile\",\n \"profile:write\": \"Write access to your member profile\",\n \"pushnotifications:read\": \"Read access to push notifications\",\n \"pushnotifications:write\": \"Write access to push notifications\",\n \"partners:read\": \"Read access to partners\",\n \"payments:read\": \"Read access to payments\",\n \"payments:write\": \"Write access to payments\",\n \"payments:admin\": \"Admin access to payments\",\n \"sales:read\": \"Read access to your Point of Sale orders\",\n \"sales:order\": \"Place Point of Sale orders on your behalf\",\n \"sales:admin\": \"Admin access to Point of Sale orders\",\n },\n}\n\n# Password validation\n# https://docs.djangoproject.com/en/dev/ref/settings/#auth-password-validators\nAUTH_PASSWORD_VALIDATORS = [\n {\n \"NAME\": (\n \"django.contrib.auth.\"\n \"password_validation.UserAttributeSimilarityValidator\"\n ),\n },\n {\n \"NAME\": (\"django.contrib.auth.password_validation.MinimumLengthValidator\"),\n },\n {\n \"NAME\": (\"django.contrib.auth.password_validation.CommonPasswordValidator\"),\n },\n {\n \"NAME\": (\"django.contrib.auth.password_validation.NumericPasswordValidator\"),\n },\n]\n\nPASSWORD_HASHERS = setting(\n development=(\n \"django.contrib.auth.hashers.PBKDF2PasswordHasher\",\n \"django.contrib.auth.hashers.MD5PasswordHasher\",\n ),\n production=(\n \"django.contrib.auth.hashers.Argon2PasswordHasher\",\n \"django.contrib.auth.hashers.PBKDF2PasswordHasher\",\n \"django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher\",\n \"django.contrib.auth.hashers.BCryptSHA256PasswordHasher\",\n \"django.contrib.auth.hashers.BCryptPasswordHasher\",\n ),\n testing=(\"django.contrib.auth.hashers.MD5PasswordHasher\",),\n)\n\nAUTHENTICATION_BACKENDS = [\n \"django.contrib.auth.backends.ModelBackend\",\n \"activemembers.backends.MemberGroupBackend\",\n]\n\nREST_FRAMEWORK = {\n \"DEFAULT_AUTHENTICATION_CLASSES\": (\n \"rest_framework.authentication.SessionAuthentication\",\n \"thaliawebsite.api.authentication.APIv1TokenAuthentication\",\n \"oauth2_provider.contrib.rest_framework.OAuth2Authentication\",\n ),\n \"DEFAULT_PAGINATION_CLASS\": \"thaliawebsite.api.pagination.APIv2LimitOffsetPagination\",\n \"PAGE_SIZE\": 50, # Only for API v2\n \"ALLOWED_VERSIONS\": [\"v1\", \"v2\", \"calendarjs\", \"facedetection\"],\n \"DEFAULT_VERSIONING_CLASS\": \"rest_framework.versioning.NamespaceVersioning\",\n \"DEFAULT_SCHEMA_CLASS\": \"thaliawebsite.api.openapi.OAuthAutoSchema\",\n \"DEFAULT_THROTTLE_CLASSES\": [\n \"thaliawebsite.api.throttling.AnonRateThrottle\",\n \"thaliawebsite.api.throttling.UserRateThrottle\",\n ],\n \"DEFAULT_THROTTLE_RATES\": setting(\n production={\"anon\": \"30/min\", \"user\": \"90/min\"},\n staging={\"anon\": \"30/min\", \"user\": \"90/min\"},\n development={\"anon\": None, \"user\": None},\n ),\n}\n\n# Rate limiting\nRATELIMIT_VIEW = \"thaliawebsite.views.rate_limited_view\"\n\n# Internationalization\n# https://docs.djangoproject.com/en/dev/topics/i18n/\nDATETIME_FORMAT = \"j M, Y, H:i\"\nSHORT_DATETIME_FORMAT = \"d-m-Y, H:i\"\n\nLANGUAGE_CODE = \"en\"\nTIME_ZONE = \"Europe/Amsterdam\"\nUSE_I18N = True\nUSE_L10N = False\nUSE_TZ = True\nLANGUAGES = [(\"en\", _(\"English\"))]\nLOCALE_PATHS = (\"locale\",)\n\n# Static files\nSTATICFILES_FINDERS = (\n \"django.contrib.staticfiles.finders.FileSystemFinder\",\n \"django.contrib.staticfiles.finders.AppDirectoriesFinder\",\n \"sass_processor.finders.CssFinder\",\n)\n\n# Allow importing .scss files that don't start with an underscore.\n# See https://github.com/jrief/django-sass-processor\nSASS_PROCESSOR_INCLUDE_FILE_PATTERN = r\"^.+\\.scss$\"\n\n# django-sass-processor does not use the Django 4.2 `storages` API yet,\n# but we can simply give it the path as we would with the new API.\nSASS_PROCESSOR_STORAGE = _STATICFILES_STORAGE\n\n# See utils/model/signals.py for explanation\nSUSPEND_SIGNALS = False\n\nTHUMBNAILS_METADATA = (\n {\n \"BACKEND\": \"thumbnails.backends.metadata.RedisBackend\",\n \"host\": REDIS_CACHE_HOST,\n \"port\": REDIS_CACHE_PORT,\n }\n if REDIS_CACHE_HOST\n else {\n \"BACKEND\": \"thumbnails.backends.metadata.DatabaseBackend\",\n }\n)\n\nTHUMBNAILS = {\n \"METADATA\": THUMBNAILS_METADATA,\n \"STORAGE\": {\n # django-thumbs does not use the Django 4.2 `storages` API yet,\n # but we can simply give it the path as we would with the new API.\n \"BACKEND\": _DEFAULT_FILE_STORAGE,\n },\n \"SIZES\": {\n \"small\": {\n \"FORMAT\": \"webp\",\n \"PROCESSORS\": [\n {\n \"PATH\": \"utils.media.processors.thumbnail\",\n \"size\": (300, 300),\n \"cover\": True,\n },\n ],\n },\n \"medium\": {\n \"FORMAT\": \"webp\",\n \"PROCESSORS\": [\n {\n \"PATH\": \"utils.media.processors.thumbnail\",\n \"size\": (600, 600),\n \"cover\": True,\n },\n ],\n },\n \"large\": {\n \"FORMAT\": \"webp\",\n \"PROCESSORS\": [\n {\n \"PATH\": \"utils.media.processors.thumbnail\",\n \"size\": (1200, 900),\n \"cover\": True,\n },\n ],\n },\n \"photo_medium\": {\n \"FORMAT\": \"webp\",\n \"PROCESSORS\": [\n {\n \"PATH\": \"utils.media.processors.thumbnail\",\n \"size\": (1200, 900),\n },\n ],\n },\n \"photo_large\": {\n \"FORMAT\": \"webp\",\n \"PROCESSORS\": [\n {\n \"PATH\": \"utils.media.processors.thumbnail\",\n \"size\": (1920, 1920),\n },\n ],\n },\n \"avatar_large\": {\n \"FORMAT\": \"webp\",\n \"PROCESSORS\": [\n {\n \"PATH\": \"utils.media.processors.thumbnail\",\n \"size\": (900, 900),\n \"cover\": True,\n },\n ],\n },\n \"slide_small\": {\n \"FORMAT\": \"webp\",\n \"PROCESSORS\": [\n {\n \"PATH\": \"utils.media.processors.thumbnail\",\n \"size\": (500, 108),\n \"cover\": True,\n },\n ],\n },\n \"slide_medium\": {\n \"FORMAT\": \"webp\",\n \"PROCESSORS\": [\n {\n \"PATH\": \"utils.media.processors.thumbnail\",\n \"size\": (1000, 215),\n \"cover\": True,\n },\n ],\n },\n \"slide\": {\n \"FORMAT\": \"webp\",\n \"PROCESSORS\": [\n {\n \"PATH\": \"utils.media.processors.thumbnail\",\n \"size\": (2000, 430),\n \"cover\": True,\n },\n ],\n },\n \"fit_small\": {\n \"FORMAT\": \"webp\",\n \"PROCESSORS\": [\n {\n \"PATH\": \"utils.media.processors.thumbnail\",\n \"size\": (300, 300),\n },\n ],\n },\n \"fit_medium\": {\n \"FORMAT\": \"webp\",\n \"PROCESSORS\": [\n {\n \"PATH\": \"utils.media.processors.thumbnail\",\n \"size\": (600, 600),\n },\n ],\n },\n \"fit_large\": {\n \"FORMAT\": \"webp\",\n \"PROCESSORS\": [\n {\n \"PATH\": \"utils.media.processors.thumbnail\",\n \"size\": (1200, 900),\n },\n ],\n },\n \"source\": {\n \"FORMAT\": \"jpg\",\n \"PROCESSORS\": [\n {\n \"PATH\": \"utils.media.processors.process_upload\",\n \"size\": (8_000, 8_000),\n \"format\": \"jpg\",\n }\n ],\n },\n \"source_png\": {\n \"FORMAT\": \"png\",\n \"PROCESSORS\": [\n {\n \"PATH\": \"utils.media.processors.process_upload\",\n \"size\": (8_000, 8_000),\n \"format\": \"png\",\n }\n ],\n },\n },\n}\n\nTHUMBNAIL_SIZES = set(THUMBNAILS[\"SIZES\"].keys())\n\n# TinyMCE config\nTINYMCE_DEFAULT_CONFIG = {\n \"max_height\": 500,\n \"menubar\": False,\n \"plugins\": \"autolink autoresize link image code media paste lists\",\n \"toolbar\": \"h2 h3 | bold italic underline strikethrough | image media | link unlink \"\n \"| bullist numlist | undo redo | code\",\n \"contextmenu\": \"bold italic underline strikethrough | link\",\n \"paste_as_text\": True,\n \"relative_urls\": False,\n \"remove_script_host\": False,\n \"autoresize_bottom_margin\": 50,\n}\nTINYMCE_EXTRA_MEDIA = {\n \"css\": {\n \"all\": [\n \"css/tinymce.css\",\n ],\n },\n}\n\n\nBOOTSTRAP5 = {\"required_css_class\": \"required-field\"}\n\n# https://docs.djangoproject.com/en/dev/ref/settings/#default-exception-reporter-filter\nDEFAULT_EXCEPTION_REPORTER_FILTER = (\n \"utils.exception_filter.ThaliaSafeExceptionReporterFilter\"\n)\n\n# Make sure the locations in django.po files don't include line nrs.\nmakemessages.Command.xgettext_options.append(\"--add-location=file\")\n\nGRAPH_MODELS = {\n \"all_applications\": False,\n \"group_models\": True,\n \"app_labels\": [\n \"events\",\n \"photos\",\n \"merchandise\",\n \"thabloid\",\n \"partners\",\n \"newsletters\",\n \"shortlinks\",\n \"promotion\",\n \"documents\",\n \"pizzas\",\n \"announcements\",\n \"sales\",\n \"registrations\",\n \"mailinglists\",\n \"payments\",\n \"members\",\n \"admin\",\n \"pushnotifications\",\n \"activemembers\",\n \"education\",\n \"auth\",\n ],\n}\n\nMONEYBIRD_START_DATE = os.environ.get(\"MONEYBIRD_START_DATE\", \"2023-09-01\")\n\nMONEYBIRD_ADMINISTRATION_ID: Optional[int] = (\n int(os.environ.get(\"MONEYBIRD_ADMINISTRATION_ID\"))\n if os.environ.get(\"MONEYBIRD_ADMINISTRATION_ID\")\n else None\n)\n\nMONEYBIRD_API_KEY = os.environ.get(\"MONEYBIRD_API_KEY\")\n\nMONEYBIRD_SYNC_ENABLED = MONEYBIRD_ADMINISTRATION_ID and MONEYBIRD_API_KEY\n\nMONEYBIRD_MEMBER_PK_CUSTOM_FIELD_ID: Optional[int] = (\n int(os.environ.get(\"MONEYBIRD_MEMBER_PK_CUSTOM_FIELD_ID\"))\n if os.environ.get(\"MONEYBIRD_MEMBER_PK_CUSTOM_FIELD_ID\")\n else None\n)\nMONEYBIRD_UNKNOWN_PAYER_CONTACT_ID: Optional[int] = (\n int(os.environ.get(\"MONEYBIRD_UNKNOWN_PAYER_CONTACT_ID\"))\n if os.environ.get(\"MONEYBIRD_UNKNOWN_PAYER_CONTACT_ID\")\n else None\n)\nMONEYBIRD_CONTRIBUTION_LEDGER_ID: Optional[int] = (\n int(os.environ.get(\"MONEYBIRD_CONTRIBUTION_LEDGER_ID\"))\n if os.environ.get(\"MONEYBIRD_CONTRIBUTION_LEDGER_ID\")\n else None\n)\n\nMONEYBIRD_TPAY_FINANCIAL_ACCOUNT_ID: Optional[int] = (\n int(os.environ.get(\"MONEYBIRD_TPAY_FINANCIAL_ACCOUNT_ID\"))\n if os.environ.get(\"MONEYBIRD_TPAY_FINANCIAL_ACCOUNT_ID\")\n else None\n)\nMONEYBIRD_CASH_FINANCIAL_ACCOUNT_ID: Optional[int] = (\n int(os.environ.get(\"MONEYBIRD_CASH_FINANCIAL_ACCOUNT_ID\"))\n if os.environ.get(\"MONEYBIRD_CASH_FINANCIAL_ACCOUNT_ID\")\n else None\n)\nMONEYBIRD_CARD_FINANCIAL_ACCOUNT_ID: Optional[int] = (\n int(os.environ.get(\"MONEYBIRD_CARD_FINANCIAL_ACCOUNT_ID\"))\n if os.environ.get(\"MONEYBIRD_CARD_FINANCIAL_ACCOUNT_ID\")\n else None\n)\n\nMONEYBIRD_ZERO_TAX_RATE_ID: Optional[int] = (\n int(os.environ.get(\"MONEYBIRD_ZERO_TAX_RATE_ID\"))\n if os.environ.get(\"MONEYBIRD_ZERO_TAX_RATE_ID\")\n else None\n)\n",
"path": "website/thaliawebsite/settings.py"
}
] | [
{
"content": "\"\"\"Django settings for concrexit.\n\nFor more information on this file, see\nhttps://docs.djangoproject.com/en/dev/topics/settings/\n\nFor the full list of settings and their values, see\nhttps://docs.djangoproject.com/en/dev/ref/settings/\n\"\"\"\n\nimport base64\nimport json\nimport logging\nimport os\nfrom typing import Optional\n\nfrom django.core.management.commands import makemessages\nfrom django.utils import timezone\nfrom django.utils.translation import gettext_lazy as _\n\nfrom celery.schedules import crontab\n\nlogger = logging.getLogger(__name__)\n\n# Sentinel objects that are distinct from None\n_NOT_SET = object()\n\n\nclass Misconfiguration(Exception):\n \"\"\"Exception that is raised when something is misconfigured in this file.\"\"\"\n\n\n# Build paths inside the project like this: os.path.join(BASE_DIR, ...)\nBASE_DIR = os.path.abspath(\n os.path.join(os.path.dirname(os.path.abspath(__file__)), \"\", \"..\")\n)\n\nSOURCE_COMMIT = os.environ.get(\"SOURCE_COMMIT\", \"unknown\")\n\n# Many of the settings are dependent on the environment we're running in.\n# The default environment is development, so the programmer doesn't have to set anything\nDJANGO_ENV = os.environ.get(\"DJANGO_ENV\", \"development\")\n_environments = [\"production\", \"staging\", \"testing\", \"development\"]\nif DJANGO_ENV not in _environments:\n raise Misconfiguration(f\"Set DJANGO_ENV to one of: {', '.join(_environments)}\")\n\n\ndef _set_django_env(env):\n \"\"\"Set the DJANGO_ENV variable.\n\n This is a helper function for the doctests below because doctests cannot set global variables.\n \"\"\"\n global DJANGO_ENV # noqa: PLW0603\n DJANGO_ENV = env\n\n\ndef setting(*, development, production, staging=_NOT_SET, testing=_NOT_SET):\n \"\"\"Generate a setting depending on the DJANGO_ENV and the arguments.\n\n This function is meant for static settings that depend on the DJANGO_ENV. If the\n staging or testing arguments are left to their defaults, they will fall back to\n the production and development settings respectively.\n\n Example:\n >>> _set_django_env(\"production\")\n >>> SEND_MESSAGES_WITH = setting(development=\"console\", production=\"mail\", staging=\"DM\")\n >>> SEND_MESSAGES_WITH\n 'mail'\n >>> _set_django_env(\"testing\")\n >>> setting(development=\"console\", production=\"mail\", staging=\"DM\")\n 'console'\n \"\"\"\n if DJANGO_ENV == \"development\" or (DJANGO_ENV == \"testing\" and testing is _NOT_SET):\n return development\n if DJANGO_ENV == \"testing\":\n return testing\n if DJANGO_ENV == \"production\" or (DJANGO_ENV == \"staging\" and staging is _NOT_SET):\n return production\n if DJANGO_ENV == \"staging\":\n return staging\n raise Misconfiguration(f\"Set DJANGO_ENV to one of: {', '.join(_environments)}\")\n\n\ndef from_env(\n name, *, production=_NOT_SET, staging=_NOT_SET, testing=_NOT_SET, development=None\n):\n \"\"\"Generate a setting that's overridable by the process environment.\n\n This will raise an exception if a default is not set for production. Because we use\n the sentinel value _NOT_SET, you can still set a default of None for production if wanted.\n\n As with :func:`setting` the staging and testing values will fall back to production\n and development. So if an environment variable is required in production, and no default\n is set for staging, staging will also raise the exception.\n\n Example:\n >>> _set_django_env(\"production\")\n >>> # A secret key should always be set in production via the environment\n >>> from_env(\"MEDIA_ROOT\", development=\"/media/root\")\n Traceback (most recent call last):\n ...\n thaliawebsite.settings.Misconfiguration: Environment variable `MEDIA_ROOT` must be supplied in production\n >>> _set_django_env(\"development\")\n >>> from_env(\"MEDIA_ROOT\", development=\"/media/root\")\n '/media/root'\n \"\"\"\n try:\n return os.environ[name]\n except KeyError:\n if DJANGO_ENV == \"production\" or (\n DJANGO_ENV == \"staging\" and staging is _NOT_SET\n ):\n if production is _NOT_SET and os.environ.get(\"MANAGE_PY\", \"0\") == \"0\":\n raise Misconfiguration(\n f\"Environment variable `{name}` must be supplied in production\"\n )\n if production is _NOT_SET and os.environ.get(\"MANAGE_PY\", \"0\") == \"1\":\n logger.warning(\n \"Ignoring unset %s because we're running a management command\", name\n )\n return development\n return production\n if DJANGO_ENV == \"staging\":\n return staging\n if DJANGO_ENV == \"development\" or (\n DJANGO_ENV == \"testing\" and testing is _NOT_SET\n ):\n return development\n if DJANGO_ENV == \"testing\":\n return testing\n raise Misconfiguration(f\"DJANGO_ENV set to unsupported value: {DJANGO_ENV}\")\n\n\n###############################################################################\n# Site settings\n\n# We use this setting to generate the email addresses, and for BASE_URL below.\nSITE_DOMAIN = from_env(\"SITE_DOMAIN\", development=\"localhost\", production=\"thalia.nu\")\n\n# Used to generate some absolute urls when we don't have access to a request.\nBASE_URL = from_env(\n \"BASE_URL\",\n development=f\"http://{SITE_DOMAIN}:8000\",\n production=f\"https://{SITE_DOMAIN}\",\n)\n\n# Default FROM email\nDEFAULT_FROM_EMAIL = f\"{os.environ.get('ADDRESS_NOREPLY', 'noreply')}@{SITE_DOMAIN}\"\n# https://docs.djangoproject.com/en/dev/ref/settings/#server-email\nSERVER_EMAIL = DEFAULT_FROM_EMAIL\nNEWSLETTER_FROM_ADDRESS = (\n f\"{os.environ.get('ADDRESS_NEWSLETTER', 'newsletter')}@{SITE_DOMAIN}\"\n)\nBOARD_NOTIFICATION_ADDRESS = (\n f\"{os.environ.get('ADDRESS_CONTACT', 'info')}@{SITE_DOMAIN}\"\n)\nPARTNER_NOTIFICATION_ADDRESS = (\n f\"{os.environ.get('ADDRESS_COLLABORATION', 'samenwerking')}@{SITE_DOMAIN}\"\n)\nEDUCATION_NOTIFICATION_ADDRESS = (\n f\"{os.environ.get('ADDRESS_EDUCATION', 'educacie')}@{SITE_DOMAIN}\"\n)\nPROMO_REQUEST_NOTIFICATION_ADDRESS = (\n f\"{os.environ.get('ADDRESS_PROMOREQUESTS', 'promocie')}@{SITE_DOMAIN}\"\n)\nTREASURER_NOTIFICATION_ADDRESS = (\n f\"{os.environ.get('ADDRESS_TREASURER', 'treasurer')}@{SITE_DOMAIN}\"\n)\n\nPROMO_PUBLISH_DATE_TIMEDELTA = timezone.timedelta(weeks=1)\n\n# How many days to keep reference faces after a user marks them for deletion\nFACEDETECTION_REFERENCE_FACE_STORAGE_PERIOD_AFTER_DELETE_DAYS = 180\n\n# How many reference faces a user can have at the same time\nFACEDETECTION_MAX_NUM_REFERENCE_FACES = 5\n\n# ARN of the concrexit-facedetection-lambda function.\n# See https://github.com/svthalia/concrexit-facedetection-lambda.\nFACEDETECTION_LAMBDA_ARN = from_env(\"FACEDETECTION_LAMBDA_ARN\")\n\nFACEDETECTION_LAMBDA_BATCH_SIZE = int(\n os.environ.get(\"FACEDETECTION_LAMBDA_BATCH_SIZE\", 20)\n)\n\n# The scheme the app uses for oauth redirection\nAPP_OAUTH_SCHEME = os.environ.get(\"APP_OAUTH_SCHEME\", \"nu.thalia\")\n\n# Membership prices\nMEMBERSHIP_PRICES = {\n \"year\": int(os.environ.get(\"MEMBERSHIP_PRICE_YEAR_CENTS\", \"750\")) / 100,\n \"study\": int(os.environ.get(\"MEMBERSHIP_PRICE_STUDY_CENTS\", \"3000\")) / 100,\n}\n\n# Window during which a payment can be deleted again\nPAYMENT_CHANGE_WINDOW = int(os.environ.get(\"PAYMENTS_CHANGE_WINDOW\", 10 * 60))\n\n# Payments creditor identifier\nSEPA_CREDITOR_ID = os.environ.get(\"SEPA_CREDITOR_ID\", \"<unknown>\")\n\n# Payment batch withdrawal date default offset after creation date\nPAYMENT_BATCH_DEFAULT_WITHDRAWAL_DATE_OFFSET = timezone.timedelta(days=14)\n\nTHALIA_PAY_ENABLED_PAYMENT_METHOD = (\n from_env(\"THALIA_PAY_ENABLED\", development=\"1\", staging=\"1\", production=\"0\") == \"1\"\n)\nTHALIA_PAY_FOR_NEW_MEMBERS = os.environ.get(\"THALIA_PAY_FOR_NEW_MEMBERS\", \"1\") == \"1\"\n\n###############################################################################\n# Django settings\n\n# https://docs.djangoproject.com/en/dev/ref/settings/#secret-key\nSECRET_KEY = from_env(\n \"SECRET_KEY\", development=\"#o-0d1q5&^&06tn@8pr1f(n3$crafd++^%sacao7hj*ea@c)^t\"\n)\n\n# https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts\nALLOWED_HOSTS = [\n SITE_DOMAIN,\n *from_env(\"ALLOWED_HOSTS\", development=\"*\", production=\"\").split(\",\"),\n]\n# https://docs.djangoproject.com/en/dev/ref/settings/#internal-ips\nINTERNAL_IPS = setting(development=[\"127.0.0.1\", \"172.17.0.1\"], production=[])\n\nDJANGO_DRF_FILEPOND_UPLOAD_TMP = from_env(\n \"DJANGO_DRF_FILEPOND_UPLOAD_TMP\",\n development=os.path.join(BASE_DIR, \"filepond-temp-uploads\"),\n)\nDJANGO_DRF_FILEPOND_FILE_STORE_PATH = from_env(\n \"DJANGO_DRF_FILEPOND_FILE_STORE_PATH\",\n development=os.path.join(BASE_DIR, \"filepond-uploaded\"),\n)\nDJANGO_DRF_FILEPOND_ALLOW_EXTERNAL_UPLOAD_DIR = True\nDJANGO_DRF_FILEPOND_PERMISSION_CLASSES = {\n \"GET_FETCH\": [\n \"oauth2_provider.contrib.rest_framework.IsAuthenticatedOrTokenHasScope\",\n ],\n \"GET_LOAD\": [\n \"oauth2_provider.contrib.rest_framework.IsAuthenticatedOrTokenHasScope\",\n ],\n \"POST_PROCESS\": [\n \"oauth2_provider.contrib.rest_framework.IsAuthenticatedOrTokenHasScope\",\n ],\n \"GET_RESTORE\": [\n \"oauth2_provider.contrib.rest_framework.IsAuthenticatedOrTokenHasScope\",\n ],\n \"DELETE_REVERT\": [\n \"oauth2_provider.contrib.rest_framework.IsAuthenticatedOrTokenHasScope\",\n ],\n \"PATCH_PATCH\": [\n \"oauth2_provider.contrib.rest_framework.IsAuthenticatedOrTokenHasScope\",\n ],\n}\n\n# https://docs.djangoproject.com/en/dev/ref/settings/#static-root\nSTATIC_ROOT = from_env(\"STATIC_ROOT\", development=os.path.join(BASE_DIR, \"static\"))\n\n# https://docs.djangoproject.com/en/dev/ref/settings/#media-root\nMEDIA_ROOT = from_env(\"MEDIA_ROOT\", development=os.path.join(BASE_DIR, \"media\"))\n\n# https://github.com/johnsensible/django-sendfile#nginx-backend\nSENDFILE_URL = \"/media/sendfile/\"\nSENDFILE_ROOT = MEDIA_ROOT\nSENDFILE_BACKEND = setting(\n development=\"django_sendfile.backends.development\",\n production=\"django_sendfile.backends.nginx\",\n)\n\nPRIVATE_MEDIA_LOCATION = \"\"\nPUBLIC_MEDIA_LOCATION = \"public\"\nSTATICFILES_LOCATION = \"static\"\n\nMEDIA_URL = \"/media/private/\"\n\nAWS_ACCESS_KEY_ID = from_env(\"AWS_ACCESS_KEY_ID\", production=None)\nAWS_SECRET_ACCESS_KEY = from_env(\"AWS_SECRET_ACCESS_KEY\", production=None)\nAWS_STORAGE_BUCKET_NAME = from_env(\"AWS_STORAGE_BUCKET_NAME\", production=None)\nAWS_DEFAULT_ACL = \"private\"\nAWS_S3_OBJECT_PARAMETERS = {\"CacheControl\": \"max-age=86400\"}\nAWS_S3_SIGNATURE_VERSION = \"s3v4\"\n\nif AWS_STORAGE_BUCKET_NAME is not None:\n AWS_CLOUDFRONT_KEY = base64.urlsafe_b64decode(\n os.environ.get(\"AWS_CLOUDFRONT_KEY\", None)\n ).decode(\"utf-8\")\n AWS_CLOUDFRONT_KEY_ID = os.environ.get(\"AWS_CLOUDFRONT_KEY_ID\", None)\n AWS_S3_CUSTOM_DOMAIN = os.environ.get(\"AWS_CLOUDFRONT_DOMAIN\", None)\n\n _STATICFILES_STORAGE = \"thaliawebsite.storage.backend.StaticS3Storage\"\n STATIC_URL = f\"https://{AWS_S3_CUSTOM_DOMAIN}/static/\"\n\n _DEFAULT_FILE_STORAGE = \"thaliawebsite.storage.backend.PrivateS3Storage\"\n\n _PUBLIC_FILE_STORAGE = \"thaliawebsite.storage.backend.PublicS3Storage\"\n PUBLIC_MEDIA_URL = f\"https://{AWS_S3_CUSTOM_DOMAIN}/\"\nelse:\n _STATICFILES_STORAGE = setting(\n development=\"django.contrib.staticfiles.storage.StaticFilesStorage\",\n production=\"django.contrib.staticfiles.storage.ManifestStaticFilesStorage\",\n )\n STATIC_URL = \"/static/\"\n\n _DEFAULT_FILE_STORAGE = \"thaliawebsite.storage.backend.PrivateFileSystemStorage\"\n\n _PUBLIC_FILE_STORAGE = \"thaliawebsite.storage.backend.PublicFileSystemStorage\"\n PUBLIC_MEDIA_URL = \"/media/public/\"\n\nSTORAGES = {\n \"default\": {\"BACKEND\": _DEFAULT_FILE_STORAGE},\n \"public\": {\"BACKEND\": _PUBLIC_FILE_STORAGE},\n \"staticfiles\": {\"BACKEND\": _STATICFILES_STORAGE},\n}\n\n# https://docs.djangoproject.com/en/dev/ref/settings/#conn-max-age\nCONN_MAX_AGE = int(from_env(\"CONN_MAX_AGE\", development=\"0\", production=\"60\"))\n\n# Useful for managing members\n# https://docs.djangoproject.com/en/dev/ref/settings/#data-upload-max-number-fields\nDATA_UPLOAD_MAX_NUMBER_FIELDS = os.environ.get(\"DATA_UPLOAD_MAX_NUMBER_FIELDS\", 10000)\n\n# https://docs.djangoproject.com/en/dev/ref/settings/#debug\nDEBUG = setting(development=True, production=False, testing=False)\n\n# https://docs.djangoproject.com/en/dev/ref/settings/#session-cookie-secure\nSESSION_COOKIE_SECURE = setting(development=False, production=True)\n# https://docs.djangoproject.com/en/dev/ref/settings/#csrf-cookie-secure\nCSRF_COOKIE_SECURE = setting(development=False, production=True)\n\n# https://docs.djangoproject.com/en/dev/ref/settings/#std-setting-SECURE_PROXY_SSL_HEADER\nSECURE_PROXY_SSL_HEADER = (\"HTTP_X_FORWARDED_PROTO\", \"https\")\n\n# https://docs.djangoproject.com/en/dev/ref/settings/#default-auto-field\nDEFAULT_AUTO_FIELD = \"django.db.models.AutoField\"\n\n\n###############################################################################\n# Celery settings\n# https://docs.celeryq.dev/en/stable/userguide/configuration.html#configuration\n\n# Set CELERY_BROKER_URL=\"redis://127.0.0.1:6379\" to use a local redis server in development.\nCELERY_BROKER_URL = from_env(\"CELERY_BROKER_URL\")\n\n# Always execute tasks synchronously when no broker is configured in development and testing.\n# See https://docs.celeryq.dev/en/stable/userguide/configuration.html#std-setting-task_always_eager\nCELERY_TASK_ALWAYS_EAGER = CELERY_BROKER_URL is None\n\n\n# See https://docs.celeryq.dev/en/stable/getting-started/backends-and-brokers/redis.html#caveats\nCELERY_BROKER_TRANSPORT_OPTIONS = {\"visibility_timeout\": 18000}\n\n# https://docs.celeryq.dev/en/stable/userguide/periodic-tasks.html\nCELERY_BEAT_SCHEDULE = {\n \"synchronize_mailinglists\": {\n \"task\": \"mailinglists.tasks.sync_mail\",\n \"schedule\": crontab(minute=30),\n },\n \"synchronize_moneybird\": {\n \"task\": \"moneybirdsynchronization.tasks.synchronize_moneybird\",\n \"schedule\": crontab(minute=30, hour=1),\n },\n \"sendpromooverviewweekly\": {\n \"task\": \"promotion.tasks.promo_update_weekly\",\n \"schedule\": crontab(minute=0, hour=8, day_of_week=1),\n },\n \"sendpromoooverviewdaily\": {\n \"task\": \"promotion.tasks.promo_update_daily\",\n \"schedule\": crontab(minute=0, hour=8),\n },\n \"facedetectlambda\": {\n \"task\": \"facedetection.tasks.trigger_facedetect_lambda\",\n \"schedule\": crontab(minute=0, hour=1),\n },\n \"revokeoldmandates\": {\n \"task\": \"payments.tasks.revoke_mandates\",\n \"schedule\": crontab(minute=0, hour=1),\n },\n \"membershipannouncement\": {\n \"task\": \"members.tasks.membership_announcement\",\n \"schedule\": crontab(minute=0, hour=6, day_of_month=31, month_of_year=8),\n },\n \"inforequest\": {\n \"task\": \"members.tasks.info_request\",\n \"schedule\": crontab(minute=0, hour=6, day_of_month=15, month_of_year=10),\n },\n \"expirationannouncement\": {\n \"task\": \"members.tasks.expiration_announcement\",\n \"schedule\": crontab(minute=0, hour=6, day_of_month=8, month_of_year=8),\n },\n \"minimiseregistration\": {\n \"task\": \"registrations.tasks.minimise_registrations\",\n \"schedule\": crontab(minute=0, hour=3, day_of_month=1),\n },\n \"sendscheduledmessages\": {\n \"task\": \"pushnotifications.tasks.send_scheduled_messages\",\n \"schedule\": crontab(minute=\"*/2\"),\n \"args\": (120,),\n },\n \"revokestaff\": {\n \"task\": \"activemembers.tasks.revoke_staff\",\n \"schedule\": crontab(minute=30, hour=3),\n },\n \"deletegsuiteusers\": {\n \"task\": \"activemembers.tasks.delete_gsuite_users\",\n \"schedule\": crontab(minute=30, hour=3, day_of_week=1),\n },\n \"sendplannednewsletters\": {\n \"task\": \"newsletters.tasks.send_planned_newsletters\",\n \"schedule\": crontab(minute=\"*/5\"),\n },\n \"dataminimisation\": {\n \"task\": \"thaliawebsite.tasks.data_minimisation\",\n \"schedule\": crontab(minute=0, hour=3),\n },\n \"cleanup\": {\n \"task\": \"thaliawebsite.tasks.clean_up\",\n \"schedule\": crontab(minute=0, hour=23),\n },\n \"cleartokens\": {\n \"task\": \"thaliawebsite.tasks.clear_tokens\",\n \"schedule\": crontab(minute=30, hour=3),\n },\n}\n\n###############################################################################\n# Email settings\n# https://docs.djangoproject.com/en/dev/ref/settings/#email-backend\n_EMAIL_BACKEND = from_env(\"EMAIL_BACKEND\", development=\"console\", production=\"smtp\")\nif _EMAIL_BACKEND == \"console\":\n EMAIL_BACKEND = \"django.core.mail.backends.console.EmailBackend\"\n\nif _EMAIL_BACKEND == \"smtp\":\n EMAIL_BACKEND = \"django.core.mail.backends.smtp.EmailBackend\"\n EMAIL_HOST = os.environ.get(\"DJANGO_EMAIL_HOST\")\n EMAIL_PORT = os.environ.get(\"DJANGO_EMAIL_PORT\", 25)\n EMAIL_HOST_USER = os.environ.get(\"DJANGO_EMAIL_HOST_USER\", \"\")\n EMAIL_HOST_PASSWORD = os.environ.get(\"DJANGO_EMAIL_HOST_PASSWORD\", \"\")\n EMAIL_USE_TLS = os.environ.get(\"DJANGO_EMAIL_USE_TLS\", \"1\") == \"1\"\n EMAIL_TIMEOUT = int(os.environ.get(\"EMAIL_TIMEOUT\", \"10\"))\n if EMAIL_HOST is None:\n logger.warning(\n \"The email host is set to the default of localhost, are you sure you don't want to set EMAIL_HOST?\"\n )\n EMAIL_HOST = \"localhost\"\n\n###############################################################################\n# Database settings\n# https://docs.djangoproject.com/en/dev/ref/settings/#databases\nDATABASE_ENGINE = from_env(\n \"DATABASE_ENGINE\", development=\"sqlite\", production=\"postgresql\", testing=None\n)\nif DATABASE_ENGINE == \"sqlite\":\n DATABASES = {\n \"default\": {\n \"ENGINE\": \"django.db.backends.sqlite3\",\n \"NAME\": os.path.join(BASE_DIR, \"db.sqlite3\"),\n }\n }\n\nif DATABASE_ENGINE == \"postgresql\":\n DATABASES = {\n \"default\": {\n \"ENGINE\": \"django.db.backends.postgresql\",\n \"USER\": os.environ.get(\"POSTGRES_USER\", \"concrexit\"),\n \"PASSWORD\": os.environ.get(\"POSTGRES_PASSWORD\", None),\n \"NAME\": os.environ.get(\"POSTGRES_DB\", \"\"),\n \"HOST\": os.environ.get(\"POSTGRES_HOST\", \"\"),\n \"PORT\": os.environ.get(\"POSTGRES_PORT\", \"5432\"),\n }\n }\n\nif DJANGO_ENV == \"testing\":\n DATABASES = {\n \"default\": {\n \"ENGINE\": \"django.db.backends.postgresql\",\n \"NAME\": \"thalia\",\n \"USER\": \"postgres\",\n \"PASSWORD\": \"postgres\",\n \"HOST\": \"127.0.0.1\",\n \"PORT\": 5432,\n },\n }\n\n###############################################################################\n# Firebase config\nFIREBASE_CREDENTIALS = os.environ.get(\"FIREBASE_CREDENTIALS\", \"{}\")\nif FIREBASE_CREDENTIALS != \"{}\":\n FIREBASE_CREDENTIALS = base64.urlsafe_b64decode(FIREBASE_CREDENTIALS)\nFIREBASE_CREDENTIALS = json.loads(FIREBASE_CREDENTIALS)\n\nif FIREBASE_CREDENTIALS != {}:\n from firebase_admin import credentials, initialize_app\n\n try:\n initialize_app(credential=credentials.Certificate(FIREBASE_CREDENTIALS))\n except ValueError:\n logger.error(\"Firebase application failed to initialise\")\n\n###############################################################################\n# GSuite config\nGSUITE_ADMIN_SCOPES = [\n \"https://www.googleapis.com/auth/admin.directory.group\",\n \"https://www.googleapis.com/auth/admin.directory.user\",\n \"https://www.googleapis.com/auth/apps.groups.settings\",\n]\n\nGSUITE_ADMIN_CREDENTIALS = os.environ.get(\"GSUITE_ADMIN_CREDENTIALS\", \"{}\")\nif GSUITE_ADMIN_CREDENTIALS != \"{}\":\n GSUITE_ADMIN_CREDENTIALS = base64.urlsafe_b64decode(GSUITE_ADMIN_CREDENTIALS)\nGSUITE_ADMIN_CREDENTIALS = json.loads(GSUITE_ADMIN_CREDENTIALS)\nGSUITE_ADMIN_USER = os.environ.get(\"GSUITE_ADMIN_USER\", \"[email protected]\")\nGSUITE_DOMAIN = from_env(\n \"GSUITE_DOMAIN\", development=\"thalia.localhost\", production=\"thalia.nu\"\n)\nGSUITE_MEMBERS_DOMAIN = from_env(\n \"GSUITE_MEMBERS_DOMAIN\",\n development=\"members.thalia.localhost\",\n production=\"members.thalia.nu\",\n)\nGSUITE_MEMBERS_AUTOSYNC = os.environ.get(\"GSUITE_MEMBERS_AUTOSYNC\", \"0\") == \"1\"\n\nif GSUITE_ADMIN_CREDENTIALS != {}:\n from google.oauth2 import service_account\n\n GSUITE_ADMIN_CREDENTIALS = service_account.Credentials.from_service_account_info(\n GSUITE_ADMIN_CREDENTIALS, scopes=GSUITE_ADMIN_SCOPES\n ).with_subject(GSUITE_ADMIN_USER)\n\nEMAIL_DOMAIN_BLACKLIST = [GSUITE_MEMBERS_DOMAIN]\n\n###############################################################################\n# Google maps API key and secrets\nGOOGLE_MAPS_API_KEY = os.environ.get(\"GOOGLE_MAPS_API_KEY\", \"\")\nGOOGLE_MAPS_API_SECRET = os.environ.get(\"GOOGLE_MAPS_API_SECRET\", \"\")\nGOOGLE_PLACES_API_KEY = os.environ.get(\"GOOGLE_PLACES_API_KEY\", \"\")\n\n###############################################################################\n# Sentry setup\nif \"SENTRY_DSN\" in os.environ:\n import sentry_sdk\n from sentry_sdk.integrations.celery import CeleryIntegration\n from sentry_sdk.integrations.django import DjangoIntegration\n\n sentry_sdk.init(\n dsn=os.environ.get(\"SENTRY_DSN\"),\n integrations=[\n DjangoIntegration(),\n CeleryIntegration(\n monitor_beat_tasks=True,\n ),\n ],\n release=SOURCE_COMMIT,\n send_default_pii=True,\n environment=DJANGO_ENV,\n traces_sample_rate=float(os.environ.get(\"SENTRY_TRACES_SAMPLE_RATE\", 0.2)),\n profiles_sample_rate=float(os.environ.get(\"SENTRY_PROFILES_SAMPLE_RATE\", 0.0)),\n )\n\n\n###############################################################################\n# (Mostly) static settings\nINSTALLED_APPS = [\n \"django.contrib.auth\",\n \"django.contrib.contenttypes\",\n \"django.contrib.sessions\",\n \"django.contrib.messages\",\n \"django.contrib.staticfiles\",\n \"django.contrib.sitemaps\",\n # Dependencies\n \"oauth2_provider\",\n \"corsheaders\",\n \"django_bootstrap5\",\n \"tinymce\",\n \"rest_framework\",\n \"rest_framework.authtoken\",\n \"debug_toolbar\",\n \"sass_processor\",\n \"admin_auto_filters\",\n \"django_drf_filepond\",\n \"django_filepond_widget\",\n \"thumbnails\",\n # Our apps\n # Directly link to the app config when applicable as recommended\n # by the docs: https://docs.djangoproject.com/en/2.0/ref/applications/\n \"thaliawebsite.apps.ThaliaWebsiteConfig\", # include for admin settings\n # Load django.contrib.admin after thaliawebsite so the admin page gets modified\n \"django.contrib.admin\",\n # Our apps ordered such that templates in the first\n # apps can override those used by the later apps.\n \"pushnotifications.apps.PushNotificationsConfig\",\n \"facedetection.apps.FaceDetectionConfig\",\n \"announcements.apps.AnnouncementsConfig\",\n \"promotion.apps.PromotionConfig\",\n \"members.apps.MembersConfig\",\n \"documents.apps.DocumentsConfig\",\n \"activemembers.apps.ActiveMembersConfig\",\n \"photos.apps.PhotosConfig\",\n \"utils\",\n \"mailinglists.apps.MailinglistsConfig\",\n \"merchandise.apps.MerchandiseConfig\",\n \"thabloid.apps.ThabloidConfig\",\n \"partners.apps.PartnersConfig\",\n \"events.apps.EventsConfig\",\n \"pizzas.apps.PizzasConfig\",\n \"newsletters.apps.NewslettersConfig\",\n \"education.apps.EducationConfig\",\n \"registrations.apps.RegistrationsConfig\",\n \"payments.apps.PaymentsConfig\",\n \"singlepages.apps.SinglepagesConfig\",\n \"shortlinks.apps.ShortLinkConfig\",\n \"sales.apps.SalesConfig\",\n \"moneybirdsynchronization.apps.MoneybirdsynchronizationConfig\",\n]\n\nMIDDLEWARE = [\n \"debug_toolbar.middleware.DebugToolbarMiddleware\",\n \"django.middleware.security.SecurityMiddleware\",\n \"django.contrib.sessions.middleware.SessionMiddleware\",\n \"django.middleware.http.ConditionalGetMiddleware\",\n \"corsheaders.middleware.CorsMiddleware\",\n \"django.middleware.common.CommonMiddleware\",\n \"django.middleware.csrf.CsrfViewMiddleware\",\n \"django.contrib.auth.middleware.AuthenticationMiddleware\",\n \"django.contrib.messages.middleware.MessageMiddleware\",\n \"django.middleware.locale.LocaleMiddleware\",\n \"thaliawebsite.middleware.RealIPMiddleware\",\n \"django_ratelimit.middleware.RatelimitMiddleware\",\n \"members.middleware.MemberMiddleware\",\n \"announcements.middleware.AnnouncementMiddleware\",\n]\n\nif DJANGO_ENV in (\"development\", \"testing\"):\n INSTALLED_APPS += [\n \"django_template_check\",\n \"django_extensions\",\n ]\n\nif DJANGO_ENV == \"testing\":\n for x in (\n \"debug_toolbar.middleware.DebugToolbarMiddleware\",\n \"django.middleware.http.ConditionalGetMiddleware\",\n \"django.middleware.csrf.CsrfViewMiddleware\",\n ):\n MIDDLEWARE.remove(x)\n for x in (\"debug_toolbar\",):\n INSTALLED_APPS.remove(x)\n\nROOT_URLCONF = \"thaliawebsite.urls\"\n\nTEMPLATES = [\n {\n \"BACKEND\": \"django.template.backends.django.DjangoTemplates\",\n \"DIRS\": [os.path.join(BASE_DIR, \"templates\")],\n \"APP_DIRS\": setting(development=True, production=False),\n \"OPTIONS\": {\n \"context_processors\": [\n \"thaliawebsite.context_processors.source_commit\",\n \"django.template.context_processors.debug\",\n \"django.template.context_processors.request\",\n \"django.template.context_processors.media\",\n \"django.contrib.auth.context_processors.auth\",\n \"django.contrib.messages.context_processors.messages\",\n \"announcements.context_processors.announcements\",\n \"thaliawebsite.context_processors.aprilfools\",\n \"thaliawebsite.context_processors.lustrum_styling\",\n ],\n },\n },\n]\n\nif DJANGO_ENV in [\"production\", \"staging\"]:\n # Use caching template loader\n TEMPLATES[0][\"OPTIONS\"][\"loaders\"] = [\n (\n \"django.template.loaders.cached.Loader\",\n [\n \"django.template.loaders.filesystem.Loader\",\n \"django.template.loaders.app_directories.Loader\",\n ],\n )\n ]\n\n# Default logging: https://github.com/django/django/blob/master/django/utils/log.py\n# We disable mailing the admin.\n# Server errors will be sent to Sentry via the config below this.\nLOGGING = {\n \"version\": 1,\n \"disable_existing_loggers\": False,\n \"filters\": {\n \"require_debug_false\": {\n \"()\": \"django.utils.log.RequireDebugFalse\",\n },\n \"require_debug_true\": {\n \"()\": \"django.utils.log.RequireDebugTrue\",\n },\n },\n \"formatters\": {\n \"django.server\": {\n \"()\": \"django.utils.log.ServerFormatter\",\n \"format\": \"[{server_time}] {message}\",\n \"style\": \"{\",\n }\n },\n \"handlers\": {\n \"console\": {\n \"level\": \"INFO\",\n \"filters\": [\"require_debug_true\"],\n \"class\": \"logging.StreamHandler\",\n },\n \"django.server\": {\n \"level\": \"INFO\",\n \"class\": \"logging.StreamHandler\",\n \"formatter\": \"django.server\",\n },\n },\n \"loggers\": {\n \"django\": {\n \"handlers\": [\"console\"],\n \"level\": \"INFO\",\n },\n \"django.server\": {\n \"handlers\": [\"django.server\"],\n \"level\": \"INFO\",\n \"propagate\": False,\n },\n },\n}\n\nREDIS_CACHE_PORT = int(\n from_env(\"REDIS_CACHE_PORT\", development=\"6379\", production=\"6379\")\n)\nREDIS_CACHE_HOST = from_env(\"REDIS_CACHE_HOST\")\nREDIS_CACHE_URL = (\n f\"redis://{REDIS_CACHE_HOST}:{REDIS_CACHE_PORT}\" if REDIS_CACHE_HOST else None\n)\n\nCACHES = {\n \"default\": {\n \"BACKEND\": \"django.core.cache.backends.redis.RedisCache\",\n \"LOCATION\": REDIS_CACHE_URL,\n }\n if REDIS_CACHE_URL is not None\n else {\n \"BACKEND\": \"django.core.cache.backends.db.DatabaseCache\",\n \"LOCATION\": \"django_default_db_cache\",\n },\n}\n\nSESSION_ENGINE = \"django.contrib.sessions.backends.cached_db\"\n\nWSGI_APPLICATION = \"thaliawebsite.wsgi.application\"\n\n# Login pages\nLOGIN_URL = \"/user/login/\"\nLOGIN_REDIRECT_URL = \"/\"\n\n# Cors configuration\nCORS_ORIGIN_ALLOW_ALL = True\nCORS_URLS_REGEX = r\"^/(?:api/v1|api/v2|user/oauth)/.*\"\n\n# OAuth configuration\nOIDC_RSA_PRIVATE_KEY = from_env(\"OIDC_RSA_PRIVATE_KEY\", testing=None)\nif OIDC_RSA_PRIVATE_KEY is not None:\n OIDC_RSA_PRIVATE_KEY = base64.urlsafe_b64decode(OIDC_RSA_PRIVATE_KEY).decode()\n\nOAUTH2_PROVIDER = {\n \"OIDC_ENABLED\": True,\n \"OIDC_RSA_PRIVATE_KEY\": OIDC_RSA_PRIVATE_KEY,\n \"ALLOWED_REDIRECT_URI_SCHEMES\": setting(\n production=[\"https\", APP_OAUTH_SCHEME],\n staging=[\"http\", \"https\", APP_OAUTH_SCHEME],\n development=[\"http\", \"https\", APP_OAUTH_SCHEME],\n ),\n \"SCOPES\": {\n \"openid\": \"OpenID Connect\",\n \"read\": \"Authenticated read access to the website\",\n \"write\": \"Authenticated write access to the website\",\n \"activemembers:read\": \"Read access to committee, society and board groups\",\n \"announcements:read\": \"Read access to announcements\",\n \"events:read\": \"Read access to events and your event registrations\",\n \"events:register\": \"Write access to the state of your event registrations\",\n \"events:admin\": \"Admin access to the events\",\n \"food:read\": \"Read access to food events\",\n \"food:order\": \"Order access to food events\",\n \"food:admin\": \"Admin access to food events\",\n \"members:read\": \"Read access to the members directory\",\n \"photos:read\": \"Read access to photos\",\n \"profile:read\": \"Read access to your member profile\",\n \"profile:write\": \"Write access to your member profile\",\n \"pushnotifications:read\": \"Read access to push notifications\",\n \"pushnotifications:write\": \"Write access to push notifications\",\n \"partners:read\": \"Read access to partners\",\n \"payments:read\": \"Read access to payments\",\n \"payments:write\": \"Write access to payments\",\n \"payments:admin\": \"Admin access to payments\",\n \"sales:read\": \"Read access to your Point of Sale orders\",\n \"sales:order\": \"Place Point of Sale orders on your behalf\",\n \"sales:admin\": \"Admin access to Point of Sale orders\",\n },\n}\n\n# Password validation\n# https://docs.djangoproject.com/en/dev/ref/settings/#auth-password-validators\nAUTH_PASSWORD_VALIDATORS = [\n {\n \"NAME\": (\n \"django.contrib.auth.\"\n \"password_validation.UserAttributeSimilarityValidator\"\n ),\n },\n {\n \"NAME\": (\"django.contrib.auth.password_validation.MinimumLengthValidator\"),\n },\n {\n \"NAME\": (\"django.contrib.auth.password_validation.CommonPasswordValidator\"),\n },\n {\n \"NAME\": (\"django.contrib.auth.password_validation.NumericPasswordValidator\"),\n },\n]\n\nPASSWORD_HASHERS = setting(\n development=(\n \"django.contrib.auth.hashers.PBKDF2PasswordHasher\",\n \"django.contrib.auth.hashers.MD5PasswordHasher\",\n ),\n production=(\n \"django.contrib.auth.hashers.Argon2PasswordHasher\",\n \"django.contrib.auth.hashers.PBKDF2PasswordHasher\",\n \"django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher\",\n \"django.contrib.auth.hashers.BCryptSHA256PasswordHasher\",\n \"django.contrib.auth.hashers.BCryptPasswordHasher\",\n ),\n testing=(\"django.contrib.auth.hashers.MD5PasswordHasher\",),\n)\n\nAUTHENTICATION_BACKENDS = [\n \"django.contrib.auth.backends.ModelBackend\",\n \"activemembers.backends.MemberGroupBackend\",\n]\n\nREST_FRAMEWORK = {\n \"DEFAULT_AUTHENTICATION_CLASSES\": (\n \"rest_framework.authentication.SessionAuthentication\",\n \"thaliawebsite.api.authentication.APIv1TokenAuthentication\",\n \"oauth2_provider.contrib.rest_framework.OAuth2Authentication\",\n ),\n \"DEFAULT_PAGINATION_CLASS\": \"thaliawebsite.api.pagination.APIv2LimitOffsetPagination\",\n \"PAGE_SIZE\": 50, # Only for API v2\n \"ALLOWED_VERSIONS\": [\"v1\", \"v2\", \"calendarjs\", \"facedetection\"],\n \"DEFAULT_VERSIONING_CLASS\": \"rest_framework.versioning.NamespaceVersioning\",\n \"DEFAULT_SCHEMA_CLASS\": \"thaliawebsite.api.openapi.OAuthAutoSchema\",\n \"DEFAULT_THROTTLE_CLASSES\": [\n \"thaliawebsite.api.throttling.AnonRateThrottle\",\n \"thaliawebsite.api.throttling.UserRateThrottle\",\n ],\n \"DEFAULT_THROTTLE_RATES\": setting(\n production={\"anon\": \"30/min\", \"user\": \"90/min\"},\n staging={\"anon\": \"30/min\", \"user\": \"90/min\"},\n development={\"anon\": None, \"user\": None},\n ),\n}\n\n# Rate limiting\nRATELIMIT_VIEW = \"thaliawebsite.views.rate_limited_view\"\n\n# Internationalization\n# https://docs.djangoproject.com/en/dev/topics/i18n/\nDATETIME_FORMAT = \"j M, Y, H:i\"\nSHORT_DATETIME_FORMAT = \"d-m-Y, H:i\"\n\nLANGUAGE_CODE = \"en\"\nTIME_ZONE = \"Europe/Amsterdam\"\nUSE_I18N = True\nUSE_L10N = False\nUSE_TZ = True\nLANGUAGES = [(\"en\", _(\"English\"))]\nLOCALE_PATHS = (\"locale\",)\n\n# Static files\nSTATICFILES_FINDERS = (\n \"django.contrib.staticfiles.finders.FileSystemFinder\",\n \"django.contrib.staticfiles.finders.AppDirectoriesFinder\",\n \"sass_processor.finders.CssFinder\",\n)\n\n# Allow importing .scss files that don't start with an underscore.\n# See https://github.com/jrief/django-sass-processor\nSASS_PROCESSOR_INCLUDE_FILE_PATTERN = r\"^.+\\.scss$\"\n\n# django-sass-processor does not use the Django 4.2 `storages` API yet,\n# but we can simply give it the path as we would with the new API.\nSASS_PROCESSOR_STORAGE = _STATICFILES_STORAGE\n\n# See utils/model/signals.py for explanation\nSUSPEND_SIGNALS = False\n\nTHUMBNAILS_METADATA = (\n {\n \"BACKEND\": \"thumbnails.backends.metadata.RedisBackend\",\n \"host\": REDIS_CACHE_HOST,\n \"port\": REDIS_CACHE_PORT,\n }\n if REDIS_CACHE_HOST\n else {\n \"BACKEND\": \"thumbnails.backends.metadata.DatabaseBackend\",\n }\n)\n\nTHUMBNAILS = {\n \"METADATA\": THUMBNAILS_METADATA,\n \"STORAGE\": {\n # django-thumbs does not use the Django 4.2 `storages` API yet,\n # but we can simply give it the path as we would with the new API.\n \"BACKEND\": _DEFAULT_FILE_STORAGE,\n },\n \"SIZES\": {\n \"small\": {\n \"FORMAT\": \"webp\",\n \"PROCESSORS\": [\n {\n \"PATH\": \"utils.media.processors.thumbnail\",\n \"size\": (300, 300),\n \"cover\": True,\n },\n ],\n },\n \"medium\": {\n \"FORMAT\": \"webp\",\n \"PROCESSORS\": [\n {\n \"PATH\": \"utils.media.processors.thumbnail\",\n \"size\": (600, 600),\n \"cover\": True,\n },\n ],\n },\n \"large\": {\n \"FORMAT\": \"webp\",\n \"PROCESSORS\": [\n {\n \"PATH\": \"utils.media.processors.thumbnail\",\n \"size\": (1200, 900),\n \"cover\": True,\n },\n ],\n },\n \"photo_medium\": {\n \"FORMAT\": \"webp\",\n \"PROCESSORS\": [\n {\n \"PATH\": \"utils.media.processors.thumbnail\",\n \"size\": (1200, 900),\n },\n ],\n },\n \"photo_large\": {\n \"FORMAT\": \"webp\",\n \"PROCESSORS\": [\n {\n \"PATH\": \"utils.media.processors.thumbnail\",\n \"size\": (1920, 1920),\n },\n ],\n },\n \"avatar_large\": {\n \"FORMAT\": \"webp\",\n \"PROCESSORS\": [\n {\n \"PATH\": \"utils.media.processors.thumbnail\",\n \"size\": (900, 900),\n \"cover\": True,\n },\n ],\n },\n \"slide_small\": {\n \"FORMAT\": \"webp\",\n \"PROCESSORS\": [\n {\n \"PATH\": \"utils.media.processors.thumbnail\",\n \"size\": (500, 108),\n \"cover\": True,\n },\n ],\n },\n \"slide_medium\": {\n \"FORMAT\": \"webp\",\n \"PROCESSORS\": [\n {\n \"PATH\": \"utils.media.processors.thumbnail\",\n \"size\": (1000, 215),\n \"cover\": True,\n },\n ],\n },\n \"slide\": {\n \"FORMAT\": \"webp\",\n \"PROCESSORS\": [\n {\n \"PATH\": \"utils.media.processors.thumbnail\",\n \"size\": (2000, 430),\n \"cover\": True,\n },\n ],\n },\n \"fit_small\": {\n \"FORMAT\": \"webp\",\n \"PROCESSORS\": [\n {\n \"PATH\": \"utils.media.processors.thumbnail\",\n \"size\": (300, 300),\n },\n ],\n },\n \"fit_medium\": {\n \"FORMAT\": \"webp\",\n \"PROCESSORS\": [\n {\n \"PATH\": \"utils.media.processors.thumbnail\",\n \"size\": (600, 600),\n },\n ],\n },\n \"fit_large\": {\n \"FORMAT\": \"webp\",\n \"PROCESSORS\": [\n {\n \"PATH\": \"utils.media.processors.thumbnail\",\n \"size\": (1200, 900),\n },\n ],\n },\n \"source\": {\n \"FORMAT\": \"jpg\",\n \"PROCESSORS\": [\n {\n \"PATH\": \"utils.media.processors.process_upload\",\n \"size\": (8_000, 8_000),\n \"format\": \"jpg\",\n }\n ],\n },\n \"source_png\": {\n \"FORMAT\": \"png\",\n \"PROCESSORS\": [\n {\n \"PATH\": \"utils.media.processors.process_upload\",\n \"size\": (8_000, 8_000),\n \"format\": \"png\",\n }\n ],\n },\n },\n}\n\nTHUMBNAIL_SIZES = set(THUMBNAILS[\"SIZES\"].keys())\n\n# TinyMCE config\nTINYMCE_DEFAULT_CONFIG = {\n \"max_height\": 500,\n \"menubar\": False,\n \"plugins\": \"autolink autoresize link image code media paste lists\",\n \"toolbar\": \"h2 h3 | bold italic underline strikethrough | image media | link unlink \"\n \"| bullist numlist | undo redo | code\",\n \"contextmenu\": \"bold italic underline strikethrough | link\",\n \"paste_as_text\": True,\n \"relative_urls\": False,\n \"remove_script_host\": False,\n \"autoresize_bottom_margin\": 50,\n}\nTINYMCE_EXTRA_MEDIA = {\n \"css\": {\n \"all\": [\n \"css/tinymce.css\",\n ],\n },\n}\n\n\nBOOTSTRAP5 = {\"required_css_class\": \"required-field\"}\n\n# https://docs.djangoproject.com/en/dev/ref/settings/#default-exception-reporter-filter\nDEFAULT_EXCEPTION_REPORTER_FILTER = (\n \"utils.exception_filter.ThaliaSafeExceptionReporterFilter\"\n)\n\n# Make sure the locations in django.po files don't include line nrs.\nmakemessages.Command.xgettext_options.append(\"--add-location=file\")\n\nGRAPH_MODELS = {\n \"all_applications\": False,\n \"group_models\": True,\n \"app_labels\": [\n \"events\",\n \"photos\",\n \"merchandise\",\n \"thabloid\",\n \"partners\",\n \"newsletters\",\n \"shortlinks\",\n \"promotion\",\n \"documents\",\n \"pizzas\",\n \"announcements\",\n \"sales\",\n \"registrations\",\n \"mailinglists\",\n \"payments\",\n \"members\",\n \"admin\",\n \"pushnotifications\",\n \"activemembers\",\n \"education\",\n \"auth\",\n ],\n}\n\nMONEYBIRD_START_DATE = os.environ.get(\"MONEYBIRD_START_DATE\", \"2023-09-01\")\n\nMONEYBIRD_ADMINISTRATION_ID: Optional[int] = (\n int(os.environ.get(\"MONEYBIRD_ADMINISTRATION_ID\"))\n if os.environ.get(\"MONEYBIRD_ADMINISTRATION_ID\")\n else None\n)\n\nMONEYBIRD_API_KEY = os.environ.get(\"MONEYBIRD_API_KEY\")\n\nMONEYBIRD_SYNC_ENABLED = MONEYBIRD_ADMINISTRATION_ID and MONEYBIRD_API_KEY\n\nMONEYBIRD_MEMBER_PK_CUSTOM_FIELD_ID: Optional[int] = (\n int(os.environ.get(\"MONEYBIRD_MEMBER_PK_CUSTOM_FIELD_ID\"))\n if os.environ.get(\"MONEYBIRD_MEMBER_PK_CUSTOM_FIELD_ID\")\n else None\n)\nMONEYBIRD_UNKNOWN_PAYER_CONTACT_ID: Optional[int] = (\n int(os.environ.get(\"MONEYBIRD_UNKNOWN_PAYER_CONTACT_ID\"))\n if os.environ.get(\"MONEYBIRD_UNKNOWN_PAYER_CONTACT_ID\")\n else None\n)\nMONEYBIRD_CONTRIBUTION_LEDGER_ID: Optional[int] = (\n int(os.environ.get(\"MONEYBIRD_CONTRIBUTION_LEDGER_ID\"))\n if os.environ.get(\"MONEYBIRD_CONTRIBUTION_LEDGER_ID\")\n else None\n)\n\nMONEYBIRD_TPAY_FINANCIAL_ACCOUNT_ID: Optional[int] = (\n int(os.environ.get(\"MONEYBIRD_TPAY_FINANCIAL_ACCOUNT_ID\"))\n if os.environ.get(\"MONEYBIRD_TPAY_FINANCIAL_ACCOUNT_ID\")\n else None\n)\nMONEYBIRD_CASH_FINANCIAL_ACCOUNT_ID: Optional[int] = (\n int(os.environ.get(\"MONEYBIRD_CASH_FINANCIAL_ACCOUNT_ID\"))\n if os.environ.get(\"MONEYBIRD_CASH_FINANCIAL_ACCOUNT_ID\")\n else None\n)\nMONEYBIRD_CARD_FINANCIAL_ACCOUNT_ID: Optional[int] = (\n int(os.environ.get(\"MONEYBIRD_CARD_FINANCIAL_ACCOUNT_ID\"))\n if os.environ.get(\"MONEYBIRD_CARD_FINANCIAL_ACCOUNT_ID\")\n else None\n)\n\nMONEYBIRD_ZERO_TAX_RATE_ID: Optional[int] = (\n int(os.environ.get(\"MONEYBIRD_ZERO_TAX_RATE_ID\"))\n if os.environ.get(\"MONEYBIRD_ZERO_TAX_RATE_ID\")\n else None\n)\n",
"path": "website/thaliawebsite/settings.py"
}
] | diff --git a/website/thaliawebsite/settings.py b/website/thaliawebsite/settings.py
index 6be42eefe..d06e004be 100644
--- a/website/thaliawebsite/settings.py
+++ b/website/thaliawebsite/settings.py
@@ -744,6 +744,8 @@ def from_env(
},
}
+SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
+
WSGI_APPLICATION = "thaliawebsite.wsgi.application"
# Login pages
|
gratipay__gratipay.com-1802 | default country on cc form should be USA
Suggested by @toddbranch on [Twitter]().
Can we be less imperialist, somehow? [Maxmind](http://www.maxmind.com/) it?
| [
{
"content": "import locale\nimport time\n\nimport gittip\nfrom aspen import log_dammit, Response\nfrom aspen.utils import typecheck\nfrom tornado.escape import linkify\nfrom postgres.cursors import SimpleCursorBase\n\n\nCOUNTRIES = (\n ('AF', u'Afghanistan'),\n ('AX', u'\\xc5land Islands'),\n ('AL', u'Albania'),\n ('DZ', u'Algeria'),\n ('AS', u'American Samoa'),\n ('AD', u'Andorra'),\n ('AO', u'Angola'),\n ('AI', u'Anguilla'),\n ('AQ', u'Antarctica'),\n ('AG', u'Antigua and Barbuda'),\n ('AR', u'Argentina'),\n ('AM', u'Armenia'),\n ('AW', u'Aruba'),\n ('AU', u'Australia'),\n ('AT', u'Austria'),\n ('AZ', u'Azerbaijan'),\n ('BS', u'Bahamas'),\n ('BH', u'Bahrain'),\n ('BD', u'Bangladesh'),\n ('BB', u'Barbados'),\n ('BY', u'Belarus'),\n ('BE', u'Belgium'),\n ('BZ', u'Belize'),\n ('BJ', u'Benin'),\n ('BM', u'Bermuda'),\n ('BT', u'Bhutan'),\n ('BO', u'Bolivia, Plurinational State of'),\n ('BQ', u'Bonaire, Sint Eustatius and Saba'),\n ('BA', u'Bosnia and Herzegovina'),\n ('BW', u'Botswana'),\n ('BV', u'Bouvet Island'),\n ('BR', u'Brazil'),\n ('IO', u'British Indian Ocean Territory'),\n ('BN', u'Brunei Darussalam'),\n ('BG', u'Bulgaria'),\n ('BF', u'Burkina Faso'),\n ('BI', u'Burundi'),\n ('KH', u'Cambodia'),\n ('CM', u'Cameroon'),\n ('CA', u'Canada'),\n ('CV', u'Cape Verde'),\n ('KY', u'Cayman Islands'),\n ('CF', u'Central African Republic'),\n ('TD', u'Chad'),\n ('CL', u'Chile'),\n ('CN', u'China'),\n ('CX', u'Christmas Island'),\n ('CC', u'Cocos (Keeling) Islands'),\n ('CO', u'Colombia'),\n ('KM', u'Comoros'),\n ('CG', u'Congo'),\n ('CD', u'Congo, The Democratic Republic of the'),\n ('CK', u'Cook Islands'),\n ('CR', u'Costa Rica'),\n ('CI', u\"C\\xf4te D'ivoire\"),\n ('HR', u'Croatia'),\n ('CU', u'Cuba'),\n ('CW', u'Cura\\xe7ao'),\n ('CY', u'Cyprus'),\n ('CZ', u'Czech Republic'),\n ('DK', u'Denmark'),\n ('DJ', u'Djibouti'),\n ('DM', u'Dominica'),\n ('DO', u'Dominican Republic'),\n ('EC', u'Ecuador'),\n ('EG', u'Egypt'),\n ('SV', u'El Salvador'),\n ('GQ', u'Equatorial Guinea'),\n ('ER', u'Eritrea'),\n ('EE', u'Estonia'),\n ('ET', u'Ethiopia'),\n ('FK', u'Falkland Islands (Malvinas)'),\n ('FO', u'Faroe Islands'),\n ('FJ', u'Fiji'),\n ('FI', u'Finland'),\n ('FR', u'France'),\n ('GF', u'French Guiana'),\n ('PF', u'French Polynesia'),\n ('TF', u'French Southern Territories'),\n ('GA', u'Gabon'),\n ('GM', u'Gambia'),\n ('GE', u'Georgia'),\n ('DE', u'Germany'),\n ('GH', u'Ghana'),\n ('GI', u'Gibraltar'),\n ('GR', u'Greece'),\n ('GL', u'Greenland'),\n ('GD', u'Grenada'),\n ('GP', u'Guadeloupe'),\n ('GU', u'Guam'),\n ('GT', u'Guatemala'),\n ('GG', u'Guernsey'),\n ('GN', u'Guinea'),\n ('GW', u'Guinea-bissau'),\n ('GY', u'Guyana'),\n ('HT', u'Haiti'),\n ('HM', u'Heard Island and McDonald Islands'),\n ('VA', u'Holy See (Vatican City State)'),\n ('HN', u'Honduras'),\n ('HK', u'Hong Kong'),\n ('HU', u'Hungary'),\n ('IS', u'Iceland'),\n ('IN', u'India'),\n ('ID', u'Indonesia'),\n ('IR', u'Iran, Islamic Republic of'),\n ('IQ', u'Iraq'),\n ('IE', u'Ireland'),\n ('IM', u'Isle of Man'),\n ('IL', u'Israel'),\n ('IT', u'Italy'),\n ('JM', u'Jamaica'),\n ('JP', u'Japan'),\n ('JE', u'Jersey'),\n ('JO', u'Jordan'),\n ('KZ', u'Kazakhstan'),\n ('KE', u'Kenya'),\n ('KI', u'Kiribati'),\n ('KP', u\"Korea, Democratic People's Republic of\"),\n ('KR', u'Korea, Republic of'),\n ('KW', u'Kuwait'),\n ('KG', u'Kyrgyzstan'),\n ('LA', u\"Lao People's Democratic Republic\"),\n ('LV', u'Latvia'),\n ('LB', u'Lebanon'),\n ('LS', u'Lesotho'),\n ('LR', u'Liberia'),\n ('LY', u'Libya'),\n ('LI', u'Liechtenstein'),\n ('LT', u'Lithuania'),\n ('LU', u'Luxembourg'),\n ('MO', u'Macao'),\n ('MK', u'Macedonia, The Former Yugoslav Republic of'),\n ('MG', u'Madagascar'),\n ('MW', u'Malawi'),\n ('MY', u'Malaysia'),\n ('MV', u'Maldives'),\n ('ML', u'Mali'),\n ('MT', u'Malta'),\n ('MH', u'Marshall Islands'),\n ('MQ', u'Martinique'),\n ('MR', u'Mauritania'),\n ('MU', u'Mauritius'),\n ('YT', u'Mayotte'),\n ('MX', u'Mexico'),\n ('FM', u'Micronesia, Federated States of'),\n ('MD', u'Moldova, Republic of'),\n ('MC', u'Monaco'),\n ('MN', u'Mongolia'),\n ('ME', u'Montenegro'),\n ('MS', u'Montserrat'),\n ('MA', u'Morocco'),\n ('MZ', u'Mozambique'),\n ('MM', u'Myanmar'),\n ('NA', u'Namibia'),\n ('NR', u'Nauru'),\n ('NP', u'Nepal'),\n ('NL', u'Netherlands'),\n ('NC', u'New Caledonia'),\n ('NZ', u'New Zealand'),\n ('NI', u'Nicaragua'),\n ('NE', u'Niger'),\n ('NG', u'Nigeria'),\n ('NU', u'Niue'),\n ('NF', u'Norfolk Island'),\n ('MP', u'Northern Mariana Islands'),\n ('NO', u'Norway'),\n ('OM', u'Oman'),\n ('PK', u'Pakistan'),\n ('PW', u'Palau'),\n ('PS', u'Palestinian Territory, Occupied'),\n ('PA', u'Panama'),\n ('PG', u'Papua New Guinea'),\n ('PY', u'Paraguay'),\n ('PE', u'Peru'),\n ('PH', u'Philippines'),\n ('PN', u'Pitcairn'),\n ('PL', u'Poland'),\n ('PT', u'Portugal'),\n ('PR', u'Puerto Rico'),\n ('QA', u'Qatar'),\n ('RE', u'R\\xe9union'),\n ('RO', u'Romania'),\n ('RU', u'Russian Federation'),\n ('RW', u'Rwanda'),\n ('BL', u'Saint Barth\\xe9lemy'),\n ('SH', u'Saint Helena, Ascension and Tristan Da Cunha'),\n ('KN', u'Saint Kitts and Nevis'),\n ('LC', u'Saint Lucia'),\n ('MF', u'Saint Martin (French Part)'),\n ('PM', u'Saint Pierre and Miquelon'),\n ('VC', u'Saint Vincent and the Grenadines'),\n ('WS', u'Samoa'),\n ('SM', u'San Marino'),\n ('ST', u'Sao Tome and Principe'),\n ('SA', u'Saudi Arabia'),\n ('SN', u'Senegal'),\n ('RS', u'Serbia'),\n ('SC', u'Seychelles'),\n ('SL', u'Sierra Leone'),\n ('SG', u'Singapore'),\n ('SX', u'Sint Maarten (Dutch Part)'),\n ('SK', u'Slovakia'),\n ('SI', u'Slovenia'),\n ('SB', u'Solomon Islands'),\n ('SO', u'Somalia'),\n ('ZA', u'South Africa'),\n ('GS', u'South Georgia and the South Sandwich Islands'),\n ('SS', u'South Sudan'),\n ('ES', u'Spain'),\n ('LK', u'Sri Lanka'),\n ('SD', u'Sudan'),\n ('SR', u'Suriname'),\n ('SJ', u'Svalbard and Jan Mayen'),\n ('SZ', u'Swaziland'),\n ('SE', u'Sweden'),\n ('CH', u'Switzerland'),\n ('SY', u'Syrian Arab Republic'),\n ('TW', u'Taiwan, Province of China'),\n ('TJ', u'Tajikistan'),\n ('TZ', u'Tanzania, United Republic of'),\n ('TH', u'Thailand'),\n ('TL', u'Timor-leste'),\n ('TG', u'Togo'),\n ('TK', u'Tokelau'),\n ('TO', u'Tonga'),\n ('TT', u'Trinidad and Tobago'),\n ('TN', u'Tunisia'),\n ('TR', u'Turkey'),\n ('TM', u'Turkmenistan'),\n ('TC', u'Turks and Caicos Islands'),\n ('TV', u'Tuvalu'),\n ('UG', u'Uganda'),\n ('UA', u'Ukraine'),\n ('AE', u'United Arab Emirates'),\n ('GB', u'United Kingdom'),\n ('US', u'United States'),\n ('UM', u'United States Minor Outlying Islands'),\n ('UY', u'Uruguay'),\n ('UZ', u'Uzbekistan'),\n ('VU', u'Vanuatu'),\n ('VE', u'Venezuela, Bolivarian Republic of'),\n ('VN', u'Viet Nam'),\n ('VG', u'Virgin Islands, British'),\n ('VI', u'Virgin Islands, U.S.'),\n ('WF', u'Wallis and Futuna'),\n ('EH', u'Western Sahara'),\n ('YE', u'Yemen'),\n ('ZM', u'Zambia'),\n ('ZW', u'Zimbabwe'),\n)\nCOUNTRIES_MAP = dict(COUNTRIES)\n\n\ndef wrap(u):\n \"\"\"Given a unicode, return a unicode.\n \"\"\"\n typecheck(u, unicode)\n u = linkify(u) # Do this first, because it calls xthml_escape.\n u = u.replace(u'\\r\\n', u'<br />\\r\\n').replace(u'\\n', u'<br />\\n')\n return u if u else '...'\n\n\ndef dict_to_querystring(mapping):\n if not mapping:\n return u''\n\n arguments = []\n for key, values in mapping.iteritems():\n for val in values:\n arguments.append(u'='.join([key, val]))\n\n return u'?' + u'&'.join(arguments)\n\ndef canonicalize(path, base, canonical, given, arguments=None):\n if given != canonical:\n assert canonical.lower() == given.lower() # sanity check\n remainder = path[len(base + given):]\n\n if arguments is not None:\n arguments = dict_to_querystring(arguments)\n\n newpath = base + canonical + remainder + arguments or ''\n raise Response(302, headers={\"Location\": newpath})\n\n\ndef plural(i, singular=\"\", plural=\"s\"):\n return singular if i == 1 else plural\n\n\ndef get_participant(request, restrict=True):\n \"\"\"Given a Request, raise Response or return Participant.\n\n If user is not None then we'll restrict access to owners and admins.\n\n \"\"\"\n user = request.context['user']\n slug = request.line.uri.path['username']\n qs = request.line.uri.querystring\n\n if restrict:\n if user.ANON:\n request.redirect(u'/%s/' % slug)\n\n participant = request.website.db.one(\"\"\"\n SELECT participants.*::participants\n FROM participants\n WHERE username_lower=%s\n \"\"\", (slug.lower(),))\n\n if participant is None:\n raise Response(404)\n\n canonicalize(request.line.uri.path.raw, '/', participant.username, slug, qs)\n\n if participant.claimed_time is None:\n\n # This is a stub participant record for someone on another platform who\n # hasn't actually registered with Gittip yet. Let's bounce the viewer\n # over to the appropriate platform page.\n\n to = participant.resolve_unclaimed()\n if to is None:\n raise Response(404)\n request.redirect(to)\n\n if restrict:\n if participant != user.participant:\n if not user.ADMIN:\n raise Response(403)\n\n return participant\n\n\ndef update_global_stats(website):\n stats = website.db.one(\"\"\"\n SELECT nactive, transfer_volume FROM paydays\n ORDER BY ts_end DESC LIMIT 1\n \"\"\", default=(0, 0.0))\n website.gnactive = locale.format(\"%d\", round(stats[0], -2), grouping=True)\n website.gtransfer_volume = locale.format(\"%d\", round(stats[1], -2), grouping=True)\n\n\ndef update_homepage_queries_once(db):\n with db.get_cursor() as cursor:\n log_dammit(\"updating homepage queries\")\n start = time.time()\n cursor.execute(\"DELETE FROM homepage_top_givers\")\n cursor.execute(\"\"\"\n\n INSERT INTO homepage_top_givers\n SELECT tipper AS username, anonymous, sum(amount) AS amount\n FROM ( SELECT DISTINCT ON (tipper, tippee)\n amount\n , tipper\n FROM tips\n JOIN participants p ON p.username = tipper\n JOIN participants p2 ON p2.username = tippee\n JOIN elsewhere ON elsewhere.participant = tippee\n WHERE p.last_bill_result = ''\n AND p.is_suspicious IS NOT true\n AND p2.claimed_time IS NOT NULL\n AND elsewhere.is_locked = false\n ORDER BY tipper, tippee, mtime DESC\n ) AS foo\n JOIN participants p ON p.username = tipper\n WHERE is_suspicious IS NOT true\n GROUP BY tipper, anonymous\n ORDER BY amount DESC;\n\n \"\"\".strip())\n cursor.execute(\"\"\"\n\n UPDATE homepage_top_givers\n SET gravatar_id = ( SELECT user_info->'gravatar_id'\n FROM elsewhere\n WHERE participant=username\n AND platform='github'\n )\n \"\"\")\n cursor.execute(\"\"\"\n\n UPDATE homepage_top_givers\n SET twitter_pic = ( SELECT user_info->'profile_image_url_https'\n FROM elsewhere\n WHERE participant=username\n AND platform='twitter'\n )\n \"\"\")\n\n cursor.execute(\"DELETE FROM homepage_top_receivers\")\n cursor.execute(\"\"\"\n\n INSERT INTO homepage_top_receivers\n SELECT tippee AS username, claimed_time, sum(amount) AS amount\n FROM ( SELECT DISTINCT ON (tipper, tippee)\n amount\n , tippee\n FROM tips\n JOIN participants p ON p.username = tipper\n JOIN elsewhere ON elsewhere.participant = tippee\n WHERE last_bill_result = ''\n AND elsewhere.is_locked = false\n AND is_suspicious IS NOT true\n AND claimed_time IS NOT null\n ORDER BY tipper, tippee, mtime DESC\n ) AS foo\n JOIN participants p ON p.username = tippee\n WHERE is_suspicious IS NOT true\n GROUP BY tippee, claimed_time\n ORDER BY amount DESC;\n\n \"\"\".strip())\n cursor.execute(\"\"\"\n\n UPDATE homepage_top_receivers\n SET gravatar_id = ( SELECT user_info->'gravatar_id'\n FROM elsewhere\n WHERE participant=username\n AND platform='github'\n )\n \"\"\")\n cursor.execute(\"\"\"\n\n UPDATE homepage_top_receivers\n SET twitter_pic = ( SELECT user_info->'profile_image_url_https'\n FROM elsewhere\n WHERE participant=username\n AND platform='twitter'\n )\n \"\"\")\n end = time.time()\n elapsed = end - start\n log_dammit(\"updated homepage queries in %.2f seconds\" % elapsed)\n\n\ndef _execute(this, sql, params=[]):\n print(sql.strip(), params)\n super(SimpleCursorBase, this).execute(sql, params)\n\ndef log_cursor(f):\n \"Prints sql and params to stdout. Works globaly so watch for threaded use.\"\n def wrapper(*a, **kw):\n try:\n SimpleCursorBase.execute = _execute\n ret = f(*a, **kw)\n finally:\n del SimpleCursorBase.execute\n return ret\n return wrapper\n",
"path": "gittip/utils/__init__.py"
}
] | [
{
"content": "import locale\nimport time\n\nimport gittip\nfrom aspen import log_dammit, Response\nfrom aspen.utils import typecheck\nfrom tornado.escape import linkify\nfrom postgres.cursors import SimpleCursorBase\n\n\nCOUNTRIES = (\n ('US', u'United States'),\n ('AF', u'Afghanistan'),\n ('AX', u'\\xc5land Islands'),\n ('AL', u'Albania'),\n ('DZ', u'Algeria'),\n ('AS', u'American Samoa'),\n ('AD', u'Andorra'),\n ('AO', u'Angola'),\n ('AI', u'Anguilla'),\n ('AQ', u'Antarctica'),\n ('AG', u'Antigua and Barbuda'),\n ('AR', u'Argentina'),\n ('AM', u'Armenia'),\n ('AW', u'Aruba'),\n ('AU', u'Australia'),\n ('AT', u'Austria'),\n ('AZ', u'Azerbaijan'),\n ('BS', u'Bahamas'),\n ('BH', u'Bahrain'),\n ('BD', u'Bangladesh'),\n ('BB', u'Barbados'),\n ('BY', u'Belarus'),\n ('BE', u'Belgium'),\n ('BZ', u'Belize'),\n ('BJ', u'Benin'),\n ('BM', u'Bermuda'),\n ('BT', u'Bhutan'),\n ('BO', u'Bolivia, Plurinational State of'),\n ('BQ', u'Bonaire, Sint Eustatius and Saba'),\n ('BA', u'Bosnia and Herzegovina'),\n ('BW', u'Botswana'),\n ('BV', u'Bouvet Island'),\n ('BR', u'Brazil'),\n ('IO', u'British Indian Ocean Territory'),\n ('BN', u'Brunei Darussalam'),\n ('BG', u'Bulgaria'),\n ('BF', u'Burkina Faso'),\n ('BI', u'Burundi'),\n ('KH', u'Cambodia'),\n ('CM', u'Cameroon'),\n ('CA', u'Canada'),\n ('CV', u'Cape Verde'),\n ('KY', u'Cayman Islands'),\n ('CF', u'Central African Republic'),\n ('TD', u'Chad'),\n ('CL', u'Chile'),\n ('CN', u'China'),\n ('CX', u'Christmas Island'),\n ('CC', u'Cocos (Keeling) Islands'),\n ('CO', u'Colombia'),\n ('KM', u'Comoros'),\n ('CG', u'Congo'),\n ('CD', u'Congo, The Democratic Republic of the'),\n ('CK', u'Cook Islands'),\n ('CR', u'Costa Rica'),\n ('CI', u\"C\\xf4te D'ivoire\"),\n ('HR', u'Croatia'),\n ('CU', u'Cuba'),\n ('CW', u'Cura\\xe7ao'),\n ('CY', u'Cyprus'),\n ('CZ', u'Czech Republic'),\n ('DK', u'Denmark'),\n ('DJ', u'Djibouti'),\n ('DM', u'Dominica'),\n ('DO', u'Dominican Republic'),\n ('EC', u'Ecuador'),\n ('EG', u'Egypt'),\n ('SV', u'El Salvador'),\n ('GQ', u'Equatorial Guinea'),\n ('ER', u'Eritrea'),\n ('EE', u'Estonia'),\n ('ET', u'Ethiopia'),\n ('FK', u'Falkland Islands (Malvinas)'),\n ('FO', u'Faroe Islands'),\n ('FJ', u'Fiji'),\n ('FI', u'Finland'),\n ('FR', u'France'),\n ('GF', u'French Guiana'),\n ('PF', u'French Polynesia'),\n ('TF', u'French Southern Territories'),\n ('GA', u'Gabon'),\n ('GM', u'Gambia'),\n ('GE', u'Georgia'),\n ('DE', u'Germany'),\n ('GH', u'Ghana'),\n ('GI', u'Gibraltar'),\n ('GR', u'Greece'),\n ('GL', u'Greenland'),\n ('GD', u'Grenada'),\n ('GP', u'Guadeloupe'),\n ('GU', u'Guam'),\n ('GT', u'Guatemala'),\n ('GG', u'Guernsey'),\n ('GN', u'Guinea'),\n ('GW', u'Guinea-bissau'),\n ('GY', u'Guyana'),\n ('HT', u'Haiti'),\n ('HM', u'Heard Island and McDonald Islands'),\n ('VA', u'Holy See (Vatican City State)'),\n ('HN', u'Honduras'),\n ('HK', u'Hong Kong'),\n ('HU', u'Hungary'),\n ('IS', u'Iceland'),\n ('IN', u'India'),\n ('ID', u'Indonesia'),\n ('IR', u'Iran, Islamic Republic of'),\n ('IQ', u'Iraq'),\n ('IE', u'Ireland'),\n ('IM', u'Isle of Man'),\n ('IL', u'Israel'),\n ('IT', u'Italy'),\n ('JM', u'Jamaica'),\n ('JP', u'Japan'),\n ('JE', u'Jersey'),\n ('JO', u'Jordan'),\n ('KZ', u'Kazakhstan'),\n ('KE', u'Kenya'),\n ('KI', u'Kiribati'),\n ('KP', u\"Korea, Democratic People's Republic of\"),\n ('KR', u'Korea, Republic of'),\n ('KW', u'Kuwait'),\n ('KG', u'Kyrgyzstan'),\n ('LA', u\"Lao People's Democratic Republic\"),\n ('LV', u'Latvia'),\n ('LB', u'Lebanon'),\n ('LS', u'Lesotho'),\n ('LR', u'Liberia'),\n ('LY', u'Libya'),\n ('LI', u'Liechtenstein'),\n ('LT', u'Lithuania'),\n ('LU', u'Luxembourg'),\n ('MO', u'Macao'),\n ('MK', u'Macedonia, The Former Yugoslav Republic of'),\n ('MG', u'Madagascar'),\n ('MW', u'Malawi'),\n ('MY', u'Malaysia'),\n ('MV', u'Maldives'),\n ('ML', u'Mali'),\n ('MT', u'Malta'),\n ('MH', u'Marshall Islands'),\n ('MQ', u'Martinique'),\n ('MR', u'Mauritania'),\n ('MU', u'Mauritius'),\n ('YT', u'Mayotte'),\n ('MX', u'Mexico'),\n ('FM', u'Micronesia, Federated States of'),\n ('MD', u'Moldova, Republic of'),\n ('MC', u'Monaco'),\n ('MN', u'Mongolia'),\n ('ME', u'Montenegro'),\n ('MS', u'Montserrat'),\n ('MA', u'Morocco'),\n ('MZ', u'Mozambique'),\n ('MM', u'Myanmar'),\n ('NA', u'Namibia'),\n ('NR', u'Nauru'),\n ('NP', u'Nepal'),\n ('NL', u'Netherlands'),\n ('NC', u'New Caledonia'),\n ('NZ', u'New Zealand'),\n ('NI', u'Nicaragua'),\n ('NE', u'Niger'),\n ('NG', u'Nigeria'),\n ('NU', u'Niue'),\n ('NF', u'Norfolk Island'),\n ('MP', u'Northern Mariana Islands'),\n ('NO', u'Norway'),\n ('OM', u'Oman'),\n ('PK', u'Pakistan'),\n ('PW', u'Palau'),\n ('PS', u'Palestinian Territory, Occupied'),\n ('PA', u'Panama'),\n ('PG', u'Papua New Guinea'),\n ('PY', u'Paraguay'),\n ('PE', u'Peru'),\n ('PH', u'Philippines'),\n ('PN', u'Pitcairn'),\n ('PL', u'Poland'),\n ('PT', u'Portugal'),\n ('PR', u'Puerto Rico'),\n ('QA', u'Qatar'),\n ('RE', u'R\\xe9union'),\n ('RO', u'Romania'),\n ('RU', u'Russian Federation'),\n ('RW', u'Rwanda'),\n ('BL', u'Saint Barth\\xe9lemy'),\n ('SH', u'Saint Helena, Ascension and Tristan Da Cunha'),\n ('KN', u'Saint Kitts and Nevis'),\n ('LC', u'Saint Lucia'),\n ('MF', u'Saint Martin (French Part)'),\n ('PM', u'Saint Pierre and Miquelon'),\n ('VC', u'Saint Vincent and the Grenadines'),\n ('WS', u'Samoa'),\n ('SM', u'San Marino'),\n ('ST', u'Sao Tome and Principe'),\n ('SA', u'Saudi Arabia'),\n ('SN', u'Senegal'),\n ('RS', u'Serbia'),\n ('SC', u'Seychelles'),\n ('SL', u'Sierra Leone'),\n ('SG', u'Singapore'),\n ('SX', u'Sint Maarten (Dutch Part)'),\n ('SK', u'Slovakia'),\n ('SI', u'Slovenia'),\n ('SB', u'Solomon Islands'),\n ('SO', u'Somalia'),\n ('ZA', u'South Africa'),\n ('GS', u'South Georgia and the South Sandwich Islands'),\n ('SS', u'South Sudan'),\n ('ES', u'Spain'),\n ('LK', u'Sri Lanka'),\n ('SD', u'Sudan'),\n ('SR', u'Suriname'),\n ('SJ', u'Svalbard and Jan Mayen'),\n ('SZ', u'Swaziland'),\n ('SE', u'Sweden'),\n ('CH', u'Switzerland'),\n ('SY', u'Syrian Arab Republic'),\n ('TW', u'Taiwan, Province of China'),\n ('TJ', u'Tajikistan'),\n ('TZ', u'Tanzania, United Republic of'),\n ('TH', u'Thailand'),\n ('TL', u'Timor-leste'),\n ('TG', u'Togo'),\n ('TK', u'Tokelau'),\n ('TO', u'Tonga'),\n ('TT', u'Trinidad and Tobago'),\n ('TN', u'Tunisia'),\n ('TR', u'Turkey'),\n ('TM', u'Turkmenistan'),\n ('TC', u'Turks and Caicos Islands'),\n ('TV', u'Tuvalu'),\n ('UG', u'Uganda'),\n ('UA', u'Ukraine'),\n ('AE', u'United Arab Emirates'),\n ('GB', u'United Kingdom'),\n ('US', u'United States'),\n ('UM', u'United States Minor Outlying Islands'),\n ('UY', u'Uruguay'),\n ('UZ', u'Uzbekistan'),\n ('VU', u'Vanuatu'),\n ('VE', u'Venezuela, Bolivarian Republic of'),\n ('VN', u'Viet Nam'),\n ('VG', u'Virgin Islands, British'),\n ('VI', u'Virgin Islands, U.S.'),\n ('WF', u'Wallis and Futuna'),\n ('EH', u'Western Sahara'),\n ('YE', u'Yemen'),\n ('ZM', u'Zambia'),\n ('ZW', u'Zimbabwe'),\n)\nCOUNTRIES_MAP = dict(COUNTRIES)\n\n\ndef wrap(u):\n \"\"\"Given a unicode, return a unicode.\n \"\"\"\n typecheck(u, unicode)\n u = linkify(u) # Do this first, because it calls xthml_escape.\n u = u.replace(u'\\r\\n', u'<br />\\r\\n').replace(u'\\n', u'<br />\\n')\n return u if u else '...'\n\n\ndef dict_to_querystring(mapping):\n if not mapping:\n return u''\n\n arguments = []\n for key, values in mapping.iteritems():\n for val in values:\n arguments.append(u'='.join([key, val]))\n\n return u'?' + u'&'.join(arguments)\n\ndef canonicalize(path, base, canonical, given, arguments=None):\n if given != canonical:\n assert canonical.lower() == given.lower() # sanity check\n remainder = path[len(base + given):]\n\n if arguments is not None:\n arguments = dict_to_querystring(arguments)\n\n newpath = base + canonical + remainder + arguments or ''\n raise Response(302, headers={\"Location\": newpath})\n\n\ndef plural(i, singular=\"\", plural=\"s\"):\n return singular if i == 1 else plural\n\n\ndef get_participant(request, restrict=True):\n \"\"\"Given a Request, raise Response or return Participant.\n\n If user is not None then we'll restrict access to owners and admins.\n\n \"\"\"\n user = request.context['user']\n slug = request.line.uri.path['username']\n qs = request.line.uri.querystring\n\n if restrict:\n if user.ANON:\n request.redirect(u'/%s/' % slug)\n\n participant = request.website.db.one(\"\"\"\n SELECT participants.*::participants\n FROM participants\n WHERE username_lower=%s\n \"\"\", (slug.lower(),))\n\n if participant is None:\n raise Response(404)\n\n canonicalize(request.line.uri.path.raw, '/', participant.username, slug, qs)\n\n if participant.claimed_time is None:\n\n # This is a stub participant record for someone on another platform who\n # hasn't actually registered with Gittip yet. Let's bounce the viewer\n # over to the appropriate platform page.\n\n to = participant.resolve_unclaimed()\n if to is None:\n raise Response(404)\n request.redirect(to)\n\n if restrict:\n if participant != user.participant:\n if not user.ADMIN:\n raise Response(403)\n\n return participant\n\n\ndef update_global_stats(website):\n stats = website.db.one(\"\"\"\n SELECT nactive, transfer_volume FROM paydays\n ORDER BY ts_end DESC LIMIT 1\n \"\"\", default=(0, 0.0))\n website.gnactive = locale.format(\"%d\", round(stats[0], -2), grouping=True)\n website.gtransfer_volume = locale.format(\"%d\", round(stats[1], -2), grouping=True)\n\n\ndef update_homepage_queries_once(db):\n with db.get_cursor() as cursor:\n log_dammit(\"updating homepage queries\")\n start = time.time()\n cursor.execute(\"DELETE FROM homepage_top_givers\")\n cursor.execute(\"\"\"\n\n INSERT INTO homepage_top_givers\n SELECT tipper AS username, anonymous, sum(amount) AS amount\n FROM ( SELECT DISTINCT ON (tipper, tippee)\n amount\n , tipper\n FROM tips\n JOIN participants p ON p.username = tipper\n JOIN participants p2 ON p2.username = tippee\n JOIN elsewhere ON elsewhere.participant = tippee\n WHERE p.last_bill_result = ''\n AND p.is_suspicious IS NOT true\n AND p2.claimed_time IS NOT NULL\n AND elsewhere.is_locked = false\n ORDER BY tipper, tippee, mtime DESC\n ) AS foo\n JOIN participants p ON p.username = tipper\n WHERE is_suspicious IS NOT true\n GROUP BY tipper, anonymous\n ORDER BY amount DESC;\n\n \"\"\".strip())\n cursor.execute(\"\"\"\n\n UPDATE homepage_top_givers\n SET gravatar_id = ( SELECT user_info->'gravatar_id'\n FROM elsewhere\n WHERE participant=username\n AND platform='github'\n )\n \"\"\")\n cursor.execute(\"\"\"\n\n UPDATE homepage_top_givers\n SET twitter_pic = ( SELECT user_info->'profile_image_url_https'\n FROM elsewhere\n WHERE participant=username\n AND platform='twitter'\n )\n \"\"\")\n\n cursor.execute(\"DELETE FROM homepage_top_receivers\")\n cursor.execute(\"\"\"\n\n INSERT INTO homepage_top_receivers\n SELECT tippee AS username, claimed_time, sum(amount) AS amount\n FROM ( SELECT DISTINCT ON (tipper, tippee)\n amount\n , tippee\n FROM tips\n JOIN participants p ON p.username = tipper\n JOIN elsewhere ON elsewhere.participant = tippee\n WHERE last_bill_result = ''\n AND elsewhere.is_locked = false\n AND is_suspicious IS NOT true\n AND claimed_time IS NOT null\n ORDER BY tipper, tippee, mtime DESC\n ) AS foo\n JOIN participants p ON p.username = tippee\n WHERE is_suspicious IS NOT true\n GROUP BY tippee, claimed_time\n ORDER BY amount DESC;\n\n \"\"\".strip())\n cursor.execute(\"\"\"\n\n UPDATE homepage_top_receivers\n SET gravatar_id = ( SELECT user_info->'gravatar_id'\n FROM elsewhere\n WHERE participant=username\n AND platform='github'\n )\n \"\"\")\n cursor.execute(\"\"\"\n\n UPDATE homepage_top_receivers\n SET twitter_pic = ( SELECT user_info->'profile_image_url_https'\n FROM elsewhere\n WHERE participant=username\n AND platform='twitter'\n )\n \"\"\")\n end = time.time()\n elapsed = end - start\n log_dammit(\"updated homepage queries in %.2f seconds\" % elapsed)\n\n\ndef _execute(this, sql, params=[]):\n print(sql.strip(), params)\n super(SimpleCursorBase, this).execute(sql, params)\n\ndef log_cursor(f):\n \"Prints sql and params to stdout. Works globaly so watch for threaded use.\"\n def wrapper(*a, **kw):\n try:\n SimpleCursorBase.execute = _execute\n ret = f(*a, **kw)\n finally:\n del SimpleCursorBase.execute\n return ret\n return wrapper\n",
"path": "gittip/utils/__init__.py"
}
] | diff --git a/gittip/utils/__init__.py b/gittip/utils/__init__.py
index 42aeb18aa5..05327d5046 100644
--- a/gittip/utils/__init__.py
+++ b/gittip/utils/__init__.py
@@ -9,6 +9,7 @@
COUNTRIES = (
+ ('US', u'United States'),
('AF', u'Afghanistan'),
('AX', u'\xc5land Islands'),
('AL', u'Albania'),
|
codespell-project__codespell-3218 | Codespell don't handle KeyboardInterrupt exception
This should be catched and the program should stop gracefully but instead show default stack trace:
```
^CTraceback (most recent call last):
File "/home/kuba/.local/bin/codespell", line 8, in <module>
sys.exit(_script_main())
^^^^^^^^^^^^^^
File "/home/kuba/.local/lib/python3.12/site-packages/codespell_lib/_codespell.py", line 1017, in _script_main
return main(*sys.argv[1:])
^^^^^^^^^^^^^^^^^^^
File "/home/kuba/.local/lib/python3.12/site-packages/codespell_lib/_codespell.py", line 1185, in main
bad_count += parse_file(
^^^^^^^^^^^
File "/home/kuba/.local/lib/python3.12/site-packages/codespell_lib/_codespell.py", line 903, in parse_file
check_matches = extract_words_iter(line, word_regex, ignore_word_regex)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/kuba/.local/lib/python3.12/site-packages/codespell_lib/_codespell.py", line 793, in extract_words_iter
return list(word_regex.finditer(_ignore_word_sub(text, ignore_word_regex)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
KeyboardInterrupt
```
There is no need to show `KeyboardInterrupt` exception stack trace.
| [
{
"content": "import sys\n\nfrom ._codespell import _script_main\n\nif __name__ == \"__main__\":\n sys.exit(_script_main())\n",
"path": "codespell_lib/__main__.py"
}
] | [
{
"content": "import sys\n\nfrom ._codespell import _script_main\n\nif __name__ == \"__main__\":\n try:\n sys.exit(_script_main())\n except KeyboardInterrupt:\n pass\n",
"path": "codespell_lib/__main__.py"
}
] | diff --git a/codespell_lib/__main__.py b/codespell_lib/__main__.py
index ecc82e092b..0a8630df52 100644
--- a/codespell_lib/__main__.py
+++ b/codespell_lib/__main__.py
@@ -3,4 +3,7 @@
from ._codespell import _script_main
if __name__ == "__main__":
- sys.exit(_script_main())
+ try:
+ sys.exit(_script_main())
+ except KeyboardInterrupt:
+ pass
|
cupy__cupy-1028 | cupy.copyto behaves differently from numpy.copyto when src is a python scalar
Code:
```python
import numpy
import cupy
def copyto_check(xp):
x = xp.zeros(3, dtype=numpy.float32)
# replace first and third items with 1.0
xp.copyto(x, 1.0, where=xp.asarray([True, False, True]))
print(x)
print('numpy', numpy.__version__)
copyto_check(numpy)
print('cupy', cupy.__version__)
copyto_check(cupy)
```
Output:
```
numpy 1.14.0
[1. 0. 1.]
cupy 2.2.0
[1. 1. 1.]
```
| [
{
"content": "import numpy\nimport six\n\nfrom cupy import core\n\n\ndef copyto(dst, src, casting='same_kind', where=None):\n \"\"\"Copies values from one array to another with broadcasting.\n\n This function can be called for arrays on different devices. In this case,\n casting, ``where``, and broadcasting is not supported, and an exception is\n raised if these are used.\n\n Args:\n dst (cupy.ndarray): Target array.\n src (cupy.ndarray): Source array.\n casting (str): Casting rule. See :func:`numpy.can_cast` for detail.\n where (cupy.ndarray of bool): If specified, this array acts as a mask,\n and an element is copied only if the corresponding element of\n ``where`` is True.\n\n .. seealso:: :func:`numpy.copyto`\n\n \"\"\"\n\n src_type = type(src)\n src_is_python_scalar = (src_type in six.integer_types or\n src_type in (bool, float, complex))\n if src_is_python_scalar:\n src_dtype = numpy.dtype(type(src))\n can_cast = numpy.can_cast(src, dst.dtype, casting)\n else:\n src_dtype = src.dtype\n can_cast = numpy.can_cast(src_dtype, dst.dtype, casting)\n\n if not can_cast:\n raise TypeError('Cannot cast %s to %s in %s casting mode' %\n (src_dtype, dst.dtype, casting))\n if dst.size == 0:\n return\n\n if src_is_python_scalar:\n dst.fill(src)\n return\n\n if where is None:\n if _can_memcpy(dst, src):\n dst.data.copy_from(src.data, src.nbytes)\n else:\n device = dst.device\n with device:\n if src.device != device:\n src = src.copy()\n core.elementwise_copy(src, dst)\n else:\n core.elementwise_copy_where(src, where, dst)\n\n\ndef _can_memcpy(dst, src):\n c_contiguous = dst.flags.c_contiguous and src.flags.c_contiguous\n f_contiguous = dst.flags.f_contiguous and src.flags.f_contiguous\n return (c_contiguous or f_contiguous) and dst.dtype == src.dtype and \\\n dst.size == src.size\n",
"path": "cupy/manipulation/basic.py"
}
] | [
{
"content": "import numpy\nimport six\n\nfrom cupy import core\n\n\ndef copyto(dst, src, casting='same_kind', where=None):\n \"\"\"Copies values from one array to another with broadcasting.\n\n This function can be called for arrays on different devices. In this case,\n casting, ``where``, and broadcasting is not supported, and an exception is\n raised if these are used.\n\n Args:\n dst (cupy.ndarray): Target array.\n src (cupy.ndarray): Source array.\n casting (str): Casting rule. See :func:`numpy.can_cast` for detail.\n where (cupy.ndarray of bool): If specified, this array acts as a mask,\n and an element is copied only if the corresponding element of\n ``where`` is True.\n\n .. seealso:: :func:`numpy.copyto`\n\n \"\"\"\n\n src_type = type(src)\n src_is_python_scalar = (src_type in six.integer_types or\n src_type in (bool, float, complex))\n if src_is_python_scalar:\n src_dtype = numpy.dtype(type(src))\n can_cast = numpy.can_cast(src, dst.dtype, casting)\n else:\n src_dtype = src.dtype\n can_cast = numpy.can_cast(src_dtype, dst.dtype, casting)\n\n if not can_cast:\n raise TypeError('Cannot cast %s to %s in %s casting mode' %\n (src_dtype, dst.dtype, casting))\n if dst.size == 0:\n return\n\n if src_is_python_scalar and where is None:\n dst.fill(src)\n return\n\n if where is None:\n if _can_memcpy(dst, src):\n dst.data.copy_from(src.data, src.nbytes)\n else:\n device = dst.device\n with device:\n if src.device != device:\n src = src.copy()\n core.elementwise_copy(src, dst)\n else:\n core.elementwise_copy_where(src, where, dst)\n\n\ndef _can_memcpy(dst, src):\n c_contiguous = dst.flags.c_contiguous and src.flags.c_contiguous\n f_contiguous = dst.flags.f_contiguous and src.flags.f_contiguous\n return (c_contiguous or f_contiguous) and dst.dtype == src.dtype and \\\n dst.size == src.size\n",
"path": "cupy/manipulation/basic.py"
}
] | diff --git a/cupy/manipulation/basic.py b/cupy/manipulation/basic.py
index b5dffd3103a..b6f029b1de1 100644
--- a/cupy/manipulation/basic.py
+++ b/cupy/manipulation/basic.py
@@ -39,7 +39,7 @@ def copyto(dst, src, casting='same_kind', where=None):
if dst.size == 0:
return
- if src_is_python_scalar:
+ if src_is_python_scalar and where is None:
dst.fill(src)
return
diff --git a/tests/cupy_tests/manipulation_tests/test_basic.py b/tests/cupy_tests/manipulation_tests/test_basic.py
index 24116781e3f..41d13f2084d 100644
--- a/tests/cupy_tests/manipulation_tests/test_basic.py
+++ b/tests/cupy_tests/manipulation_tests/test_basic.py
@@ -108,7 +108,7 @@ def test_copyto_multigpu_noncontinguous(self, dtype):
@testing.parameterize(
*testing.product(
- {'src': [float(3.2), int(0), int(4), int(-4), True, False],
+ {'src': [float(3.2), int(0), int(4), int(-4), True, False, 1+1j],
'dst_shape': [(), (0,), (1,), (1, 1), (2, 2)]}))
@testing.gpu
class TestCopytoFromScalar(unittest.TestCase):
@@ -119,3 +119,12 @@ def test_copyto(self, xp, dtype):
dst = xp.ones(self.dst_shape, dtype=dtype)
xp.copyto(dst, self.src)
return dst
+
+ @testing.for_all_dtypes()
+ @testing.numpy_cupy_allclose(accept_error=TypeError)
+ def test_copyto_where(self, xp, dtype):
+ dst = xp.ones(self.dst_shape, dtype=dtype)
+ mask = (testing.shaped_arange(
+ self.dst_shape, xp, dtype) % 2).astype(xp.bool_)
+ xp.copyto(dst, self.src, where=mask)
+ return dst
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.