repo
stringlengths 7
60
| instance_id
stringlengths 11
64
| base_commit
stringlengths 40
40
| patch
stringlengths 83
793k
| test_patch
stringclasses 1
value | problem_statement
stringlengths 22
112k
| hints_text
stringlengths 0
189k
| created_at
timestamp[ns]date 2015-02-23 20:51:45
2024-12-13 21:31:14
| environment_setup_commit
stringclasses 1
value | version
stringclasses 1
value | FAIL_TO_PASS
sequencelengths 0
0
| PASS_TO_PASS
sequencelengths 0
0
|
---|---|---|---|---|---|---|---|---|---|---|---|
opendilab/treevalue | opendilab__treevalue-52 | 17cdc03f8961d8d631ffd3237c4ae79b8f060588 | diff --git a/docs/source/api_doc/tree/tree.rst b/docs/source/api_doc/tree/tree.rst
index 06294d0c28..2b387292fa 100644
--- a/docs/source/api_doc/tree/tree.rst
+++ b/docs/source/api_doc/tree/tree.rst
@@ -9,7 +9,7 @@ TreeValue
---------------
.. autoclass:: TreeValue
- :members: __init__, __getattribute__, __setattr__, __delattr__, __contains__, __repr__, __iter__, __hash__, __eq__, _attr_extern, __len__, __bool__, __str__, __getstate__, __setstate__, get, pop, keys, values, items, __getitem__, __setitem__, __delitem__, _getitem_extern, _setitem_extern, _delitem_extern
+ :members: __init__, __getattribute__, __setattr__, __delattr__, __contains__, __repr__, __iter__, __hash__, __eq__, _attr_extern, __len__, __bool__, __str__, __getstate__, __setstate__, get, pop, keys, values, items, __getitem__, __setitem__, __delitem__, _getitem_extern, _setitem_extern, _delitem_extern, popitem
.. _apidoc_tree_tree_delayed:
diff --git a/treevalue/tree/common/storage.pxd b/treevalue/tree/common/storage.pxd
index 775bdeac8c..7631cc2753 100644
--- a/treevalue/tree/common/storage.pxd
+++ b/treevalue/tree/common/storage.pxd
@@ -14,6 +14,7 @@ cdef class TreeStorage:
cpdef public object get_or_default(self, str key, object default)
cpdef public object pop(self, str key)
cpdef public object pop_or_default(self, str key, object default)
+ cpdef public tuple popitem(self)
cpdef public void del_(self, str key) except *
cpdef public boolean contains(self, str key)
cpdef public uint size(self)
diff --git a/treevalue/tree/common/storage.pyx b/treevalue/tree/common/storage.pyx
index 4efe1cb6a2..c353ed2cba 100644
--- a/treevalue/tree/common/storage.pyx
+++ b/treevalue/tree/common/storage.pyx
@@ -55,6 +55,12 @@ cdef class TreeStorage:
del self.map[key]
return res
+ cpdef public tuple popitem(self):
+ cdef str k
+ cdef object v
+ k, v = self.map.popitem()
+ return k, undelay(v)
+
cpdef public void del_(self, str key) except *:
try:
del self.map[key]
diff --git a/treevalue/tree/tree/tree.pxd b/treevalue/tree/tree/tree.pxd
index e45f784273..92c20f73da 100644
--- a/treevalue/tree/tree/tree.pxd
+++ b/treevalue/tree/tree/tree.pxd
@@ -22,6 +22,7 @@ cdef class TreeValue:
cpdef _delitem_extern(self, object key)
cpdef get(self, str key, object default= *)
cpdef pop(self, str key, object default= *)
+ cpdef popitem(self)
cpdef treevalue_keys keys(self)
cpdef treevalue_values values(self)
diff --git a/treevalue/tree/tree/tree.pyx b/treevalue/tree/tree/tree.pyx
index c19464c70b..9ab963b158 100644
--- a/treevalue/tree/tree/tree.pyx
+++ b/treevalue/tree/tree/tree.pyx
@@ -140,6 +140,27 @@ cdef class TreeValue:
return self._unraw(value)
+ @cython.binding(True)
+ cpdef popitem(self):
+ """
+ Overview:
+ Pop item (with a key and its value) from the tree node.
+
+ :return: Popped item.
+ :raise KeyError: When current treevalue is empty.
+
+ .. note::
+ The method :meth:`popitem` will raise ``KeyError`` when empty, like the behaviour in \
+ `dict.popitem <https://docs.python.org/3/library/stdtypes.html#dict.popitem>`_.
+ """
+ cdef str k
+ cdef object v
+ try:
+ k, v = self._st.popitem()
+ return k, self._unraw(v)
+ except KeyError:
+ raise KeyError(f'popitem(): {self._type.__name__} is empty.')
+
@cython.binding(True)
cpdef _attr_extern(self, str key):
r"""
| Inherit MutableMapping in TreeValue Class
The `TreeValue` class need to inherit `MutableMapping` class for supporting all the features that `dict` object have.
See: https://docs.python.org/3/library/collections.abc.html#collections-abstract-base-classes
| 2022-07-06T14:18:17 | 0.0 | [] | [] |
|||
opendilab/treevalue | opendilab__treevalue-49 | bfbd23d402363f8f026cd4f8e74eebd7eb76f429 | diff --git a/treevalue/tree/tree/tree.pyx b/treevalue/tree/tree/tree.pyx
index 066c00ed61..c19464c70b 100644
--- a/treevalue/tree/tree/tree.pyx
+++ b/treevalue/tree/tree/tree.pyx
@@ -102,26 +102,20 @@ cdef class TreeValue:
return obj
@cython.binding(True)
- cpdef get(self, str key, object default=_GET_NO_DEFAULT):
+ cpdef get(self, str key, object default=None):
r"""
Overview:
Get item from the tree node.
- Arguments:
- - key (:obj:`str`): Item's name.
- - default (:obj:`default`): Default value when this item is not found, default is \
- `_GET_NO_DEFAULT` which means just raise `KeyError` when not found.
+ :param key: Item's name.
+ :param default: Default value when this item is not found, default is ``None``.
+ :return: Item's value.
- Returns:
- - value: Item's value.
+ .. note::
+ The method :meth:`get` will never raise ``KeyError``, like the behaviour in \
+ `dict.get <https://docs.python.org/3/library/stdtypes.html#dict.get>`_.
"""
- cdef object value
- if default is _GET_NO_DEFAULT:
- value = self._st.get(key)
- else:
- value = self._st.get_or_default(key, default)
-
- return self._unraw(value)
+ return self._unraw(self._st.get_or_default(key, default))
@cython.binding(True)
cpdef pop(self, str key, object default=_GET_NO_DEFAULT):
@@ -129,13 +123,14 @@ cdef class TreeValue:
Overview:
Pop item from the tree node.
- Arguments:
- - key (:obj:`str`): Item's name.
- - default (:obj:`default`): Default value when this item is not found, default is \
- `_GET_NO_DEFAULT` which means just raise `KeyError` when not found.
+ :param key: Item's name.
+ :param default: Default value when this item is not found, default is ``_GET_NO_DEFAULT`` which means \
+ just raise ``KeyError`` when not found.
+ :return: Item's value.
- Returns:
- - value: Item's value.
+ .. note::
+ The method :meth:`pop` will raise ``KeyError`` when ``key`` is not found, like the behaviour in \
+ `dict.pop <https://docs.python.org/3/library/stdtypes.html#dict.pop>`_.
"""
cdef object value
if default is _GET_NO_DEFAULT:
| Inherit MutableMapping in TreeValue Class
The `TreeValue` class need to inherit `MutableMapping` class for supporting all the features that `dict` object have.
See: https://docs.python.org/3/library/collections.abc.html#collections-abstract-base-classes
| 2022-06-23T05:08:33 | 0.0 | [] | [] |
|||
opendilab/treevalue | opendilab__treevalue-48 | ae999e178f6d9ad61354701f87fe3f3106a36cbf | diff --git a/treevalue/tree/common/storage.pyx b/treevalue/tree/common/storage.pyx
index 576111ae1e..4efe1cb6a2 100644
--- a/treevalue/tree/common/storage.pyx
+++ b/treevalue/tree/common/storage.pyx
@@ -185,27 +185,41 @@ cdef class TreeStorage:
cdef str k
cdef object v
cdef list _items = []
- for k, v in sorted(self.items(), key=lambda x: x[0]):
+ for k, v in sorted(self.iter_items(), key=lambda x: x[0]):
_items.append((k, v))
return hash(tuple(_items))
- def keys(self):
+ def iter_keys(self):
return self.map.keys()
- def values(self):
+ def iter_rev_keys(self):
+ return reversed(self.map.keys())
+
+ def iter_values(self):
cdef str k
cdef object v, nv
for k, v in self.map.items():
yield _c_undelay_data(self.map, k, v)
- def items(self):
+ def iter_rev_values(self):
+ cdef str k
+ cdef object v, nv
+ for k, v in reversed(self.map.items()):
+ yield _c_undelay_data(self.map, k, v)
+
+ def iter_items(self):
cdef str k
cdef object v, nv
for k, v in self.map.items():
- v = _c_undelay_data(self.map, k, v)
+ yield k, _c_undelay_data(self.map, k, v)
+
+ def iter_rev_items(self):
+ cdef str k
+ cdef object v, nv
+ for k, v in reversed(self.map.items()):
+ yield k, _c_undelay_data(self.map, k, v)
- yield k, v
cpdef object create_storage(dict value):
cdef dict _map = {}
diff --git a/treevalue/tree/tree/tree.pxd b/treevalue/tree/tree/tree.pxd
index 8234cc0e28..e45f784273 100644
--- a/treevalue/tree/tree/tree.pxd
+++ b/treevalue/tree/tree/tree.pxd
@@ -6,6 +6,9 @@ from libcpp cimport bool
from ..common.delay cimport DelayedProxy
from ..common.storage cimport TreeStorage
+cdef class _CObject:
+ pass
+
cdef class TreeValue:
cdef readonly TreeStorage _st
cdef readonly type _type
@@ -20,9 +23,29 @@ cdef class TreeValue:
cpdef get(self, str key, object default= *)
cpdef pop(self, str key, object default= *)
+ cpdef treevalue_keys keys(self)
+ cpdef treevalue_values values(self)
+ cpdef treevalue_items items(self)
+
cdef str _prefix_fix(object text, object prefix)
+cdef str _title_repr(TreeStorage st, object type_)
cdef object _build_tree(TreeStorage st, object type_, str prefix, dict id_pool, tuple path)
+# noinspection PyPep8Naming
+cdef class treevalue_keys(_CObject):
+ cdef readonly TreeStorage _st
+ cdef readonly type _type
+
+# noinspection PyPep8Naming
+cdef class treevalue_values(_CObject):
+ cdef readonly TreeStorage _st
+ cdef readonly type _type
+
+# noinspection PyPep8Naming
+cdef class treevalue_items(_CObject):
+ cdef readonly TreeStorage _st
+ cdef readonly type _type
+
cdef class DetachedDelayedProxy(DelayedProxy):
cdef DelayedProxy proxy
cdef readonly bool calculated
diff --git a/treevalue/tree/tree/tree.pyx b/treevalue/tree/tree/tree.pyx
index c7159c8560..066c00ed61 100644
--- a/treevalue/tree/tree/tree.pyx
+++ b/treevalue/tree/tree/tree.pyx
@@ -2,6 +2,7 @@
# cython:language_level=3
import os
+from collections.abc import Sized, Container, Reversible
from operator import itemgetter
import cython
@@ -11,6 +12,9 @@ from ..common.delay cimport undelay, _c_delayed_partial, DelayedProxy
from ..common.storage cimport TreeStorage, create_storage, _c_undelay_data
from ...utils import format_tree
+cdef class _CObject:
+ pass
+
cdef inline object _item_unwrap(object v):
if isinstance(v, list) and len(v) == 1:
return v[0]
@@ -392,7 +396,7 @@ cdef class TreeValue:
"""
cdef str k
cdef object v
- for k, v in self._st.items():
+ for k, v in self._st.iter_items():
yield k, self._unraw(v)
@cython.binding(True)
@@ -525,48 +529,97 @@ cdef class TreeValue:
return self._st
@cython.binding(True)
- def keys(self):
+ cpdef treevalue_keys keys(self):
"""
Overview:
Get keys of this treevalue object, like the :class:`dict`.
Returns:
- keys: A generator of all the keys.
+
+ Examples::
+ >>> from treevalue import TreeValue
+ >>>
+ >>> t = TreeValue({'a': 1, 'b': 3, 'c': '233'})
+ >>> t.keys()
+ treevalue_keys(['a', 'b', 'c'])
+ >>> len(t.keys())
+ 3
+ >>> list(t.keys())
+ ['a', 'b', 'c']
+ >>> list(reversed(t.keys())) # only available in python3.8+
+ ['c', 'b', 'a']
+ >>> 'a' in t.keys()
+ True
+ >>> 'f' in t.keys()
+ False
+
+ .. note::
+ :func:`reversed` is only available in python 3.8 or higher versions.
"""
- return self._st.keys()
+ return treevalue_keys(self._st, self._type)
@cython.binding(True)
- def values(self):
+ cpdef treevalue_values values(self):
"""
Overview:
Get value of this treevalue object, like the :class:`dict`.
Returns:
- values: A generator of all the values
+
+ Examples::
+ >>> from treevalue import TreeValue
+ >>>
+ >>> t = TreeValue({'a': 1, 'b': 3, 'c': '233'})
+ >>> t.values()
+ treevalue_values([1, 3, '233'])
+ >>> len(t.values())
+ 3
+ >>> list(t.values())
+ [1, 3, '233']
+ >>> list(reversed(t.values())) # only supported on python3.8+
+ ['233', 3, 1]
+ >>> 1 in t.values()
+ True
+ >>> 'fff' in t.values()
+ False
+
+ .. note::
+ :func:`reversed` is only available in python 3.8 or higher versions.
"""
- cdef object v
- for v in self._st.values():
- if isinstance(v, TreeStorage):
- yield self._type(v)
- else:
- yield v
+ return treevalue_values(self._st, self._type)
@cython.binding(True)
- def items(self):
+ cpdef treevalue_items items(self):
"""
Overview:
Get pairs of keys and values of this treevalue object, like the :class:`items`.
Returns:
- items: A generator of pairs of keys and values.
+
+ Examples::
+ >>> from treevalue import TreeValue
+ >>>
+ >>> t = TreeValue({'a': 1, 'b': 3, 'c': '233'})
+ >>> t.items()
+ treevalue_items([('a', 1), ('b', 3), ('c', '233')])
+ >>> len(t.items())
+ 3
+ >>> list(t.items())
+ [('a', 1), ('b', 3), ('c', '233')]
+ >>> list(reversed(t.items())) # only supported on python3.8+
+ [('c', '233'), ('b', 3), ('a', 1)]
+ >>> ('a', 1) in t.items()
+ True
+ >>> ('c', '234') in t.values()
+ False
+
+ .. note::
+ :func:`reversed` is only available in python 3.8 or higher versions.
"""
- cdef str k
- cdef object v
- for k, v in self._st.items():
- if isinstance(v, TreeStorage):
- yield k, self._type(v)
- else:
- yield k, v
+ return treevalue_items(self._st, self._type)
cdef str _prefix_fix(object text, object prefix):
cdef list lines = []
@@ -578,9 +631,12 @@ cdef str _prefix_fix(object text, object prefix):
return os.linesep.join(lines)
+cdef inline str _title_repr(TreeStorage st, object type_):
+ return f'<{type_.__name__} {hex(id(st))}>'
+
cdef object _build_tree(TreeStorage st, object type_, str prefix, dict id_pool, tuple path):
cdef object nid = id(st)
- cdef str self_repr = f'<{type_.__name__} {hex(nid)}>'
+ cdef str self_repr = _title_repr(st, type_)
cdef list children = []
cdef str k, _t_prefix
@@ -605,6 +661,129 @@ cdef object _build_tree(TreeStorage st, object type_, str prefix, dict id_pool,
self_repr = _prefix_fix(self_repr, prefix)
return self_repr, children
+try:
+ reversed({'a': 1}.keys())
+except TypeError:
+ _reversible = False
+else:
+ _reversible = True
+
+# noinspection PyPep8Naming
+cdef class treevalue_keys(_CObject, Sized, Container, Reversible):
+ def __cinit__(self, TreeStorage storage, type _type):
+ self._st = storage
+ self._type = _type
+
+ def __len__(self):
+ return self._st.size()
+
+ def __contains__(self, item):
+ return self._st.contains(item)
+
+ def _iter(self):
+ for k in self._st.iter_keys():
+ yield k
+
+ def __iter__(self):
+ return self._iter()
+
+ def _rev_iter(self):
+ for k in self._st.iter_rev_keys():
+ yield k
+
+ def __reversed__(self):
+ if _reversible:
+ return self._rev_iter()
+ else:
+ raise TypeError(f'{type(self).__name__!r} object is not reversible')
+
+ def __repr__(self):
+ return f'{type(self).__name__}({list(self)!r})'
+
+# noinspection PyPep8Naming
+cdef class treevalue_values(_CObject, Sized, Container, Reversible):
+ def __cinit__(self, TreeStorage storage, type _type):
+ self._st = storage
+ self._type = _type
+
+ def __len__(self):
+ return self._st.size()
+
+ def __contains__(self, item):
+ for v in self:
+ if item == v:
+ return True
+
+ return False
+
+ def _iter(self):
+ for v in self._st.iter_values():
+ if isinstance(v, TreeStorage):
+ yield self._type(v)
+ else:
+ yield v
+
+ def __iter__(self):
+ return self._iter()
+
+ def _rev_iter(self):
+ for v in self._st.iter_rev_values():
+ if isinstance(v, TreeStorage):
+ yield self._type(v)
+ else:
+ yield v
+
+ def __reversed__(self):
+ if _reversible:
+ return self._rev_iter()
+ else:
+ raise TypeError(f'{type(self).__name__!r} object is not reversible')
+
+ def __repr__(self):
+ return f'{type(self).__name__}({list(self)!r})'
+
+# noinspection PyPep8Naming
+cdef class treevalue_items(_CObject, Sized, Container, Reversible):
+ def __cinit__(self, TreeStorage storage, type _type):
+ self._st = storage
+ self._type = _type
+
+ def __len__(self):
+ return self._st.size()
+
+ def __contains__(self, item):
+ for k, v in self:
+ if item == (k, v):
+ return True
+
+ return False
+
+ def _iter(self):
+ for k, v in self._st.iter_items():
+ if isinstance(v, TreeStorage):
+ yield k, self._type(v)
+ else:
+ yield k, v
+
+ def __iter__(self):
+ return self._iter()
+
+ def _rev_iter(self):
+ for k, v in self._st.iter_rev_items():
+ if isinstance(v, TreeStorage):
+ yield k, self._type(v)
+ else:
+ yield k, v
+
+ def __reversed__(self):
+ if _reversible:
+ return self._rev_iter()
+ else:
+ raise TypeError(f'{type(self).__name__!r} object is not reversible')
+
+ def __repr__(self):
+ return f'{type(self).__name__}({list(self)!r})'
+
cdef class DetachedDelayedProxy(DelayedProxy):
def __init__(self, DelayedProxy proxy):
self.proxy = proxy
| Inherit MutableMapping in TreeValue Class
The `TreeValue` class need to inherit `MutableMapping` class for supporting all the features that `dict` object have.
See: https://docs.python.org/3/library/collections.abc.html#collections-abstract-base-classes
| 2022-06-22T14:58:57 | 0.0 | [] | [] |
|||
opendilab/treevalue | opendilab__treevalue-32 | f7d1b6fdf367d9cbd68c56304cd6648fde218295 | diff --git a/treevalue/utils/formattree.py b/treevalue/utils/formattree.py
index fc53c95f73..60bfed2dfc 100644
--- a/treevalue/utils/formattree.py
+++ b/treevalue/utils/formattree.py
@@ -35,43 +35,45 @@
import itertools
import os
+import sys
-FORK = u'\u251c'
-LAST = u'\u2514'
-VERTICAL = u'\u2502'
-HORIZONTAL = u'\u2500'
-NEWLINE = u''
+_DEFAULT_ENCODING = os.environ.get("PYTHONIOENCODING", sys.getdefaultencoding())
+_UTF8_CHARS = (u'\u251c', u'\u2514', u'\u2502', u'\u2500', u'')
+_ASCII_CHARS = (u'+', u'`', u'|', u'-', u'')
-def _format_newlines(prefix, formatted_node):
+
+def _format_newlines(prefix, formatted_node, chars: tuple):
"""
Convert newlines into U+23EC characters, followed by an actual newline and
then a tree prefix so as to position the remaining text under the previous
line.
"""
+ FORK, LAST, VERTICAL, HORIZONTAL, NEWLINE = chars
replacement = u''.join([NEWLINE, os.linesep, prefix])
return replacement.join(formatted_node.splitlines())
-def _format_tree(node, format_node, get_children, prefix=u''):
+def _format_tree(node, format_node, get_children, prefix=u'', chars: tuple = _UTF8_CHARS):
+ FORK, LAST, VERTICAL, HORIZONTAL, NEWLINE = chars
children = list(get_children(node))
next_prefix = u''.join([prefix, VERTICAL, u' '])
for child in children[:-1]:
yield u''.join([
prefix, FORK, HORIZONTAL, HORIZONTAL, u' ',
- _format_newlines(next_prefix, format_node(child))])
- for result in _format_tree(child, format_node, get_children, next_prefix):
+ _format_newlines(next_prefix, format_node(child), chars)])
+ for result in _format_tree(child, format_node, get_children, next_prefix, chars):
yield result
if children:
last_prefix = u''.join([prefix, u' '])
yield u''.join([
prefix, LAST, HORIZONTAL, HORIZONTAL, u' ',
- _format_newlines(last_prefix, format_node(children[-1]))])
- for result in _format_tree(children[-1], format_node, get_children, last_prefix):
+ _format_newlines(last_prefix, format_node(children[-1]), chars)])
+ for result in _format_tree(children[-1], format_node, get_children, last_prefix, chars):
yield result
-def format_tree(node, format_node, get_children) -> str:
+def format_tree(node, format_node, get_children, encoding=None) -> str:
r"""
Overview:
Format the given tree.
@@ -80,6 +82,8 @@ def format_tree(node, format_node, get_children) -> str:
- node: Node object
- format_node: Format node getter
- get_children: Children getter.
+ - encoding: Encoding to be used. Default is ``None`` which means system encoding. \
+ When ASCII encoding is used, ASCII chars will be used instead of original chars.
Returns:
- formatted: Formatted string.
@@ -111,9 +115,13 @@ def format_tree(node, format_node, get_children) -> str:
└── c
d
"""
- lines = itertools.chain(
+ if 'ASCII' in (encoding or _DEFAULT_ENCODING).upper():
+ _chars = _ASCII_CHARS
+ else:
+ _chars = _UTF8_CHARS
+
+ return os.linesep.join(itertools.chain(
[format_node(node)],
- _format_tree(node, format_node, get_children),
+ _format_tree(node, format_node, get_children, u'', _chars),
[u''],
- )
- return os.linesep.join(lines)
+ ))
| ASCII output when print a tree.
As this title, sometimes unicode output is not acceptable (such as in latex documentation), so the full ASCII output should be allowed.
Like this
```python
FORK = u'+'
LAST = u'+'
VERTICAL = u'|'
HORIZONTAL = u'-'
NEWLINE = u''
```
in `treevalue/utils/formattree.py`.
| 2022-01-26T06:35:02 | 0.0 | [] | [] |
|||
opendilab/treevalue | opendilab__treevalue-9 | da5022b0c199da07adb87b17f496928c8d102bf7 | diff --git a/docs/source/api_doc/tree/tree.rst b/docs/source/api_doc/tree/tree.rst
index fbe7907b40..b86fea58bf 100644
--- a/docs/source/api_doc/tree/tree.rst
+++ b/docs/source/api_doc/tree/tree.rst
@@ -44,6 +44,22 @@ walk
.. autofunction:: walk
+.. _apidoc_tree_tree_flatten:
+
+flatten
+-------------------
+
+.. autofunction:: flatten
+
+
+.. _apidoc_tree_tree_unflatten:
+
+unflatten
+-------------------
+
+.. autofunction:: unflatten
+
+
.. _apidoc_tree_tree_mapping:
mapping
diff --git a/docs/source/tutorials/advanced_usage/flatten_demo.demo.py b/docs/source/tutorials/advanced_usage/flatten_demo.demo.py
new file mode 100644
index 0000000000..09ecd8c341
--- /dev/null
+++ b/docs/source/tutorials/advanced_usage/flatten_demo.demo.py
@@ -0,0 +1,15 @@
+from treevalue import TreeValue, raw, flatten
+
+if __name__ == '__main__':
+ t = TreeValue({
+ 'a': 1,
+ 'b': 2,
+ 'c': raw({'x': 3, 'y': 4}),
+ 'd': {
+ 'x': 3,
+ 'y': 4
+ },
+ })
+
+ print('flatten(t):')
+ print(flatten(t))
diff --git a/docs/source/tutorials/advanced_usage/index.rst b/docs/source/tutorials/advanced_usage/index.rst
index 2ab64a3de8..e144deb5e5 100644
--- a/docs/source/tutorials/advanced_usage/index.rst
+++ b/docs/source/tutorials/advanced_usage/index.rst
@@ -877,6 +877,98 @@ For further informaon of function ``typetrans``, \
take a look at :ref:`apidoc_tree_tree_typetrans`.
+Walk
+~~~~~~~~~~~~~~~
+
+You can use function :func:`treevalue.tree.tree.walk` to iterate all the nodes \
+in the tree, like the example below:
+
+.. literalinclude:: walk_demo.demo.py
+ :language: python
+ :linenos:
+
+The output should be like below:
+
+.. literalinclude:: walk_demo.demo.py.txt
+ :language: text
+ :linenos:
+
+For further informaon of function ``walk``, \
+take a look at :ref:`apidoc_tree_tree_walk`.
+
+
+Flatten Utilities
+----------------------
+
+In order to support the parallel calculation in values of \
+``TreeValue`` object, :func:`treevalue.tree.tree.flatten` and \
+:func:`treevalue.tree.tree.unflatten` is provided to dump a \
+sequence of nodes' information and value, and recover it to \
+original tree structure when calculation is completed.
+
+Flatten
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The values and node structures can be dumped \
+by function :func:`treevalue.tree.tree.flatten` as a list, \
+like this:
+
+.. literalinclude:: flatten_demo.demo.py
+ :language: python
+ :linenos:
+
+The result should be like below:
+
+.. literalinclude:: flatten_demo.demo.py.txt
+ :language: python
+ :linenos:
+
+.. note::
+
+ Function :func:`treevalue.tree.tree.flatten` is different from \
+ :func:`treevalue.tree.tree.walk` because ``flatten`` \
+ has fewer features because it is designed entirely for \
+ the dumping process before parallel calculation.
+
+ So when you need to do parallel calculation, please use \
+ :func:`treevalue.tree.tree.flatten` to dump the values to ensure the \
+ speed performance.
+
+For further informaon of function ``flatten``, \
+take a look at :ref:`apidoc_tree_tree_flatten`.
+
+
+Unflatten
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The dumped nodes' information and value can be \
+recovered to a ``TreeValue`` object by \
+function :func:`treevalue.tree.tree.unflatten`, like this:
+
+.. literalinclude:: unflatten_demo.demo.py
+ :language: python
+ :linenos:
+
+The result should be like below:
+
+.. literalinclude:: unflatten_demo.demo.py.txt
+ :language: text
+ :linenos:
+
+.. note::
+
+ It is recommended to pass an ordered iterable object \
+ in ``pairs``, this will improve the speed performance of \
+ function :func:`treevalue.tree.tree.unflatten`.
+
+ Because of this, it is a good idea to keep \
+ the :func:`treevalue.tree.tree.flatten`'s result's order \
+ when executing your own parallel processing logic.
+
+For further informaon of function ``unflatten``, \
+take a look at :ref:`apidoc_tree_tree_unflatten`.
+
+
IO Utilities
-----------------------
diff --git a/docs/source/tutorials/advanced_usage/unflatten_demo.demo.py b/docs/source/tutorials/advanced_usage/unflatten_demo.demo.py
new file mode 100644
index 0000000000..3cd016f543
--- /dev/null
+++ b/docs/source/tutorials/advanced_usage/unflatten_demo.demo.py
@@ -0,0 +1,13 @@
+from treevalue import unflatten
+
+if __name__ == '__main__':
+ flatted = [
+ (('a',), 1),
+ (('b',), 2),
+ (('c',), {'x': 3, 'y': 4}),
+ (('d', 'x'), 3),
+ (('d', 'y'), 4)
+ ]
+
+ print('unflatten(flatted):')
+ print(unflatten(flatted))
diff --git a/docs/source/tutorials/advanced_usage/walk_demo.demo.py b/docs/source/tutorials/advanced_usage/walk_demo.demo.py
new file mode 100644
index 0000000000..33d52f7e39
--- /dev/null
+++ b/docs/source/tutorials/advanced_usage/walk_demo.demo.py
@@ -0,0 +1,15 @@
+from treevalue import TreeValue, raw, walk
+
+if __name__ == '__main__':
+ t = TreeValue({
+ 'a': 1,
+ 'b': 2,
+ 'c': raw({'x': 3, 'y': 4}),
+ 'd': {
+ 'x': 3,
+ 'y': 4
+ },
+ })
+
+ for path, node in walk(t):
+ print(path, '-->', node)
diff --git a/treevalue/tree/tree/__init__.py b/treevalue/tree/tree/__init__.py
index f200212fd1..030d35e4f3 100644
--- a/treevalue/tree/tree/__init__.py
+++ b/treevalue/tree/tree/__init__.py
@@ -1,3 +1,4 @@
+from .flatten import flatten, unflatten
from .functional import mapping, filter_, mask, reduce_
from .graph import graphics
from .io import loads, load, dumps, dump
diff --git a/treevalue/tree/tree/flatten.pxd b/treevalue/tree/tree/flatten.pxd
new file mode 100644
index 0000000000..bdaddc48be
--- /dev/null
+++ b/treevalue/tree/tree/flatten.pxd
@@ -0,0 +1,13 @@
+# distutils:language=c++
+# cython:language_level=3
+
+# flatten, unflatten
+
+from .tree cimport TreeValue
+from ..common.storage cimport TreeStorage
+
+cdef void _c_flatten(TreeStorage st, tuple path, list res) except *
+cpdef list flatten(TreeValue tree)
+
+cdef TreeStorage _c_unflatten(object pairs)
+cpdef TreeValue unflatten(object pairs, object return_type= *)
diff --git a/treevalue/tree/tree/flatten.pyx b/treevalue/tree/tree/flatten.pyx
new file mode 100644
index 0000000000..ad1001eb06
--- /dev/null
+++ b/treevalue/tree/tree/flatten.pyx
@@ -0,0 +1,105 @@
+# distutils:language=c++
+# cython:language_level=3
+
+# flatten, unflatten
+
+import cython
+
+from .tree cimport TreeValue
+from ..common.storage cimport TreeStorage
+
+cdef void _c_flatten(TreeStorage st, tuple path, list res) except *:
+ cdef dict data = st.detach()
+ cdef tuple curpath
+
+ cdef str k
+ cdef object v
+ for k, v in data.items():
+ curpath = path + (k,)
+ if isinstance(v, TreeStorage):
+ _c_flatten(v, curpath, res)
+ else:
+ res.append((curpath, v))
+
[email protected](True)
+cpdef list flatten(TreeValue tree):
+ r"""
+ Overview:
+ Flatten the values in the tree.
+
+ Arguments:
+ - tree (:obj:`TreeValue`): Tree object to be flatten.
+
+ Returns:
+ - flatted (:obj:`list`): Flatted tree, a list of tuple with (path, value).
+
+ .. note::
+
+ The result of function :func:`flatten` is guaranteed to be ordered, \
+ which means it obey one of the tree traversal order. But please note \
+ that the order of key values under the same subtree is not guaranteed.
+ """
+ cdef list result = []
+ _c_flatten(tree._detach(), (), result)
+ return result
+
+cdef TreeStorage _c_unflatten(object pairs):
+ cdef dict raw_data = {}
+ cdef TreeStorage result = TreeStorage(raw_data)
+ cdef list stack = []
+ stack.append(((), raw_data))
+
+ cdef tuple path
+ cdef object v
+ cdef tuple curpath, newpath
+ cdef dict curdata, newdata
+ cdef int curlen, curplen, i
+ for path, v in pairs:
+ curpath, curdata = stack[-1]
+ while path[:len(curpath)] != curpath:
+ stack.pop()
+ curpath, curdata = stack[-1]
+
+ curlen = len(curpath)
+ curplen = len(path)
+ for i in range(curlen, curplen):
+ if i < curplen - 1:
+ newpath = curpath + (path[i],)
+ if path[i] not in curdata:
+ newdata = {}
+ curdata[path[i]] = TreeStorage(newdata)
+ else:
+ newdata = curdata[path[i]].detach()
+
+ curpath, curdata = newpath, newdata
+ stack.append((curpath, curdata))
+
+ else:
+ curdata[path[i]] = v
+
+ return result
+
[email protected](True)
+cpdef TreeValue unflatten(object pairs, object return_type=None):
+ r"""
+ Overview:
+ Unflatten the given pairs of tree's data.
+
+ Arguments:
+ - pairs: Data pairs, should be a iterable object with items of (path, value).
+ - return_type: Return type of unflatted tree, default is ``None`` which means use the default \
+ :class:`TreeValue` class.
+
+ Returns:
+ - tree (:obj:`TreeValue`): Unflatted tree object.
+
+ .. note::
+
+ It is recommended to pass an ordered iterable object in ``pairs``, this \
+ will improve the speed performance of function :func:`unflatten`.
+
+ Because of this, it is a good idea to keep the :func:`flatten`'s result's order \
+ when executing your own processing logic.
+ """
+ return_type = return_type or TreeValue
+ return return_type(_c_unflatten(pairs))
| Development of flatten and unflatten for supporting parallel calculation
* [x] `flatten` - dump a tree to list of node information and value
* [x] `unflatten` - recover a list of information back to tree (inverse operation of `flatten`)
| 2021-11-02T05:53:31 | 0.0 | [] | [] |
|||
popsim-consortium/demes-python | popsim-consortium__demes-python-430 | 83a6d09562cdabe50d46be4e308f97ca04835568 | diff --git a/.gitmodules b/.gitmodules
new file mode 100644
index 00000000..d15b0bae
--- /dev/null
+++ b/.gitmodules
@@ -0,0 +1,3 @@
+[submodule "demes-spec"]
+ path = demes-spec
+ url = https://github.com/popsim-consortium/demes-spec.git
diff --git a/demes-spec b/demes-spec
new file mode 160000
index 00000000..b2341f13
--- /dev/null
+++ b/demes-spec
@@ -0,0 +1,1 @@
+Subproject commit b2341f13e51808f3849a2253c188549c163525a8
diff --git a/demes/demes.py b/demes/demes.py
index 45811d18..36930745 100644
--- a/demes/demes.py
+++ b/demes/demes.py
@@ -51,6 +51,11 @@ def unit_interval(self, attribute, value):
raise ValueError(f"must have 0 <= {attribute.name} <= 1")
+def sum_less_than_one(self, attribute, value):
+ if sum(value) > 1:
+ raise ValueError(f"{attribute.name} must sum to less than one")
+
+
def nonzero_len(self, attribute, value):
if len(value) == 0:
if isinstance(value, str):
@@ -95,12 +100,21 @@ def isclose_deme_proportions(
return True
-def validate_item(name, value, required_type, scope):
+_DummyAttribute = collections.namedtuple("_DummyAttribute", ["name"])
+
+
+def validate_item(name, value, required_type, scope, validator=None):
if not isinstance(value, required_type):
raise TypeError(
f"{scope}: field '{name}' must be a {required_type}; "
f"current type is {type(value)}."
)
+ if validator is not None:
+ if not isinstance(validator, (list, tuple)):
+ validator = [validator]
+ dummy_attribute = _DummyAttribute(f"{scope}: {name}")
+ for v in validator:
+ v(None, dummy_attribute, value)
# We need to use this trick because None is a meaningful input value for these
@@ -142,6 +156,17 @@ def check_allowed(data, allowed_fields, scope):
)
+def check_defaults(defaults, allowed_fields, scope):
+ for key, value in defaults.items():
+ if key not in allowed_fields:
+ raise KeyError(
+ f"{scope}: unexpected field: '{key}'. "
+ f"Allowed fields are: {list(allowed_fields)}"
+ )
+ required_type, validator = allowed_fields[key]
+ validate_item(key, value, required_type, scope, validator=validator)
+
+
def insert_defaults(data, defaults):
for key, value in defaults.items():
if key not in data:
@@ -960,7 +985,7 @@ class Deme:
A collection of individuals that have a common set of population parameters.
:ivar str name: A concise string that identifies the deme.
- :ivar str description: A description of the deme. May be ``None``.
+ :ivar str description: A description of the deme.
:ivar float start_time: The time at which the deme begins to exist.
:ivar list[str] ancestors: List of deme names for the deme's ancestors.
This may be ``None``, indicating the deme has no ancestors.
@@ -972,11 +997,7 @@ class Deme:
"""
name: Name = attr.ib(validator=[attr.validators.instance_of(str), valid_deme_name])
- description: Optional[str] = attr.ib(
- validator=attr.validators.optional(
- [attr.validators.instance_of(str), nonzero_len]
- )
- )
+ description: str = attr.ib(default="", validator=attr.validators.instance_of(str))
start_time: Time = attr.ib(validator=[int_or_float, positive])
ancestors: List[Name] = attr.ib(
validator=attr.validators.deep_iterable(
@@ -1224,7 +1245,6 @@ class Graph:
object to use when inspecting a model's properties.
:ivar str description: A human readable description of the demography.
- May be ``None``.
:ivar str time_units: The units of time used for the demography. This is
commonly ``years`` or ``generations``, but can be any string.
This field is intended to be useful for documenting a demography,
@@ -1244,12 +1264,7 @@ class Graph:
:ivar list[Pulse] pulses: The migration pulses for the demography.
"""
- description: Optional[str] = attr.ib(
- default=None,
- validator=attr.validators.optional(
- [attr.validators.instance_of(str), nonzero_len]
- ),
- )
+ description: str = attr.ib(default="", validator=attr.validators.instance_of(str))
time_units: str = attr.ib(validator=[attr.validators.instance_of(str), nonzero_len])
generation_time: Optional[Time] = attr.ib(
default=None,
@@ -1286,6 +1301,11 @@ def __attrs_post_init__(self):
raise ValueError(
'if time_units!="generations", generation_time must be specified'
)
+ if self.generation_time is None:
+ self.generation_time = 1
+ if self.time_units == "generations" and self.generation_time != 1:
+ # This doesn't make sense. What units are the generation_time in?
+ raise ValueError('time_units=="generations", but generation_time!=1')
def __getitem__(self, deme_name: Name) -> Deme:
"""
@@ -1897,20 +1917,19 @@ def in_generations(self) -> "Graph":
Return a copy of the graph with times in units of generations.
"""
graph = copy.deepcopy(self)
- generation_time = self.generation_time
- graph.generation_time = None
- if graph.time_units != "generations" and generation_time is not None:
- graph.time_units = "generations"
- for deme in graph.demes:
- deme.start_time /= generation_time
- for epoch in deme.epochs:
- epoch.start_time /= generation_time
- epoch.end_time /= generation_time
- for migration in graph.migrations:
- migration.start_time /= generation_time
- migration.end_time /= generation_time
- for pulse in graph.pulses:
- pulse.time /= generation_time
+ assert graph.generation_time is not None
+ for deme in graph.demes:
+ deme.start_time /= graph.generation_time
+ for epoch in deme.epochs:
+ epoch.start_time /= graph.generation_time
+ epoch.end_time /= graph.generation_time
+ for migration in graph.migrations:
+ migration.start_time /= graph.generation_time
+ migration.end_time /= graph.generation_time
+ for pulse in graph.pulses:
+ pulse.time /= graph.generation_time
+ graph.time_units = "generations"
+ graph.generation_time = 1
return graph
@classmethod
@@ -1955,7 +1974,30 @@ def fromdict(cls, data: MutableMapping[str, Any]) -> "Graph":
"proportions",
]
allowed_fields_deme_inner = allowed_fields_deme + ["name", "defaults", "epochs"]
- check_allowed(deme_defaults, allowed_fields_deme, "defaults: deme")
+ check_defaults(
+ deme_defaults,
+ dict(
+ description=(str, None),
+ start_time=(numbers.Number, [int_or_float, positive]),
+ ancestors=(
+ list,
+ attr.validators.deep_iterable(
+ member_validator=attr.validators.and_(
+ attr.validators.instance_of(str), valid_deme_name
+ ),
+ iterable_validator=attr.validators.instance_of(list),
+ ),
+ ),
+ proportions=(
+ list,
+ attr.validators.deep_iterable(
+ member_validator=int_or_float,
+ iterable_validator=attr.validators.instance_of(list),
+ ),
+ ),
+ ),
+ "defaults.deme",
+ )
migration_defaults = pop_object(defaults, "migration", {}, scope="defaults")
allowed_fields_migration = [
@@ -1966,13 +2008,62 @@ def fromdict(cls, data: MutableMapping[str, Any]) -> "Graph":
"end_time",
"rate",
]
- check_allowed(
- migration_defaults, allowed_fields_migration, "defaults: migration"
+ check_defaults(
+ migration_defaults,
+ dict(
+ rate=(numbers.Number, [int_or_float, unit_interval]),
+ start_time=(numbers.Number, [int_or_float, non_negative]),
+ end_time=(numbers.Number, [int_or_float, non_negative, finite]),
+ source=(str, valid_deme_name),
+ dest=(str, valid_deme_name),
+ demes=(
+ list,
+ attr.validators.deep_iterable(
+ member_validator=attr.validators.and_(
+ attr.validators.instance_of(str), valid_deme_name
+ ),
+ iterable_validator=attr.validators.instance_of(list),
+ ),
+ ),
+ ),
+ "defaults.migration",
)
pulse_defaults = pop_object(defaults, "pulse", {}, scope="defaults")
allowed_fields_pulse = ["sources", "dest", "time", "proportions"]
- check_allowed(pulse_defaults, allowed_fields_pulse, "defaults.pulse")
+ check_defaults(
+ pulse_defaults,
+ dict(
+ sources=(
+ list,
+ attr.validators.and_(
+ attr.validators.deep_iterable(
+ member_validator=attr.validators.and_(
+ attr.validators.instance_of(str), valid_deme_name
+ ),
+ iterable_validator=attr.validators.instance_of(list),
+ ),
+ nonzero_len,
+ ),
+ ),
+ dest=(str, valid_deme_name),
+ time=(numbers.Number, [int_or_float, positive, finite]),
+ proportions=(
+ list,
+ attr.validators.deep_iterable(
+ member_validator=attr.validators.and_(
+ int_or_float, unit_interval
+ ),
+ iterable_validator=attr.validators.and_(
+ attr.validators.instance_of(list),
+ nonzero_len,
+ sum_less_than_one,
+ ),
+ ),
+ ),
+ ),
+ "defaults.pulse",
+ )
# epoch defaults may also be specified within a Deme definition.
global_epoch_defaults = pop_object(defaults, "epoch", {}, scope="defaults")
@@ -1984,22 +2075,33 @@ def fromdict(cls, data: MutableMapping[str, Any]) -> "Graph":
"cloning_rate",
"selfing_rate",
]
- check_allowed(global_epoch_defaults, allowed_fields_epoch, "defaults: epoch")
+ allowed_epoch_defaults = dict(
+ end_time=(numbers.Number, [int_or_float, non_negative, finite]),
+ start_size=(numbers.Number, [int_or_float, positive, finite]),
+ end_size=(numbers.Number, [int_or_float, positive, finite]),
+ selfing_rate=(numbers.Number, [int_or_float, unit_interval]),
+ cloning_rate=(numbers.Number, [int_or_float, unit_interval]),
+ size_function=(str, None),
+ )
+ check_defaults(global_epoch_defaults, allowed_epoch_defaults, "defaults.epoch")
if "time_units" not in data:
raise KeyError("toplevel: required field 'time_units' not found")
graph = cls(
- description=data.pop("description", None),
+ description=data.pop("description", ""),
time_units=data.pop("time_units"),
doi=data.pop("doi", []),
generation_time=data.pop("generation_time", None),
metadata=data.pop("metadata", {}),
)
- for i, deme_data in enumerate(
- pop_list(data, "demes", required_type=MutableMapping, scope="toplevel")
- ):
+ demes_list = pop_list(
+ data, "demes", required_type=MutableMapping, scope="toplevel"
+ )
+ if len(demes_list) == 0:
+ raise ValueError("toplevel: 'demes' must be a non-empty list")
+ for i, deme_data in enumerate(demes_list):
if "name" not in deme_data:
raise KeyError(f"demes[{i}]: required field 'name' not found")
deme_name = deme_data.pop("name")
@@ -2010,7 +2112,7 @@ def fromdict(cls, data: MutableMapping[str, Any]) -> "Graph":
deme = graph._add_deme(
name=deme_name,
- description=deme_data.pop("description", None),
+ description=deme_data.pop("description", ""),
start_time=deme_data.pop("start_time", None),
ancestors=deme_data.pop("ancestors", None),
proportions=deme_data.pop("proportions", None),
@@ -2025,14 +2127,13 @@ def fromdict(cls, data: MutableMapping[str, Any]) -> "Graph":
local_epoch_defaults = pop_object(
local_defaults, "epoch", {}, scope=f"demes[{i}] {deme.name}: defaults"
)
- epoch_defaults = global_epoch_defaults.copy()
- epoch_defaults.update(local_epoch_defaults)
-
- check_allowed(
- epoch_defaults,
- allowed_fields_epoch,
+ check_defaults(
+ local_epoch_defaults,
+ allowed_epoch_defaults,
f"demes[{i}] {deme.name}: defaults: epoch",
)
+ epoch_defaults = global_epoch_defaults.copy()
+ epoch_defaults.update(local_epoch_defaults)
if len(epoch_defaults) == 0 and "epochs" not in deme_data:
# This condition would be caught downstream, because start_size
@@ -2050,6 +2151,10 @@ def fromdict(cls, data: MutableMapping[str, Any]) -> "Graph":
required_type=MutableMapping,
scope=f"demes[{i}] {deme.name}",
)
+ if len(epochs) == 0:
+ raise ValueError(
+ f"demes[{i}] {deme.name}: 'epochs' must be a non-empty list"
+ )
for j, epoch_data in enumerate(epochs):
check_allowed(
epoch_data,
@@ -2065,6 +2170,7 @@ def fromdict(cls, data: MutableMapping[str, Any]) -> "Graph":
f"demes[{i}] {deme.name}: epochs[{j}]: "
"required field 'end_time' not found"
)
+
deme._add_epoch(
end_time=epoch_data.pop("end_time"),
start_size=epoch_data.pop("start_size", None),
@@ -2074,6 +2180,10 @@ def fromdict(cls, data: MutableMapping[str, Any]) -> "Graph":
cloning_rate=epoch_data.pop("cloning_rate", 0),
)
+ assert len(deme.epochs) > 0
+
+ assert len(graph.demes) > 0
+
for i, migration_data in enumerate(
pop_list(
data, "migrations", [], required_type=MutableMapping, scope="toplevel"
@@ -2140,17 +2250,16 @@ def fromdict(cls, data: MutableMapping[str, Any]) -> "Graph":
return graph
- def asdict(self) -> MutableMapping[str, Any]:
+ def asdict(self, keep_empty_fields=True) -> MutableMapping[str, Any]:
"""
Return a fully-resolved dict representation of the graph.
"""
def filt(attrib, value):
return (
- value is not None
- and not (hasattr(value, "__len__") and len(value) == 0)
- and attrib.name != "_deme_map"
- )
+ keep_empty_fields
+ or (not (hasattr(value, "__len__") and len(value) == 0))
+ ) and attrib.name != "_deme_map"
def coerce_numbers(inst, attribute, value):
# Explicitly convert numeric types to int or float, so that they
@@ -2297,7 +2406,7 @@ def collapse_demes(pairs):
data["migrations"] = symmetric + asymmetric
- data = self.asdict()
+ data = self.asdict(keep_empty_fields=False)
if "migrations" in data:
simplify_migration_rates(data)
diff --git a/demes/load_dump.py b/demes/load_dump.py
index 48bba8f4..adcb0521 100644
--- a/demes/load_dump.py
+++ b/demes/load_dump.py
@@ -19,7 +19,8 @@ def _open_file_polymorph(polymorph, mode="r"):
just yield polymorph under the assumption it's a fileobj.
"""
try:
- f = open(polymorph, mode)
+ # We must specify utf8 explicitly for Windows.
+ f = open(polymorph, mode, encoding="utf-8")
except TypeError:
f = polymorph
try:
@@ -129,15 +130,17 @@ def assert_no_nulls(d):
if isinstance(v, dict):
assert_no_nulls(v)
elif isinstance(v, list):
- for _ in v:
- if isinstance(_, dict):
- assert_no_nulls(_)
+ for e in v:
+ if isinstance(e, dict):
+ assert_no_nulls(e)
else:
- check_if_None(k, v)
+ check_if_None(k, e)
else:
check_if_None(k, v)
- assert_no_nulls(data)
+ # Don't look inside metadata.
+ data_no_metadata = {k: v for k, v in data.items() if k != "metadata"}
+ assert_no_nulls(data_no_metadata)
def loads_asdict(string, *, format="yaml") -> MutableMapping[str, Any]:
diff --git a/examples/browning_america.yaml b/examples/browning_america.yaml
index bbae291d..898877f1 100644
--- a/examples/browning_america.yaml
+++ b/examples/browning_america.yaml
@@ -2,7 +2,6 @@ description: The Browning et al. (2011) model of admixture in the Americas.
doi:
- https://doi.org/10.1371/journal.pgen.1007385
time_units: generations
-generation_time: 25
demes:
- name: ancestral
description: Equilibrium/root population
diff --git a/examples/zigzag.yaml b/examples/zigzag.yaml
index c79303a0..21225be5 100644
--- a/examples/zigzag.yaml
+++ b/examples/zigzag.yaml
@@ -2,7 +2,6 @@ description: A single population model with epochs of exponential growth and dec
doi:
- https://doi.org/10.1038/ng.3015
time_units: generations
-generation_time: 30
demes:
- name: generic
description: All epochs wrapped into the same population, so that epoch intervals
| We dont reject bad defaults unless they're used
This causes various "failed to reject erroneous input" problems with invalid test cases from the spec test suite.
| 2022-02-18T09:11:18 | 0.0 | [] | [] |
|||
popsim-consortium/demes-python | popsim-consortium__demes-python-426 | 1ddd4048a78b0ef628e35d45bdbac35c7379edf8 | diff --git a/demes/demes.py b/demes/demes.py
index 03b0b60e..1c626017 100644
--- a/demes/demes.py
+++ b/demes/demes.py
@@ -213,8 +213,6 @@ def __attrs_post_init__(self):
raise ValueError("if start time is inf, must be a constant size epoch")
if self.size_function == "constant" and self.start_size != self.end_size:
raise ValueError("start_size != end_size, but size_function is constant")
- if self.selfing_rate + self.cloning_rate > 1:
- raise ValueError("must have selfing_rate + cloning_rate <= 1")
@property
def time_span(self):
| selfing_rate + cloning_rate >=1 should be permitted
See https://github.com/popsim-consortium/demes-spec/issues/43#issuecomment-888486763
Test case that should be permitted (but isn't currently):
```yaml
time_units: generations
demes:
- name: A
epochs:
- start_size: 100
selfing_rate: 0.6
cloning_rate: 0.6
```
| 2022-02-15T14:16:30 | 0.0 | [] | [] |
|||
popsim-consortium/demes-python | popsim-consortium__demes-python-397 | f09b2cb5e0799c134cb631f0bd20d5414237c979 | diff --git a/.gitignore b/.gitignore
index fa4dfa59..a9d3c74c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -7,4 +7,3 @@ demes/_version.py
build/
dist/
demes.egg-info
-.hypothesis
diff --git a/codecov.yml b/codecov.yml
deleted file mode 100644
index bd4e3601..00000000
--- a/codecov.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-ignore:
- # Coverage of the hypothesis strategy is not deterministic.
- - demes/hypothesis_strategies.py
diff --git a/demes/hypothesis_strategies.py b/demes/hypothesis_strategies.py
deleted file mode 100644
index a9c07f96..00000000
--- a/demes/hypothesis_strategies.py
+++ /dev/null
@@ -1,458 +0,0 @@
-import math
-import itertools
-import collections
-import struct
-
-import hypothesis as hyp
-import hypothesis.strategies as st
-
-import demes
-
-__all__ = ["graphs"]
-
-
-def __dir__():
- return sorted(__all__)
-
-
-def prec32(x):
- """truncate x to the nearest single-precision floating point number"""
- return struct.unpack("f", struct.pack("f", x))[0]
-
-
-# Limits for the floating point numbers we'll draw.
-#
-# We wish to be more restrictive with the allowable range than the limits
-# provided by floating-point types, to avoid doing arithmetic on numbers at
-# those floating point limits. Values near the limits are not useful for
-# demographic models in practice, so we don't want to generate models that
-# require applications to deal with floating point underflow and overflow.
-# On the other hand, we also don't want to enforce artificial limits in the
-# Demes spec for things like time values or deme sizes.
-#
-# The numbers below are sufficiently conservative so as to avoid underflow
-# and overflow during arithmetic (although this is not guaranteed),
-# but not too conservative that the randomly generated models won't catch a
-# variety of errors in downstream application code.
-FLOAT_MAX = prec32(1e30)
-FLOAT_EPS = prec32(1e-6)
-
-
[email protected]
-def deme_names(draw, max_length=20):
- """
- A hypothesis strategy for creating a valid deme name.
- """
- name = draw(st.text(min_size=1, max_size=max_length))
- # Names must be valid Python identifiers.
- hyp.assume(name.isidentifier())
- return name
-
-
[email protected]
-def yaml_strings(draw, min_size=1, max_size=100):
- """
- A hypothesis strategy for creating a valid YAML string.
-
- From https://yaml.org/spec/1.2/spec.html#id2770814
-
- To ensure readability, YAML streams use only the printable subset of
- the Unicode character set. The allowed character range explicitly
- excludes the C0 control block #x0-#x1F (except for TAB #x9, LF #xA,
- and CR #xD which are allowed), DEL #x7F, the C1 control block #x80-#x9F
- (except for NEL #x85 which is allowed), the surrogate block #xD800-#xDFFF,
- #xFFFE, and #xFFFF.
-
- On input, a YAML processor must accept all Unicode characters except
- those explicitly excluded above.
-
- On output, a YAML processor must only produce acceptable characters.
- Any excluded characters must be presented using escape sequences.
- In addition, any allowed characters known to be non-printable should
- also be escaped. This isn’t mandatory since a full implementation would
- require extensive character property tables.
- """
- return draw(
- st.text(
- alphabet=st.characters(
- blacklist_categories=(
- "Cc", # control block (C0 and C1)
- "Cs", # surrogate block
- ),
- blacklist_characters=("\ufffe", "\uffff"),
- whitelist_characters=("\x09", "\x0a", "\x0d", "\x85"),
- ),
- min_size=min_size,
- max_size=max_size,
- )
- )
-
-
[email protected]
-def epochs_lists(
- draw,
- start_time=math.inf,
- max_epochs=5,
- min_deme_size=FLOAT_EPS,
- max_deme_size=FLOAT_MAX,
- size_functions=None,
-):
- """
- A hypothesis strategy for creating lists of Epochs for a deme.
-
- :param float start_time: The start time of the deme.
- :param int max_epochs: The maximum number of epochs in the list.
- """
- if size_functions is None:
- size_functions = ["constant", "exponential", "linear"]
- assert max_epochs >= 2
- times = draw(
- st.lists(
- st.floats(
- min_value=0,
- max_value=min(FLOAT_MAX, start_time),
- exclude_max=True,
- width=32,
- ),
- unique=True,
- min_size=1,
- max_size=max_epochs,
- )
- )
- times.sort(reverse=True)
- epochs = []
-
- for i, end_time in enumerate(times):
- start_size = draw(st.floats(min_value=min_deme_size, max_value=max_deme_size))
- if i == 0 and math.isinf(start_time):
- end_size = start_size
- size_function = "constant"
- else:
- size_function = draw(st.sampled_from(size_functions))
- if size_function == "constant":
- end_size = start_size
- else:
- end_size = draw(
- st.floats(min_value=min_deme_size, max_value=max_deme_size)
- )
- if end_size == start_size:
- size_function = "constant"
- cloning_rate = draw(st.floats(min_value=0, max_value=1))
- selfing_rate = draw(st.floats(min_value=0, max_value=prec32(1 - cloning_rate)))
-
- epochs.append(
- dict(
- end_time=end_time,
- start_size=start_size,
- end_size=end_size,
- size_function=size_function,
- cloning_rate=cloning_rate,
- selfing_rate=selfing_rate,
- )
- )
-
- return epochs
-
-
[email protected]
-def migration_matrices(
- draw, graph, max_migrations=10, max_additional_migration_intervals=5
-):
- """
- A hypothesis strategy for creating migration matrices for a graph.
- """
- n = len(graph.demes)
- assert n > 0
-
- uniq_deme_times = set(deme.start_time for deme in graph.demes)
- uniq_deme_times.update(deme.end_time for deme in graph.demes)
- start_time, *end_times = sorted(uniq_deme_times, reverse=True)
-
- # Identify the first time at which 2 or more demes exist simultaneously.
- for end_time in end_times:
- if sum(1 for deme in graph.demes if deme.start_time <= start_time) > 1:
- break
- start_time = end_time
-
- if start_time == end_times[-1]:
- # No two demes exist simultaneously.
- return [[[0] * n for _ in range(n)]], math.inf, [0]
-
- saved_start_time = start_time
-
- # Partition time intervals even further.
- additional_times = draw(
- st.lists(
- st.floats(
- min_value=end_times[-1],
- max_value=start_time,
- exclude_max=True,
- width=32,
- ),
- unique=True,
- min_size=0,
- max_size=max_additional_migration_intervals,
- )
- )
- end_times = sorted(set(end_times + additional_times), reverse=True)
-
- mm_list = [[[0] * n for _ in range(n)] for _ in range(len(end_times))]
- n_migrations = draw(st.integers(min_value=0, max_value=max_migrations))
-
- for migration_matrix, end_time in zip(mm_list, end_times):
- # Find demes alive in this interval.
- deme_indices = [
- j
- for j, deme in enumerate(graph.demes)
- if (
- deme.start_time >= start_time > deme.end_time
- and deme.start_time > end_time >= deme.end_time
- )
- ]
- if len(deme_indices) < 2:
- continue
-
- # Select pairs of demes for migration.
- pairs = list(itertools.permutations(deme_indices, 2))
- pair_indices = draw(
- st.lists(
- st.integers(min_value=0, max_value=len(pairs) - 1),
- unique=True,
- min_size=0,
- max_size=min(len(pairs), n_migrations),
- )
- )
-
- for k in pair_indices:
- a, b = pairs[k]
- assert migration_matrix[a][b] == 0
- max_rate = 1 - sum(migration_matrix[a])
- if math.isclose(max_rate, 0):
- continue
- n_migrations -= 1
- rate = draw(
- st.floats(min_value=0, max_value=prec32(max_rate), exclude_min=True)
- )
- migration_matrix[a][b] = rate
-
- if n_migrations == 0:
- break
- start_time = end_time
-
- return mm_list, saved_start_time, end_times
-
-
[email protected]
-def migrations_lists(draw, graph, max_migrations=10):
- """
- A hypothesis strategy for creating a migration list for a graph.
- """
- mm_list, start_time, end_times = draw(
- migration_matrices(graph, max_migrations=max_migrations)
- )
- assert len(mm_list) == len(end_times)
- migrations = []
- for migration_matrix, end_time in zip(mm_list, end_times):
- for j, row in enumerate(migration_matrix):
- for k, rate in enumerate(row):
- if rate > 0:
- migration = demes.AsymmetricMigration(
- source=graph.demes[k].name,
- dest=graph.demes[j].name,
- start_time=start_time,
- end_time=end_time,
- rate=rate,
- )
- migrations.append(migration)
- start_time = end_time
- return migrations
-
-
[email protected]
-def pulses_lists(draw, graph, max_pulses=10):
- """
- A hypothesis strategy for creating a pulses list for a graph.
- """
- n_pulses = draw(st.integers(min_value=0, max_value=max_pulses))
- pulses = []
- ingress_proportions = collections.defaultdict(lambda: 0)
- for j, deme_j in enumerate(graph.demes[:-1]):
- for deme_k in graph.demes[j + 1 :]:
- time_lo = max(deme_j.end_time, deme_k.end_time)
- time_hi = min(deme_j.start_time, deme_k.start_time)
-
- # We wish to draw times for the pulses. They must be in the open
- # interval (time_lo, time_hi) to ensure the pulse doesn't happen
- # at any deme's start_time or end_time, which could be invalid.
- # So we check for some breathing room between time_lo and time_hi.
- if time_hi <= time_lo + FLOAT_EPS:
- continue
- n = draw(st.integers(min_value=0, max_value=n_pulses))
- for _ in range(n):
- source, dest = deme_j.name, deme_k.name
- if draw(st.booleans()):
- source, dest = dest, source
- time = draw(
- st.floats(
- min_value=time_lo,
- max_value=time_hi,
- exclude_min=True,
- exclude_max=True,
- width=32,
- )
- )
- max_proportion = 1 - ingress_proportions[(dest, time)]
- if math.isclose(max_proportion, 0):
- continue
- proportion = draw(
- st.floats(
- min_value=0,
- max_value=prec32(max_proportion),
- exclude_min=True,
- exclude_max=True,
- width=32,
- )
- )
- ingress_proportions[(dest, time)] += proportion
- pulse = dict(
- sources=[source],
- dest=dest,
- time=time,
- proportions=[proportion],
- )
- pulses.append(pulse)
- n_pulses -= 1
- if n_pulses == 0:
- break
- if n_pulses == 0:
- break
- return pulses
-
-
[email protected]
-def graphs(
- draw,
- max_demes=5,
- max_epochs=10,
- max_migrations=10,
- max_pulses=10,
- min_deme_size=FLOAT_EPS,
- max_deme_size=FLOAT_MAX,
- size_functions=None,
-):
- """
- A hypothesis strategy for creating a Graph.
-
- .. code-block::
-
- @hypothesis.given(graphs())
- def test_something(self, graph: demes.Graph):
- # Do something with the ``graph``.
- ...
-
- :param int max_demes: The maximum number of demes in the graph.
- :param int max_epochs: The maximum number of epochs per deme.
- :param int max_migrations: The maximum number of migrations in the graph.
- :param int max_pulses: The maximum number of pulses in the graph.
- :param float min_deme_size: The minimum size of a deme in any epoch.
- :param float max_deme_size: The maximum size of a deme in any epoch.
- :param list size_functions: Allowable values for an epoch's size_function.
- """
- generation_time = draw(
- st.none() | st.floats(min_value=FLOAT_EPS, max_value=FLOAT_MAX)
- )
- if generation_time is None:
- time_units = "generations"
- else:
- time_units = draw(yaml_strings())
- b = demes.Builder(
- description=draw(yaml_strings()),
- generation_time=generation_time,
- time_units=time_units,
- doi=draw(st.lists(yaml_strings(), max_size=3)),
- )
-
- for deme_name in draw(st.sets(deme_names(), min_size=1, max_size=max_demes)):
- ancestors = []
- proportions = []
- start_time = math.inf
- n_demes = 0 if "demes" not in b.data else len(b.data["demes"])
- if n_demes > 0:
- # draw indices into demes list to use as ancestors
- anc_idx = draw(
- st.lists(
- st.integers(min_value=0, max_value=n_demes - 1),
- unique=True,
- max_size=n_demes,
- )
- )
- if len(anc_idx) > 0:
- time_hi = min(
- FLOAT_MAX, min(b.data["demes"][j]["start_time"] for j in anc_idx)
- )
- time_lo = max(
- b.data["demes"][j]["epochs"][-1]["end_time"] for j in anc_idx
- )
- # If time_hi > time_lo, the proposed ancestors exist
- # at the same time. So we draw a number for the deme's
- # start_time, which must be in the half-open interval
- # [time_lo, time_hi), with the further constraint that the
- # start_time cannot be 0.
- # However, there may not be any floating point numbers between
- # 0 and time_hi even if time_hi > 0, so we check that time_hi
- # is greater than a small positive number.
- if (time_lo > 0 and time_hi > time_lo) or (
- time_lo == 0 and time_hi > FLOAT_EPS
- ):
- # Draw a start time and the ancestry proportions.
- start_time = draw(
- st.floats(
- min_value=time_lo,
- max_value=time_hi,
- exclude_max=True,
- # Can't have start_time=0.
- exclude_min=time_lo == 0,
- width=32,
- )
- )
- ancestors = [b.data["demes"][j]["name"] for j in anc_idx]
- if len(ancestors) == 1:
- proportions = [1.0]
- else:
- proportions = draw(
- st.lists(
- st.integers(min_value=1, max_value=10 ** 9),
- min_size=len(ancestors),
- max_size=len(ancestors),
- )
- )
- psum = sum(proportions)
- proportions = [p / psum for p in proportions]
- b.add_deme(
- name=deme_name,
- description=draw(st.none() | yaml_strings()),
- ancestors=ancestors,
- proportions=proportions,
- epochs=draw(
- epochs_lists(
- start_time=start_time,
- max_epochs=max_epochs,
- min_deme_size=min_deme_size,
- max_deme_size=max_deme_size,
- size_functions=size_functions,
- )
- ),
- start_time=start_time,
- )
-
- graph = b.resolve()
- graph.migrations = draw(migrations_lists(graph, max_migrations=max_migrations))
- graph.pulses = draw(pulses_lists(graph, max_pulses=max_pulses))
- # Resolve the graph again. This is not strictly necessary, but has only
- # a small computational overhead and serves to catch simple errors in
- # the migrations_lists()/pulses_lists() implementations.
- graph = demes.Builder.fromdict(graph.asdict()).resolve()
- return graph
-
-
-st.register_type_strategy(demes.Graph, graphs())
| hypothesis still not behaving itself
https://github.com/tskit-dev/msprime/pull/1758/checks?check_run_id=3021484804
| 2021-12-07T08:33:21 | 0.0 | [] | [] |
|||
popsim-consortium/demes-python | popsim-consortium__demes-python-387 | fb694c3deee2632b381c01416710286a2dda63c1 | diff --git a/demes/load_dump.py b/demes/load_dump.py
index b6bf5813..4c170ddc 100644
--- a/demes/load_dump.py
+++ b/demes/load_dump.py
@@ -111,6 +111,31 @@ def _unstringify_infinities(data: MutableMapping[str, Any]) -> None:
data["defaults"][default]["start_time"] = float(start_time)
+def _no_null_values(data: MutableMapping[str, Any]) -> None:
+ """
+ Checks for any null values in the input data.
+ """
+
+ def check_if_None(key, val):
+ if val is None:
+ raise ValueError(f"{key} must have a non-null value")
+
+ def assert_no_nulls(d):
+ for k, v in d.items():
+ if isinstance(v, dict):
+ assert_no_nulls(v)
+ elif isinstance(v, list):
+ for _ in v:
+ if isinstance(_, dict):
+ assert_no_nulls(_)
+ else:
+ check_if_None(k, v)
+ else:
+ check_if_None(k, v)
+
+ assert_no_nulls(data)
+
+
def loads_asdict(string, *, format="yaml") -> MutableMapping[str, Any]:
"""
Load a YAML or JSON string into a dictionary of nested objects.
@@ -149,6 +174,9 @@ def load_asdict(filename, *, format="yaml") -> MutableMapping[str, Any]:
data = _load_yaml_asdict(f)
else:
raise ValueError(f"unknown format: {format}")
+ # We forbid null values in the input data.
+ # See https://github.com/popsim-consortium/demes-spec/issues/76
+ _no_null_values(data)
# The string "Infinity" should only be present in JSON files.
# But YAML is a superset of JSON, so we want the YAML loader to also
# load JSON files without problem.
| Reject YAML files with values set to `null`.
See https://github.com/popsim-consortium/demes-spec/issues/76.
| 2021-11-28T19:11:54 | 0.0 | [] | [] |
|||
popsim-consortium/demes-python | popsim-consortium__demes-python-385 | 0676cf0366ccd816911614cc0bb3ae8753b77fb9 | diff --git a/demes/demes.py b/demes/demes.py
index 0dee2a58..97bfd666 100644
--- a/demes/demes.py
+++ b/demes/demes.py
@@ -2220,7 +2220,7 @@ def collapse_demes(pairs):
source = migration["source"]
dest = migration["dest"]
time_hi = min(self[source].start_time, self[dest].start_time)
- time_lo = max(self[dest].end_time, self[dest].end_time)
+ time_lo = max(self[source].end_time, self[dest].end_time)
if migration["end_time"] == time_lo:
del migration["end_time"]
if migration["start_time"] == time_hi:
| Some symmetric migration scenarioes are not simplified
For graphs with continuous symmetric migration between two demes that do not have equivalent time intervals of existence, `graph.asdict_simplified()` does not collapse the directional migrations into a single symmetric migration specified by `demes` and `rate`. Instead, it remains as asymmetric migrations with `source` and `dest`. Those migrations should be collapsed.
For example, using the Gutenkunst-OOA model:
```python
import demes
g = demes.load("examples/gutenkunst_ooa.yml")
print(g)
```
The migrations print as
```
migrations:
- demes: [YRI, CEU]
rate: 3e-05
- demes: [YRI, CHB]
rate: 1.9e-05
- demes: [CEU, CHB]
rate: 9.6e-05
- {source: YRI, dest: OOA, rate: 0.00025}
- {source: OOA, dest: YRI, end_time: 21200.0, rate: 0.00025}
```
The input file has
```
tail -n 5 examples/gutenkunst_ooa.yaml
```
```
migrations:
- {demes: [YRI, OOA], rate: 25e-5}
- {demes: [YRI, CEU], rate: 3e-5}
- {demes: [YRI, CHB], rate: 1.9e-5}
- {demes: [CEU, CHB], rate: 9.6e-5}
```
| 2021-11-28T01:13:08 | 0.0 | [] | [] |
|||
popsim-consortium/demes-python | popsim-consortium__demes-python-362 | 0c24b412afb7db7679f8d4d48789bd650fd18200 | diff --git a/demes/demes.py b/demes/demes.py
index 62bb8383..b5368c9e 100644
--- a/demes/demes.py
+++ b/demes/demes.py
@@ -1230,7 +1230,7 @@ class Graph:
# because we're using slotted classes and can't add attributes after
# object creation (e.g. in __attrs_post_init__()).
_deme_map: Dict[Name, Deme] = attr.ib(
- factory=dict, init=False, repr=False, cmp=False
+ factory=dict, init=False, repr=False, eq=False, order=False
)
def __attrs_post_init__(self):
@@ -1289,7 +1289,6 @@ def assert_close(
- The graphs' ``description`` and ``doi`` attributes.
- The order in which ``migrations`` were specified.
- - The order in which admixture ``pulses`` were specified.
- The order in which ``demes`` were specified.
- The order in which a deme's ``ancestors`` were specified.
@@ -1329,13 +1328,12 @@ def assert_sorted_eq(aa, bb, *, rel_tol, abs_tol, name):
abs_tol=abs_tol,
name="migrations",
)
- assert_sorted_eq(
- self.pulses,
- other.pulses,
- rel_tol=rel_tol,
- abs_tol=abs_tol,
- name="pulses",
- )
+ assert len(self.pulses) == len(other.pulses)
+ for i, (self_pulse, other_pulse) in enumerate(zip(self.pulses, other.pulses)):
+ try:
+ self_pulse.assert_close(other_pulse, rel_tol=rel_tol, abs_tol=abs_tol)
+ except AssertionError as e:
+ raise AssertionError(f"Failed for pulses (number {i})") from e
def isclose(
self,
@@ -1353,7 +1351,6 @@ def isclose(
- The graphs' ``description`` and ``doi`` attributes.
- The order in which ``migrations`` were specified.
- - The order in which admixture ``pulses`` were specified.
- The order in which ``demes`` were specified.
- The order in which a deme's ``ancestors`` were specified.
@@ -1617,17 +1614,6 @@ def _add_pulse(self, *, source, dest, proportion, time) -> Pulse:
"the desired ancestry proportions."
)
- # Check for multiple pulses into dest at the same time that
- # give a sum of proportions > 1.
- proportion_sum = proportion
- for pulse in self.pulses:
- if dest == pulse.dest and pulse.time == time:
- proportion_sum += pulse.proportion
- if proportion_sum > 1:
- raise ValueError(
- f"sum of pulse proportions > 1 for dest={dest} at time={time}"
- )
-
self.pulses.append(new_pulse)
return new_pulse
@@ -2093,6 +2079,9 @@ def fromdict(cls, data: MutableMapping[str, Any]) -> "Graph":
except (TypeError, ValueError) as e:
raise e.__class__(f"pulse[{i}]: invalid pulse") from e
+ # Sort pulses from oldest to youngest.
+ graph.pulses.sort(key=lambda pulse: pulse.time, reverse=True)
+
return graph
def asdict(self) -> MutableMapping[str, Any]:
| Graph.assert_close() shouldn't ignore pulse ordering.
The docs currently say that `the order in which admixture pulses were specified` is ignored during the comparison. But this shouldn't strictly be the case. If multiple pulses are specified for the same time, then the order in which pulses were defined will matter. Hence the correct thing to do is to (stable)sort pulses by the time field and then compare.
| Actually, it might be more tricky that this... The order of pulses with the same `time` value only matters when the pulses have demes in common. E.g. for pulse 1: `source=A, dest=B`, pulse 2: `source=B, dest=C`, the pulse order matters. But in other cases, e.g. pulse 1: `source=A, dest=B`, pulse 2: `source=X, dest=Y`, the order of pulses should be ignored.
Better to be overly strict and require a bit of arbitrary sortedness than to falsely claim equality, IMO | 2021-07-23T08:41:21 | 0.0 | [] | [] |
||
popsim-consortium/demes-python | popsim-consortium__demes-python-354 | 9a48a29b44ffd43bbb63e8e6b4707e858b43a5e7 | diff --git a/demes/__init__.py b/demes/__init__.py
index 5de76a98..46fa086e 100644
--- a/demes/__init__.py
+++ b/demes/__init__.py
@@ -28,7 +28,7 @@
dumps,
dump_all,
)
-from .ms import from_ms
+from .ms import from_ms, to_ms
__all__ = [
"Builder",
@@ -50,6 +50,7 @@
"dumps",
"dump_all",
"from_ms",
+ "to_ms",
]
diff --git a/demes/__main__.py b/demes/__main__.py
index 4d61f129..5a155a6e 100644
--- a/demes/__main__.py
+++ b/demes/__main__.py
@@ -11,23 +11,41 @@
class ParseCommand:
"""
- Parse models and write them to stdout in canonical form.
+ Parse models and write them to stdout. YAML is output by default,
+ but JSON or ms commands may instead be written. See options below.
"""
def __init__(self, subparsers):
parser = subparsers.add_parser(
"parse",
- help=self.__doc__,
+ help="Parse models and write them to stdout in canonical form.",
description=textwrap.dedent(self.__doc__),
)
parser.set_defaults(func=self)
- parser.add_argument(
+
+ format_group = parser.add_mutually_exclusive_group()
+ format_group.add_argument(
"-j",
"--json",
action="store_true",
default=False,
- help="Output a JSON-formatted model. YAML is output by default.",
+ help="Output a JSON-formatted model.",
+ )
+ format_group.add_argument(
+ "--ms",
+ metavar="REFERENCE_SIZE",
+ type=float,
+ default=None,
+ help=(
+ "Output ms command line arguments, using the given reference "
+ "population size (N0) to translate into coalescent units "
+ "(see the 'ms' subcommand for interpretation of this value)."
+ "The sampling configuration in the output will need editing "
+ "prior to simulation. The order of deme IDs matches the "
+ "order of demes in the input model. "
+ ),
)
+
parser.add_argument(
"-s",
"--simplified",
@@ -61,26 +79,37 @@ def __init__(self, subparsers):
def __call__(self, args: argparse.Namespace) -> None:
if args.json:
output_format = "json"
+ elif args.ms:
+ output_format = "ms"
else:
output_format = "yaml"
+ if args.ms and args.simplified:
+ # Ignore this for now.
+ pass
+
num_documents, graphs = self.load_and_count_documents(args.filename)
if num_documents == 0:
# Input file is empty.
pass
elif num_documents == 1:
- demes.dump(
- next(graphs),
- sys.stdout,
- simplified=args.simplified,
- format=output_format,
- )
+ graph = next(graphs)
+ if args.ms is not None:
+ print(demes.to_ms(graph, N0=args.ms))
+ else:
+ demes.dump(
+ graph,
+ sys.stdout,
+ simplified=args.simplified,
+ format=output_format,
+ )
else:
if output_format != "yaml":
raise RuntimeError(
"The input file contains multiple models, which is only "
- "supported with YAML output. If multi-model JSON output "
- "would be useful to you, please open an issue on github.",
+ "supported with YAML output. If multi-model output "
+ "would be useful to you with other formats, "
+ "please open an issue on github.",
)
demes.dump_all(graphs, sys.stdout, simplified=args.simplified)
@@ -132,7 +161,7 @@ class MsCommand:
def __init__(self, subparsers):
parser = subparsers.add_parser(
"ms",
- help="Build a Demes model using ms commands.",
+ help="Build a Demes model using ms command line arguments.",
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent(self.__doc__),
)
diff --git a/demes/demes.py b/demes/demes.py
index 8242b6b5..62bb8383 100644
--- a/demes/demes.py
+++ b/demes/demes.py
@@ -2498,8 +2498,8 @@ def _add_migrations_from_matrices(
if migration_dict is None:
if rate != 0:
migration_dict = dict(
- source=deme_names[j],
- dest=deme_names[k],
+ source=deme_names[k],
+ dest=deme_names[j],
start_time=start_time,
end_time=end_time,
rate=rate,
@@ -2514,8 +2514,8 @@ def _add_migrations_from_matrices(
migration_dict["end_time"] = end_time
else:
migration_dict = dict(
- source=deme_names[j],
- dest=deme_names[k],
+ source=deme_names[k],
+ dest=deme_names[j],
start_time=start_time,
end_time=end_time,
rate=rate,
diff --git a/demes/ms.py b/demes/ms.py
index 0633c756..727fa73c 100644
--- a/demes/ms.py
+++ b/demes/ms.py
@@ -5,7 +5,7 @@
import sys
import operator
import itertools
-from typing import Any, List, Mapping, Set
+from typing import Any, List, Mapping, Set, Tuple
import attr
@@ -80,6 +80,21 @@ def __call__(self, parser, namespace, values, *args, **kwargs):
# from the graph building procedure.
+def float_str(a: float) -> str:
+ """
+ Convert float to string, for use in command line arguments.
+ """
+ if a < 0:
+ # Use lots of decimal places for negative numbers because argparse
+ # has problems parsing option args that are negative numbers in
+ # exponential form.
+ # https://github.com/popsim-consortium/demes-python/issues/325
+ # https://bugs.python.org/issue9334
+ return format(a, ".10f")
+ else:
+ return str(a)
+
+
@attr.define
class Option:
# This attribute is set by CoerceAction.
@@ -109,6 +124,12 @@ def from_nargs(cls, *args):
*n, rate = n
return cls(npop, n, rate)
+ def __str__(self):
+ s = ["-I", str(self.npop)] + [str(n) for n in self.n]
+ if self.rate > 0:
+ s += [float_str(self.rate)]
+ return " ".join(s)
+
@attr.define
class Event(Option):
@@ -124,6 +145,14 @@ class GrowthRateChange(Event):
# -eG t alpha
alpha = attr.ib(converter=float, validator=finite)
+ def __str__(self):
+ if self.t > 0:
+ s = ["-eG", float_str(self.t)]
+ else:
+ s = ["-G"]
+ s.append(float_str(self.alpha))
+ return " ".join(s)
+
@attr.define
class PopulationGrowthRateChange(Event):
@@ -132,12 +161,23 @@ class PopulationGrowthRateChange(Event):
i = attr.ib(converter=int, validator=positive)
alpha = attr.ib(converter=float, validator=finite)
+ def __str__(self):
+ if self.t > 0:
+ s = ["-eg", float_str(self.t)]
+ else:
+ s = ["-g"]
+ s.extend([str(self.i), float_str(self.alpha)])
+ return " ".join(s)
+
@attr.define
class SizeChange(Event):
# -eN t x
x = attr.ib(converter=float, validator=non_negative)
+ def __str__(self):
+ return " ".join(["-eN", float_str(self.t), float_str(self.x)])
+
@attr.define
class PopulationSizeChange(Event):
@@ -146,12 +186,23 @@ class PopulationSizeChange(Event):
i = attr.ib(converter=int, validator=positive)
x = attr.ib(converter=float, validator=non_negative)
+ def __str__(self):
+ if self.t > 0:
+ s = ["-en", float_str(self.t)]
+ else:
+ s = ["-n"]
+ s.extend([str(self.i), float_str(self.x)])
+ return " ".join(s)
+
@attr.define
class MigrationRateChange(Event):
# -eM t x
x = attr.ib(converter=float, validator=non_negative)
+ def __str__(self):
+ return " ".join(["-eM", float_str(self.t), float_str(self.x)])
+
@attr.define
class MigrationMatrixEntryChange(Event):
@@ -161,11 +212,19 @@ class MigrationMatrixEntryChange(Event):
j = attr.ib(converter=int, validator=positive)
rate = attr.ib(converter=float, validator=non_negative)
+ def __str__(self):
+ if self.t > 0:
+ s = ["-em", float_str(self.t)]
+ else:
+ s = ["-m"]
+ s.extend([str(self.i), str(self.j), float_str(self.rate)])
+ return " ".join(s)
+
@attr.define
class MigrationMatrixChange(Event):
- # -ma M11 M12 M12 ... M21 ...
- # -ema t npop M11 M12 M12 ... M21 ...
+ # -ma M11 M12 M13 ... M21 ...
+ # -ema t npop M11 M12 M13 ... M21 ...
npop = attr.ib(converter=int, validator=positive)
mm_vector = attr.ib()
@@ -205,6 +264,21 @@ def from_nargs(cls, *args):
t, npop, *mm_vector = args
return cls(t, npop, mm_vector)
+ def __str__(self):
+ if self.t > 0:
+ s = ["-ema", float_str(self.t)]
+ else:
+ s = ["-ma"]
+ s.append(str(self.npop))
+ M = self.M
+ for j in range(self.npop):
+ for k in range(self.npop):
+ if j == k:
+ s.append("x")
+ else:
+ s.append(float_str(M[j][k]))
+ return " ".join(s)
+
@attr.define
class Split(Event):
@@ -212,6 +286,9 @@ class Split(Event):
i = attr.ib(converter=int, validator=positive)
p = attr.ib(converter=float, validator=unit_interval)
+ def __str__(self):
+ return " ".join(["-es", float_str(self.t), str(self.i), float_str(self.p)])
+
@attr.define
class Join(Event):
@@ -219,6 +296,9 @@ class Join(Event):
i = attr.ib(converter=int, validator=positive)
j = attr.ib(converter=int, validator=positive)
+ def __str__(self):
+ return " ".join(["-ej", float_str(self.t), str(self.i), str(self.j)])
+
def build_parser(parser=None):
if parser is None:
@@ -518,10 +598,11 @@ def migration_matrix_at(time):
# same time parameter into more direct ancestry relationships.
n = num_demes + sum(1 for event in events_group if isinstance(event, Split))
lineage_movements = [[0] * n for _ in range(n)]
- for j in range(n):
+ for j in range(num_demes):
lineage_movements[j][j] = 1
- # The indices for lineages specified in Split/Join events.
- split_join_indices = set()
+ # The params gleaned from Split/Join events, which are used to collapse
+ # ancestry into plain old pulse migrations.
+ split_join_params: List[Tuple[int, int, float]] = []
for event in events_group:
if isinstance(event, GrowthRateChange):
@@ -627,9 +708,14 @@ def migration_matrix_at(time):
b.data["demes"][pop_i]["start_time"] = time
b.data["demes"][pop_i]["ancestors"] = [f"deme{pop_j + 1}"]
for lm in lineage_movements:
- lm[pop_j] = lm[pop_i]
+ lm[pop_j] += lm[pop_i]
lm[pop_i] = 0
- split_join_indices.add(pop_i)
+ for idx, (g, h, q) in reversed(list(enumerate(split_join_params))):
+ if h == pop_i:
+ split_join_params[idx] = (g, pop_j, q)
+ break
+ else:
+ split_join_params.append((pop_i, pop_j, 1))
mm = migration_matrix_at(time)
# Turn off migrations to/from deme i.
@@ -656,9 +742,10 @@ def migration_matrix_at(time):
epochs=[dict(end_size=N0, end_time=time)],
)
for lm in lineage_movements:
+ assert lm[new_pid] == 0
lm[new_pid] = (1 - event.p) * lm[pid]
lm[pid] *= event.p
- split_join_indices.add(pid)
+ split_join_params.append((pid, new_pid, 1 - event.p))
num_demes += 1
@@ -671,33 +758,31 @@ def migration_matrix_at(time):
else:
assert False, f"unhandled option: {event}"
- for j in split_join_indices:
+ for j, k, p in split_join_params:
ancestors = []
proportions = []
- for k, proportion in enumerate(lineage_movements[j]):
- if j != k and proportion > 0:
- ancestors.append(f"deme{k + 1}")
+ for o, proportion in enumerate(lineage_movements[j]):
+ if j != o and proportion > 0:
+ ancestors.append(o)
proportions.append(proportion)
if len(ancestors) == 0:
continue
p_jj = lineage_movements[j][j]
if p_jj == 0:
# No ancestry left in j.
- b.data["demes"][j]["ancestors"] = ancestors
+ b.data["demes"][j]["ancestors"] = [f"deme{o + 1}" for o in ancestors]
b.data["demes"][j]["proportions"] = proportions
else:
# Some ancestry is retained in j, so we use pulse migrations to
# indicate foreign ancestry.
# The order of pulses will later be reversed such that realised
# ancestry proportions are maintained forwards in time.
- for k, source in enumerate(ancestors):
- p = proportions[k] / (sum(proportions[k:]) + p_jj)
- b.add_pulse(
- source=source,
- dest=f"deme{j + 1}",
- time=time,
- proportion=p,
- )
+ b.add_pulse(
+ source=f"deme{k + 1}",
+ dest=f"deme{j + 1}",
+ time=time,
+ proportion=p,
+ )
# Resolve/remove growth_rate in oldest epochs.
for deme in b.data["demes"]:
@@ -806,3 +891,140 @@ def from_ms(
name_map = dict(zip((f"deme{j+1}" for j in range(len(deme_names))), deme_names))
graph = remap_deme_names(graph, name_map)
return graph
+
+
+def to_ms(graph: demes.Graph, *, N0, samples=None) -> str:
+ """
+ Get ms command line arguments for the graph.
+
+ The order of deme IDs matches the order of demes in the graph.
+
+ :param float N0:
+ The reference population size used to translate into coalescent units.
+ See :func:`from_ms` for details.
+ :param list(int) samples:
+ Sampling scheme that will be used with the '-I' option. This is ignored
+ for graphs with only one deme. If not specified, the sampling
+ configuration in the returned string will need editing prior to
+ simulation.
+ :return: The ms command line.
+ :rtype: str
+ """
+ graph = graph.in_generations()
+ cmd = []
+ num_demes = len(graph.demes)
+ if samples is not None and len(samples) != num_demes:
+ raise ValueError("samples must match the number of demes in the graph")
+ if num_demes > 1:
+ if samples is None:
+ # Output a no-samples configuration. The user must edit this anyway,
+ # so if they blindly pass this to a simulator, it should complain.
+ samples = [0] * num_demes
+ structure = Structure.from_nargs(num_demes, *samples)
+ cmd.append(str(structure))
+
+ def get_growth_rate(epoch):
+ ret = 0
+ if epoch.size_function not in ["constant", "exponential"]:
+ raise ValueError(
+ "ms only supports constant or exponentially changing population sizes"
+ )
+ if epoch.end_size != epoch.start_size:
+ dt = epoch.time_span / (4 * N0)
+ ret = -math.log(epoch.start_size / epoch.end_size) / dt
+ return ret
+
+ events: List[Event] = []
+ for j, deme in enumerate(graph.demes, 1):
+ size = N0
+ growth_rate = 0
+ for epoch in reversed(deme.epochs):
+ if size != epoch.end_size:
+ size = epoch.end_size
+ events.append(PopulationSizeChange(epoch.end_time, j, size / N0))
+ alpha = get_growth_rate(epoch)
+ if growth_rate != alpha:
+ growth_rate = alpha
+ events.append(
+ PopulationGrowthRateChange(epoch.end_time, j, growth_rate)
+ )
+ size = epoch.start_size
+
+ # Describing either the ancestry of a deme with multiple ancestors,
+ # or ancestry via a pulse migration, both require the use of -es/-ej to
+ # first split the deme (or pulse dest) into two and then immediately join
+ # one lineage to the ancestor (or pulse source). A split event creates
+ # a new deme with ID equal to the current number of demes plus 1,
+ # and we must know this ID in order to then join the new deme.
+ # To obtain correct IDs, we sort the combined list of demes and pulses
+ # by the deme start time or alternately the pulse time. IDs are then
+ # sequential with this ordering.
+ # Pulses are added in reverse order, so that pulses with the same time
+ # are given the correct backwards-time ordering.
+ # Pulses occurring at the same time as a deme's start time are possible,
+ # and in this case the pulse will come first (backwards in time).
+ demes_and_pulses = list(reversed(graph.pulses)) + list(graph.demes) # type: ignore
+ demes_and_pulses.sort(
+ key=lambda d_p: d_p.start_time if isinstance(d_p, demes.Deme) else d_p.time
+ )
+ deme_id = {deme.name: j for j, deme in enumerate(graph.demes, 1)}
+
+ for deme_or_pulse in demes_and_pulses:
+ if isinstance(deme_or_pulse, demes.Deme):
+ deme = deme_or_pulse
+ for k, ancestor in enumerate(deme.ancestors):
+ anc_id = deme_id[ancestor]
+ proportion = deme.proportions[k] / sum(deme.proportions[k:])
+ if k == len(deme.ancestors) - 1:
+ assert math.isclose(proportion, 1)
+ events.append(Join(deme.start_time, deme_id[deme.name], anc_id))
+ else:
+ num_demes += 1
+ new_deme_id = num_demes
+ e1 = Split(deme.start_time, deme_id[deme.name], 1 - proportion)
+ e2 = Join(deme.start_time, new_deme_id, anc_id)
+ events.extend([e1, e2])
+ else:
+ assert isinstance(deme_or_pulse, demes.Pulse)
+ pulse = deme_or_pulse
+ num_demes += 1
+ new_deme_id = num_demes
+ e1 = Split(pulse.time, deme_id[pulse.dest], 1 - pulse.proportion)
+ e2 = Join(pulse.time, new_deme_id, deme_id[pulse.source])
+ events.extend([e1, e2])
+
+ # Turn migrations off at the start_time. We schedule all start_time
+ # events first, and then all end_time events. This ensures that events
+ # for migrations with an end_time that coincides with the start_time of
+ # another migration will be scheduled later (backwards in time),
+ # and thus override the rate=0 setting.
+ for migration in graph.migrations:
+ if (
+ not math.isinf(migration.start_time)
+ and migration.start_time != graph[migration.dest].start_time
+ and migration.start_time != graph[migration.source].start_time
+ ):
+ events.append(
+ MigrationMatrixEntryChange(
+ t=migration.start_time,
+ i=deme_id[migration.dest],
+ j=deme_id[migration.source],
+ rate=0,
+ )
+ )
+ for migration in graph.migrations:
+ events.append(
+ MigrationMatrixEntryChange(
+ t=migration.end_time,
+ i=deme_id[migration.dest],
+ j=deme_id[migration.source],
+ rate=4 * N0 * migration.rate,
+ )
+ )
+
+ events.sort(key=operator.attrgetter("t"))
+ for event in events:
+ event.t /= 4 * N0
+
+ cmd.extend(str(event) for event in events)
+ return " ".join(cmd)
diff --git a/docs/api.md b/docs/api.md
index 6c0b8258..a7dc6024 100644
--- a/docs/api.md
+++ b/docs/api.md
@@ -70,3 +70,7 @@
```{eval-rst}
.. autofunction:: demes.from_ms
```
+
+```{eval-rst}
+.. autofunction:: demes.to_ms
+```
diff --git a/verification.py b/verification.py
new file mode 100644
index 00000000..0076b2a6
--- /dev/null
+++ b/verification.py
@@ -0,0 +1,411 @@
+#!/usr/bin/env python3
+# Verifies the demes.to_ms() converter by comparing summary statistics against
+# msprime and moments. Tested with Python 3.9 on Linux.
+import abc
+import concurrent.futures
+import functools
+import subprocess
+import os
+
+import demes
+import demesdraw
+import msprime
+import moments
+import tsconvert
+import numpy as np
+import matplotlib
+
+# Use non-GUI backend, to avoid problems with multiprocessing.
+matplotlib.use("Agg")
+import matplotlib.pyplot as plt # noqa: E402
+from matplotlib.backends.backend_pdf import PdfPages # noqa: E402
+
+NUM_PROCS = os.cpu_count()
+NUM_REPLICATES = 100_000
+REPS_PER_BATCH = 5_000
+assert NUM_REPLICATES % REPS_PER_BATCH == 0
+# ms isn't so useful for testing, as the trees' branch lengths are printed
+# to only 3 decimal places, which is insufficient to build a tree sequence.
+# We just want to check demes' ms output is implemented correctly, so mspms
+# is a convenient alternative.
+MS_COMMAND = "mspms"
+
+
+class Simulator(abc.ABC):
+ """Abstract base class for simulators."""
+
+ def __init__(
+ self, *, graph: demes.Graph, samples: dict[str, int], num_replicates=None
+ ):
+ self.graph = graph
+ self.samples = samples
+ if num_replicates is None:
+ num_replicates = 1
+ self.num_replicates = num_replicates
+
+ def tmrca(self):
+ """Get vector of tmrcas, one for each replicate."""
+ raise NotImplementedError
+
+ def sfs(self):
+ """Get 1D SFS vector (mean or expected)."""
+ raise NotImplementedError
+
+ # Functions for simulators that output tree sequences.
+
+ def _ts_callback(self, ts):
+ """Process one simulation replicate."""
+ if not hasattr(self, "_sfs"):
+ self._sfs = self._ts_sfs(ts)
+ else:
+ self._sfs += self._ts_sfs(ts)
+ if not hasattr(self, "_tmrca"):
+ self._tmrca = []
+ self._tmrca.append(self._ts_mean_tmrca(ts))
+
+ def _ts_sfs(self, ts):
+ """SFS branch stat."""
+ return ts.allele_frequency_spectrum(mode="branch", polarised=True)
+
+ def _ts_mean_tmrca(self, ts):
+ """Mean tmrca across all trees in the sequence."""
+ tmrca = []
+ for tree in ts.trees():
+ tmrca.append(tree.time(tree.root))
+ return np.mean(tmrca)
+
+
+class SimMs(Simulator):
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.N0 = 1 # reference population size
+ self.run()
+
+ def run(self):
+ samples = [self.samples.get(deme.name, 0) for deme in self.graph.demes]
+ nsam = sum(samples)
+ assert nsam >= 2
+ # We must set a recombination rate, otherwise the output format
+ # is not recognised by tsconvert.
+ r = 1e-30
+ sequence_length = 100
+ rho = 4 * self.N0 * r * (sequence_length - 1)
+
+ ms_args = demes.to_ms(self.graph, N0=self.N0, samples=samples)
+ # We `nice` the subprocess, to avoid overcommitting the process pool
+ # due to having an additional subprocess.
+ cmd = (
+ f"nice -n 10 {MS_COMMAND} {nsam} {self.num_replicates} {ms_args} "
+ f"-T -r {rho} {sequence_length} -p 12"
+ ).split()
+
+ # Run the ms command, split the output at replicate delimiters `//`,
+ # and convert each replicate into a tree sequence with tsconvert.
+ num_tree_sequences = 0
+ with subprocess.Popen(
+ cmd, text=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE
+ ) as process:
+ current_lines = None
+ for line in process.stdout:
+ line = line.rstrip()
+ if line.startswith("//"):
+ # next replicate
+ if current_lines:
+ ts = tsconvert.from_ms("\n".join(current_lines))
+ self._ts_callback(ts)
+ num_tree_sequences += 1
+ current_lines = []
+ elif current_lines is not None:
+ current_lines.append(line)
+ stderr = process.stderr.read()
+
+ if process.returncode != 0 or stderr.strip():
+ raise RuntimeError(f"{MS_COMMAND} failed:\n" + stderr)
+
+ if current_lines:
+ ts = tsconvert.from_ms("\n".join(current_lines))
+ self._ts_callback(ts)
+ num_tree_sequences += 1
+
+ assert num_tree_sequences == self.num_replicates
+
+ def sfs(self):
+ return self.N0 * np.array(self._sfs) / self.num_replicates
+
+ def tmrca(self):
+ return self.N0 * np.array(self._tmrca)
+
+
+class SimMsprime(Simulator):
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.run()
+
+ def run(self):
+ demog = msprime.Demography.from_demes(self.graph)
+ ts_iter = msprime.sim_ancestry(
+ demography=demog,
+ samples=[
+ msprime.SampleSet(nsam, population=pop, ploidy=1)
+ for pop, nsam in self.samples.items()
+ ],
+ ploidy=2,
+ num_replicates=self.num_replicates,
+ record_provenance=False,
+ )
+ if self.num_replicates == 1:
+ ts_iter = [ts_iter]
+
+ for ts in ts_iter:
+ self._ts_callback(ts)
+
+ def sfs(self):
+ return np.array(self._sfs) / 4 / self.num_replicates
+
+ def tmrca(self):
+ return np.array(self._tmrca) / 4
+
+
+class SimMoments(Simulator):
+ def sfs(self):
+ fs = moments.Spectrum.from_demes(
+ self.graph,
+ sampled_demes=list(self.samples.keys()),
+ sample_sizes=list(self.samples.values()),
+ )
+ # Scale by the ancestral size.
+ # Moments only accepts graphs with one root, which is guaranteed
+ # to be the first deme in the graph.
+ N0 = self.graph.demes[0].epochs[0].start_size
+ return fs * N0
+
+
+class Parallel:
+ """Wrapper that runs a simulator's replicates in parallel batches."""
+
+ def __init__(
+ self, pool, sim_class, *, num_replicates=None, reps_per_batch=None, **kwargs
+ ):
+ if num_replicates is None:
+ num_replicates = NUM_REPLICATES
+ if reps_per_batch is None:
+ reps_per_batch = REPS_PER_BATCH
+ # Not worth supporting non-integral multiples.
+ assert num_replicates % reps_per_batch == 0
+ self.futures = []
+ self.num_batches = num_replicates // reps_per_batch
+ for _ in range(self.num_batches):
+ self.futures.append(
+ pool.submit(sim_class, num_replicates=reps_per_batch, **kwargs)
+ )
+ self.done = False
+
+ def _wait(self):
+ sfs = None
+ tmrca = []
+ for fs in concurrent.futures.as_completed(self.futures):
+ sim = fs.result()
+ if sfs is None:
+ sfs = sim.sfs()
+ else:
+ sfs += sim.sfs()
+ tmrca.extend(sim.tmrca())
+ self._sfs = sfs / self.num_batches
+ self._tmrca = tmrca
+ self.done = True
+
+ def sfs(self):
+ if not self.done:
+ self._wait()
+ return self._sfs
+
+ def tmrca(self):
+ if not self.done:
+ self._wait()
+ return self._tmrca
+
+
+def plot_sfs(ax, title, /, **kwargs):
+ """
+ Plot SFS onto the given axes.
+ """
+ plot_styles = [
+ dict(marker="o", ms=10, mfc="none", lw=2),
+ dict(marker="d", mfc="none", lw=1),
+ dict(marker="x", lw=1),
+ dict(marker="|", lw=1),
+ ]
+ style = iter(plot_styles)
+
+ for label, fs in kwargs.items():
+ x = np.arange(1, len(fs) - 1, dtype=int)
+ ax.plot(x, fs[1:-1], label=label, **next(style))
+
+ ax.set_yscale("log")
+ ax.xaxis.set_major_locator(matplotlib.ticker.MaxNLocator(integer=True))
+ ax.set_ylabel("Count")
+ ax.set_xlabel("Allele frequency")
+ ax.set_title(title)
+ ax.legend()
+
+
+def plot_qq(ax, title, /, **kwargs):
+ """
+ Plot QQ onto the given axes.
+ """
+ (x_label, x), (y_label, y) = kwargs.items()
+ quantiles = np.linspace(0, 1, 101)
+ xq = np.nanquantile(x, quantiles)
+ yq = np.nanquantile(y, quantiles)
+ ax.scatter(xq, yq, marker="o", edgecolor="black", facecolor="none")
+ ax.scatter(xq[50], yq[50], marker="x", lw=2, c="red", label="median")
+
+ # diagonal line
+ xlim = ax.get_xlim()
+ ylim = ax.get_ylim()
+ min_ = min(xlim[0], ylim[0])
+ max_ = max(xlim[1], ylim[1])
+ ax.plot([min_, max_], [min_, max_], c="lightgray", ls="--", lw=1, zorder=-10)
+ ax.set_xlim(xlim)
+ ax.set_ylim(ylim)
+
+ ax.set_xlabel(x_label)
+ ax.set_ylabel(y_label)
+ ax.set_title(title)
+ ax.legend()
+
+
+def get_axes(aspect=9 / 16, scale=1.5, **subplot_kwargs):
+ """Make a matplotlib axes."""
+ figsize = scale * plt.figaspect(aspect)
+ fig, ax = plt.subplots(figsize=figsize, **subplot_kwargs)
+ fig.set_tight_layout(True)
+ return fig, ax
+
+
+def log_time_heuristic(graph):
+ """Decide whether or not to use log time scale for demesdraw figure."""
+ times = {epoch.start_time for deme in graph.demes for epoch in deme.epochs}
+ times.update(epoch.end_time for deme in graph.demes for epoch in deme.epochs)
+ times.discard(np.inf)
+ times.discard(0)
+ if len(times) > 0 and max(times) / min(times) > 4:
+ log_time = True
+ else:
+ log_time = False
+ return log_time
+
+
+def multipanel_figure(pool, graph, *, sample_sets=None):
+ """Multipanel figure showing the graph, TMRCA QQ, and SFS."""
+ if sample_sets is None:
+ nsam = 20
+ sample_sets = [{deme.name: nsam} for deme in graph.demes]
+ fig, axs = get_axes(nrows=2, ncols=1 + len(sample_sets))
+ demesdraw.tubes(graph, ax=axs[0, 0], log_time=log_time_heuristic(graph))
+ axs[1, 0].set_axis_off()
+
+ for j, samples in enumerate(sample_sets, 1):
+ ms_sims = Parallel(pool, SimMs, graph=graph, samples=samples)
+ msprime_sims = Parallel(pool, SimMsprime, graph=graph, samples=samples)
+ moments_sims = SimMoments(graph=graph, samples=samples)
+ sample_str = ", ".join(f"{k}={v}" for k, v in samples.items())
+ plot_qq(
+ axs[0, j],
+ f"QQ TMRCA, samples: {sample_str}",
+ ms=ms_sims.tmrca(),
+ msprime=msprime_sims.tmrca(),
+ )
+ plot_sfs(
+ axs[1, j],
+ f"Frequency spectrum, samples: {sample_str}",
+ ms=ms_sims.sfs(),
+ msprime=msprime_sims.sfs(),
+ moments=moments_sims.sfs(),
+ )
+
+ return fig
+
+
+def graph_zigzag():
+ return demes.load("examples/zigzag.yml")
+
+
+def graph_twopop_asymmetric():
+ b = demes.Builder()
+ b.add_deme("a", epochs=[dict(start_size=10000)])
+ b.add_deme("b", ancestors=["a"], start_time=2000, epochs=[dict(start_size=200)])
+ b.add_migration(source="a", dest="b", rate=1e-3)
+ return b.resolve()
+
+
+def graph_twopop_symmetric():
+ b = demes.Builder()
+ b.add_deme("a", epochs=[dict(start_size=10000)])
+ b.add_deme("b", ancestors=["a"], start_time=2000, epochs=[dict(start_size=200)])
+ b.add_migration(demes=["a", "b"], rate=1e-3)
+ return b.resolve()
+
+
+def graph_twopop_pulse():
+ b = demes.Builder()
+ b.add_deme("a", epochs=[dict(start_size=10000)])
+ b.add_deme("b", ancestors=["a"], start_time=2000, epochs=[dict(start_size=200)])
+ b.add_pulse(source="a", dest="b", time=200, proportion=0.1)
+ return b.resolve()
+
+
+def graph_concurrent_pulses_AtoB_BtoC():
+ b = demes.Builder()
+ b.add_deme("a", epochs=[dict(start_size=10000)])
+ b.add_deme("b", ancestors=["a"], start_time=2000, epochs=[dict(start_size=200)])
+ b.add_deme("c", ancestors=["a"], start_time=2000, epochs=[dict(start_size=200)])
+ b.add_pulse(source="a", dest="b", time=200, proportion=0.5)
+ b.add_pulse(source="b", dest="c", time=200, proportion=0.5)
+ return b.resolve()
+
+
+def graph_concurrent_pulses_CtoB_BtoA():
+ b = demes.Builder()
+ b.add_deme("a", epochs=[dict(start_size=10000)])
+ b.add_deme("b", ancestors=["a"], start_time=2000, epochs=[dict(start_size=200)])
+ b.add_deme("c", ancestors=["a"], start_time=2000, epochs=[dict(start_size=200)])
+ b.add_pulse(source="c", dest="b", time=200, proportion=0.5)
+ b.add_pulse(source="b", dest="a", time=200, proportion=0.5)
+ return b.resolve()
+
+
+def graph_concurrent_pulses_AtoC_BtoC():
+ b = demes.Builder()
+ b.add_deme("a", epochs=[dict(start_size=10000)])
+ b.add_deme("b", ancestors=["a"], start_time=2000, epochs=[dict(start_size=200)])
+ b.add_deme("c", ancestors=["a"], start_time=2000, epochs=[dict(start_size=200)])
+ b.add_pulse(source="a", dest="c", time=200, proportion=0.5)
+ b.add_pulse(source="b", dest="c", time=200, proportion=0.5)
+ return b.resolve()
+
+
+if __name__ == "__main__":
+ with PdfPages("/tmp/verification.pdf") as pdf:
+ with concurrent.futures.ProcessPoolExecutor(NUM_PROCS) as pool:
+ fn = functools.partial(multipanel_figure, pool)
+ for fig in (
+ fn(graph_zigzag()),
+ fn(graph_twopop_symmetric()),
+ fn(graph_twopop_asymmetric()),
+ fn(graph_twopop_pulse()),
+ fn(
+ graph_concurrent_pulses_AtoB_BtoC(),
+ sample_sets=[dict(b=20), dict(c=20)],
+ ),
+ fn(
+ graph_concurrent_pulses_CtoB_BtoA(),
+ sample_sets=[dict(b=20), dict(c=20)],
+ ),
+ fn(
+ graph_concurrent_pulses_AtoC_BtoC(),
+ sample_sets=[dict(b=20), dict(c=20)],
+ ),
+ ):
+ pdf.savefig(figure=fig)
+ plt.close(fig)
| Conversion to/from ms format?
I'm guessing that a lot of people will want to be able to either convert their existing models to demes or to run demes models through simulators that support ms's demographic model description.
Should we provide ``from_ms()`` and ``to_ms()`` methods? (Or similarly named?)
The ``from_ms`` code wouldn't be too bad, as we could adapt the command line parsing code in [msprime](https://github.com/tskit-dev/msprime/blob/main/msprime/cli.py#L469). With a few small adjustments (i.e., raising ValueErrors rather than calling sys.exit) I think we could reuse the argparse code directly, which doesn't *have* to actually be used in the context of a CLI.
I've been thinking about doing this for the msprime 1.0 Demography API, but it seems to me it would be much more useful here.
Thoughts?
| Yes, that would be an excellent addition.
Good stuff. I'm updating the msprime CLI code at the moment to modernise, so let's not take this on for a bit. | 2021-07-09T09:04:45 | 0.0 | [] | [] |
||
popsim-consortium/demes-python | popsim-consortium__demes-python-352 | 9257456fd4c863ff5e7fdd79329e430fe70eb4b0 | diff --git a/demes/__main__.py b/demes/__main__.py
index 85d4ddca..4d61f129 100644
--- a/demes/__main__.py
+++ b/demes/__main__.py
@@ -37,12 +37,12 @@ def __init__(self, subparsers):
"Output a simplified model. This is a compact representation "
"in which many default values are ommited. As only the "
"essential details are retained, this is usually easier for "
- "humans to read. The simplfied output is guaranteed to be a "
+ "humans to read. The simplified output is guaranteed to be a "
"valid Demes model that can be resolved identically to the "
- "input model. But exactly which fields are simplfied, "
+ "input model. But exactly which fields are simplified, "
"and how simplification is performed, may change over time. "
"Thus users should not rely on details of the output such as "
- "presence or absense of specific fields, or other details "
+ "presence or absence of specific fields, or other details "
"that do not alter how the model is resolved into a "
"fully-qualified model. "
"A fully-qualified model is output by default."
diff --git a/demes/demes.py b/demes/demes.py
index 85957d73..8242b6b5 100644
--- a/demes/demes.py
+++ b/demes/demes.py
@@ -2462,3 +2462,98 @@ def fromdict(cls, data: MutableMapping[str, Any]) -> "Builder":
builder = cls()
builder.data = data
return builder
+
+ # Below are general-purpose functions that operate on a data dict,
+ # which are used in the ms conversion code.
+
+ def _sort_demes_by_ancestry(self) -> None:
+ """
+ Sort demes by their start time so that ancestors come before descendants.
+ """
+ self.data["demes"].sort(key=operator.itemgetter("start_time"), reverse=True)
+
+ def _add_migrations_from_matrices(
+ self, mm_list: List[List[List[float]]], end_times: List[float]
+ ) -> None:
+ """
+ Convert a list of migration matrices into a list of migration dicts.
+ """
+ assert len(mm_list) == len(end_times)
+ assert len(self.data.get("migrations", [])) == 0
+ deme_names = [deme["name"] for deme in self.data.get("demes", [])]
+ assert len(deme_names) > 0
+ migrations: List[MutableMapping] = []
+ current: Dict[Tuple[int, int], MutableMapping] = dict()
+ start_time = math.inf
+ for migration_matrix, end_time in zip(mm_list, end_times):
+ n = len(migration_matrix)
+ assert n == len(deme_names)
+ for j in range(n):
+ assert n == len(migration_matrix[j])
+ for k in range(n):
+ if j == k:
+ continue
+ rate = migration_matrix[j][k]
+ migration_dict = current.get((j, k))
+ if migration_dict is None:
+ if rate != 0:
+ migration_dict = dict(
+ source=deme_names[j],
+ dest=deme_names[k],
+ start_time=start_time,
+ end_time=end_time,
+ rate=rate,
+ )
+ current[(j, k)] = migration_dict
+ migrations.append(migration_dict)
+ else:
+ if rate == 0:
+ del current[(j, k)]
+ elif migration_dict["rate"] == rate:
+ # extend migration_dict
+ migration_dict["end_time"] = end_time
+ else:
+ migration_dict = dict(
+ source=deme_names[j],
+ dest=deme_names[k],
+ start_time=start_time,
+ end_time=end_time,
+ rate=rate,
+ )
+ current[(j, k)] = migration_dict
+ migrations.append(migration_dict)
+ start_time = end_time
+ self.data["migrations"] = migrations
+
+ def _remove_transient_demes(self) -> None:
+ """
+ Remove demes that don't exist (deme.start_time == deme.end_time).
+
+ These demes are not valid, but could be created from ms commands where
+ a lineage splits and is then immediately joined.
+ ms -I 2 1 1 -es 1.0 1 0.1 -ej 1.0 3 2
+ This approach appears to be the only possible way to represent certain
+ types of demographic relationships using ms commands, such as the pulse
+ migration depicted above.
+ """
+ demes = list(self.data.get("demes", []))
+ assert len(demes) > 0
+ num_removed = 0
+ for j, deme in enumerate(demes):
+ start_time = deme["start_time"]
+ end_time = deme["epochs"][-1]["end_time"]
+ if start_time == 0 or math.isinf(start_time):
+ # errors with this caught elsewhere
+ continue
+ if start_time == end_time:
+ for pulse in self.data.get("pulses", []):
+ assert pulse["source"] != deme["name"]
+ assert pulse["dest"] != deme["name"]
+ for migration in self.data.get("migrations", []):
+ assert deme["name"] not in migration.get("demes", [])
+ assert deme["name"] != migration.get("source")
+ assert deme["name"] != migration.get("dest")
+ for other in self.data["demes"]:
+ assert deme["name"] not in other.get("ancestors", [])
+ del self.data["demes"][j - num_removed]
+ num_removed += 1
diff --git a/demes/ms.py b/demes/ms.py
index 65eec639..0633c756 100644
--- a/demes/ms.py
+++ b/demes/ms.py
@@ -4,7 +4,8 @@
import logging
import sys
import operator
-from typing import Any, Dict, List, Mapping, MutableMapping, Set, Tuple
+import itertools
+from typing import Any, List, Mapping, Set
import attr
@@ -206,14 +207,14 @@ def from_nargs(cls, *args):
@attr.define
-class Admixture(Event):
+class Split(Event):
# -es t i p
i = attr.ib(converter=int, validator=positive)
p = attr.ib(converter=float, validator=unit_interval)
@attr.define
-class PopulationSplit(Event):
+class Join(Event):
# -ej t i j
i = attr.ib(converter=int, validator=positive)
j = attr.ib(converter=int, validator=positive)
@@ -386,87 +387,36 @@ def __call__(self, parser, namespace, filename, option_string=None):
parser.add_argument(
"-es",
nargs=3,
- action=coerce_nargs(Admixture, append=True),
+ action=coerce_nargs(Split, append=True),
dest="demographic_events",
default=[],
metavar=("t", "i", "p"),
help=(
"Split deme i into a new deme, such that the specified "
- "proportion p of lineages remains in deme i. Forwards in time "
- "this corresponds to an admixture event with the extinction of "
- "the new deme. The new deme has ID num_demes + 1, and has size N0, "
- "growth rate 0, and migration rates to and from the new deme are "
- "set to 0."
+ "proportion p of lineages remains in deme i. The new deme has ID "
+ "num_demes + 1, and has size N0, growth rate 0, and migration "
+ "rates to and from the new deme are set to 0. "
+ "Forwards in time this corresponds to an admixture event with "
+ "the extinction of the new deme."
),
)
parser.add_argument(
"-ej",
nargs=3,
- action=coerce_nargs(PopulationSplit, append=True),
+ action=coerce_nargs(Join, append=True),
dest="demographic_events",
default=[],
metavar=("t", "i", "j"),
help=(
- "Move all lineages in deme i to j at time t. "
- "Forwards in time, this corresponds to a population split "
- "in which lineages in j split into i. All migration "
- "rates for deme i are set to zero."
+ "Move all lineages in deme i to j at time t. All migration "
+ "rates for deme i are set to zero. "
+ "Forwards in time, this corresponds to a branch event "
+ "in which lineages in j split into i."
),
)
return parser
-def migrations_from_mm_list(
- mm_list: List[List[List[float]]], end_times: List[float], deme_names: List[str]
-) -> List[MutableMapping]:
- """
- Convert a list of migration matrices into a list of migration dicts.
- """
- assert len(mm_list) == len(end_times)
- migrations: List[MutableMapping] = []
- current: Dict[Tuple[int, int], MutableMapping] = dict()
- start_time = math.inf
- for migration_matrix, end_time in zip(mm_list, end_times):
- n = len(migration_matrix)
- assert n == len(deme_names)
- for j in range(n):
- assert n == len(migration_matrix[j])
- for k in range(n):
- if j == k:
- continue
- rate = migration_matrix[j][k]
- mm = current.get((j, k))
- if mm is None:
- if rate != 0:
- mm = dict(
- source=deme_names[j],
- dest=deme_names[k],
- start_time=start_time,
- end_time=end_time,
- rate=rate,
- )
- current[(j, k)] = mm
- migrations.append(mm)
- else:
- if rate == 0:
- del current[(j, k)]
- elif mm["rate"] == rate:
- # extend mm
- mm["end_time"] = end_time
- else:
- mm = dict(
- source=deme_names[j],
- dest=deme_names[k],
- start_time=start_time,
- end_time=end_time,
- rate=rate,
- )
- current[(j, k)] = mm
- migrations.append(mm)
- start_time = end_time
- return migrations
-
-
def build_graph(args, N0: float) -> demes.Graph:
num_demes = 1
# List of migration matrices in time-descending order (oldest to most recent).
@@ -555,147 +505,199 @@ def migration_matrix_at(time):
return migration_matrix
# Sort demographic events args by the time field.
- args.demographic_events.sort(key=lambda x: x.t)
+ args.demographic_events.sort(key=operator.attrgetter("t"))
# Process the initial_state options followed by the demographic_events.
- for event in args.initial_state + args.demographic_events:
- time = 4 * N0 * event.t
- if isinstance(event, GrowthRateChange):
- # -G α
- # -eG t α
- growth_rate = event.alpha / (4 * N0)
- for deme in b.data["demes"]:
+ for t, events_iter in itertools.groupby(
+ args.initial_state + args.demographic_events, operator.attrgetter("t")
+ ):
+ time = 4 * N0 * t
+ events_group = list(events_iter)
+
+ # Lineage movements matrix to track -es/ej (Split/Join) events.
+ # This is used to turn complex sequences of -es/-ej events with the
+ # same time parameter into more direct ancestry relationships.
+ n = num_demes + sum(1 for event in events_group if isinstance(event, Split))
+ lineage_movements = [[0] * n for _ in range(n)]
+ for j in range(n):
+ lineage_movements[j][j] = 1
+ # The indices for lineages specified in Split/Join events.
+ split_join_indices = set()
+
+ for event in events_group:
+ if isinstance(event, GrowthRateChange):
+ # -G α
+ # -eG t α
+ growth_rate = event.alpha / (4 * N0)
+ for j, deme in enumerate(b.data["demes"]):
+ if j not in joined:
+ current_epoch = deme["epochs"][0]
+ current_growth_rate = current_epoch.get("growth_rate", 0)
+ if current_growth_rate != growth_rate:
+ epoch = epoch_resolve(deme, time)
+ epoch["growth_rate"] = growth_rate
+
+ elif isinstance(event, PopulationGrowthRateChange):
+ # -g i α
+ # -eg t i α
+ pid = convert_population_id(event.i)
+ growth_rate = event.alpha / (4 * N0)
+ deme = b.data["demes"][pid]
current_epoch = deme["epochs"][0]
current_growth_rate = current_epoch.get("growth_rate", 0)
if current_growth_rate != growth_rate:
epoch = epoch_resolve(deme, time)
epoch["growth_rate"] = growth_rate
- elif isinstance(event, PopulationGrowthRateChange):
- # -g i α
- # -eg t i α
- pid = convert_population_id(event.i)
- growth_rate = event.alpha / (4 * N0)
- deme = b.data["demes"][pid]
- current_epoch = deme["epochs"][0]
- current_growth_rate = current_epoch.get("growth_rate", 0)
- if current_growth_rate != growth_rate:
- epoch = epoch_resolve(deme, time)
- epoch["growth_rate"] = growth_rate
-
- elif isinstance(event, SizeChange):
- # -eN t x
- size = event.x * N0
- for deme in b.data["demes"]:
+ elif isinstance(event, SizeChange):
+ # -eN t x
+ size = event.x * N0
+ for j, deme in enumerate(b.data["demes"]):
+ if j not in joined:
+ current_epoch = deme["epochs"][0]
+ current_growth_rate = current_epoch.get("growth_rate", 0)
+ if (
+ current_growth_rate != 0
+ or current_epoch["end_size"] != size
+ ):
+ epoch = epoch_resolve(deme, time)
+ epoch["growth_rate"] = 0
+ epoch["end_size"] = size
+
+ elif isinstance(event, PopulationSizeChange):
+ # -n i x
+ # -en t i x
+ pid = convert_population_id(event.i)
+ size = event.x * N0
+ deme = b.data["demes"][pid]
current_epoch = deme["epochs"][0]
current_growth_rate = current_epoch.get("growth_rate", 0)
if current_growth_rate != 0 or current_epoch["end_size"] != size:
epoch = epoch_resolve(deme, time)
- epoch["growth_rate"] = 0
epoch["end_size"] = size
-
- elif isinstance(event, PopulationSizeChange):
- # -n i x
- # -en t i x
- pid = convert_population_id(event.i)
- size = event.x * N0
- deme = b.data["demes"][pid]
- current_epoch = deme["epochs"][0]
- current_growth_rate = current_epoch.get("growth_rate", 0)
- if current_growth_rate != 0 or current_epoch["end_size"] != size:
- epoch = epoch_resolve(deme, time)
- epoch["end_size"] = size
- # set growth_rate to 0 for -en option, but not for -n option
- if "-en" in event.option_strings:
- epoch["growth_rate"] = 0
-
- elif isinstance(event, PopulationSplit):
- # -ej t i j
- pop_i = convert_population_id(event.i)
- pop_j = convert_population_id(event.j)
-
- b.data["demes"][pop_i]["start_time"] = time
- b.data["demes"][pop_i]["ancestors"] = [f"deme{pop_j + 1}"]
-
- mm = migration_matrix_at(time)
- # Turn off migrations to/from deme i.
- for k in range(num_demes):
- if k != pop_i:
- mm[k][pop_i] = 0
- mm[pop_i][k] = 0
-
- # Record pop_i so that this index isn't used by later events.
- joined.add(pop_i)
-
- elif isinstance(event, Admixture):
- # -es t i p
- pid = convert_population_id(event.i)
-
- # Add a new deme which will be the source of a migration pulse.
- new_pid = num_demes
- b.add_deme(
- f"deme{new_pid + 1}",
- start_time=math.inf,
- epochs=[dict(end_size=N0, end_time=time)],
- )
- # In ms, the probability of staying in source is p and the
- # probabilty of moving to the new population is 1 - p.
- b.add_pulse(
- source=f"deme{new_pid + 1}",
- dest=f"deme{pid + 1}",
- time=time,
- proportion=1 - event.p,
- )
- num_demes += 1
-
- # Expand each migration matrix with a row and column of zeros.
- for migration_matrix in mm_list:
- for row in migration_matrix:
- row.append(0)
- migration_matrix.append([0 for _ in range(num_demes)])
-
- ##
- # Demographic events that affect the migration matrix
-
- elif isinstance(event, MigrationRateChange):
- # -eM t x
- mm = migration_matrix_at(time)
- for j in range(len(mm)):
- for k in range(len(mm)):
- if j != k:
- mm[j][k] = event.x / (num_demes - 1)
-
- elif isinstance(event, MigrationMatrixEntryChange):
- # -m i j x
- # -em t i j x
- pid_i = convert_population_id(event.i)
- pid_j = convert_population_id(event.j)
- if pid_i == pid_j:
- raise ValueError("Cannot set diagonal elements in migration matrix")
- mm = migration_matrix_at(time)
- mm[pid_i][pid_j] = event.rate
-
- elif isinstance(event, MigrationMatrixChange):
- # -ma M11 M12 M12 ... M21 ...
- # -ema t npop M11 M12 M12 ... M21 ...
- if "-ma" in event.option_strings:
- event.npop = num_demes
- if event.npop != num_demes:
- raise ValueError(
- f"-ema 'npop' ({event.npop}) doesn't match the current "
- f"number of demes ({num_demes})"
- )
- _ = migration_matrix_at(time)
- mm = mm_list[0] = copy.deepcopy(event.M)
- # Ms ignores matrix entries for demes that were previously joined
- # (-ej option), and users may deliberately put invalid values
- # here (e.g. 'x'). So we explicitly set these rates to zero.
- for j in joined:
+ # set growth_rate to 0 for -en option, but not for -n option
+ if "-en" in event.option_strings:
+ epoch["growth_rate"] = 0
+
+ elif isinstance(event, MigrationRateChange):
+ # -eM t x
+ mm = migration_matrix_at(time)
+ for j in range(len(mm)):
+ if j not in joined:
+ for k in range(len(mm)):
+ if j != k and k not in joined:
+ mm[j][k] = event.x / (num_demes - 1)
+
+ elif isinstance(event, MigrationMatrixEntryChange):
+ # -m i j x
+ # -em t i j x
+ pid_i = convert_population_id(event.i)
+ pid_j = convert_population_id(event.j)
+ if pid_i == pid_j:
+ raise ValueError("Cannot set diagonal elements in migration matrix")
+ mm = migration_matrix_at(time)
+ mm[pid_i][pid_j] = event.rate
+
+ elif isinstance(event, MigrationMatrixChange):
+ # -ma M11 M12 M12 ... M21 ...
+ # -ema t npop M11 M12 M12 ... M21 ...
+ if "-ma" in event.option_strings:
+ event.npop = num_demes
+ if event.npop != num_demes:
+ raise ValueError(
+ f"-ema 'npop' ({event.npop}) doesn't match the current "
+ f"number of demes ({num_demes})"
+ )
+ _ = migration_matrix_at(time)
+ mm = mm_list[0] = copy.deepcopy(event.M)
+ # Ms ignores matrix entries for demes that were previously joined
+ # (-ej option), and users may deliberately put invalid values
+ # here (e.g. 'x'). So we explicitly set these rates to zero.
+ for j in joined:
+ for k in range(num_demes):
+ if j != k:
+ mm[j][k] = 0
+ mm[k][j] = 0
+
+ elif isinstance(event, Join):
+ # -ej t i j
+ # Move all lineages from deme i to deme j at time t.
+ pop_i = convert_population_id(event.i)
+ pop_j = convert_population_id(event.j)
+
+ b.data["demes"][pop_i]["start_time"] = time
+ b.data["demes"][pop_i]["ancestors"] = [f"deme{pop_j + 1}"]
+ for lm in lineage_movements:
+ lm[pop_j] = lm[pop_i]
+ lm[pop_i] = 0
+ split_join_indices.add(pop_i)
+
+ mm = migration_matrix_at(time)
+ # Turn off migrations to/from deme i.
for k in range(num_demes):
- if j != k:
- mm[j][k] = 0
- mm[k][j] = 0
- else:
- assert False, f"unhandled option: {event}"
+ if k != pop_i:
+ mm[k][pop_i] = 0
+ mm[pop_i][k] = 0
+
+ # Record pop_i so that this index isn't used by later events.
+ joined.add(pop_i)
+
+ elif isinstance(event, Split):
+ # -es t i p
+ # Split deme i into a new deme (num_demes + 1),
+ # with proportion p of lineages remaining in deme i,
+ # and 1-p moving to the new deme.
+ pid = convert_population_id(event.i)
+
+ # Add new deme.
+ new_pid = num_demes
+ b.add_deme(
+ f"deme{new_pid + 1}",
+ start_time=math.inf,
+ epochs=[dict(end_size=N0, end_time=time)],
+ )
+ for lm in lineage_movements:
+ lm[new_pid] = (1 - event.p) * lm[pid]
+ lm[pid] *= event.p
+ split_join_indices.add(pid)
+
+ num_demes += 1
+
+ # Expand each migration matrix with a row and column of zeros.
+ for migration_matrix in mm_list:
+ for row in migration_matrix:
+ row.append(0)
+ migration_matrix.append([0 for _ in range(num_demes)])
+
+ else:
+ assert False, f"unhandled option: {event}"
+
+ for j in split_join_indices:
+ ancestors = []
+ proportions = []
+ for k, proportion in enumerate(lineage_movements[j]):
+ if j != k and proportion > 0:
+ ancestors.append(f"deme{k + 1}")
+ proportions.append(proportion)
+ if len(ancestors) == 0:
+ continue
+ p_jj = lineage_movements[j][j]
+ if p_jj == 0:
+ # No ancestry left in j.
+ b.data["demes"][j]["ancestors"] = ancestors
+ b.data["demes"][j]["proportions"] = proportions
+ else:
+ # Some ancestry is retained in j, so we use pulse migrations to
+ # indicate foreign ancestry.
+ # The order of pulses will later be reversed such that realised
+ # ancestry proportions are maintained forwards in time.
+ for k, source in enumerate(ancestors):
+ p = proportions[k] / (sum(proportions[k:]) + p_jj)
+ b.add_pulse(
+ source=source,
+ dest=f"deme{j + 1}",
+ time=time,
+ proportion=p,
+ )
# Resolve/remove growth_rate in oldest epochs.
for deme in b.data["demes"]:
@@ -712,18 +714,22 @@ def migration_matrix_at(time):
else:
epoch["start_size"] = epoch["end_size"]
- migrations = migrations_from_mm_list(
- mm_list, mm_end_times, [deme["name"] for deme in b.data["demes"]]
- )
+ # Convert migration matrices into migration dictionaries.
+ b._add_migrations_from_matrices(mm_list, mm_end_times)
+
# Rescale rates so they don't have units of 4*N0.
- for migration in migrations:
+ for migration in b.data["migrations"]:
migration["rate"] /= 4 * N0
- b.data["migrations"] = migrations
+
+ # Remove demes whose existence time span is zero.
+ # These can be created by simultaneous -es/-ej commands.
+ b._remove_transient_demes()
# Sort demes by their start time so that ancestors come before descendants.
- b.data["demes"] = sorted(
- b.data["demes"], key=operator.itemgetter("start_time"), reverse=True
- )
+ b._sort_demes_by_ancestry()
+
+ # Reverse the order of pulses so realised ancestry proportions are correct.
+ b.data.get("pulses", []).reverse()
graph = b.resolve()
return graph
| from_ms: error setting properties for all demes after one deme has been joined
This should just set sizes for the non-joined demes.
```
$ python -m demes ms -N0 1 -I 2 1 1 -ej 1.0 2 1 -eN 2.0 2
Traceback (most recent call last):
File "/usr/lib/python3.9/runpy.py", line 197, in _run_module_as_main
return _run_code(code, main_globals, None,
File "/usr/lib/python3.9/runpy.py", line 87, in _run_code
exec(code, run_globals)
File "/home/grg/src/demes/demes-python/demes/__main__.py", line 187, in <module>
cli()
File "/home/grg/src/demes/demes-python/demes/__main__.py", line 183, in cli
args.func(args)
File "/home/grg/src/demes/demes-python/demes/__main__.py", line 162, in __call__
graph = ms.build_graph(args, N0=args.reference_size)
File "/home/grg/src/demes/demes-python/demes/ms.py", line 592, in build_graph
epoch = epoch_resolve(deme, time)
File "/home/grg/src/demes/demes-python/demes/ms.py", line 525, in epoch_resolve
raise ValueError(
ValueError: time 8.0 outside deme2's existence interval (start_time=4.0, end_time=0]
```
The same bug exists for setting growth rates of all demes (`-eG`) and migration rates between all demes (`-eM`).
| 2021-07-05T13:48:44 | 0.0 | [] | [] |
|||
popsim-consortium/demes-python | popsim-consortium__demes-python-335 | 7db3a05e2f6a54097135a785e6bc6ad160c61d38 | diff --git a/demes/__init__.py b/demes/__init__.py
index 05a51fbe..ae03846c 100644
--- a/demes/__init__.py
+++ b/demes/__init__.py
@@ -20,5 +20,14 @@
Merge,
Admix,
)
-from .load_dump import load_asdict, loads_asdict, load, loads, dump, dumps
+from .load_dump import (
+ load_asdict,
+ loads_asdict,
+ load,
+ loads,
+ load_all,
+ dump,
+ dumps,
+ dump_all,
+)
from .ms import from_ms
diff --git a/demes/demes.py b/demes/demes.py
index 77e2595f..85957d73 100644
--- a/demes/demes.py
+++ b/demes/demes.py
@@ -2270,7 +2270,7 @@ class Builder:
:ivar dict data: The data dictionary of the graph's current state.
The objects nested within this dictionary follow Demes' data model,
- as described in the :ref:`spec:sec_ref`.
+ as described in the :ref:`spec:sec_spec`.
.. note::
Users may freely modify the data dictionary, as long as the data
@@ -2454,7 +2454,7 @@ def fromdict(cls, data: MutableMapping[str, Any]) -> "Builder":
:param MutableMapping data: The data dictionary to initialise the
graph's state. The objects nested within this dictionary must
- follow Demes' data model, as described in the :ref:`spec:sec_ref`.
+ follow Demes' data model, as described in the :ref:`spec:sec_spec`.
:return: The new Builder object.
:rtype: Builder
diff --git a/demes/load_dump.py b/demes/load_dump.py
index 9effcc03..b7260f7c 100644
--- a/demes/load_dump.py
+++ b/demes/load_dump.py
@@ -5,7 +5,7 @@
import json
import io
import math
-from typing import MutableMapping, Any
+from typing import Any, Generator, MutableMapping
import ruamel.yaml
@@ -36,12 +36,22 @@ def _open_file_polymorph(polymorph, mode="r"):
# which are hopefully simple enough to not suffer from API instability.
-def _load_yaml_asdict(fp):
+def _load_yaml_asdict(fp) -> MutableMapping[str, Any]:
with ruamel.yaml.YAML(typ="safe") as yaml:
return yaml.load(fp)
-def _dump_yaml_fromdict(data, fp):
+def _dump_yaml_fromdict(data, fp, multidoc=False) -> None:
+ """
+ Dump data dict to a YAML file-like object.
+
+ :param bool multidoc: If True, output the YAML document start line ``---``,
+ and document end line ``...``, which indicate the beginning and end of
+ a YAML document respectively. The start indicator is needed when
+ outputing multiple YAML documents to a single file (or file stream).
+ The end indicator is not strictly needed, but may be desirable
+ depending on the underlying communication channel.
+ """
with ruamel.yaml.YAML(typ="safe", output=fp) as yaml:
# Output flow style, but only for collections that consist only
# of scalars (i.e. the leaves in the document tree).
@@ -51,6 +61,9 @@ def _dump_yaml_fromdict(data, fp):
yaml.allow_unicode = False
# Keep dict insertion order, thank you very much!
yaml.sort_base_mapping_type_on_output = False
+ if multidoc:
+ yaml.explicit_start = True
+ yaml.explicit_end = True
yaml.dump(data)
@@ -93,11 +106,11 @@ def _unstringify_infinities(data: MutableMapping[str, Any]) -> None:
migration["start_time"] = float(start_time)
-def loads_asdict(string, *, format="yaml"):
+def loads_asdict(string, *, format="yaml") -> MutableMapping[str, Any]:
"""
Load a YAML or JSON string into a dictionary of nested objects.
The keywords and structure of the input are defined by the
- :ref:`spec:sec_ref`.
+ :ref:`spec:sec_spec`.
:param str string: The string to be loaded.
:param str format: The format of the input string. Either "yaml" or "json".
@@ -109,11 +122,11 @@ def loads_asdict(string, *, format="yaml"):
return load_asdict(stream, format=format)
-def load_asdict(filename, *, format="yaml"):
+def load_asdict(filename, *, format="yaml") -> MutableMapping[str, Any]:
"""
Load a YAML or JSON file into a dictionary of nested objects.
The keywords and structure of the input are defined by the
- :ref:`spec:sec_ref`.
+ :ref:`spec:sec_spec`.
:param filename: The path to the file to be loaded, or a file-like object
with a ``read()`` method.
@@ -135,11 +148,11 @@ def load_asdict(filename, *, format="yaml"):
return data
-def loads(string, *, format="yaml"):
+def loads(string, *, format="yaml") -> "demes.Graph":
"""
Load a graph from a YAML or JSON string.
The keywords and structure of the input are defined by the
- :ref:`spec:sec_ref`.
+ :ref:`spec:sec_spec`.
:param str string: The string to be loaded.
:param str format: The format of the input string. Either "yaml" or "json".
@@ -150,11 +163,11 @@ def loads(string, *, format="yaml"):
return demes.Graph.fromdict(data)
-def load(filename, *, format="yaml"):
+def load(filename, *, format="yaml") -> "demes.Graph":
"""
Load a graph from a YAML or JSON file.
The keywords and structure of the input are defined by the
- :ref:`spec:sec_ref`.
+ :ref:`spec:sec_spec`.
:param filename: The path to the file to be loaded, or a file-like object
with a ``read()`` method.
@@ -167,16 +180,35 @@ def load(filename, *, format="yaml"):
return demes.Graph.fromdict(data)
-def dumps(graph, *, format="yaml", simplified=True):
+def load_all(filename) -> Generator["demes.Graph", None, None]:
+ """
+ Generate graphs from a YAML document stream. Documents must be separated by
+ the YAML document start indicator, ``---``.
+ The keywords and structure of each document are defined by the
+ :ref:`spec:sec_spec`.
+
+ :param filename: The path to the file to be loaded, or a file-like object
+ with a ``read()`` method.
+ :type filename: Union[str, os.PathLike, FileLike]
+ :return: A generator of graphs.
+ :rtype: Generator[demes.Graph, None, None]
+ """
+ with _open_file_polymorph(filename) as f:
+ with ruamel.yaml.YAML(typ="safe") as yaml:
+ for data in yaml.load_all(f):
+ yield demes.Graph.fromdict(data)
+
+
+def dumps(graph, *, format="yaml", simplified=True) -> str:
"""
Dump the specified graph to a YAML or JSON string.
The keywords and structure of the output are defined by the
- :ref:`spec:sec_ref`.
+ :ref:`spec:sec_spec`.
:param .Graph graph: The graph to dump.
:param str format: The format of the output file. Either "yaml" or "json".
:param bool simplified: If True, returns a simplified graph. If False, returns
- a complete redundant graph.
+ a fully-qualified graph.
:return: The YAML or JSON string.
:rtype: str
"""
@@ -186,11 +218,11 @@ def dumps(graph, *, format="yaml", simplified=True):
return string
-def dump(graph, filename, *, format="yaml", simplified=True):
+def dump(graph, filename, *, format="yaml", simplified=True) -> None:
"""
Dump the specified graph to a file.
The keywords and structure of the output are defined by the
- :ref:`spec:sec_ref`.
+ :ref:`spec:sec_spec`.
:param .Graph graph: The graph to dump.
:param filename: Path to the output file, or a file-like object with a
@@ -198,7 +230,7 @@ def dump(graph, filename, *, format="yaml", simplified=True):
:type filename: Union[str, os.PathLike, FileLike]
:param str format: The format of the output file. Either "yaml" or "json".
:param bool simplified: If True, outputs a simplified graph. If False, outputs
- a redundant graph.
+ a fully-qualified graph.
"""
if simplified:
data = graph.asdict_simplified()
@@ -214,3 +246,23 @@ def dump(graph, filename, *, format="yaml", simplified=True):
_dump_yaml_fromdict(data, f)
else:
raise ValueError(f"unknown format: {format}")
+
+
+def dump_all(graphs, filename, *, simplified=True) -> None:
+ """
+ Dump the specified graphs to a multi-document YAML file or output stream.
+
+ :param graphs: An iterable of graphs to dump.
+ :param filename: Path to the output file, or a file-like object with a
+ ``write()`` method.
+ :type filename: Union[str, os.PathLike, FileLike]
+ :param bool simplified: If True, outputs simplified graphs. If False, outputs
+ fully-qualified graphs.
+ """
+ with _open_file_polymorph(filename, "w") as f:
+ for graph in graphs:
+ if simplified:
+ data = graph.asdict_simplified()
+ else:
+ data = graph.asdict()
+ _dump_yaml_fromdict(data, f, multidoc=True)
diff --git a/docs/api.md b/docs/api.md
index 33dd5324..6c0b8258 100644
--- a/docs/api.md
+++ b/docs/api.md
@@ -9,8 +9,10 @@
.. autofunction:: demes.load_asdict
.. autofunction:: demes.loads
.. autofunction:: demes.loads_asdict
+.. autofunction:: demes.load_all
.. autofunction:: demes.dump
.. autofunction:: demes.dumps
+.. autofunction:: demes.dump_all
```
## Building Demes graphs
| support multi-document YAML?
YAML permits multiple "documents" per file, using `---` dash separators. We should consider supporting this in the load/save API, to streamline how folks could use it for collections of Demes graphs (e.g. for inference).
| Agreed | 2021-06-22T09:01:24 | 0.0 | [] | [] |
||
popsim-consortium/demes-python | popsim-consortium__demes-python-320 | 434622fb1d60af3a5140a78fffe58bf0e3bc3d5b | diff --git a/CHANGELOG.md b/CHANGELOG.md
index 10575db0..9414fd52 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,36 @@
# Changelog
+## 0.1.2 - 2021-06-08
+
+**New features**:
+
+- Add `Graph.migration_matrices()` to get the migration matrices for a graph.
+ ({user}`grahamgower`, {issue}`309`, {pr}`320`)
+- Add `Deme.size_at()` to get the size of a deme at a given time.
+ ({user}`grahamgower`, {issue}`312`, {pr}`314`)
+- Support "linear" as an `Epoch.size_function`.
+ ({user}`noscode`, {issue}`296`, {pr}`310`)
+- Downstream test code can now use the `demes.hypothesis_strategies.graphs()`
+ [hypothesis](https://hypothesis.readthedocs.io/) strategy to generate a
+ random `Graph`. This is preliminary, and as such is not yet documented,
+ but is used for testing internally with some success. The API may change
+ in the future in response to requests from downstream application authors.
+ ({user}`grahamgower`, {issue}`217`, {pr}`294`)
+- The string representation for a graph, `Graph.__str__()`, is now the
+ simplified YAML output.
+ ({user}`grahamgower`, {issue}`235`, {pr}`293`)
+
+**Breaking changes**:
+
+- The undocumented msprime and stdpopsim converters have been removed.
+ ({user}`grahamgower`, {issue}`313`, {pr}`316`)
+- The JSON spec doesn't allow serialising infinite float values (although the
+ Python json library does support this by default). So for JSON output we
+ instead use the string "Infinity".
+ ({user}`grahamgower`,
+ [demes-spec#70](https://github.com/popsim-consortium/demes-spec/issues/70),
+ {pr}`311`)
+
## 0.1.1 - 2021-04-21
Remove the "demes" console_scripts entry point.
diff --git a/demes/demes.py b/demes/demes.py
index deb852a0..77e2595f 100644
--- a/demes/demes.py
+++ b/demes/demes.py
@@ -1,4 +1,4 @@
-from typing import List, Union, Optional, Dict, MutableMapping, Any, Set
+from typing import List, Union, Optional, Dict, MutableMapping, Any, Set, Tuple
import itertools
import math
import numbers
@@ -1239,15 +1239,35 @@ def __attrs_post_init__(self):
'if time_units!="generations", generation_time must be specified'
)
- def __getitem__(self, deme_name):
+ def __getitem__(self, deme_name: Name) -> Deme:
"""
- Return the :class:`.Deme` with the specified name.
+ Get the :class:`.Deme` with the specified name.
+
+ .. code::
+
+ graph = demes.load("gutenkunst_ooa.yml")
+ yri = graph["YRI"]
+ print(yri)
+
+ :param str deme_name: The name of the deme.
+ :rtype: Deme
+ :return: The deme.
"""
return self._deme_map[deme_name]
- def __contains__(self, deme_name):
+ def __contains__(self, deme_name: Name) -> bool:
"""
Check if the graph contains a deme with the specified name.
+
+ .. code::
+
+ graph = demes.load("gutenkunst_ooa.yml")
+ if "CHB" in graph:
+ print("Deme CHB is in the graph")
+
+ :param str deme_name: The name of the deme.
+ :rtype: bool
+ :return: ``True`` if the deme is in the graph, ``False`` otherwise.
"""
return deme_name in self._deme_map
@@ -1611,10 +1631,52 @@ def _add_pulse(self, *, source, dest, proportion, time) -> Pulse:
self.pulses.append(new_pulse)
return new_pulse
- def _migration_matrices(self):
+ def migration_matrices(self) -> Tuple[List[List[List[float]]], List[Number]]:
"""
- Return a list of migration matrices, and a list of end times that
- partition them. The start time for the first matrix is inf.
+ Get the migration matrices and the end times that partition them.
+
+ Returns a list of matrices, one for each time interval
+ over which migration rates do not change, in time-descending
+ order (from most ancient to most recent). For a migration matrix list
+ :math:`M`, the migration rate is :math:`M[i][j][k]` from deme
+ :math:`k` into deme :math:`j` during the :math:`i` 'th time interval.
+ The order of the demes' indices in each matrix matches the
+ order of demes in the graph's deme list (I.e. deme :math:`j`
+ corresponds to ``Graph.demes[j]``).
+
+ There is always at least one migration matrix in the list, even when
+ the graph defines no migrations.
+
+ A list of end times to which the matrices apply is also
+ returned. The time intervals to which the migration rates apply are an
+ open-closed interval ``(start_time, end_time]``, where the start time
+ of the first matrix is ``inf`` and the start time of subsequent
+ matrices match the end time of the previous matrix in the list.
+
+ .. note::
+ The last entry of the list of end times is always ``0``,
+ even when all demes in the graph go extinct before time ``0``.
+
+
+ .. code::
+
+ graph = demes.load("gutenkunst_ooa.yml")
+ mm_list, end_times = graph.migration_matrices()
+ start_times = [math.inf] + end_times[:-1]
+ assert len(mm_list) == len(end_times) == len(start_times)
+ deme_ids = {deme.name: j for j, deme in enumerate(graph.demes)}
+ j = deme_ids["YRI"]
+ k = deme_ids["CEU"]
+ for mm, start_time, end_time in zip(mm_list, start_times, end_times):
+ print(
+ f"CEU -> YRI migration rate is {mm[j][k]} during the "
+ f"time interval ({start_time}, {end_time}]"
+ )
+
+ :return: A 2-tuple of ``(mm_list, end_times)``,
+ where ``mm_list`` is a list of migration matrices,
+ and ``end_times`` are a list of end times for each matrix.
+ :rtype: tuple[list[list[list[float]]], list[float]]
"""
uniq_times = set(migration.start_time for migration in self.migrations)
uniq_times.update(migration.end_time for migration in self.migrations)
@@ -1624,7 +1686,7 @@ def _migration_matrices(self):
# Extend to t=0 even when there are no migrations.
end_times.append(0)
n = len(self.demes)
- mm_list = [[[0] * n for _ in range(n)] for _ in range(len(end_times))]
+ mm_list = [[[0.0] * n for _ in range(n)] for _ in range(len(end_times))]
deme_id = {deme.name: j for j, deme in enumerate(self.demes)}
for migration in self.migrations:
start_time = math.inf
@@ -1640,7 +1702,7 @@ def _migration_matrices(self):
f"source={migration.source}, dest={migration.dest} "
f"between start_time={start_time}, end_time={end_time}"
)
- mm_list[k][dest_id][source_id] = migration.rate
+ mm_list[k][dest_id][source_id] = float(migration.rate)
start_time = end_time
return mm_list, end_times
@@ -1650,7 +1712,7 @@ def _check_migration_rates(self):
deme in any interval of time.
"""
start_time = math.inf
- mm_list, end_times = self._migration_matrices()
+ mm_list, end_times = self.migration_matrices()
for migration_matrix, end_time in zip(mm_list, end_times):
for j, row in enumerate(migration_matrix):
row_sum = sum(row)
diff --git a/docs/api.md b/docs/api.md
index e186b4f2..33dd5324 100644
--- a/docs/api.md
+++ b/docs/api.md
@@ -26,6 +26,10 @@
.. autoclass:: demes.Graph
:members:
+ .. automethod:: __contains__
+
+ .. automethod:: __getitem__
+
.. autoclass:: demes.Deme
:members:
| make Graph.migration_matrices() part of the API
At the moment we have Graph._migration_matrices(), which returns a tuple `(mm_list, end_times)`, where `mm_list` is a list of migration matrices and `end_times` is a list of the end times for each of the matrices (the initial start time for the first matrix is always inf). I recently copied this code into another project because I wanted to use it (and found a bug, hence #308). This is probably useful to others, so it would be nice to export this as part of the API.
But maybe we want to modify the interface? Or nail down what to do for edge cases, like when there are no migrations. Right now there's also no gaurantee that `mm_list[i] != mm_list[i-1]` --- do we care?
| If you've found use for it elsewhere, it's probably useful to others, so makes sense to me to export as part of the API.
If there are no migrations, it should just return a single migration matrix filled with zeros, with the single end time being 0, right?
The behavior is a bit strange to me, because it returns rows and columns in the migration matrices for demes that are not "active" over those epochs, instead of the migration matrix for only active demes. I think that's the right output for the msprime conversion, for example, but the moments methods only care about active demes over graph epochs defined by changes in parameter values. So if it's made part of the API, that should be documented pretty thoroughly I think. Also, is the row/column order of the demes guaranteed to be consistent, and should that ordered list of deme names matching the migration matrices also be returned?
The row/column order here is gauranteed to match the order of the demes in the graph. That ordering is the easiest to implement, and ought to be the least surprising (assuming all demes get a row/column at all times). It maps to the way msprime works, because the deme IDs/indexes are constant throught the simulation, regardless of how many other demes are alive at a given time. Likewise for SLiM, where I'm wanting to use this (just in the test code actually).
Outputing a list of migration matrices, where each matrix may have different dimensions, would then necessitate returning the list of active demes for each matrix. It would also need to add a bunch more time breakpoints to the `end_times` list, to explicitly account for when demes are created and go extinct. So it sounds like this function doesn't map well with the moments API. I guess you just have indexes into the SFS array, and the array changes dimensions over time?
It would be nice if we could satisfy the requirements of moments too. What does the fwdpy conversion do? Does it just use the list of `Migration` objects directly and not need migration matrices?
I think the current behavior makes the most sense, and don't really suggest complicating it more. Was just clarifying, and I think the moments functionality is a bit of a corner case and probably less useful for the broader API. As is, I think it makes sense. | 2021-06-04T15:41:02 | 0.0 | [] | [] |
||
popsim-consortium/demes-python | popsim-consortium__demes-python-315 | a040a4f4a89bc9a164147e0ee4937f955a467132 | diff --git a/codecov.yml b/codecov.yml
new file mode 100644
index 00000000..bd4e3601
--- /dev/null
+++ b/codecov.yml
@@ -0,0 +1,3 @@
+ignore:
+ # Coverage of the hypothesis strategy is not deterministic.
+ - demes/hypothesis_strategies.py
diff --git a/demes/hypothesis_strategies.py b/demes/hypothesis_strategies.py
index 4b89aa60..23478626 100644
--- a/demes/hypothesis_strategies.py
+++ b/demes/hypothesis_strategies.py
@@ -129,6 +129,8 @@ def epochs_lists(
end_size = draw(
st.floats(min_value=min_deme_size, max_value=max_deme_size)
)
+ if end_size == start_size:
+ size_function = "constant"
cloning_rate = draw(st.floats(min_value=0, max_value=1))
selfing_rate = draw(st.floats(min_value=0, max_value=prec32(1 - cloning_rate)))
| hypothesis strategy: set size_function="constant" if start_size==end_size
See failure here: https://github.com/popsim-consortium/demes-python/pull/316/checks?check_run_id=2744849403
| 2021-06-04T07:54:33 | 0.0 | [] | [] |
|||
popsim-consortium/demes-python | popsim-consortium__demes-python-294 | d200bb4923e0ac35b3ff3ab33da01f3e21975836 | diff --git a/tests/__init__.py b/demes/hypothesis_strategies.py
similarity index 81%
rename from tests/__init__.py
rename to demes/hypothesis_strategies.py
index e1475c9c..9914c2ca 100644
--- a/tests/__init__.py
+++ b/demes/hypothesis_strategies.py
@@ -1,14 +1,37 @@
import math
import itertools
import collections
+import struct
-import numpy as np
import hypothesis as hyp
import hypothesis.strategies as st
import demes
+def prec32(x):
+ """truncate x to the nearest single-precision floating point number"""
+ return struct.unpack("f", struct.pack("f", x))[0]
+
+
+# Limits for the floating point numbers we'll draw.
+#
+# We wish to be more restrictive with the allowable range than the limits
+# provided by floating-point types, to avoid doing arithmetic on numbers at
+# those floating point limits. Values near the limits are not useful for
+# demographic models in practice, so we don't want to generate models that
+# require applications to deal with floating point underflow and overflow.
+# On the other hand, we also don't want to enforce artificial limits in the
+# Demes spec for things like time values or deme sizes.
+#
+# The numbers below are sufficiently conservative so as to avoid underflow
+# and overflow during arithmetic (although this is not gauranteed),
+# but not too conservative that the randomly generated models won't catch a
+# variety of errors in downstream application code.
+FLOAT_MAX = prec32(1e30)
+FLOAT_EPS = prec32(1e-6)
+
+
@st.composite
def deme_names(draw, max_length=20):
"""
@@ -60,7 +83,13 @@ def yaml_strings(draw, min_size=1, max_size=100):
@st.composite
-def epochs_lists(draw, start_time=math.inf, max_epochs=5):
+def epochs_lists(
+ draw,
+ start_time=math.inf,
+ max_epochs=5,
+ min_deme_size=FLOAT_EPS,
+ max_deme_size=FLOAT_MAX,
+):
"""
A hypothesis strategy for creating lists of Epochs for a deme.
@@ -70,7 +99,12 @@ def epochs_lists(draw, start_time=math.inf, max_epochs=5):
assert max_epochs >= 2
times = draw(
st.lists(
- st.floats(min_value=0, max_value=start_time, exclude_max=True),
+ st.floats(
+ min_value=0,
+ max_value=min(FLOAT_MAX, start_time),
+ exclude_max=True,
+ width=32,
+ ),
unique=True,
min_size=1,
max_size=max_epochs,
@@ -80,15 +114,11 @@ def epochs_lists(draw, start_time=math.inf, max_epochs=5):
epochs = []
for i, end_time in enumerate(times):
- start_size = draw(
- st.floats(min_value=0, exclude_min=True, allow_infinity=False)
- )
+ start_size = draw(st.floats(min_value=min_deme_size, max_value=max_deme_size))
if i == 0 and math.isinf(start_time):
end_size = start_size
else:
- end_size = draw(
- st.floats(min_value=0, exclude_min=True, allow_infinity=False)
- )
+ end_size = draw(st.floats(min_value=min_deme_size, max_value=max_deme_size))
cloning_rate = draw(st.floats(min_value=0, max_value=1))
selfing_rate = draw(st.floats(min_value=0, max_value=1 - cloning_rate))
@@ -134,7 +164,12 @@ def migration_matrices(
# Partition time intervals even further.
additional_times = draw(
st.lists(
- st.floats(min_value=end_times[-1], max_value=start_time, exclude_max=True),
+ st.floats(
+ min_value=end_times[-1],
+ max_value=start_time,
+ exclude_max=True,
+ width=32,
+ ),
unique=True,
min_size=0,
max_size=max_additional_migration_intervals,
@@ -228,9 +263,8 @@ def pulses_lists(draw, graph, max_pulses=10):
# We wish to draw times for the pulses. They must be in the open
# interval (time_lo, time_hi) to ensure the pulse doesn't happen
# at any deme's start_time or end_time, which could be invalid.
- # So we check there is at least one floating point number between
- # time_lo and time_hi.
- if time_hi <= np.nextafter(time_lo, np.inf, dtype=float):
+ # So we check for some breathing room between time_lo and time_hi.
+ if time_hi <= time_lo + FLOAT_EPS:
continue
n = draw(st.integers(min_value=0, max_value=n_pulses))
for _ in range(n):
@@ -243,6 +277,7 @@ def pulses_lists(draw, graph, max_pulses=10):
max_value=time_hi,
exclude_min=True,
exclude_max=True,
+ width=32,
)
)
max_proportion = 1 - ingress_proportions[(dest, time)]
@@ -254,6 +289,7 @@ def pulses_lists(draw, graph, max_pulses=10):
max_value=max_proportion,
exclude_min=True,
exclude_max=True,
+ width=32,
)
)
ingress_proportions[(dest, time)] += proportion
@@ -273,7 +309,15 @@ def pulses_lists(draw, graph, max_pulses=10):
@st.composite
-def graphs(draw, max_demes=5, max_epochs=10, max_migrations=10, max_pulses=10):
+def graphs(
+ draw,
+ max_demes=5,
+ max_epochs=10,
+ max_migrations=10,
+ max_pulses=10,
+ min_deme_size=FLOAT_EPS,
+ max_deme_size=FLOAT_MAX,
+):
"""
A hypothesis strategy for creating a Graph.
@@ -288,8 +332,12 @@ def test_something(self, graph: demes.Graph):
:param int max_epochs: The maximum number of epochs per deme.
:param int max_migrations: The maximum number of migrations in the graph.
:param int max_pulses: The maximum number of pulses in the graph.
+ :param float min_deme_size: The minimum size of a deme in any epoch.
+ :param float max_deme_size: The maximum size of a deme in any epoch.
"""
- generation_time = draw(st.none() | st.floats(min_value=1e-9, max_value=1e6))
+ generation_time = draw(
+ st.none() | st.floats(min_value=FLOAT_EPS, max_value=FLOAT_MAX)
+ )
if generation_time is None:
time_units = "generations"
else:
@@ -316,7 +364,9 @@ def test_something(self, graph: demes.Graph):
)
)
if len(anc_idx) > 0:
- time_hi = min(b.data["demes"][j]["start_time"] for j in anc_idx)
+ time_hi = min(
+ FLOAT_MAX, min(b.data["demes"][j]["start_time"] for j in anc_idx)
+ )
time_lo = max(
b.data["demes"][j]["epochs"][-1]["end_time"] for j in anc_idx
)
@@ -327,9 +377,9 @@ def test_something(self, graph: demes.Graph):
# start_time cannot be 0.
# However, there may not be any floating point numbers between
# 0 and time_hi even if time_hi > 0, so we check that time_hi
- # is greater than the smallest positive number.
+ # is greater than a small positive number.
if (time_lo > 0 and time_hi > time_lo) or (
- time_lo == 0 and time_hi > np.finfo(float).tiny
+ time_lo == 0 and time_hi > FLOAT_EPS
):
# Draw a start time and the ancestry proportions.
start_time = draw(
@@ -339,6 +389,7 @@ def test_something(self, graph: demes.Graph):
exclude_max=True,
# Can't have start_time=0.
exclude_min=time_lo == 0,
+ width=32,
)
)
ancestors = [b.data["demes"][j]["name"] for j in anc_idx]
@@ -359,7 +410,14 @@ def test_something(self, graph: demes.Graph):
description=draw(st.none() | yaml_strings()),
ancestors=ancestors,
proportions=proportions,
- epochs=draw(epochs_lists(start_time=start_time, max_epochs=max_epochs)),
+ epochs=draw(
+ epochs_lists(
+ start_time=start_time,
+ max_epochs=max_epochs,
+ min_deme_size=min_deme_size,
+ max_deme_size=max_deme_size,
+ )
+ ),
start_time=start_time,
)
@@ -371,3 +429,6 @@ def test_something(self, graph: demes.Graph):
# the migrations_lists()/pulses_lists() implementations.
graph = demes.Builder.fromdict(graph.asdict()).resolve()
return graph
+
+
+st.register_type_strategy(demes.Graph, graphs())
| hypothesis strategy generates corner cases we don't care about
Brought up when testing the msprime converter: https://github.com/tskit-dev/msprime/pull/1678.
Probably we should limit the strategy to generating sane deme sizes (i.e. `1 >= size > 1e80`), and sane event times (i.e. `1>= times_in_generations > 1e80`).
| 2021-05-11T13:59:57 | 0.0 | [] | [] |
|||
popsim-consortium/demes-python | popsim-consortium__demes-python-278 | 69d4f0dacc21bba1e8be4bbddf9e42a4141a93d6 | diff --git a/demes/demes.py b/demes/demes.py
index 4d2ef169..310dbd73 100644
--- a/demes/demes.py
+++ b/demes/demes.py
@@ -160,12 +160,27 @@ class Epoch:
:ivar float end_size: Population size at ``end_time``.
If ``start_size != end_size``, the population size changes
monotonically between the start and end times.
- :ivar str size_function: The size change function. Common options are
- ``"constant"`` or ``"exponential"``, though any string is valid.
+ :ivar str size_function: The size change function. This is either
+ ``constant`` or ``exponential``, though it is possible that
+ additional values will be added in the future.
+
+ * ``constant``: the deme's size does not change over the epoch.
+ * ``exponential``: the deme's size changes exponentially from
+ ``start_size`` to ``end_size`` over the epoch.
+ If :math:`t` is a time within the span of the epoch,
+ the deme size :math:`N` at :math:`t` can be calculated as:
+
+ .. code::
+
+ dt = (epoch.start_time - t) / epoch.time_span
+ r = math.log(epoch.end_size / epoch.start_size)
+ N = epoch.start_size * math.exp(r * dt)
.. warning::
- Downstream applications might not understand the size_function provided.
+ Do not assume an exponentially changing size just because
+ ``start_size != end_size``. For forwards compatibility,
+ applications should always check the ``size_function``.
:ivar float selfing_rate: The selfing rate for this epoch.
:ivar float cloning_rate: The cloning rate for this epoch.
@@ -176,7 +191,7 @@ class Epoch:
start_size: Size = attr.ib(validator=[int_or_float, positive, finite])
end_size: Size = attr.ib(validator=[int_or_float, positive, finite])
size_function: str = attr.ib(
- validator=[attr.validators.instance_of(str), nonzero_len]
+ validator=attr.validators.in_(["constant", "exponential"])
)
selfing_rate: Proportion = attr.ib(
default=0, validator=[int_or_float, unit_interval]
@@ -188,7 +203,6 @@ class Epoch:
def __attrs_post_init__(self):
if self.start_time <= self.end_time:
raise ValueError("must have start_time > end_time")
- # XXX: these tests should use math.isclose()
if math.isinf(self.start_time) and self.start_size != self.end_size:
raise ValueError("if start time is inf, must be a constant size epoch")
if self.size_function == "constant" and self.start_size != self.end_size:
@@ -1012,7 +1026,6 @@ def _add_epoch(
if end_size is None:
end_size = start_size
- # XXX: use math.isclose()?
if size_function is None:
if start_size == end_size:
size_function = "constant"
@@ -2211,8 +2224,7 @@ def add_deme(
This list has the same length as ``ancestors``, and must sum to 1.
:param float start_time: The deme's start time.
:param list[dict] epochs: List of epoch dictionaries. Each dictionary
- contains parameters to be passed to the deme's
- DemeProxy.add_epoch() method.
+ follows the data model for an epoch.
"""
deme: MutableMapping[str, Any] = dict(name=name)
if description is not None:
diff --git a/examples/linear_growth.yml b/examples/linear_growth.yml
deleted file mode 100644
index afe66879..00000000
--- a/examples/linear_growth.yml
+++ /dev/null
@@ -1,17 +0,0 @@
-description: A single-population model with varying size change functions in different
- epochs.
-generation_time: 1
-time_units: generations
-demes:
- - name: my_pop
- description: Multi-epoch population
- epochs:
- - start_size: 1e4
- end_time: 1000
- - start_size: 1e3
- end_size: 1e4
- end_time: 100
- size_function: linear
- - start_size: 1e4
- end_size: 1e2
- end_time: 0
| nail down semantics for `size_function`
The semantics for `size_function` are not nailed down. See https://github.com/popsim-consortium/demes-spec/issues/34. We're currently allowing any value, but treating "constant" and "exponential" specially (during consistency checks, and during `Graph.asdict_simplified()`). When not specified the defaults are currently:
```python
if size_function is None:
if start_size == end_size:
size_function = "constant"
else:
size_function = "exponential"
```
As far as I'm concerned, it would be easiest if we only supported these two options, as then basically all downstream software can do something sensible. But easiest is not necessarily best. Do we want to allow any string here? Do we want to allow other values, such as "linear"? Do we permit downstream software to ignore values it doesn't understand? We currently have a warning in the docs `Warning: Downstream applications might not understand the size_function provided.`, but I find this somewhat unsatisfying --- clearly it would be best if they did.
| > Do we want to allow any string here? Do we want to allow other values, such as "linear"?
I think we want to keep the allowable entries as flexible as possible and not impose any constraints. It's also probably too difficult to guess what the list of allowable strings should be.. ["exponential", "constant", "linear", "logistic", ...] so that we'd be forgetting something that someone might want to allow. So I think allowing any string is appropriate here.
> Do we permit downstream software to ignore values it doesn't understand?
I think we can't control how downstream software use demes, so it's really up to them how they handle the possible size functions (just as with other options such as selfing rate, which not every downstream software will be able to handle)
> I think we want to keep the allowable entries as flexible as possible and not impose any constraints. It's also probably too difficult to guess what the list of allowable strings should be.. ["exponential", "constant", "linear", "logistic", ...] so that we'd be forgetting something that someone might want to allow. So I think allowing any string is appropriate here.
I think that's essentially what we have now. So we'd want a description in the spec for both "constant" and "exponential", and describe what we do when `size_function` isn't provided but `start_size != end_size` (currently defaults to "exponential"). Right now this forces downstream software to understand both "constant" and "exponential" (which is fine).
> I think we can't control how downstream software use demes, so it's really up to them how they handle the possible size functions (just as with other options such as selfing rate, which not every downstream software will be able to handle)
What's not clear to me is if we're saying "hey simulators, feel free to use this field any way you like". Or, "we're leaving this field here for the possiblity of adding additional well-defined size_function values to the spec in the future" (such as "linear" or "logistic"). I think the former is probably a bad idea, as it could lead to fragmentation in the formats accepted by different tools, when our goal is unification.
I think it's ok to have it say "feel free to use this field any way you like" as long as we mean any way you'd like to interpolate between `start_size` and `end_size`. There are some software out there that can't handle even exponential size changes, where everything is piecewise constant. And other software where you can give any functions `N(t) = lambda t: whatever` and I think erring on the side of being permissive and allowing whatever flexible function someone wants to write down is the way to go. Luckily this is a corner case and I think most users and software will be quite happy to stick with "constant" and "exponential" (maybe "linear" and "logistic") and I don't foresee fragmentation, but I don't think we want to enforce restricting size functions to that small set.
My take was to leave this field out of the initial specification, since all the downstream software we know about are assuming exponential. I think we could spill a lot of ink debating this back and forth, but until some software is actually using a particular feature it's all a bit pointless.
So, how about we leave it out and just say we assume it's exponential in the spec, and then add it back in when we find some software that needs it?
> So, how about we leave it out and just say we assume it's exponential in the spec, and then add it back in when we find some software that needs it?
But that won't be backwards-compatible. If we remove `size_function`, then downstream software assumes "exponential" when `start_time != end_time`. So if we later add `size_function` to allow "linear", then downstream software will silently do the wrong thing.
Ok, I'll do a size_function-ectomy. If we add it back in, then we have to bump the spec version (and add a spec-version field to the data model).
I agree we can defer this to later.
Sorry, I forgot to follow up on this. What's remaining to be decided for the current size function field? Your point about compatibility is a good one.
(We should add a spec_version field regardless though)
The alternative to removing `size_function` would be to clarify it's usage in the spec and the docstrings to make explicit statements about which values must be supported and what an implementation should do with values it doesn't understand. Something like the following semantics:
---
The user-provided `size_function` may be any string, or may be omitted. Two special values are recognised: "constant" and "exponential". If the size_function is not specified and `start_size==end_size`, the size_function will be set to "constant". If the size_function is not specified and `start_size!=end_size`, the size_function will be set to "exponential". It is an error to specify `size_function="constant"` when `start_size!=end_size`, but permissible to set `size_function="exponential"` when `start_size==end_size` (although the latter is not recommended). Other values of size_function are permitted, but how these are to be interpreted is deliberately not defined by the spec. The spec may choose to define clear semantics for additional size_function values in the future.
Software that works with the demes data model are required to support the "constant" and "exponential" values for the size_function, and it is recommended that software raises an error for values it does not recognise. E.g. appropriate code for using the demes-python API could look like this:
```python
def size_of_deme_at_time(deme: demes.Deme, time: float) -> float:
"""
Return the population size of the deme at the given time.
"""
for epoch in deme.epochs:
if epoch.start_time >= time >= epoch.end_time:
break
else:
raise ValueError(f"deme {deme.name} doesn't exist at time {time}")
if epoch.size_function == "constant":
N = epoch.end_size
elif epoch.size_function == "exponential":
dt = (epoch.start_time - time) / epoch.time_span
r = math.log(epoch.end_size / epoch.start_size)
N = epoch.start_size * math.exp(r * dt)
else:
raise ValueError(f"unrecognised size_function: {epoch.size_function}")
return N
```
This leaves open the ability to support additional size_function values in the future. The following code is *not* recommended, because it will break in the event that new size_function values are defined in a future version of the spec.
```python
def size_of_deme_at_time(deme: demes.Deme, time: float) -> float:
"""
Return the population size of the deme at the given time.
"""
for epoch in deme.epochs:
if epoch.start_time >= time >= epoch.end_time:
break
else:
raise ValueError(f"deme {deme.name} doesn't exist at time {time}")
if epoch.start_size == epoch.end_size:
N = epoch.end_size
else:
dt = (epoch.start_time - time) / epoch.time_span
r = math.log(epoch.end_size / epoch.start_size)
N = epoch.start_size * math.exp(r * dt)
return N
```
I think we should say that it must be "constant" or "exponential", with other values optionally added in later versions of the spec, using an explicit enum of accepted values in the spec. If the value is omitted, it's assumed to be "exponential".
I don't think there's much point in having recommendations on setting to "constant" when start_size == end_size, it's just complicating things I think. For example, your code above would have to guard against `start_size == end_size`, since implementing programs might think they are outputting different values but could get rounded to the same during yaml conversion, (e.g.).
Implementations SHOULD check this value to ensure that they support the size function.
That seems straightforward enough to specify then?
That's all we can reasonably do, I think. | 2021-04-16T12:19:13 | 0.0 | [] | [] |
||
popsim-consortium/demes-python | popsim-consortium__demes-python-274 | 6e29d31490715f90c7379506728e2b47a1c60321 | diff --git a/demes/demes.py b/demes/demes.py
index 1ab38d8f..a6cdb5d6 100644
--- a/demes/demes.py
+++ b/demes/demes.py
@@ -1735,10 +1735,10 @@ def in_generations(self) -> "Graph":
Return a copy of the graph with times in units of generations.
"""
graph = copy.deepcopy(self)
- graph.time_units = "generations"
generation_time = self.generation_time
- if generation_time is not None:
- graph.generation_time = None
+ graph.generation_time = None
+ if graph.time_units != "generations" and generation_time is not None:
+ graph.time_units = "generations"
for deme in graph.demes:
deme.start_time /= generation_time
for epoch in deme.epochs:
| `Graph.in_generations()` assumes `time_units` are not generations if `generation_time` is given.
This is wrong, because it means the times get converted to nonsense values for, e.g. `examples/zigzag.yml` (which has `time_units=generations; generation_time=30`).
| 2021-04-15T17:05:37 | 0.0 | [] | [] |
|||
popsim-consortium/demes-python | popsim-consortium__demes-python-268 | db58a7b9742a64e51f977a98538a437c7b356a34 | diff --git a/CHANGELOG.md b/CHANGELOG.md
index 8847f0d4..d918b7f3 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,6 +2,16 @@
## 0.1.XXX - 2021-XX-XX
+**Breaking changes**:
+
+- The interpretation has been changed for symmetric migrations when the
+ `start_time` (and/or `end_time`) is not specified. Symmetric migrations are
+ now resolved separately for each pair in the list of participating demes.
+ To accommodate this semantic change, the `SymmetricMigration` class has
+ been removed, and symmetric migrations are always resolved into pairs of
+ `AsymmetricMigration` objects.
+ ({user}`grahamgower`, {issue}`263`, {pr}`268`)
+
## 0.1.0a4 - 2021-03-22
diff --git a/demes/__init__.py b/demes/__init__.py
index 1ef84fdd..a9b68d09 100644
--- a/demes/__init__.py
+++ b/demes/__init__.py
@@ -11,8 +11,6 @@
from .demes import (
Builder,
Epoch,
- Migration,
- SymmetricMigration,
AsymmetricMigration,
Pulse,
Deme,
diff --git a/demes/convert/msprime_.py b/demes/convert/msprime_.py
index c09f9ec1..9814bbaa 100644
--- a/demes/convert/msprime_.py
+++ b/demes/convert/msprime_.py
@@ -138,17 +138,9 @@ def append_migration(dest, source, start_time, end_time, rate):
rate = migration.rate
start_time = migration.end_time
end_time = migration.start_time
- if isinstance(migration, demes.AsymmetricMigration):
- dest = pop_id[migration.source]
- source = pop_id[migration.dest]
- append_migration(dest, source, start_time, end_time, rate)
- else:
- assert isinstance(migration, demes.SymmetricMigration)
- for x, y in itertools.permutations(migration.demes, 2):
- pop_x = pop_id[x]
- pop_y = pop_id[y]
- append_migration(pop_x, pop_y, start_time, end_time, rate)
- append_migration(pop_y, pop_x, start_time, end_time, rate)
+ dest = pop_id[migration.source]
+ source = pop_id[migration.dest]
+ append_migration(dest, source, start_time, end_time, rate)
# Collapse migration rate events in the same generation.
# This is not strictly needed, but usually results in fewer events.
diff --git a/demes/demes.py b/demes/demes.py
index 6052b197..1ab38d8f 100644
--- a/demes/demes.py
+++ b/demes/demes.py
@@ -290,25 +290,31 @@ def isclose(
@attr.s(auto_attribs=True, kw_only=True)
-class Migration:
+class AsymmetricMigration:
"""
- Parameters for continuous migration. Migration may be symmetric, in which
- case the list of demes will be specified. Alternately, migration may be
- asymmetric from one deme to another. In the latter case,
- source and destination demes follow the forwards-in-time convention,
- of migrations born in the source deme having children in the dest deme.
+ Parameters for continuous asymmetric migration.
+ The source and destination demes follow the forwards-in-time convention,
+ where migrants are born in the source deme and (potentially) have children
+ in the dest deme.
+ :ivar str source: The source deme for asymmetric migration.
+ :ivar str dest: The destination deme for asymmetric migration.
:ivar float start_time: The time at which the migration rate is activated.
:ivar float ~.end_time: The time at which the migration rate is deactivated.
- :ivar float rate: The rate of migration. Set to zero to disable
- migrations after the given time.
+ :ivar float rate: The rate of migration per generation.
"""
+ source: Name = attr.ib(
+ validator=[attr.validators.instance_of(str), valid_deme_name]
+ )
+ dest: Name = attr.ib(validator=[attr.validators.instance_of(str), valid_deme_name])
start_time: Time = attr.ib(validator=[int_or_float, non_negative])
end_time: Time = attr.ib(validator=[int_or_float, non_negative, finite])
rate: Rate = attr.ib(validator=[int_or_float, unit_interval])
def __attrs_post_init__(self):
+ if self.source == self.dest:
+ raise ValueError("source and dest cannot be the same deme")
if not (self.start_time > self.end_time):
raise ValueError("must have start_time > end_time")
@@ -323,10 +329,9 @@ def assert_close(
Raises AssertionError if the object is not equal to ``other``,
up to a numerical tolerance.
Compares values of the following attributes:
- `start_time``, ``end_time``, ``rate``.
+ ``source``, ``dest``, ``start_time``, ``end_time``, ``rate``.
- :param other: The migration to compare against.
- :type other: :class:`.Migration`
+ :param AsymmetricMigration other: The migration to compare against.
:param ret_tol: The relative tolerance permitted for numerical
comparisons. See documentation for :func:`math.isclose`
:type ret_tol: float
@@ -337,6 +342,8 @@ def assert_close(
assert (
self.__class__ is other.__class__
), f"Failed as other migration is not instance of {self.__class__} type."
+ assert self.source == other.source
+ assert self.dest == other.dest
assert math.isclose(
self.start_time, other.start_time, rel_tol=rel_tol, abs_tol=abs_tol
), f"Failed for start_time {self.start_time} != {other.start_time} (other)."
@@ -358,8 +365,7 @@ def isclose(
Returns true if the migration is equal to the ``other`` migration.
For more information see :meth:`assert_close`.
- :param other: The migration to compare against.
- :type other: :class:`.Migration`
+ :param AsymmetricMigration other: The migration to compare against.
:param ret_tol: The relative tolerance permitted for numerical
comparisons. See documentation for :func:`math.isclose`
:type ret_tol: float
@@ -378,98 +384,6 @@ def isclose(
return False
[email protected](auto_attribs=True, kw_only=True)
-class SymmetricMigration(Migration):
- """
- :ivar list[str] demes: The list of demes for symmetric migration.
- """
-
- demes: List[Name] = attr.ib(
- validator=attr.validators.deep_iterable(
- member_validator=attr.validators.and_(
- attr.validators.instance_of(str), valid_deme_name
- ),
- iterable_validator=attr.validators.instance_of(list),
- ),
- )
-
- def __attrs_post_init__(self):
- super().__attrs_post_init__()
- if len(self.demes) < 2:
- raise ValueError("must have at least 2 demes for symmetric migration")
- if len(self.demes) != len(set(self.demes)):
- raise ValueError("demes for symmetric migration must be unique")
-
- def assert_close(
- self,
- other,
- *,
- rel_tol=_ISCLOSE_REL_TOL,
- abs_tol=_ISCLOSE_ABS_TOL,
- ):
- """
- Raises AssertionError if the object is not equal to ``other``,
- up to a numerical tolerance.
- Compares values of the following attributes:
- ``demes``, ``start_time``, ``end_time``, ``rate``.
-
- :param other: The migration to compare against.
- :type other: :class:`.Migration`
- :param ret_tol: The relative tolerance permitted for numerical
- comparisons. See documentation for :func:`math.isclose`
- :type ret_tol: float
- :param abs_tol: The absolute tolerance permitted for numerical
- comparisons. See documentation for :func:`math.isclose`.
- :type abs_tol: float
- """
- super().assert_close(other, rel_tol=rel_tol, abs_tol=abs_tol)
- assert sorted(self.demes) == sorted(other.demes)
-
-
[email protected](auto_attribs=True, kw_only=True)
-class AsymmetricMigration(Migration):
- """
- :ivar str source: The source deme for asymmetric migration.
- :ivar str dest: The destination deme for asymmetric migration.
- """
-
- source: Name = attr.ib(
- validator=[attr.validators.instance_of(str), valid_deme_name]
- )
- dest: Name = attr.ib(validator=[attr.validators.instance_of(str), valid_deme_name])
-
- def __attrs_post_init__(self):
- super().__attrs_post_init__()
- if self.source == self.dest:
- raise ValueError("source and dest cannot be the same deme")
-
- def assert_close(
- self,
- other,
- *,
- rel_tol=_ISCLOSE_REL_TOL,
- abs_tol=_ISCLOSE_ABS_TOL,
- ):
- """
- Raises AssertionError if the object is not equal to ``other``,
- up to a numerical tolerance.
- Compares values of the following attributes:
- ``source``, ``dest``, ``start_time``, ``end_time``, ``rate``.
-
- :param other: The migration to compare against.
- :type other: :class:`.Migration`
- :param ret_tol: The relative tolerance permitted for numerical
- comparisons. See documentation for :func:`math.isclose`
- :type ret_tol: float
- :param abs_tol: The absolute tolerance permitted for numerical
- comparisons. See documentation for :func:`math.isclose`.
- :type abs_tol: float
- """
- super().assert_close(other, rel_tol=rel_tol, abs_tol=abs_tol)
- assert self.source == other.source
- assert self.dest == other.dest
-
-
@attr.s(auto_attribs=True, kw_only=True)
class Pulse:
"""
@@ -1235,7 +1149,8 @@ class Graph:
:ivar list[str] doi: If the graph describes a published demography,
the DOI(s) should be be given here as a list.
:ivar list[Deme] demes: The demes in the demography.
- :ivar list[Migration] migrations: The continuous migrations for the demography.
+ :ivar list[AsymmetricMigration] migrations: The continuous migrations for
+ the demographic model.
:ivar list[Pulse] pulses: The migration pulses for the demography.
"""
@@ -1260,7 +1175,7 @@ class Graph:
),
)
demes: List[Deme] = attr.ib(factory=list, init=False)
- migrations: List[Migration] = attr.ib(factory=list, init=False)
+ migrations: List[AsymmetricMigration] = attr.ib(factory=list, init=False)
pulses: List[Pulse] = attr.ib(factory=list, init=False)
def __attrs_post_init__(self):
@@ -1331,35 +1246,12 @@ def assert_sorted_eq(aa, bb, *, rel_tol, abs_tol, name):
assert_sorted_eq(
self.demes, other.demes, rel_tol=rel_tol, abs_tol=abs_tol, name="demes"
)
- # Compare asymmetric and symmetric migrations separately.
- assert_sorted_eq(
- [m for m in self.migrations if isinstance(m, AsymmetricMigration)],
- [m for m in other.migrations if isinstance(m, AsymmetricMigration)],
- rel_tol=rel_tol,
- abs_tol=abs_tol,
- name="asymmetric migrations",
- )
- # Symmetric migrations are special, in the sense that they contain lists
- # of demes, and we must first sort the list of demes before sorting the
- # list of SymmetricMigration objects.
- self_migrations_sym = []
- other_migrations_sym = []
- for m in self.migrations:
- if isinstance(m, SymmetricMigration):
- m = copy.deepcopy(m)
- m.demes.sort()
- self_migrations_sym.append(m)
- for m in other.migrations:
- if isinstance(m, SymmetricMigration):
- m = copy.deepcopy(m)
- m.demes.sort()
- other_migrations_sym.append(m)
assert_sorted_eq(
- self_migrations_sym,
- other_migrations_sym,
+ self.migrations,
+ other.migrations,
rel_tol=rel_tol,
abs_tol=abs_tol,
- name="symmetric migrations",
+ name="migrations",
)
assert_sorted_eq(
self.pulses,
@@ -1511,7 +1403,7 @@ def _check_time_intersection(self, deme1, deme2, time):
if time is not None:
if not (time_lo <= time <= time_hi):
raise ValueError(
- f"{time} not in interval [{time_lo}, {time_hi}], "
+ f"time {time} not in interval [{time_lo}, {time_hi}], "
f"as defined by the time-intersection of {deme1.name} "
f"(start_time={deme1.start_time}, end_time={deme1.end_time}) "
f"and {deme2.name} (start_time={deme2.start_time}, "
@@ -1519,31 +1411,9 @@ def _check_time_intersection(self, deme1, deme2, time):
)
return time_lo, time_hi
- def _check_overlapping_migrations(self, *, source, dest, start_time, end_time):
- for migration in self.migrations:
- if (
- isinstance(migration, SymmetricMigration)
- and source in migration.demes
- and dest in migration.demes
- ) or (
- isinstance(migration, AsymmetricMigration)
- and source == migration.source
- and dest == migration.dest
- ):
- if (
- start_time >= migration.start_time > end_time
- or start_time > migration.end_time >= end_time
- or migration.start_time >= start_time > migration.end_time
- or migration.start_time > end_time >= migration.end_time
- ):
- raise ValueError(
- "new migration overlaps exisiting migration "
- f"between {source} and {dest}"
- )
-
def _add_symmetric_migration(
self, *, demes, rate, start_time=None, end_time=None
- ) -> SymmetricMigration:
+ ) -> List[AsymmetricMigration]:
"""
Add continuous symmetric migrations between all pairs of demes in a list.
@@ -1553,28 +1423,21 @@ def _add_symmetric_migration(
:param float start_time: The time at which the migration rate is enabled.
:param float end_time: The time at which the migration rate is disabled.
:return: List of newly created migrations.
- :rtype: list[SymmetricMigration]
+ :rtype: list[AsymmetricMigration]
"""
if not isinstance(demes, list) or len(demes) < 2:
raise ValueError("must specify a list of two or more deme names")
- if start_time is None:
- start_time = min(self[deme_name].start_time for deme_name in demes)
- if end_time is None:
- end_time = max(self[deme_name].end_time for deme_name in demes)
+ migrations = []
for source, dest in itertools.permutations(demes, 2):
- self._check_time_intersection(source, dest, start_time)
- self._check_time_intersection(source, dest, end_time)
- self._check_overlapping_migrations(
- source=source, dest=dest, start_time=start_time, end_time=end_time
+ migration = self._add_asymmetric_migration(
+ source=source,
+ dest=dest,
+ rate=rate,
+ start_time=start_time,
+ end_time=end_time,
)
- migration = SymmetricMigration(
- demes=demes,
- start_time=start_time,
- end_time=end_time,
- rate=rate,
- )
- self.migrations.append(migration)
- return migration
+ migrations.append(migration)
+ return migrations
def _add_asymmetric_migration(
self, *, source, dest, rate, start_time=None, end_time=None
@@ -1609,9 +1472,6 @@ def _add_asymmetric_migration(
end_time = time_lo
else:
self._check_time_intersection(source, dest, end_time)
- self._check_overlapping_migrations(
- source=source, dest=dest, start_time=start_time, end_time=end_time
- )
migration = AsymmetricMigration(
source=source,
dest=dest,
@@ -1712,15 +1572,15 @@ def _migration_matrices(self):
if start_time <= migration.end_time:
break
if end_time < migration.start_time:
- if isinstance(migration, AsymmetricMigration):
- source_id = deme_id[migration.source]
- dest_id = deme_id[migration.dest]
- mm_list[k][dest_id][source_id] = migration.rate
- else:
- for source, dest in itertools.permutations(migration.demes, 2):
- source_id = deme_id[source]
- dest_id = deme_id[dest]
- mm_list[k][dest_id][source_id] = migration.rate
+ source_id = deme_id[migration.source]
+ dest_id = deme_id[migration.dest]
+ if mm_list[k][dest_id][source_id] > 0:
+ raise ValueError(
+ "multiple migrations defined for "
+ f"source={migration.source}, dest={migration.dest} "
+ f"between start_time={start_time}, end_time={end_time}"
+ )
+ mm_list[k][dest_id][source_id] = migration.rate
start_time = end_time
return mm_list, end_times
@@ -2115,7 +1975,7 @@ def fromdict(cls, data: MutableMapping[str, Any]) -> "Graph":
def asdict(self) -> MutableMapping[str, Any]:
"""
- Return a dict representation of the graph.
+ Return a fully-resolved dict representation of the graph.
"""
def filt(_attrib, val):
@@ -2140,10 +2000,6 @@ def coerce_numbers(inst, attribute, value):
for deme in data["demes"]:
for epoch in deme["epochs"]:
del epoch["start_time"]
- if epoch["selfing_rate"] == 0:
- del epoch["selfing_rate"]
- if epoch["cloning_rate"] == 0:
- del epoch["cloning_rate"]
return data
def asdict_simplified(self) -> MutableMapping[str, Any]:
@@ -2162,6 +2018,10 @@ def simplify_epochs(data):
del epoch["size_function"]
if epoch["start_size"] == epoch["end_size"]:
del epoch["end_size"]
+ if epoch["selfing_rate"] == 0:
+ del epoch["selfing_rate"]
+ if epoch["cloning_rate"] == 0:
+ del epoch["cloning_rate"]
for deme in data["demes"]:
# remove implied start times
@@ -2175,22 +2035,96 @@ def simplify_epochs(data):
def simplify_migration_rates(data):
"""
- Remove redundant information
+ Collapse symmetric migration rates, and remove redundant information
about start and end times if they are implied by the time overlap
interval of the demes involved.
+
+ To collapse symmetric migrations, we collect all source/dest migration
+ pairs for each set of migration attributes (rate, start_time, end_time),
+ and then iteratively check for all-way symmetric migration between all
+ demes that are involved in migrations for the given set of migration
+ attributes.
"""
+ def collapse_demes(pairs):
+ all_demes = []
+ for pair in pairs:
+ if pair[0] not in all_demes:
+ all_demes.append(pair[0])
+ if pair[1] not in all_demes:
+ all_demes.append(pair[1])
+ return all_demes
+
+ symmetric = []
+ asymmetric = data["migrations"].copy()
+ # first remove start/end times if equal time intersections
+ rate_sets = {}
+ # keys of rate_types are (rate, start_time, end_time)
for migration in data["migrations"]:
- demes = migration.get("demes", [])
- if len(demes) == 0:
- demes = [migration["source"], migration["dest"]]
-
- time_lo = min(self[deme_name].start_time for deme_name in demes)
- time_hi = max(self[deme_name].end_time for deme_name in demes)
- if migration["start_time"] == time_lo:
- del migration["start_time"]
- if migration["end_time"] == time_hi:
+ source = migration["source"]
+ dest = migration["dest"]
+ time_hi = min(self[source].start_time, self[dest].start_time)
+ time_lo = max(self[dest].end_time, self[dest].end_time)
+ if migration["end_time"] == time_lo:
del migration["end_time"]
+ if migration["start_time"] == time_hi:
+ del migration["start_time"]
+ k = tuple(
+ migration.get(key) for key in ("rate", "start_time", "end_time")
+ )
+ rate_sets.setdefault(k, [])
+ rate_sets[k].append((source, dest))
+
+ for k, pairs in rate_sets.items():
+ if len(pairs) == 1:
+ continue
+ # list of all demes that are source or dest in this rate set
+ all_demes = collapse_demes(pairs)
+
+ # we check all possible sets of n-way symmetric migration
+ i = len(all_demes)
+ while len(all_demes) >= 2 and i >= 2:
+ # loop through each possible set for a given set size i
+ compress_demes = False
+ for deme_set in itertools.combinations(all_demes, i):
+ # check if all (source, dest) pairs exist in pairs of migration
+ all_present = True
+ for deme_pair in itertools.permutations(deme_set, 2):
+ if deme_pair not in pairs:
+ all_present = False
+ break
+ # if they do all exist
+ if all_present:
+ compress_demes = True
+ # remove from asymmetric list
+ for deme_pair in itertools.permutations(deme_set, 2):
+ mig = {
+ "source": deme_pair[0],
+ "dest": deme_pair[1],
+ "rate": k[0],
+ }
+ if k[1] is not None:
+ mig["start_time"] = k[1]
+ if k[2] is not None:
+ mig["end_time"] = k[2]
+ asymmetric.remove(mig)
+ pairs.remove(deme_pair)
+ # add to symmetric list
+ sym_mig = dict(demes=[d for d in deme_set], rate=k[0])
+ if k[1] is not None:
+ sym_mig["start_time"] = k[1]
+ if k[2] is not None:
+ sym_mig["end_time"] = k[2]
+ symmetric.append(sym_mig)
+ # if we found a set of symmetric migrations, compress all_demes
+ if compress_demes:
+ all_demes = collapse_demes(pairs)
+ i = min(i, len(all_demes))
+ # otherwise, check one set size smaller
+ else:
+ i -= 1
+
+ data["migrations"] = symmetric + asymmetric
data = self.asdict()
diff --git a/docs/api.md b/docs/api.md
index a2b39718..e30d2af0 100644
--- a/docs/api.md
+++ b/docs/api.md
@@ -36,14 +36,8 @@
## Continuous demographic events
```{eval-rst}
-.. autoclass:: demes.Migration
- :members:
-
.. autoclass:: demes.AsymmetricMigration
:members:
-
-.. autoclass:: demes.SymmetricMigration
- :members:
```
## Discrete demographic events
diff --git a/requirements/docs.txt b/requirements/docs.txt
index 02baaffe..b57dfb1c 100644
--- a/requirements/docs.txt
+++ b/requirements/docs.txt
@@ -1,3 +1,3 @@
-jupyter-book==0.10.1
+jupyter-book==0.10.2
sphinx==3.5.3
sphinx_issues==1.2.0
| nail down semantics for many-deme symmetric migrations
The semantics for many-deme symmetric migrations are not clear. https://github.com/popsim-consortium/demes-spec/issues/55. I'm leaning towards making this an error as Jerome suggests in the spec issue. There are probably some tricky details to be worked out here, with regard to the `Graph.asdict_simplified()` code --- I yanked out the migration simplification code again a few weeks back (sorry Aaron!), in part because it didn't fit with the splitting into two migration classes, but also because of the lack of clarity here. It would be really great to have the migration simplification code back.
| I think at this point we should err on the side of simplicity - anything we can do without in the initial version that we're not 100% happy with, just take it out. It's easy enough to update a spec later with extra things, but it's hellishly hard to change things that are slightly wrong.
(By leave out, I mean make it an error. It's OK to make something that used to be an error condition have well defined semantics, less OK to go the other way around)
Yep, I agree that this is the most reasonable step to take. So to be concrete: for symmetric migrations where the migration `start_time` (or `end_time`) isn't specified, then all participating demes *must* have the same `start_time` (or `end_time`), otherwise an error is raised.
This will, unfortunately, lead to some repetition of time values for some models. E.g. the following will be an error, as the migration should also have `start_time: 100`.
```yaml
time_units: generations
defaults:
epoch: {start_size: 1000}
demes:
- name: A
- name: B
ancestors: ["A"]
start_time: 100
migrations:
- demes: [A, B]
rate: 1e-5
```
> E.g. the following will be an error, as the migration should also have `start_time: 100`.
Hmm, I don't really like that.. Can we check that start/end times align only when _more_ than two demes are involved in the migrations? Otherwise, that's a pretty tedious condition for specifying the demography.
> Hmm, I don't really like that.. Can we check that start/end times align only when _more_ than two demes are involved in the migrations? Otherwise, that's a pretty tedious condition for specifying the demography.
I think having different rules for n-way migrations when n=2 compared to n>2 is probably a misstep. Perhaps we really do want to do pairwise resolution of n-way symmetric migrations? In this scheme, each n-way migration without a `start_time` (or `end_time`) is "resolved" by breaking it into multiple pairwise migrations with `start_time=min(deme1.start_time, deme2.start_time)` (or `end_time=max(deme1.end_time, deme2.end_time)`). `demes-python` had this behaviour last year.
The resolution process is simple enough, but only if we don't try to maintain n-way symmetric migrations after resolution---the simplest thing to do is to resolve everything into asymmetric migrations (which we were doing previously). So I guess the Python API here would really need to supply both "sparse" (list of asymmetric migrations) and "dense" versions (list of migration matrices). Having a `SymmetricMigration` class internally actually introduces more complexity (e.g. https://github.com/popsim-consortium/demes-python/blob/main/demes/demes.py#L1342-L1363, #216). Aaron's excellent migration simplification code can then be put back into `asdict_simplified()` almost verbatim.
Sounds reasonable to me @grahamgower - saying that symmetric migrations are really just a thin wrapper around the pairwise asymmetric migrations is a good way of reducing complexity. | 2021-04-05T08:50:34 | 0.0 | [] | [] |
||
popsim-consortium/demes-python | popsim-consortium__demes-python-249 | 62892f60036d67553cfa30cd37d9e795ef3e5425 | diff --git a/demes/demes.py b/demes/demes.py
index ce999605..9f7cf682 100644
--- a/demes/demes.py
+++ b/demes/demes.py
@@ -314,7 +314,7 @@ class Migration:
start_time: Time = attr.ib(validator=[int_or_float, non_negative])
end_time: Time = attr.ib(validator=[int_or_float, non_negative, finite])
- rate: Rate = attr.ib(validator=[int_or_float, non_negative, finite])
+ rate: Rate = attr.ib(validator=[int_or_float, unit_interval])
def __attrs_post_init__(self):
if not (self.start_time > self.end_time):
@@ -498,7 +498,7 @@ class Pulse:
validator=[attr.validators.instance_of(str), valid_deme_name]
)
dest: Name = attr.ib(validator=[attr.validators.instance_of(str), valid_deme_name])
- time: Time = attr.ib(validator=[int_or_float, non_negative, finite])
+ time: Time = attr.ib(validator=[int_or_float, positive, finite])
proportion: Proportion = attr.ib(validator=[int_or_float, unit_interval])
def __attrs_post_init__(self):
@@ -1656,6 +1656,14 @@ def _add_pulse(self, *, source, dest, proportion, time) -> Pulse:
if deme_name not in self:
raise ValueError(f"{deme_name} not in graph")
self._check_time_intersection(source, dest, time)
+ if time == self[dest].end_time:
+ raise ValueError(
+ f"invalid pulse at time={time}, which is dest={dest}'s end_time"
+ )
+ if time == self[source].start_time:
+ raise ValueError(
+ f"invalid pulse at time={time}, which is source={source}'s start_time"
+ )
# Check for models that have multiple pulses defined at the same time.
# E.g. chains of pulses like: deme0 -> deme1; deme1 -> deme2,
@@ -1688,6 +1696,53 @@ def _add_pulse(self, *, source, dest, proportion, time) -> Pulse:
self.pulses.append(pulse)
return pulse
+ def _migration_matrices(self):
+ """
+ Return a list of migration matrices, and a list of end times that
+ partition them. The start time for the first matrix is inf.
+ """
+ uniq_times = set(migration.start_time for migration in self.migrations)
+ uniq_times.update(migration.end_time for migration in self.migrations)
+ end_times = sorted(uniq_times, reverse=True)[1:]
+ n = len(self.demes)
+ mm_list = [[[0] * n for _ in range(n)] for _ in range(len(end_times))]
+ deme_id = {deme.name: j for j, deme in enumerate(self.demes)}
+ for migration in self.migrations:
+ start_time = math.inf
+ for k, end_time in enumerate(end_times):
+ if start_time <= migration.end_time:
+ break
+ if end_time < migration.start_time:
+ if isinstance(migration, AsymmetricMigration):
+ source_id = deme_id[migration.source]
+ dest_id = deme_id[migration.dest]
+ mm_list[k][dest_id][source_id] = migration.rate
+ else:
+ for source, dest in itertools.permutations(migration.demes, 2):
+ source_id = deme_id[source]
+ dest_id = deme_id[dest]
+ mm_list[k][dest_id][source_id] = migration.rate
+ start_time = end_time
+ return mm_list, end_times
+
+ def _check_migration_rates(self):
+ """
+ Check that the sum of migration ingress rates doesn't exceed 1 for any
+ deme in any interval of time.
+ """
+ start_time = math.inf
+ mm_list, end_times = self._migration_matrices()
+ for migration_matrix, end_time in zip(mm_list, end_times):
+ for j, row in enumerate(migration_matrix):
+ row_sum = sum(row)
+ if row_sum > 1 and not math.isclose(row_sum, 1):
+ name = self.demes[j].name
+ raise ValueError(
+ f"sum of migration rates into deme {name} is greater "
+ f"than 1 during interval ({start_time}, {end_time}]"
+ )
+ start_time = end_time
+
def successors(self) -> Dict[Name, List[Name]]:
"""
Returns the successors (child demes) for all demes in the graph.
@@ -1991,6 +2046,8 @@ def fromdict(cls, data: MutableMapping[str, Any]):
raise e.__class__(f"migration[{i}]: invalid migration") from e
check_empty(migration_data, f"migration[{i}]")
+ graph._check_migration_rates()
+
check_defaults(
pulse_defaults, ["source", "dest", "time", "proportion"], "defaults: pulse"
)
| enforce an upper bound for the migration rate
Once we know what units we're using, we should be able to figure out the upper bound. https://github.com/popsim-consortium/demes-spec/issues/21
| There's a long history in the theoretical literature defining migration rates as ancestry proportions. Freddy Christiansen's papers, Malecot, etc..
Sorry, I'm still being dense. Does this mean we should use big `M` (so our upper bound is 1) or little `m` (our upper bound is the ratio of population sizes) from https://tskit.dev/msprime/docs/latest/demography.html#definitions ?
So it means they are fractions. I guess lower case m, where mij is the proportion of deme j whose ancestry is from deme i, meaning their parents were drawn from i.
I'm assuming the denominator is "per generation", which I should be explicit about.
I think what we’d want is that the sum of incoming migration rates/probabilities is bounded by one, since I think like Kevin said we want to think of these as probabilities of ancestry proportions each generation. That’s maybe a bit trickier to check because we allow epochs of migration and would need to loop through the migrations somehow.
> On 19 Mar 2021, at 7:46, Graham Gower ***@***.***> wrote:
>
>
> Sorry, I'm still being dense. Does this mean we should use big M (so our upper bound is 1) or little m (our upper bound is the ratio of population sizes) from https://tskit.dev/msprime/docs/latest/demography.html#definitions ?
>
> —
> You are receiving this because you are subscribed to this thread.
> Reply to this email directly, view it on GitHub, or unsubscribe.
Does this mean we don't need to change anything except to enforce `migration.rate <= 1`? Can we just do that, and let downstream simulators enforce that `sum(matrix_row)<=1`?
And clearly someone who isn't me needs to write the docs on this stuff!
> Does this mean we don't need to change anything except to enforce `migration.rate <= 1`? Can we just do that, and let downstream simulators enforce that `sum(matrix_row)<=1`?
>
> And clearly someone who isn't me needs to write the docs on this stuff!
Ideally, we'd somehow loop through the migration matrix over time and assert that row sums are 1. That check should be redundant to what a simulator does, but it'd be user friendly to catch at the YAML/graph stage.
Ok, thanks for the input! Here's what I'm taking away from this:
1. The way we're *using* migration rates in demes is currently consistent with expected ancestry proportions per generation (inasmuch as we don't convert units when going to/from msprime and moments). And we want to keep it this way.
2. A "continuous" migration spanning one generation is equivalent to a pulse, if they have the same rate/proportion and source/dest. [EDIT: assuming a discrete-time framework]
3. We really should construct migration matrices and enforce the upper limit properly. This should be fine, as there's already code to make the migration matrices inside `demes.convert.to_msprime()`.
4. In light of 3., we might consider exporting a migration matrix generating function in the API (but this is low priority).
Yeah, this sounds right. The equivalence to a pulse is an interesting thing for the simulators to wrestle with, in terms of what they do w/a demes Graph.
Cool. I should have clarified that I'd only expect migration/pulse equivalence for discrete-time simulation. | 2021-03-19T20:07:05 | 0.0 | [] | [] |
||
popsim-consortium/demes-python | popsim-consortium__demes-python-247 | 62892f60036d67553cfa30cd37d9e795ef3e5425 | diff --git a/demes/demes.py b/demes/demes.py
index ce999605..cb7cf9cb 100644
--- a/demes/demes.py
+++ b/demes/demes.py
@@ -498,7 +498,7 @@ class Pulse:
validator=[attr.validators.instance_of(str), valid_deme_name]
)
dest: Name = attr.ib(validator=[attr.validators.instance_of(str), valid_deme_name])
- time: Time = attr.ib(validator=[int_or_float, non_negative, finite])
+ time: Time = attr.ib(validator=[int_or_float, positive, finite])
proportion: Proportion = attr.ib(validator=[int_or_float, unit_interval])
def __attrs_post_init__(self):
@@ -1656,6 +1656,14 @@ def _add_pulse(self, *, source, dest, proportion, time) -> Pulse:
if deme_name not in self:
raise ValueError(f"{deme_name} not in graph")
self._check_time_intersection(source, dest, time)
+ if time == self[dest].end_time:
+ raise ValueError(
+ f"invalid pulse at time={time}, which is dest={dest}'s end_time"
+ )
+ if time == self[source].start_time:
+ raise ValueError(
+ f"invalid pulse at time={time}, which is source={source}'s start_time"
+ )
# Check for models that have multiple pulses defined at the same time.
# E.g. chains of pulses like: deme0 -> deme1; deme1 -> deme2,
| check/fix pulse time edge cases
Discussion in spec repo here: https://github.com/popsim-consortium/demes-spec/issues/22
| 2021-03-19T15:10:57 | 0.0 | [] | [] |
|||
popsim-consortium/demes-python | popsim-consortium__demes-python-225 | c31ee9aca6cd8cfe84622a2f1b12b80c5887ae86 | diff --git a/CHANGELOG.rst b/CHANGELOG.rst
index c33659e2..981221e3 100644
--- a/CHANGELOG.rst
+++ b/CHANGELOG.rst
@@ -1,3 +1,16 @@
+********************
+0.1.0a3 - 2021-02-25
+********************
+
+**Bug fixes**:
+
+- Fix ``Graph.in_generations()`` to also convert the ``Deme.start_time`` field.
+ Thanks to :user:`apragsdale` for reporting the problem.
+ (:user:`grahamgower`, :issue:`224`, :pr:`225`).
+- Fix ``assert_close()`` and ``is_close()`` equality checks to compare the deme
+ ``start_time``.
+ (:user:`grahamgower`, :issue:`224`, :pr:`225`).
+
********************
0.1.0a2 - 2021-02-24
********************
diff --git a/demes/demes.py b/demes/demes.py
index 942c76b3..f63992d7 100644
--- a/demes/demes.py
+++ b/demes/demes.py
@@ -992,6 +992,7 @@ class Deme:
:ivar str id: A string identifier for the deme.
:ivar str description: A description of the deme. May be ``None``.
+ :ivar float start_time: The time at which the deme begins to exist.
:ivar ancestors: List of string identifiers for the deme's ancestors.
This may be ``None``, indicating the deme has no ancestors.
:vartype ancestors: list of str
@@ -1010,6 +1011,7 @@ class Deme:
[attr.validators.instance_of(str), nonzero_len]
)
)
+ start_time: Time = attr.ib(validator=[int_or_float, positive])
ancestors: List[ID] = attr.ib(
validator=attr.validators.deep_iterable(
member_validator=attr.validators.and_(
@@ -1030,7 +1032,6 @@ class Deme:
iterable_validator=attr.validators.instance_of(list),
)
)
- start_time: Time = attr.ib(validator=[int_or_float, positive])
@ancestors.validator
def _check_ancestors(self, _attribute, _value):
@@ -1145,6 +1146,9 @@ def assert_close(
self.__class__ is other.__class__
), f"Failed as other deme is not instance of {self.__class__} type."
assert self.id == other.id
+ assert math.isclose(
+ self.start_time, other.start_time, rel_tol=rel_tol, abs_tol=abs_tol
+ ), f"Failed for start_time {self.start_time} != {other.start_time} (other)."
assert isclose_deme_proportions(
self.ancestors,
self.proportions,
@@ -1810,6 +1814,7 @@ def in_generations(self):
if generation_time is not None:
graph.generation_time = None
for deme in graph.demes:
+ deme.start_time /= generation_time
for epoch in deme.epochs:
epoch.start_time /= generation_time
epoch.end_time /= generation_time
@@ -2024,7 +2029,6 @@ def coerce_numbers(inst, attribute, value):
data = attr.asdict(self, filter=filt, value_serializer=coerce_numbers)
# translate to spec data model
for deme in data["demes"]:
- deme["start_time"] = deme["epochs"][0]["start_time"]
for epoch in deme["epochs"]:
del epoch["start_time"]
if epoch["selfing_rate"] == 0:
diff --git a/docs/conf.py b/docs/conf.py
index 647e8e4b..f3a5c61a 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -47,6 +47,7 @@
"sphinx.ext.autodoc",
"sphinx.ext.todo",
"sphinx.ext.viewcode",
+ "sphinx_issues",
"jupyter_sphinx",
]
@@ -72,3 +73,6 @@
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
+
+# Github repo, for sphinx_issues
+issues_github_path = "popsim-consortium/demes-python"
diff --git a/requirements.txt b/requirements.txt
index 731dcbc1..ffc215ab 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -11,4 +11,5 @@ pytest-cov==2.11.1
ruamel.yaml==0.16.12
sphinx==3.4.3
sphinx_rtd_theme==0.5.1
+sphinx_issues==1.2.0
stdpopsim==0.1.2
| `in_generations` does not convert `deme[id].start_time`
If we have a deme graph in years, for example, and call `g_gen = g.in_generations()`, the resulting deme graph `g_gen` does not properly convert deme start times.
For example:
```Python
import demes
file_in = "examples/gutenkunst_ooa.yml"
g = demes.load(file_in)
print(g["AMH"].start_time)
print(g["AMH"].epochs[0].start_time)
g_gen = g.in_generations()
print(g_gen["AMH"].start_time)
print(g_gen["AMH"].epochs[0].start_time)
# note that there is no issue with end time, because
# of the end_time function grabbing the last epoch's end time
print(g_gen["AMH"].end_time)
print(g_gen["AMH"].epochs[0].end_time)
```
I _think_ this is just a quick fix of updating the `in_generations` function to also set `deme.start_time /= generation_time`, but @grahamgower maybe there is something more subtle going on?
| Nice catch! I think this just got forgotten when `Deme.start_time` got promoted from a `@property` to an attribute. It does make me a little suspicious that the test for `in_generations()` isn't doing what it should though.
Yeah, I noticed this when converting a graph in years to generations and then getting the list of discrete demographic events, which said everything was a branch instead of splits and was causing errors in the moments implementations using demes. Took a bit of tracking down to find the culprit, and it's not an obvious issue, so not surprised it was missed.
I guess multiple alpha releases isn't a problem, so we can quickly fix this and put out a new alpha release?
> I guess multiple alpha releases isn't a problem, so we can quickly fix this and put out a new alpha release?
Absolutely! An alpha a day keeps the stable release away?
> It does make me a little suspicious that the test for `in_generations()` isn't doing what it should though.
And it looks like `Deme.start_time` was also omitted from the `Deme.assert_close()` function, upon which the test relies. | 2021-02-25T08:20:40 | 0.0 | [] | [] |
||
popsim-consortium/demes-python | popsim-consortium__demes-python-206 | c17db87089dc3f7cd5528ce779a404f0fb2b4b33 | diff --git a/demes/demes.py b/demes/demes.py
index cfb24f2e..6fde2050 100644
--- a/demes/demes.py
+++ b/demes/demes.py
@@ -1397,6 +1397,10 @@ def deme(
start_time = self[ancestors[0]].end_time
else:
start_time = float("inf")
+
+ if len(ancestors) == 0 and not math.isinf(start_time):
+ raise ValueError(f"deme {id} has finite start_time, but no ancestors")
+
# the first epoch's start time is set to deme's start time
if epochs[0].start_time is None:
epochs[0].start_time = start_time
diff --git a/examples/bottleneck.yml b/examples/bottleneck.yml
index b09d10b1..c49f7f73 100644
--- a/examples/bottleneck.yml
+++ b/examples/bottleneck.yml
@@ -1,10 +1,9 @@
-description: A single-population bottleneck model. Also tests a finite start_time.
+description: A single-population bottleneck model.
generation_time: 1
time_units: generations
demes:
- id: our_population
description: Bottleneck population using epochs
- start_time: 10000
epochs:
- start_size: 1e4
end_time: 500
| Enforce root deme start times to be inf
Currently, `demes` allows a start time of a deme to be any value >0, even when that deme has no ancestors (i.e. is a root). This implies that some set of individuals spontaneously appear at that deme's start time. We would want to enforce that the first epoch for a deme with no ancestors has a start time of inf.
Edit: I don't think this is a bug (we could think of some experimental evolution scenario where maybe it "makes sense"), but it probably is more confusing than helpful to allow root demes to have finite start times.
| This also suggests a nice and trivial `roots()` method for the graph.
```python
def roots(self):
return [deme.id for deme in self.demes if math.isinf(deme.start_time)]
```
Well, there's also ``roots = [deme.id for deme in self.demes if len(deme.ancestors) == 0]``?
Yeah, that's probably clearer, and works already.
Depending on ``inf`` semantics always makes me nervous! | 2021-02-16T17:26:42 | 0.0 | [] | [] |
||
popsim-consortium/demes-python | popsim-consortium__demes-python-197 | 60e92f045e7014027a52f17861ec849a7e659dcc | diff --git a/demes/convert/__init__.py b/demes/convert/__init__.py
index 8e2cb19c..52278dca 100644
--- a/demes/convert/__init__.py
+++ b/demes/convert/__init__.py
@@ -1,4 +1,3 @@
# flake8: noqa: F401
from .msprime_ import to_msprime, from_msprime
from .stdpopsim_ import to_stdpopsim, from_stdpopsim
-from .moments_ import SFS
diff --git a/demes/convert/moments_.py b/demes/convert/moments_.py
deleted file mode 100644
index e48e5012..00000000
--- a/demes/convert/moments_.py
+++ /dev/null
@@ -1,883 +0,0 @@
-from collections import defaultdict
-import math
-
-import numpy as np
-import moments
-from demes import Epoch
-
-
-# This script contains functions to compute the sample SFS from a demography defined
-# using demes using moments. Moments can handle integrating up to five populations,
-# so the demography cannot have more than five populations at any given time.
-
-
-def SFS(g, sampled_demes, sample_sizes, sample_times=None, Ne=None, unsampled_n=4):
- """
- Takes a deme graph and computes the site frequency spectrum using ``moments``.
-
- :param g: A demes graph from which to compute the SFS.
- :param sampled_demes: A list of deme IDs to take samples from. We can repeat
- demes, as long as the sampling of repeated deme IDs occurs at distinct
- times.
- :param sample_sizes: A list of the same length as ``sampled_demes``,
- giving the sample sizes for each sampled deme.
- :param sample_times: If None, assumes all sampling occurs at the end of the
- existence of the sampled deme. If there are
- ancient samples, ``sample_times`` must be a list of same length as
- ``sampled_demes``, giving the sampling times for each sampled
- deme. Sampling times are given in time units of the original deme graph,
- so might not necessarily be generations (e.g. if ``g.time_units`` is years)
- :param Ne: reference population size. If none is given, we use the initial
- size of the root deme.
- :param unsampled_n: The default sample size of unsampled demes, which must be
- greater than or equal to 4.
- :return: A ``moments`` site frequency spectrum, with dimension equal to the
- length of ``sampled_demes``, and shape equal to ``sample_sizes`` plus one
- in each dimension, indexing the allele frequency in each deme from 0
- to n[i], where i is the deme index.
- :rtype: :class:`moments.Spectrum`
- """
- if len(sampled_demes) != len(sample_sizes):
- raise ValueError("sampled_demes and sample_sizes must be same length")
- if sample_times is not None and len(sampled_demes) != len(sample_times):
- raise ValueError("sample_times must have same length as sampled_demes")
- for deme in sampled_demes:
- if deme not in g:
- raise ValueError(f"deme {deme} is not in demography")
-
- if unsampled_n < 4:
- raise ValueError("unsampled_n must be greater than 3")
-
- if sample_times is None:
- sample_times = [g[d].end_time for d in sampled_demes]
-
- # for any ancient samples, we need to add frozen branches
- # with this, all "sample times" are at time 0, and ancient sampled demes are frozen
- if np.any(np.array(sample_times) != 0):
- g, sampled_demes, list_of_frozen_demes = augment_with_ancient_samples(
- g, sampled_demes, sample_times
- )
- sample_times = [0 for _ in sample_times]
- else:
- list_of_frozen_demes = []
-
- if g.time_units != "generations":
- g, sample_times = convert_to_generations(g, sample_times)
- for d, n, t in zip(sampled_demes, sample_sizes, sample_times):
- if n < 4:
- raise ValueError("moments fails with sample sizes less than 4")
- if t < g[d].end_time or t >= g[d].start_time:
- raise ValueError("sample time for {deme} must be within its time span")
-
- # get the list of demographic events from demes, which is a dictionary with
- # lists of splits, admixtures, mergers, branches, and pulses
- demes_demo_events = g.list_demographic_events()
-
- # get the dict of events and event times that partition integration epochs, in
- # descending order. events include demographic events, such as splits and
- # mergers and admixtures, as well as changes in population sizes or migration
- # rates that require instantaneous changes in the size function or migration matrix.
- # get the list of demes present in each epoch, as a dictionary with non-overlapping
- # adjoint epoch time intervals
- demo_events, demes_present = get_demographic_events(
- g, demes_demo_events, sampled_demes
- )
-
- for epoch, epoch_demes in demes_present.items():
- if len(epoch_demes) > 5:
- raise ValueError(
- f"Moments cannot integrate more than five demes at a time. "
- f"Epoch {epoch} has demes {epoch_demes}."
- )
-
- # get the list of size functions, migration matrices, and frozen attributes from
- # the deme graph and event times, matching the integration times
- nu_funcs, mig_mats, Ts, frozen_pops = get_integration_parameters(
- g, demes_present, list_of_frozen_demes, Ne=Ne
- )
-
- # get the sample sizes within each deme, given sample sizes
- deme_sample_sizes = get_deme_sample_sizes(
- g,
- demo_events,
- sampled_demes,
- sample_sizes,
- demes_present,
- unsampled_n=unsampled_n,
- )
-
- # compute the SFS
- fs = compute_sfs(
- demo_events,
- demes_present,
- deme_sample_sizes,
- nu_funcs,
- mig_mats,
- Ts,
- frozen_pops,
- )
-
- fs, pop_ids = reorder_fs(fs, fs.pop_ids, sampled_demes)
- fs.pop_ids = pop_ids
-
- return fs
-
-
-def convert_to_generations(g, sample_times):
- """
- Takes a deme graph that is not in time units of generations and converts
- times to generations, using the time units and generation times given.
- """
- if g.time_units == "generations":
- return g, sample_times
- else:
- for ii, sample_time in enumerate(sample_times):
- sample_times[ii] = sample_time / g.generation_time
- g = g.in_generations()
- return g, sample_times
-
-
-def augment_with_ancient_samples(g, sampled_demes, sample_times):
- """
- Returns a demography object and new sampled demes where we add
- a branch event for the new sampled deme that is frozen.
-
- New sampled, frozen demes are labeled "{deme}_sampled_{sample_time}".
- Note that we cannot have multiple ancient sampling events at the same
- time for the same deme (for additional samples at the same time, increase
- the sample size).
- """
- frozen_demes = []
- for ii, (sd, st) in enumerate(zip(sampled_demes, sample_times)):
- if st > 0:
- sd_frozen = sd + f"_sampled_{st}"
- frozen_demes.append(sd_frozen)
- sampled_demes[ii] = sd_frozen
- g.deme(
- id=sd_frozen,
- epochs=[Epoch(start_time=st, end_time=0, start_size=1)],
- ancestors=[sd],
- )
- return g, sampled_demes, frozen_demes
-
-
-def get_demographic_events(g, demes_demo_events, sampled_demes):
- """
- Returns demographic events and present demes over each epoch.
- Epochs are divided by any demographic event.
- """
- # first get set of all time dividers, from demographic events, migration
- # rate changes, deme epoch changes
- break_points = set()
- for deme in g.demes:
- for e in deme.epochs:
- break_points.add(e.start_time)
- break_points.add(e.end_time)
- for pulse in g.pulses:
- break_points.add(pulse.time)
- for migration in g.migrations:
- break_points.add(migration.start_time)
- break_points.add(migration.end_time)
-
- # get demes present for each integration epoch
- integration_times = [
- (start_time, end_time)
- for start_time, end_time in zip(
- sorted(list(break_points))[-1:0:-1], sorted(list(break_points))[-2::-1]
- )
- ]
-
- # find live demes in each epoch, starting with most ancient
- demes_present = defaultdict(list)
- # add demes as they appear from past to present to end of lists
- deme_start_times = defaultdict(list)
- for deme in g.demes:
- deme_start_times[deme.start_time].append(deme.id)
-
- if math.inf not in deme_start_times.keys():
- raise ValueError("Root deme must have start time as inf")
- if len(deme_start_times[math.inf]) != 1:
- raise ValueError("Deme graph can only have a single root")
-
- for start_time in sorted(deme_start_times.keys())[::-1]:
- for deme_id in deme_start_times[start_time]:
- end_time = g[deme_id].end_time
- for interval in integration_times:
- if start_time >= interval[0] and end_time <= interval[1]:
- demes_present[interval].append(deme_id)
-
- # dictionary of demographic events (pulses, splits, branches, mergers, and
- # admixtures) it's possible that the order of these events will matter
- # also noting here that there can be ambiguity about order of events, that will
- # change the demography... but there should always be a way to write the demography
- # in an unambiguous manner, using different verbs (e.g., two pulse events at the
- # same time with same dest can be converted to an admixture event, and split the
- # dest deme into two demes)
- demo_events = defaultdict(list)
- for pulse in demes_demo_events["pulses"]:
- event = ("pulse", pulse.source, pulse.dest, pulse.proportion)
- demo_events[pulse.time].append(event)
- for branch in demes_demo_events["branches"]:
- event = ("branch", branch.parent, branch.child)
- demo_events[branch.time].append(event)
- for merge in demes_demo_events["mergers"]:
- event = ("merge", merge.parents, merge.proportions, merge.child)
- demo_events[merge.time].append(event)
- for admix in demes_demo_events["admixtures"]:
- event = ("admix", admix.parents, admix.proportions, admix.child)
- demo_events[admix.time].append(event)
- for split in demes_demo_events["splits"]:
- event = ("split", split.parent, split.children)
- demo_events[split.time].append(event)
-
- # if there are any unsampled demes that end before present and do not have
- # any descendent demes, we need to add marginalization events.
- for deme_id, succs in g.successors.items():
- if deme_id not in sampled_demes and (
- len(succs) == 0
- or np.all([g[succ].start_time > g[deme_id].end_time for succ in succs])
- ):
- event = ("marginalize", deme_id)
- demo_events[g[deme_id].end_time].append(event)
-
- return demo_events, demes_present
-
-
-def get_integration_parameters(g, demes_present, frozen_list, Ne=None):
- """
- Returns a list of size functions, migration matrices, integration times,
- and frozen attributes.
- """
- nu_funcs = []
- integration_times = []
- migration_matrices = []
- frozen_demes = []
-
- if Ne is None:
- # get root population and set Ne to root size
- for deme_id, preds in g.predecessors.items():
- if len(preds) == 0:
- root_deme = deme_id
- break
- Ne = g[root_deme].epochs[0].start_size
-
- for interval, live_demes in sorted(demes_present.items())[::-1]:
- # get intergration time for interval
- T = (interval[0] - interval[1]) / 2 / Ne
- if T == math.inf:
- T = 0
- integration_times.append(T)
- # get frozen attributes
- freeze = [d in frozen_list for d in live_demes]
- frozen_demes.append(freeze)
- # get nu_function or list of sizes (if all constant)
- sizes = []
- for d in live_demes:
- sizes.append(sizes_at_time(g, d, interval))
- nu_func = make_nu_func(sizes, T, Ne)
- nu_funcs.append(nu_func)
- # get migration matrix for interval
- mig_mat = np.zeros((len(live_demes), len(live_demes)))
- for ii, d_from in enumerate(live_demes):
- for jj, d_to in enumerate(live_demes):
- if d_from != d_to:
- m = migration_rate_in_interval(g, d_from, d_to, interval)
- mig_mat[jj, ii] = 2 * Ne * m
- migration_matrices.append(mig_mat)
-
- return nu_funcs, migration_matrices, integration_times, frozen_demes
-
-
-def make_nu_func(sizes, T, Ne):
- """
- Given the sizes at start and end of time interval, and the size function for
- each deme, along with the integration time and reference Ne, return the
- size function that gets passed to the moments integration routines.
- """
- if np.all([s[-1] == "constant" for s in sizes]):
- # all constant
- nu_func = [s[0] / Ne for s in sizes]
- else:
- nu_funcs_separated = []
- for s in sizes:
- if s[-1] == "constant":
- assert s[0] == s[1]
- nu_funcs_separated.append(lambda t, N0=s[0]: N0 / Ne)
- elif s[-1] == "linear":
- nu_funcs_separated.append(
- lambda t, N0=s[0], NF=s[1]: N0 / Ne + t / T * (NF - N0) / Ne
- )
- elif s[-1] == "exponential":
- nu_funcs_separated.append(
- lambda t, N0=s[0], NF=s[1]: N0
- / Ne
- * np.exp(np.log(NF / N0) * t / T)
- )
- else:
- raise ValueError(f"{s[-1]} not a valid size function")
-
- def nu_func(t):
- return [nu(t) for nu in nu_funcs_separated]
-
- # check that this is correct, or if we have to "pin" parameters
- return nu_func
-
-
-def sizes_at_time(g, deme_id, time_interval):
- """
- Returns the start size, end size, and size function for given deme over the
- given time interval.
- """
- for epoch in g[deme_id].epochs:
- if epoch.start_time >= time_interval[0] and epoch.end_time <= time_interval[1]:
- break
- if epoch.size_function not in ["constant", "exponential", "linear"]:
- raise ValueError(
- "Can only intergrate constant, exponential, or linear size functions"
- )
- size_function = epoch.size_function
-
- if size_function == "constant":
- start_size = end_size = epoch.start_size
-
- if epoch.start_time == time_interval[0]:
- start_size = epoch.start_size
- else:
- if size_function == "exponential":
- start_size = epoch.start_size * np.exp(
- np.log(epoch.end_size / epoch.start_size)
- * (epoch.start_time - time_interval[0])
- / epoch.time_span
- )
- elif size_function == "linear":
- frac = (epoch.start_time - time_interval[0]) / epoch.time_span
- start_size = epoch.start_size + frac * (epoch.end_size - epoch.start_size)
-
- if epoch.end_time == time_interval[1]:
- end_size = epoch.end_size
- else:
- if size_function == "exponential":
- end_size = epoch.start_size * np.exp(
- np.log(epoch.end_size / epoch.start_size)
- * (epoch.start_time - time_interval[1])
- / epoch.time_span
- )
- elif size_function == "linear":
- frac = (epoch.start_time - time_interval[1]) / epoch.time_span
- end_size = epoch.start_size + frac * (epoch.end_size - epoch.start_size)
-
- return start_size, end_size, size_function
-
-
-def migration_rate_in_interval(g, source, dest, time_interval):
- """
- Get the migration rate from source to dest over the given time interval.
- """
- rate = 0
- for mig in g.migrations:
- if mig.source == source and mig.dest == dest:
- if mig.start_time >= time_interval[0] and mig.end_time <= time_interval[1]:
- rate = mig.rate
- return rate
-
-
-def get_deme_sample_sizes(
- g, demo_events, sampled_demes, sample_sizes, demes_present, unsampled_n=4
-):
- """
- Returns sample sizes within each deme that is present within each interval.
- Deme samples sizes can change if there are pulse or branching events, e.g.,
- but will be constant over the integration epochs.
- This works by climbing up the demography from most recent integration epoch to
- most distant. Unsampled leaf demes get size unsampled_ns, and others have size
- given by sample_sizes.
- """
- ns = {}
- for interval, deme_ids in demes_present.items():
- ns[interval] = [0 for _ in deme_ids]
-
- # initialize with sampled demes and unsampled, marginalized demes
- for deme_id, n in zip(sampled_demes, sample_sizes):
- for interval in ns.keys():
- if interval[0] <= g[deme_id].start_time:
- ns[interval][demes_present[interval].index(deme_id)] += n
-
- # Climb up the demographic events, taking into account pulses, branches, etc
- # when we add a new deme, determine base n from its successors (split, merge,
- # admixture), and propagate up. Similarly, propagate up other events that add
- # lineages to a branch (branches, pulses). Marginalize events add the deme
- # sample size with unsampled_n.
- for t, events in sorted(demo_events.items()):
- for event in events:
- if event[0] == "marginalize":
- deme_id = event[1]
- # add unsampled deme
- for interval in ns.keys():
- if (
- interval[0] <= g[deme_id].start_time
- and interval[1] >= g[deme_id].end_time
- ):
- ns[interval][
- demes_present[interval].index(deme_id)
- ] += unsampled_n
- elif event[0] == "split":
- # add the parental deme
- deme_id = event[1]
- children = event[2]
- for interval in sorted(ns.keys()):
- if interval[0] == g[deme_id].end_time:
- # get child sizes at time of split
- children_ns = {
- child: ns[interval][demes_present[interval].index(child)]
- for child in children
- }
- if (
- interval[0] <= g[deme_id].start_time
- and interval[1] >= g[deme_id].end_time
- ):
- for child in children:
- ns[interval][
- demes_present[interval].index(deme_id)
- ] += children_ns[child]
- elif event[0] == "branch":
- # add child n to parent n for integration epochs above t
- deme_id = event[1]
- child = event[2]
- for interval in sorted(ns.keys()):
- if interval[0] == t:
- # get child sizes at time of split
- child_ns = ns[interval][demes_present[interval].index(child)]
- if (
- interval[0] <= g[deme_id].start_time
- and interval[1] >= g[deme_id].end_time
- and interval[1] >= t
- ):
- ns[interval][demes_present[interval].index(deme_id)] += child_ns
- elif event[0] == "pulse":
- # figure out how much the admix_in_place needs from child to parent
- source = event[1]
- dest = event[2]
- for interval in sorted(ns.keys()):
- if interval[0] == t:
- dest_size = ns[interval][demes_present[interval].index(dest)]
- if (
- interval[0] <= g[source].start_time
- and interval[1] >= g[source].end_time
- and interval[1] >= t
- ):
- ns[interval][demes_present[interval].index(source)] += dest_size
- elif event[0] == "merge":
- # each parent gets number of lineages in child
- parents = event[1]
- child = event[3]
- for interval in sorted(ns.keys()):
- if interval[0] == t:
- child_size = ns[interval][demes_present[interval].index(child)]
- for parent in parents:
- if (
- interval[0] <= g[parent].start_time
- and interval[1] >= g[parent].end_time
- ):
- ns[interval][
- demes_present[interval].index(parent)
- ] += child_size
- elif event[0] == "admix":
- # each parent gets num child lineages for all epochs above t
- parents = event[1]
- child = event[3]
- for interval in sorted(ns.keys()):
- if interval[0] == t:
- child_size = ns[interval][demes_present[interval].index(child)]
- for parent in parents:
- if (
- interval[0] <= g[parent].start_time
- and interval[1] >= g[parent].end_time
- and interval[1] >= t
- ):
- ns[interval][
- demes_present[interval].index(parent)
- ] += child_size
- return ns
-
-
-def compute_sfs(
- demo_events,
- demes_present,
- deme_sample_sizes,
- nu_funcs,
- migration_matrices,
- integration_times,
- frozen_demes,
- theta=1.0,
- gamma=None,
- h=None,
- reversible=False,
-):
- """
- Integrates using moments to find the SFS for given demo events, etc
- """
- if gamma is not None and h is None:
- h = 0.5
-
- if reversible is True:
- assert type(theta) is list
- assert len(theta) == 2
- # theta is forward and backward rates, as list of length 2
- theta_fd = theta[0]
- theta_bd = theta[1]
- assert theta_fd < 1 and theta_bd < 1
- else:
- # theta is a scalar
- assert type(theta) in [int, float]
-
- integration_intervals = sorted(list(demes_present.keys()))[::-1]
-
- # set up initial steady-state 1D SFS for ancestral deme
- n0 = deme_sample_sizes[integration_intervals[0]][0]
- if gamma is None:
- gamma0 = 0.0
- if h is None:
- h0 = 0.5
- if reversible is False:
- fs = theta * moments.LinearSystem_1D.steady_state_1D(n0, gamma=gamma0, h=h0)
- else:
- fs = moments.LinearSystem_1D.steady_state_1D_reversible(
- n0, gamma=gamma0, theta_fd=theta_fd, theta_bd=theta_bd
- )
- if h0 != 0.5:
- raise ValueError("only use h=0.5 for reversible model for now...")
- fs = moments.Spectrum(fs)
- pop_ids = [demes_present[integration_intervals[0]][0]]
-
- # for each set of demographic events and integration epochs, step through
- # integration, apply events, and then reorder populations to align with demes
- # present in the next integration epoch
- for (T, nu, M, frozen, interval) in zip(
- integration_times,
- nu_funcs,
- migration_matrices,
- frozen_demes,
- integration_intervals,
- ):
- if T > 0:
- if gamma is not None:
- gamma_int = [gamma for _ in frozen]
- h_int = [h for _ in frozen]
- else:
- gamma_int = None
- h_int = None
- if reversible:
- fs.integrate(
- nu,
- T,
- m=M,
- frozen=frozen,
- gamma=gamma_int,
- h=h_int,
- finite_genome=True,
- theta_fd=theta_fd,
- theta_bd=theta_bd,
- )
- else:
- fs.integrate(
- nu, T, m=M, frozen=frozen, gamma=gamma_int, h=h_int, theta=theta
- )
-
- events = demo_events[interval[1]]
- for event in events:
- fs, pop_ids = apply_event(
- fs, pop_ids, event, interval[1], deme_sample_sizes, demes_present
- )
-
- if interval[1] > 0:
- # rearrange to next order of demes
- next_interval = integration_intervals[
- [x[0] for x in integration_intervals].index(interval[1])
- ]
- next_deme_order = demes_present[next_interval]
- assert fs.ndim == len(next_deme_order)
- assert np.all([d in next_deme_order for d in pop_ids])
- fs, pop_ids = reorder_fs(fs, pop_ids, next_deme_order)
-
- fs.pop_ids = pop_ids
- return fs
-
-
-def apply_event(fs, pop_ids, event, t, deme_sample_sizes, demes_present):
- e = event[0]
- if e == "marginalize":
- marg_idx = pop_ids.index(event[1])
- fs = fs.marginalize([marg_idx])
- pop_ids.pop(marg_idx)
- elif e == "split":
- children = event[2]
- if len(children) == 1:
- # "split" into just one population (name change)
- deme_idx = pop_ids.index(event[1])
- pop_ids[deme_idx] = children[0]
- else:
- # split into multiple children demes
- if len(children) + len(pop_ids) > 5:
- raise ValueError("Cannot apply split that creates more than 5 demes")
- # get children deme sizes at time t
- for i, ns in deme_sample_sizes.items():
- if i[0] == t:
- split_sizes = [
- deme_sample_sizes[i][demes_present[i].index(c)]
- for c in children
- ]
- break
- split_idx = pop_ids.index(event[1])
- # children[0] is in split idx, the rest are at the end
- fs, pop_ids = split_fs(fs, pop_ids, split_idx, children, split_sizes)
- elif e == "branch":
- # branch is a split, but keep the pop_id of parent
- parent = event[1]
- child = event[2]
- children = [parent, child]
- for i, ns in deme_sample_sizes.items():
- if i[0] == t:
- split_sizes = [
- deme_sample_sizes[i][demes_present[i].index(c)] for c in children
- ]
- break
- split_idx = pop_ids.index(parent)
- fs, pop_ids = split_fs(fs, pop_ids, split_idx, children, split_sizes)
- elif e == "merge":
- # two or more populations merge, based on given proportions
- parents = event[1]
- proportions = event[2]
- child = event[3]
- for i, ns in deme_sample_sizes.items():
- if i[0] == t:
- child_size = deme_sample_sizes[i][demes_present[i].index(child)]
- fs, pop_ids = admix_fs(
- fs, pop_ids, parents, proportions, child, child_size, marginalize=True
- )
- elif e == "admix":
- # two or more populations merge, based on given proportions
- parents = event[1]
- proportions = event[2]
- child = event[3]
- for i, ns in deme_sample_sizes.items():
- if i[0] == t:
- child_size = deme_sample_sizes[i][demes_present[i].index(child)]
- fs, pop_ids = admix_fs(
- fs, pop_ids, parents, proportions, child, child_size, marginalize=False
- )
- elif e == "pulse":
- # admixture from one population to another, with some proportion
- source = event[1]
- dest = event[2]
- proportion = event[3]
- for i, ns in deme_sample_sizes.items():
- if i[0] == t:
- target_sizes = [
- deme_sample_sizes[i][demes_present[i].index(source)],
- deme_sample_sizes[i][demes_present[i].index(dest)],
- ]
- fs, pop_ids = pulse_fs(fs, pop_ids, source, dest, proportion, target_sizes)
- else:
- raise ValueError(f"Haven't implemented methods for event type {e}")
- return fs, pop_ids
-
-
-def split_fs(fs, pop_ids, split_idx, children, split_sizes):
- """
- Split the SFS into children with split_sizes, from the deme at split_idx.
- """
- if fs.ndim == 1:
- assert len(split_sizes) <= 5
- assert split_idx == 0
- fs = moments.Manips.split_1D_to_2D(
- fs, split_sizes[0] + sum(split_sizes[2:]), split_sizes[1]
- )
- if len(split_sizes) >= 3:
- fs = moments.Manips.split_2D_to_3D_1(
- fs, split_sizes[0] + sum(split_sizes[3:]), split_sizes[2]
- )
- if len(split_sizes) >= 4:
- fs = moments.Manips.split_3D_to_4D_1(
- fs, split_sizes[0] + sum(split_sizes[4:]), split_sizes[3]
- )
- if len(split_sizes) == 5:
- fs = moments.Manips.split_4D_to_5D_1(fs, split_sizes[0], split_sizes[4])
- elif fs.ndim == 2:
- assert len(split_sizes) <= 4
- assert split_idx in [0, 1]
- if split_idx == 0:
- fs = moments.Manips.split_2D_to_3D_1(
- fs, split_sizes[0] + sum(split_sizes[2:]), split_sizes[1]
- )
- if len(split_sizes) >= 3:
- fs = moments.Manips.split_3D_to_4D_1(
- fs, split_sizes[0] + sum(split_sizes[3:]), split_sizes[2]
- )
- if len(split_sizes) == 4:
- fs = moments.Manips.split_4D_to_5D_1(fs, split_sizes[0], split_sizes[3])
- elif split_idx == 1:
- fs = moments.Manips.split_2D_to_3D_2(
- fs, split_sizes[0] + sum(split_sizes[2:]), split_sizes[1]
- )
- if len(split_sizes) >= 3:
- fs = moments.Manips.split_3D_to_4D_2(
- fs, split_sizes[0] + sum(split_sizes[3:]), split_sizes[2]
- )
- if len(split_sizes) == 4:
- fs = moments.Manips.split_4D_to_5D_2(fs, split_sizes[0], split_sizes[3])
- elif fs.ndim == 3:
- if split_idx == 0:
- fs = moments.Manips.split_3D_to_4D_1(
- fs, split_sizes[0] + sum(split_sizes[2:]), split_sizes[1]
- )
- if len(split_sizes) == 3:
- fs = moments.Manips.split_4D_to_5D_1(fs, split_sizes[0], split_sizes[2])
- elif split_idx == 1:
- fs = moments.Manips.split_3D_to_4D_2(
- fs, split_sizes[0] + sum(split_sizes[2:]), split_sizes[1]
- )
- if len(split_sizes) == 3:
- fs = moments.Manips.split_4D_to_5D_2(fs, split_sizes[0], split_sizes[2])
- elif split_idx == 2:
- fs = moments.Manips.split_3D_to_4D_3(
- fs, split_sizes[0] + sum(split_sizes[2:]), split_sizes[1]
- )
- if len(split_sizes) == 3:
- fs = moments.Manips.split_4D_to_5D_3(fs, split_sizes[0], split_sizes[2])
- elif fs.ndim == 4:
- if split_idx == 0:
- fs = moments.Manips.split_4D_to_5D_1(fs, split_sizes[0], split_sizes[1])
- elif split_idx == 1:
- fs = moments.Manips.split_4D_to_5D_2(fs, split_sizes[0], split_sizes[1])
- elif split_idx == 2:
- fs = moments.Manips.split_4D_to_5D_3(fs, split_sizes[0], split_sizes[1])
- elif split_idx == 3:
- fs = moments.Manips.split_4D_to_5D_4(fs, split_sizes[0], split_sizes[1])
- else:
- raise ValueError("Cannot split SFS with ndim > 4")
-
- pop_ids[split_idx] = children[0]
- for child in children[1:]:
- pop_ids.append(child)
- return fs, pop_ids
-
-
-def admix_fs(fs, pop_ids, parents, proportions, child, child_size, marginalize=False):
- """
- Both merge and admixture events use this function, with the only difference that
- merge events remove the parental demes (martinalize = True), while admixture events
- do not.
- """
- # get which parents get "marginalized" because parent size == child size
- # need to know this to know how to update pop_ids along the way
- if marginalize:
- # though should all have parent size == child size, so no marginalization
- # of the fs should need to take place, only updating pop_ids
- marged_parents = [parent for parent in parents]
- else:
- marged_parents = []
- for parent in parents:
- if fs.sample_sizes[pop_ids.index(parent)] == child_size:
- marged_parents.append(parent)
-
- # if len(parents) == 2:
- # # use admix_into_new to add the child deme to the end
- # fs = moments.Manips.admix_into_new(
- # fs,
- # pop_ids.index(parents[0]),
- # pop_ids.index(parents[1]),
- # child_size,
- # proportions[0],
- # )
- # to_delete = [pop_ids.index(parent) for parent in marged_parents]
- # for idx in sorted(to_delete, reverse=True):
- # del pop_ids[idx]
- # pop_ids.append(child)
- if len(parents) >= 2:
- fA = proportions[0] / (proportions[0] + proportions[1])
- fB = proportions[1] / (proportions[0] + proportions[1])
- assert np.isclose(fA, 1 - fB)
- idxA = pop_ids.index(parents[0])
- idxB = pop_ids.index(parents[1])
- # admix first two
- fs = moments.Manips.admix_into_new(fs, idxA, idxB, child_size, fA)
- # adjust pop_ids
- for parent in parents[:2]:
- if parent in marged_parents:
- del pop_ids[pop_ids.index(parent)]
- pop_ids.append(child)
- if len(parents) >= 3:
- # admix third pop
- fAB = (proportions[0] + proportions[1]) / (
- proportions[0] + proportions[1] + proportions[2]
- )
- fC = proportions[2] / (proportions[0] + proportions[1] + proportions[2])
- assert np.isclose(fAB, 1 - fC)
- idxAB = pop_ids.index(child) # last pop, was added to end
- idxC = pop_ids.index(parents[2])
- fs = moments.Manips.admix_into_new(fs, idxAB, idxC, child_size, fAB)
- if parents[2] in marged_parents:
- del pop_ids[pop_ids.index(parents[2])]
- # child still on end
- if len(parents) >= 4:
- # admix 4th pop
- fABC = (proportions[0] + proportions[1] + proportions[2]) / (
- proportions[0] + proportions[1] + proportions[2] + proportions[3]
- )
- fD = proportions[3] / (
- proportions[0] + proportions[1] + proportions[2] + proportions[3]
- )
- assert np.isclose(fABC, 1 - fD)
- idxABC = pop_ids.index(child)
- idxD = pop_ids.index(parents[3])
- fs = moments.Manips.admix_into_new(fs, idxABC, idxD, child_size, fABC)
- if parents[3] in marged_parents:
- del pop_ids[pop_ids.index(parents[3])]
- if len(parents) == 5:
- # admix 5th pop
- fABCD = (proportions[0] + proportions[1] + proportions[2] + proportions[3]) / (
- proportions[0]
- + proportions[1]
- + proportions[2]
- + proportions[3]
- + proportions[4]
- )
- fE = proportions[4] / (
- proportions[0]
- + proportions[1]
- + proportions[2]
- + proportions[3]
- + proportions[4]
- )
- assert np.isclose(fABCD, 1 - fE)
- idxABCD = pop_ids.index(child)
- idxE = pop_ids.index(parents[4])
- fs = moments.Manips.admix_into_new(fs, idxABCD, idxE, child_size, fABCD)
- if parents[4] in marged_parents:
- del pop_ids[pop_ids.index(parents[4])]
-
- return fs, pop_ids
-
-
-def pulse_fs(fs, pop_ids, source, dest, proportion, target_sizes):
- # uses admix in place
- source_idx = pop_ids.index(source)
- dest_idx = pop_ids.index(dest)
- fs = moments.Manips.admix_inplace(
- fs, source_idx, dest_idx, target_sizes[0], proportion
- )
-
- assert fs.sample_sizes[source_idx] == target_sizes[0]
- assert fs.sample_sizes[dest_idx] == target_sizes[1]
-
- return fs, pop_ids
-
-
-def reorder_fs(fs, pop_ids, next_deme_order):
- for ii, swap_id in enumerate(next_deme_order):
- pop_id = pop_ids[ii]
- if pop_id != swap_id:
- swap_index = pop_ids.index(swap_id)
- fs = fs.swapaxes(ii, swap_index)
- pop_ids[ii], pop_ids[swap_index] = pop_ids[swap_index], pop_ids[ii]
- return fs, pop_ids
diff --git a/docs/convert.rst b/docs/convert.rst
index 1e31ff09..6fc8f59b 100644
--- a/docs/convert.rst
+++ b/docs/convert.rst
@@ -11,4 +11,3 @@ Conversion functions
.. autofunction:: demes.convert.from_msprime
.. autofunction:: demes.convert.to_stdpopsim
.. autofunction:: demes.convert.from_stdpopsim
-.. autofunction:: demes.convert.SFS
diff --git a/requirements.txt b/requirements.txt
index a07aca0e..7de406a7 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -13,4 +13,3 @@ ruamel.yaml==0.16.12
sphinx==3.4.3
sphinx_rtd_theme==0.5.0
stdpopsim==0.1.2
-git+https://bitbucket.org/simongravel/[email protected]#egg=moments
diff --git a/setup.cfg b/setup.cfg
index 07e01ce1..7e6f735b 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -43,9 +43,6 @@ ignore_missing_imports = True
[mypy-msprime.*]
ignore_missing_imports = True
-[mypy-moments.*]
-ignore_missing_imports = True
-
[mypy-numpy.*]
ignore_missing_imports = True
| recent tests workflow failures with moments on python 3.9
Not sure what's going on here, but there are failures evident in today's dependabot PRs. I thought they were due to having multiple numpy versions installed, so opened #195 to fix that. But there's something else going on which is not at all obvious.
I can reproduce the test failures locally, after installing moments via
```pip install git+https://bitbucket.org/simongravel/[email protected]#egg=moments```.
However, the failures don't occur after installing moments from a locally cloned repository with `python setup.py install --user`.
| The problem here is related to something called "build isolation" during `pip install`. Essentially, when you call `pip install foo`, pip will create an isolated environment with the packages listed in foo's `build_requires`. This ignores any current environment the user has installed. So in the case of pip-installing moments, we end up with pip installing numpy into the isolated environment, and pip resolves this to the *latest* numpy version, ignoring the currently installed numpy. Now, numpy 1.20.0 was recently released, with a backwards-incompatible binary API change. So pip builds moments against numpy-1.20.0 in the isolated environment, and then we try to use moments in our target environment with numpy-1.19.x.
I expect that reports of the following error will be widespread over the next few days, because there are going to be a *lot* of projects encountering this problem.
```
ValueError: numpy.ndarray size changed, may indicate binary incompatibility. Expected 88 from C header, got 80 from PyObject
```
There's a bunch of discussion here about the general problem. I'm not convinced the suggestions are foolproof, so I guess we'll see how other projects deal with this in the coming days.
https://discuss.python.org/t/pep-517-how-to-pick-compatible-build-dependencies/2460/2
Hi @grahamgower thanks for catching this. It's definitely not something I had run into before, but something that does need to be fixed in the `moments` distribution.
On another note, I wonder if it's time to remove the moments conversion from demes itself. I've copied it to a branch for a future release within `moments` and update and test it there as well. Dropping it here will simplify things as we gear up for our alpha release when we'll remove the built-in conversion methods (except maybe for old `ms` command line code).
> On another note, I wonder if it's time to remove the moments conversion from demes itself. I've copied it to a branch for a future release within `moments` and update and test it there as well. Dropping it here will simplify things as we gear up for our alpha release when we'll remove the built-in conversion methods (except maybe for old `ms` command line code).
I suspect there are still going to be API changes, unfortunately. E.g. #188 already touched the moments and moments test code. If you're willing to forgo getting those fixes as the API is changed, then by all means, we can remove the moments code from Demes.
Yeah, let's go ahead and just remove the moments conversion. I already have it integrated within the next moments release branch, so it doesn't make sense to maintain that code both there and here, and it's going to be living there eventually. I don't mind maintaining it as the API changes in the coming weeks.
The installation from bitbucket is kind of a pain anyway. This will remove a lot of that headache. | 2021-02-10T08:58:44 | 0.0 | [] | [] |
||
popsim-consortium/demes-python | popsim-consortium__demes-python-188 | aca41bb3a5d2d9a2f73074eb13349e17a07f0e0e | diff --git a/demes/convert/msprime_.py b/demes/convert/msprime_.py
index f755dafb..3f2e9a90 100644
--- a/demes/convert/msprime_.py
+++ b/demes/convert/msprime_.py
@@ -353,9 +353,9 @@ def from_msprime(
deme_id,
ancestors=deme_dict["ancestors"],
proportions=deme_dict["proportions"],
+ start_time=epochs[deme_id][0].start_time,
epochs=[
demes.Epoch(
- start_time=epoch.start_time,
end_time=epoch.end_time,
start_size=epoch.start_size,
end_size=epoch.end_size,
diff --git a/demes/demes.py b/demes/demes.py
index 13b61505..cfb24f2e 100644
--- a/demes/demes.py
+++ b/demes/demes.py
@@ -1333,13 +1333,10 @@ def deme(
proportions=None,
epochs=None,
start_time=None,
- end_time=None,
- start_size=None,
- end_size=None,
defaults={},
) -> Deme:
"""
- Add a deme to the graph, with lifetime ``(start_time, end_time]``.
+ Add a deme to the graph.
:param str id: A string identifier for the deme.
:param ancestors: List of string identifiers for the deme's ancestors.
@@ -1361,18 +1358,9 @@ def deme(
- If the deme has multiple ancestors, the ``start_time`` must be
provided.
- :param float end_time: The time at which this deme stops existing,
- in units of ``time_units`` before the present.
- If not specified, defaults to ``0.0`` (the present).
- :param start_size: The initial population size of the deme.
- This must be provided.
- :param end_size: The final population size of the deme. If ``None``,
- the deme has a constant ``start_size`` population size.
:param epochs: Epochs that define population sizes, selfing rates, and
cloning rates, for the deme over various time periods.
- If not specified, a single epoch will be created for the deme that
- spans from ``start_time`` to ``end_time``, using the ``start_size``,
- ``end_size``, ``selfing_rate`` and ``cloning_rate`` provided.
+ If the final epoch's ``end_time`` is ``None``, it will be set to ``0``.
:param defaults: Default attributes for epochs, including cloning_rate
and selfing_rate.
:return: Newly created deme.
@@ -1383,10 +1371,6 @@ def deme(
raise ValueError(f"deme {id} already exists in this graph")
if epochs is None:
raise ValueError(f"deme {id} must have at least one specified epoch")
- if start_size is None and epochs is not None:
- start_size = epochs[0].start_size
- if start_size is None:
- raise ValueError(f"must set start_size for deme {id}")
if ancestors is None:
ancestors = []
if not isinstance(ancestors, list):
@@ -1404,9 +1388,7 @@ def deme(
# if first epoch does not have a start time, set to inf or to
# the ancestor's end time
if start_time is None:
- if epochs[0].start_time is not None:
- start_time = epochs[0].start_time
- elif len(ancestors) > 0:
+ if len(ancestors) > 0:
if len(ancestors) > 1:
raise ValueError(
"with multiple ancestors, start_time must be specified "
@@ -1422,14 +1404,9 @@ def deme(
raise ValueError(
f"deme and first epoch start times do not align for deme {id}"
)
-
- # set the end time to the last epoch's end time
- if end_time is None:
- end_time = epochs[-1].end_time
- if epochs[-1].end_time is not None and epochs[-1].end_time != end_time:
- raise ValueError(
- f"deme and final epoch end times do not align for deme {id}"
- )
+ # fix deme's end time to 0 if not set
+ if epochs[-1].end_time is None:
+ epochs[-1].end_time = 0
# check start time is valid wrt ancestor time intervals
for ancestor in ancestors:
@@ -1452,6 +1429,8 @@ def deme(
epochs[i].start_time = epochs[i - 1].end_time
if epochs[i].end_time is None:
raise ValueError("all epochs must specify the end time")
+ if epochs[i].start_time <= epochs[i].end_time:
+ raise ValueError(f"epoch {i} has start_time <= end_time for deme {id}")
if i > 0 and epochs[i].start_time != epochs[i - 1].end_time:
raise ValueError(
"epoch start and end times do not align for deme {id}, "
@@ -1811,7 +1790,8 @@ def coerce_numbers(inst, attribute, value):
# translate to spec data model
for deme in data["demes"]:
deme["start_time"] = deme["epochs"][0]["start_time"]
- deme["end_time"] = deme["epochs"][-1]["end_time"]
+ for epoch in deme["epochs"]:
+ del epoch["start_time"]
migrations = data.pop("migrations", None)
if migrations is not None:
data["migrations"] = {"asymmetric": migrations}
@@ -1829,34 +1809,25 @@ def asdict_simplified(self, custom_attributes=[]):
def simplify_epochs(data):
"""
- Remove epoch start times if implied by previous epoch's end time
- or if implied by the deme ancestor(s)'s end time(s).
+ Remove epoch start times. Also remove deme start time
+ if implied by the deme ancestor(s)'s end time(s).
"""
for deme in data["demes"]:
for j, epoch in enumerate(deme["epochs"]):
- # remove implied start times
- if j == 0:
- if math.isinf(epoch["start_time"]):
- del epoch["start_time"]
- if "ancestors" in deme and len(deme["ancestors"]) == 1:
- # start time needed for more than 1 ancestor
- if (
- self[deme["ancestors"][0]].end_time
- == epoch["start_time"]
- ):
- del epoch["start_time"]
- else:
- del epoch["start_time"]
if epoch["size_function"] in ("constant", "exponential"):
del epoch["size_function"]
if epoch["start_size"] == epoch["end_size"]:
del epoch["end_size"]
for deme in data["demes"]:
- del deme["start_time"]
- del deme["end_time"]
+ # remove implied start times
+ if math.isinf(deme["start_time"]):
+ del deme["start_time"]
if "ancestors" in deme and len(deme["ancestors"]) == 1:
del deme["proportions"]
+ # start time needed for more than 1 ancestor
+ if self[deme["ancestors"][0]].end_time == deme["start_time"]:
+ del deme["start_time"]
def simplify_migration_rates(data):
"""
diff --git a/examples/bottleneck.yml b/examples/bottleneck.yml
index 6985cc3e..b09d10b1 100644
--- a/examples/bottleneck.yml
+++ b/examples/bottleneck.yml
@@ -4,9 +4,9 @@ time_units: generations
demes:
- id: our_population
description: Bottleneck population using epochs
+ start_time: 10000
epochs:
- start_size: 1e4
- start_time: 10000
end_time: 500
- start_size: 1e2
end_time: 100
diff --git a/examples/browning_america.yml b/examples/browning_america.yml
index a8e20f12..f8392013 100644
--- a/examples/browning_america.yml
+++ b/examples/browning_america.yml
@@ -45,9 +45,9 @@ demes:
description: Admixed America
ancestors: [AFR, EUR, EAS]
proportions: [0.167, 0.333, 0.5]
+ start_time: 12
epochs:
- - start_time: 12
- start_size: 30000
+ - start_size: 30000
end_size: 54664
end_time: 0
migrations:
diff --git a/examples/jacobs_papuans.yml b/examples/jacobs_papuans.yml
index 347977b0..fc5602a8 100644
--- a/examples/jacobs_papuans.yml
+++ b/examples/jacobs_papuans.yml
@@ -40,21 +40,21 @@ demes:
start_size: 826.0
- id: Den2
ancestors: [DenA]
+ start_time: 12500.0
epochs:
- - start_time: 12500.0
- start_size: 13249.0
+ - start_size: 13249.0
end_time: 0
- id: Den1
ancestors: [DenA]
+ start_time: 9750.0
epochs:
- - start_time: 9750.0
- start_size: 13249.0
+ - start_size: 13249.0
end_time: 0
- id: Nea1
ancestors: [NeaA]
+ start_time: 3375.0
epochs:
- - start_time: 3375.0
- start_size: 13249.0
+ - start_size: 13249.0
end_time: 0
- id: Ghost
ancestors: [YRI]
@@ -84,9 +84,9 @@ demes:
start_size: 9025.0
- id: CEU
ancestors: [CHB]
+ start_time: 1293.0
epochs:
- - start_time: 1293.0
- start_size: 6962.0
+ - start_size: 6962.0
end_time: 0
migrations:
symmetric:
diff --git a/examples/offshoots.yml b/examples/offshoots.yml
index 4f8d6f3d..a145efaf 100644
--- a/examples/offshoots.yml
+++ b/examples/offshoots.yml
@@ -11,16 +11,16 @@ demes:
- id: offshoot1
description: More recent offshoot population
ancestors: [ancestral]
+ start_time: 500
epochs:
- - start_time: 500
- start_size: 100
+ - start_size: 100
end_time: 0
- id: offshoot2
description: More ancient offshoot population
ancestors: [ancestral]
+ start_time: 1000
epochs:
- - start_time: 1000
- start_size: 200
+ - start_size: 200
end_time: 0
migrations:
asymmetric:
diff --git a/examples/zigzag.yml b/examples/zigzag.yml
index 68396f7c..969c63b5 100644
--- a/examples/zigzag.yml
+++ b/examples/zigzag.yml
@@ -11,21 +11,15 @@ demes:
epochs:
- end_time: 34133.31
start_size: 7156
- - start_time: 34133.31
- end_time: 8533.33
+ - end_time: 8533.33
end_size: 71560
- - start_time: 8533.33
- end_time: 2133.33
+ - end_time: 2133.33
end_size: 7156
- - start_time: 2133.33
- end_time: 533.33
+ - end_time: 533.33
end_size: 71560
- - start_time: 533.33
- end_time: 133.33
+ - end_time: 133.33
end_size: 7156
- - start_time: 133.33
- end_time: 33.333
+ - end_time: 33.333
end_size: 71560
- - start_time: 33.333
- end_time: 0
+ - end_time: 0
end_size: 71560
diff --git a/requirements.txt b/requirements.txt
index 73e68e5f..731dcbc1 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -2,7 +2,6 @@ attrs==20.3.0
black==20.8b1
flake8==3.8.4
hypothesis==6.1.1
-jsonschema==3.2.0
jupyter-sphinx==0.3.2
msprime==0.7.4
mypy==0.800
diff --git a/schema/graph.json b/schema/graph.json
deleted file mode 100644
index 7feec3f9..00000000
--- a/schema/graph.json
+++ /dev/null
@@ -1,183 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "definitions": {
- "id": {
- "type": "string"
- },
- "rate": {
- "type": "number",
- "minumum": 0,
- "maximum": 1
- },
- "proportion": {
- "type": "number",
- "exclusiveMinumum": 0,
- "maximum": 1
- },
- "size": {
- "type": "number",
- "exclusiveMinimum": 0,
- "exclusiveMaximum": Infinity
- },
- "start_time": {
- "type": "number",
- "exclusiveMinimum": 0,
- "maximum": Infinity
- },
- "end_time": {
- "type": "number",
- "minimum": 0,
- "exclusiveMaximum": Infinity
- },
- "epoch": {
- "type": "object",
- "properties": {
- "start_size": {
- "$ref": "#/definitions/size"
- },
- "end_size": {
- "$ref": "#/definitions/size"
- },
- "size_function": {
- "type": "string"
- },
- "start_time": {
- "$ref": "#/definitions/start_time"
- },
- "end_time": {
- "$ref": "#/definitions/end_time"
- },
- "cloning_rate": {
- "$ref": "#/definitions/rate",
- "default": 0
- },
- "selfing_rate": {
- "$ref": "#/definitions/rate",
- "default": 0
- }
- },
- "required": ["start_size", "end_size", "size_function", "start_time", "end_time"]
- },
- "deme": {
- "type": "object",
- "properties": {
- "id": {
- "$ref": "#/definitions/id"
- },
- "description": {
- "type": "string"
- },
- "ancestors": {
- "type": "array",
- "items": {
- "$ref": "#/definitions/id"
- },
- "default": []
- },
- "proportions": {
- "type": "array",
- "items": {
- "$ref": "#/definitions/proportion"
- },
- "default": []
- },
- "epochs": {
- "type": "array",
- "minItems": 1,
- "items": {
- "$ref": "#/definitions/epoch"
- }
- }
- },
- "required" : ["id", "epochs"]
- },
- "pulse": {
- "type": "object",
- "properties": {
- "source": {
- "$ref": "#/definitions/id"
- },
- "dest": {
- "$ref": "#/definitions/id"
- },
- "time": {
- "type": "number",
- "exclusiveMinimum": 0,
- "exclusiveMaximum": Infinity
- },
- "proportion": {
- "$ref": "#/definitions/proportion"
- }
- },
- "required" : ["source", "dest", "time", "proportion"]
- },
- "asymmetric_migration": {
- "type": "object",
- "properties": {
- "source": {
- "$ref": "#/definitions/id"
- },
- "dest": {
- "$ref": "#/definitions/id"
- },
- "start_time": {
- "$ref": "#/definitions/start_time"
- },
- "end_time": {
- "$ref": "#/definitions/end_time"
- },
- "rate": {
- "$ref": "#/definitions/rate"
- }
- },
- "required" : ["source", "dest", "start_time", "end_time", "rate"]
- }
- },
- "title": "Fully-qualified demes graph",
- "type": "object",
- "properties": {
- "description": {
- "type": "string"
- },
- "doi": {
- "type": "array",
- "items": {
- "type" : "string"
- }
- },
- "time_units": {
- "type": "string"
- },
- "generation_time": {
- "type": "number",
- "exclusiveMinimum": 0,
- "exclusiveMaximum": Infinity
- },
- "demes": {
- "type": "array",
- "minItems": 1,
- "items": {
- "$ref": "#/definitions/deme"
- }
- },
- "pulses": {
- "type": "array",
- "items": {
- "$ref": "#/definitions/pulse"
- }
- },
- "migrations": {
- "type": "object",
- "properties": {
- "asymmetric": {
- "type": "array",
- "items": {
- "$ref" : "#/definitions/asymmetric_migration"
- }
- }
- },
- "required": ["asymmetric"]
- }
- },
- "required": ["description", "time_units", "demes"]
-}
| require deme.start_time and epoch.end_time in input files
This change is to follow the reference implementation. See https://github.com/popsim-consortium/demes-spec/issues/32
| 2021-01-29T05:23:46 | 0.0 | [] | [] |
|||
popsim-consortium/demes-python | popsim-consortium__demes-python-186 | 3b23538845b78275ecd1db25c4d49425f4b84d40 | diff --git a/demes/convert/moments_.py b/demes/convert/moments_.py
index 17a5b207..e48e5012 100644
--- a/demes/convert/moments_.py
+++ b/demes/convert/moments_.py
@@ -155,7 +155,7 @@ def augment_with_ancient_samples(g, sampled_demes, sample_times):
sampled_demes[ii] = sd_frozen
g.deme(
id=sd_frozen,
- epochs=[Epoch(start_time=st, end_time=0, initial_size=1)],
+ epochs=[Epoch(start_time=st, end_time=0, start_size=1)],
ancestors=[sd],
)
return g, sampled_demes, frozen_demes
@@ -259,7 +259,7 @@ def get_integration_parameters(g, demes_present, frozen_list, Ne=None):
if len(preds) == 0:
root_deme = deme_id
break
- Ne = g[root_deme].epochs[0].initial_size
+ Ne = g[root_deme].epochs[0].start_size
for interval, live_demes in sorted(demes_present.items())[::-1]:
# get intergration time for interval
@@ -338,37 +338,33 @@ def sizes_at_time(g, deme_id, time_interval):
size_function = epoch.size_function
if size_function == "constant":
- start_size = end_size = epoch.initial_size
+ start_size = end_size = epoch.start_size
if epoch.start_time == time_interval[0]:
- start_size = epoch.initial_size
+ start_size = epoch.start_size
else:
if size_function == "exponential":
- start_size = epoch.initial_size * np.exp(
- np.log(epoch.final_size / epoch.initial_size)
+ start_size = epoch.start_size * np.exp(
+ np.log(epoch.end_size / epoch.start_size)
* (epoch.start_time - time_interval[0])
/ epoch.time_span
)
elif size_function == "linear":
frac = (epoch.start_time - time_interval[0]) / epoch.time_span
- start_size = epoch.initial_size + frac * (
- epoch.final_size - epoch.initial_size
- )
+ start_size = epoch.start_size + frac * (epoch.end_size - epoch.start_size)
if epoch.end_time == time_interval[1]:
- end_size = epoch.final_size
+ end_size = epoch.end_size
else:
if size_function == "exponential":
- end_size = epoch.initial_size * np.exp(
- np.log(epoch.final_size / epoch.initial_size)
+ end_size = epoch.start_size * np.exp(
+ np.log(epoch.end_size / epoch.start_size)
* (epoch.start_time - time_interval[1])
/ epoch.time_span
)
elif size_function == "linear":
frac = (epoch.start_time - time_interval[1]) / epoch.time_span
- end_size = epoch.initial_size + frac * (
- epoch.final_size - epoch.initial_size
- )
+ end_size = epoch.start_size + frac * (epoch.end_size - epoch.start_size)
return start_size, end_size, size_function
diff --git a/demes/convert/msprime_.py b/demes/convert/msprime_.py
index 7e730d06..f755dafb 100644
--- a/demes/convert/msprime_.py
+++ b/demes/convert/msprime_.py
@@ -33,8 +33,8 @@ def to_msprime(graph: demes.Graph):
pop_id = {deme.id: j for j, deme in enumerate(graph.demes)}
def growth_rate(epoch: demes.Epoch) -> float:
- initial_size = typing.cast(float, epoch.final_size)
- final_size = typing.cast(float, epoch.initial_size)
+ initial_size = typing.cast(float, epoch.end_size)
+ final_size = typing.cast(float, epoch.start_size)
if initial_size == final_size:
growth_rate = 0.0
else:
@@ -61,7 +61,7 @@ def growth_rate(epoch: demes.Epoch) -> float:
initial_size = Ne_invalid
_growth_rate = 0.0
else:
- initial_size = typing.cast(float, deme.epochs[-1].final_size)
+ initial_size = typing.cast(float, deme.epochs[-1].end_size)
_growth_rate = growth_rate(deme.epochs[-1])
population_configurations.append(
msprime.PopulationConfiguration(
@@ -89,7 +89,7 @@ def growth_rate(epoch: demes.Epoch) -> float:
demographic_events.append(
msprime.PopulationParametersChange(
time=epoch.end_time,
- initial_size=epoch.final_size,
+ initial_size=epoch.end_size,
growth_rate=growth_rate(epoch),
population_id=pop_id[deme.id],
)
@@ -240,7 +240,7 @@ def from_msprime(
gtmp["demes"][pop_name] = {
"ancestors": [],
"proportions": [],
- "epochs": [demes.Epoch(initial_size=1)],
+ "epochs": [demes.Epoch(start_size=1)],
}
pop_param_changes.add(pop_name)
@@ -257,7 +257,7 @@ def from_msprime(
gtmp["demes"][parent] = {
"ancestors": [],
"proportions": [],
- "epochs": [demes.Epoch(initial_size=1)],
+ "epochs": [demes.Epoch(start_size=1)],
}
if math.isclose(sum(proportions), 1):
@@ -266,7 +266,7 @@ def from_msprime(
# Set attributes after deme creation, to avoid internal
# checks about the ancestors' existence time intervals.
gtmp["demes"][child]["epochs"] = [
- demes.Epoch(start_time=ddb_epoch.start_time, initial_size=1)
+ demes.Epoch(start_time=ddb_epoch.start_time, start_size=1)
]
gtmp["demes"][child]["ancestors"] = ancestors
gtmp["demes"][child]["proportions"] = proportions
@@ -296,14 +296,14 @@ def from_msprime(
last_epoch = epochs[deme_id][-1]
last_epoch.end_time = ddb_epoch.start_time
if last_epoch.start_time == ddb_epoch.end_time:
- last_epoch.initial_size = pop.end_size
- last_epoch.final_size = pop.start_size
+ last_epoch.start_size = pop.end_size
+ last_epoch.end_size = pop.start_size
if name[j] in pop_param_changes:
# Add new epoch, to be fixed in the next ddb_epoch iteration.
epochs[deme_id].append(
demes.Epoch(
- start_time=ddb_epoch.start_time, end_time=0, initial_size=1
+ start_time=ddb_epoch.start_time, end_time=0, start_size=1
)
)
@@ -337,8 +337,8 @@ def from_msprime(
epochs[deme_id][0] = demes.Epoch(
start_time=epoch.start_time,
end_time=epoch.end_time,
- initial_size=epoch.final_size,
- final_size=epoch.final_size,
+ start_size=epoch.end_size,
+ end_size=epoch.end_size,
)
# Create a fresh demes graph, now that we have complete epoch information
@@ -357,8 +357,8 @@ def from_msprime(
demes.Epoch(
start_time=epoch.start_time,
end_time=epoch.end_time,
- initial_size=epoch.initial_size,
- final_size=epoch.final_size,
+ start_size=epoch.start_size,
+ end_size=epoch.end_size,
)
for epoch in epochs[deme_id]
],
diff --git a/demes/demes.py b/demes/demes.py
index 266d841e..13b61505 100644
--- a/demes/demes.py
+++ b/demes/demes.py
@@ -125,9 +125,9 @@ class Epoch:
:ivar start_time: The start time of the epoch.
:ivar end_time: The end time of the epoch (must be specified).
- :ivar initial_size: Population size at ``start_time``.
- :ivar final_size: Population size at ``end_time``.
- If ``initial_size != final_size``, the population size changes
+ :ivar start_size: Population size at ``start_time``.
+ :ivar end_size: Population size at ``end_time``.
+ If ``start_size != end_size``, the population size changes
monotonically between the start and end times.
:ivar size_function: The size change function. Common options are constant,
exponential, or linear, though any string is valid. Warning: downstream
@@ -144,11 +144,11 @@ class Epoch:
default=None,
validator=attr.validators.optional([int_or_float, non_negative, finite]),
)
- initial_size: Optional[Size] = attr.ib(
+ start_size: Optional[Size] = attr.ib(
default=None,
validator=attr.validators.optional([int_or_float, positive, finite]),
)
- final_size: Optional[Size] = attr.ib(
+ end_size: Optional[Size] = attr.ib(
default=None,
validator=attr.validators.optional([int_or_float, positive, finite]),
)
@@ -168,8 +168,8 @@ class Epoch:
)
def __attrs_post_init__(self):
- if self.initial_size is None and self.final_size is None:
- raise ValueError("must set either initial_size or final_size")
+ if self.start_size is None and self.end_size is None:
+ raise ValueError("must set either start_size or end_size")
if (
self.start_time is not None
and self.end_time is not None
@@ -178,20 +178,18 @@ def __attrs_post_init__(self):
raise ValueError("must have start_time > end_time")
if (
self.start_time is not None
- and self.initial_size is not None
- and self.final_size is not None
+ and self.start_size is not None
+ and self.end_size is not None
):
- if math.isinf(self.start_time) and self.initial_size != self.final_size:
+ if math.isinf(self.start_time) and self.start_size != self.end_size:
raise ValueError("if start time is inf, must be a constant size epoch")
if (
self.size_function == "constant"
- and self.initial_size is not None
- and self.final_size is not None
- and self.initial_size != self.final_size
+ and self.start_size is not None
+ and self.end_size is not None
+ and self.start_size != self.end_size
):
- raise ValueError(
- "initial_size != final_size, but size_function is constant"
- )
+ raise ValueError("start_size != end_size, but size_function is constant")
@property
def time_span(self):
@@ -211,7 +209,7 @@ def assert_close(
Returns true if the epoch and ``other`` epoch implement essentially
the same epoch and raises AssertionError otherwise.
Compares values of the following attributes:
- ``start_time``, ``end_time``, ``initial_size``, ``final_size``,
+ ``start_time``, ``end_time``, ``start_size``, ``end_size``,
``size_function``, ``selfing_rate``, ``cloning_rate``.
:param other: The epoch to compare against.
@@ -237,14 +235,14 @@ def assert_close(
self.end_time, other.end_time, rel_tol=rel_tol, abs_tol=abs_tol
), f"Failed for end_time {self.end_time} != {other.end_time} (other)."
assert isclose(
- self.initial_size, other.initial_size, rel_tol=rel_tol, abs_tol=abs_tol
+ self.start_size, other.start_size, rel_tol=rel_tol, abs_tol=abs_tol
), (
- f"Failed for initial_size "
- f"{self.initial_size} != {other.initial_size} (other)."
+ f"Failed for start_size "
+ f"{self.start_size} != {other.start_size} (other)."
)
assert isclose(
- self.final_size, other.final_size, rel_tol=rel_tol, abs_tol=abs_tol
- ), f"Failed for final_size {self.final_size} != {other.final_size} (other)."
+ self.end_size, other.end_size, rel_tol=rel_tol, abs_tol=abs_tol
+ ), f"Failed for end_size {self.end_size} != {other.end_size} (other)."
assert self.size_function == other.size_function
assert isclose(
self.selfing_rate, other.selfing_rate, rel_tol=rel_tol, abs_tol=abs_tol
@@ -1138,6 +1136,7 @@ class Graph:
function uses this API internally.
:ivar str description: A human readable description of the demography.
+ May be ``None``.
:ivar str time_units: The units of time used for the demography. This is
commonly ``years`` or ``generations``, but can be any string.
This field is intended to be useful for documenting a demography,
@@ -1164,8 +1163,11 @@ class Graph:
:vartype defaults: dict
"""
- description: str = attr.ib(
- validator=[attr.validators.instance_of(str), nonzero_len]
+ description: Optional[str] = attr.ib(
+ default=None,
+ validator=attr.validators.optional(
+ [attr.validators.instance_of(str), nonzero_len]
+ ),
)
time_units: str = attr.ib(validator=[attr.validators.instance_of(str), nonzero_len])
generation_time: Optional[Time] = attr.ib(
@@ -1332,8 +1334,8 @@ def deme(
epochs=None,
start_time=None,
end_time=None,
- initial_size=None,
- final_size=None,
+ start_size=None,
+ end_size=None,
defaults={},
) -> Deme:
"""
@@ -1362,15 +1364,15 @@ def deme(
:param float end_time: The time at which this deme stops existing,
in units of ``time_units`` before the present.
If not specified, defaults to ``0.0`` (the present).
- :param initial_size: The initial population size of the deme.
+ :param start_size: The initial population size of the deme.
This must be provided.
- :param final_size: The final population size of the deme. If ``None``,
- the deme has a constant ``initial_size`` population size.
+ :param end_size: The final population size of the deme. If ``None``,
+ the deme has a constant ``start_size`` population size.
:param epochs: Epochs that define population sizes, selfing rates, and
cloning rates, for the deme over various time periods.
If not specified, a single epoch will be created for the deme that
- spans from ``start_time`` to ``end_time``, using the ``initial_size``,
- ``final_size``, ``selfing_rate`` and ``cloning_rate`` provided.
+ spans from ``start_time`` to ``end_time``, using the ``start_size``,
+ ``end_size``, ``selfing_rate`` and ``cloning_rate`` provided.
:param defaults: Default attributes for epochs, including cloning_rate
and selfing_rate.
:return: Newly created deme.
@@ -1381,10 +1383,10 @@ def deme(
raise ValueError(f"deme {id} already exists in this graph")
if epochs is None:
raise ValueError(f"deme {id} must have at least one specified epoch")
- if initial_size is None and epochs is not None:
- initial_size = epochs[0].initial_size
- if initial_size is None:
- raise ValueError(f"must set initial_size for deme {id}")
+ if start_size is None and epochs is not None:
+ start_size = epochs[0].start_size
+ if start_size is None:
+ raise ValueError(f"must set start_size for deme {id}")
if ancestors is None:
ancestors = []
if not isinstance(ancestors, list):
@@ -1457,12 +1459,12 @@ def deme(
)
# for each subsequent epoch, fill in start size, final size,
# and size function as necessary based on last epoch
- if epochs[i].initial_size is None:
- epochs[i].initial_size = epochs[i - 1].final_size
- if epochs[i].final_size is None:
- epochs[i].final_size = epochs[i].initial_size
+ if epochs[i].start_size is None:
+ epochs[i].start_size = epochs[i - 1].end_size
+ if epochs[i].end_size is None:
+ epochs[i].end_size = epochs[i].start_size
if epochs[i].size_function is None:
- if epochs[i].initial_size == epochs[i].final_size:
+ if epochs[i].start_size == epochs[i].end_size:
epochs[i].size_function = "constant"
else:
epochs[i].size_function = "exponential"
@@ -1847,8 +1849,8 @@ def simplify_epochs(data):
del epoch["start_time"]
if epoch["size_function"] in ("constant", "exponential"):
del epoch["size_function"]
- if epoch["initial_size"] == epoch["final_size"]:
- del epoch["final_size"]
+ if epoch["start_size"] == epoch["end_size"]:
+ del epoch["end_size"]
for deme in data["demes"]:
del deme["start_time"]
diff --git a/docs/api.rst b/docs/api.rst
index 5ffb74da..1c05df7b 100644
--- a/docs/api.rst
+++ b/docs/api.rst
@@ -32,12 +32,12 @@ pulses via the methods available on this class.
time_units="years",
generation_time=25,
)
- g.deme("ancestral", epochs=[demes.Epoch(end_time=220e3, initial_size=7300)])
- g.deme("AMH", ancestors=["ancestral"], epochs=[demes.Epoch(end_time=140e3, initial_size=12300)])
- g.deme("OOA", ancestors=["AMH"], epochs=[demes.Epoch(end_time=21.2e3, initial_size=2100)])
- g.deme("YRI", ancestors=["AMH"], epochs=[demes.Epoch(initial_size=12300, end_time=0)])
- g.deme("CEU", ancestors=["OOA"], epochs=[demes.Epoch(initial_size=1000, final_size=29725, end_time=0)])
- g.deme("CHB", ancestors=["OOA"], epochs=[demes.Epoch(initial_size=510, final_size=54090, end_time=0)])
+ g.deme("ancestral", epochs=[demes.Epoch(end_time=220e3, start_size=7300)])
+ g.deme("AMH", ancestors=["ancestral"], epochs=[demes.Epoch(end_time=140e3, start_size=12300)])
+ g.deme("OOA", ancestors=["AMH"], epochs=[demes.Epoch(end_time=21.2e3, start_size=2100)])
+ g.deme("YRI", ancestors=["AMH"], epochs=[demes.Epoch(start_size=12300, end_time=0)])
+ g.deme("CEU", ancestors=["OOA"], epochs=[demes.Epoch(start_size=1000, end_size=29725, end_time=0)])
+ g.deme("CHB", ancestors=["OOA"], epochs=[demes.Epoch(start_size=510, end_size=54090, end_time=0)])
g.symmetric_migration(demes=["YRI", "OOA"], rate=25e-5)
g.symmetric_migration(demes=["YRI", "CEU"], rate=3e-5)
g.symmetric_migration(demes=["YRI", "CHB"], rate=1.9e-5)
@@ -65,7 +65,7 @@ with migration between adjacent demes, can be done with the following code.
)
for j in range(M):
- g.deme(f"deme{j}", epochs=[demes.Epoch(initial_size=1000, end_time=0)])
+ g.deme(f"deme{j}", epochs=[demes.Epoch(start_size=1000, end_time=0)])
if j > 0:
g.symmetric_migration(demes=[f"deme{j - 1}", f"deme{j}"], rate=1e-5)
g.symmetric_migration(demes=[f"deme{M - 1}", "deme0"], rate=1e-5)
diff --git a/docs/tutorial.rst b/docs/tutorial.rst
index 68ff50f5..687d5d50 100644
--- a/docs/tutorial.rst
+++ b/docs/tutorial.rst
@@ -78,8 +78,8 @@ Defining epochs
A ``deme`` can be made to have more interesting demographic history, such as
size changes or non-constant size functions. This is done by defining
``epochs`` that span the time that a deme exists. When defining an epoch,
-we specify its ``start_time`` and ``end_time``, along with its ``initial_size``
-and ``final_size`` (or just ``initial_size`` if the population size is constant
+we specify its ``start_time`` and ``end_time``, along with its ``start_size``
+and ``end_size`` (or just ``start_size`` if the population size is constant
over that epoch).
For example, the same minimal demography :ref:`above <minimal-demography>`
@@ -91,7 +91,7 @@ the deme:
demes:
constant_size_deme:
epochs:
- - initial_size: 1000
+ - start_size: 1000
end_time: 0
By default, the first listed ``epoch`` has a ``start_time`` of :math:`\infty`
@@ -110,8 +110,8 @@ in order, from most ancient to most recent.
:linenos:
We also see, again, that for constant size epochs we only need to specify
-the ``initial_size``, and if no ``initial_size`` is given, the ``epoch`` inherits
-the ``final_size`` of the previous epoch.
+the ``start_size``, and if no ``start_size`` is given, the ``epoch`` inherits
+the ``end_size`` of the previous epoch.
In the previous example, we have a ``deme`` that expands from an effective population
size of 10,000 to 20,000 250 thousand years ago, goes through a bottleneck
@@ -120,7 +120,7 @@ bottleneck size of 1,500 to 40,000 from 30 thousand years ago until present
time.
.. note::
- If no ``size_function`` is given, when the ``initial_size`` and ``final_size``
+ If no ``size_function`` is given, when the ``start_size`` and ``end_size``
are different, the default size function is exponential. However, other
size functions, such as linear, are permitted and can be specified.
@@ -135,9 +135,9 @@ interact (for illustration, not realism), we could write:
demes:
deme1:
- initial_size: 1000
+ start_size: 1000
deme2:
- initial_size: 2000
+ start_size: 2000
Population branches and splits
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -151,11 +151,11 @@ deme, we would specify that its ``ancestor`` is its parental deme. For example,
demes:
parental_deme:
description: The parental deme, that exists for all time.
- initial_size: 1000
+ start_size: 1000
child_deme:
description: The child deme, that exists from the branching time.
ancestors: parental_deme
- initial_size: 500
+ start_size: 500
start_time: 100
Here, the child deme split off from the parental deme 100 time units ago,
@@ -172,20 +172,20 @@ is written as
demes:
parental_deme:
description: The parental deme, that splits into three demes.
- initial_size: 1000
+ start_size: 1000
end_time: 200
child1:
description: The first child deme.
ancestors: parental_deme
- initial_size: 500
+ start_size: 500
child2:
description: The second child deme.
ancestors: parental_deme
- initial_size: 800
+ start_size: 800
child3:
description: The third child deme.
ancestors: parental_deme
- initial_size: 600
+ start_size: 600
Here, the parental deme exists until time 200, and each child deme exists
from time 200 to present time.
@@ -225,9 +225,9 @@ over time:
demes:
deme1:
- initial_size: 1000
+ start_size: 1000
deme2:
- initial_size: 1000
+ start_size: 1000
migrations:
symmetric:
- demes: deme1, deme2
@@ -291,16 +291,16 @@ demes listed in ``ancestors``.
demes:
parental1:
- initial_size: 1000
+ start_size: 1000
end_time: 100
parental2:
- initial_size: 2000
+ start_size: 2000
end_time: 100
merged_deme:
ancestors: parental1, parental2
proportions: 0.7, 0.3
- initial_size: 3000
- final_size: 5000
+ start_size: 3000
+ end_size: 5000
Here, two demes merge to form a single deme 100 time units ago, which
then grows exponentially from 3,000 to 5,000 at present time.
diff --git a/docs/tutorial_examples/browning_america.yml b/docs/tutorial_examples/browning_america.yml
index 8912d448..a8e20f12 100644
--- a/docs/tutorial_examples/browning_america.yml
+++ b/docs/tutorial_examples/browning_america.yml
@@ -8,38 +8,38 @@ demes:
description: Equilibrium/root population
epochs:
- end_time: 5920
- initial_size: 7310
+ start_size: 7310
- id: AMH
description: Anatomically modern humans
ancestors: [ancestral]
epochs:
- end_time: 2040
- initial_size: 14474
+ start_size: 14474
- id: OOA
description: Bottleneck out-of-Africa population
ancestors: [AMH]
epochs:
- end_time: 920
- initial_size: 1861
+ start_size: 1861
- id: AFR
description: African population
ancestors: [AMH]
epochs:
- - initial_size: 14474
+ - start_size: 14474
end_time: 0
- id: EUR
description: European population
ancestors: [OOA]
epochs:
- - initial_size: 1000
- final_size: 34039
+ - start_size: 1000
+ end_size: 34039
end_time: 0
- id: EAS
description: East Asian population
ancestors: [OOA]
epochs:
- - initial_size: 510
- final_size: 45852
+ - start_size: 510
+ end_size: 45852
end_time: 0
- id: ADMIX
description: Admixed America
@@ -47,8 +47,8 @@ demes:
proportions: [0.167, 0.333, 0.5]
epochs:
- start_time: 12
- initial_size: 30000
- final_size: 54664
+ start_size: 30000
+ end_size: 54664
end_time: 0
migrations:
symmetric:
diff --git a/docs/tutorial_examples/isolation_with_migration.yml b/docs/tutorial_examples/isolation_with_migration.yml
index 3ce3f842..263ac913 100644
--- a/docs/tutorial_examples/isolation_with_migration.yml
+++ b/docs/tutorial_examples/isolation_with_migration.yml
@@ -5,21 +5,21 @@ demes:
root:
description: Parental deme
epochs:
- - initial_size: 10000
+ - start_size: 10000
end_time: 1200
deme1:
description: First child deme
ancestors: root
epochs:
- - initial_size: 2000
- final_size: 30000
+ - start_size: 2000
+ end_size: 30000
end_time: 0
deme2:
description: Second child deme
ancestors: root
epochs:
- - initial_size: 8000
- final_size: 15000
+ - start_size: 8000
+ end_size: 15000
end_time: 0
migrations:
symmetric:
diff --git a/docs/tutorial_examples/minimal.yml b/docs/tutorial_examples/minimal.yml
index d63a5609..8b262425 100644
--- a/docs/tutorial_examples/minimal.yml
+++ b/docs/tutorial_examples/minimal.yml
@@ -3,5 +3,5 @@ time_units: generations
demes:
constant_size_deme:
epochs:
- - initial_size: 1000
+ - start_size: 1000
end_time: 0
diff --git a/docs/tutorial_examples/one_pop_epochs.yml b/docs/tutorial_examples/one_pop_epochs.yml
index 4d872ff2..9a03f774 100644
--- a/docs/tutorial_examples/one_pop_epochs.yml
+++ b/docs/tutorial_examples/one_pop_epochs.yml
@@ -5,13 +5,13 @@ generation_time: 25
demes:
our_population:
description: A deme with multiple epochs.
- initial_size: 10000
+ start_size: 10000
epochs:
- - initial_size: 10000
+ - start_size: 10000
end_time: 250000
- - initial_size: 20000
+ - start_size: 20000
end_time: 60000
- - initial_size: 1500
+ - start_size: 1500
end_time: 30000
- - final_size: 40000
+ - end_size: 40000
end_time: 0
diff --git a/examples/bottleneck.yml b/examples/bottleneck.yml
index 1c3b4f4f..6985cc3e 100644
--- a/examples/bottleneck.yml
+++ b/examples/bottleneck.yml
@@ -5,10 +5,10 @@ demes:
- id: our_population
description: Bottleneck population using epochs
epochs:
- - initial_size: 1e4
+ - start_size: 1e4
start_time: 10000
end_time: 500
- - initial_size: 1e2
+ - start_size: 1e2
end_time: 100
- - initial_size: 1e4
+ - start_size: 1e4
end_time: 0
diff --git a/examples/browning_america.yml b/examples/browning_america.yml
index 8912d448..a8e20f12 100644
--- a/examples/browning_america.yml
+++ b/examples/browning_america.yml
@@ -8,38 +8,38 @@ demes:
description: Equilibrium/root population
epochs:
- end_time: 5920
- initial_size: 7310
+ start_size: 7310
- id: AMH
description: Anatomically modern humans
ancestors: [ancestral]
epochs:
- end_time: 2040
- initial_size: 14474
+ start_size: 14474
- id: OOA
description: Bottleneck out-of-Africa population
ancestors: [AMH]
epochs:
- end_time: 920
- initial_size: 1861
+ start_size: 1861
- id: AFR
description: African population
ancestors: [AMH]
epochs:
- - initial_size: 14474
+ - start_size: 14474
end_time: 0
- id: EUR
description: European population
ancestors: [OOA]
epochs:
- - initial_size: 1000
- final_size: 34039
+ - start_size: 1000
+ end_size: 34039
end_time: 0
- id: EAS
description: East Asian population
ancestors: [OOA]
epochs:
- - initial_size: 510
- final_size: 45852
+ - start_size: 510
+ end_size: 45852
end_time: 0
- id: ADMIX
description: Admixed America
@@ -47,8 +47,8 @@ demes:
proportions: [0.167, 0.333, 0.5]
epochs:
- start_time: 12
- initial_size: 30000
- final_size: 54664
+ start_size: 30000
+ end_size: 54664
end_time: 0
migrations:
symmetric:
diff --git a/examples/cloning_example.yml b/examples/cloning_example.yml
index 17795b3e..3612a7cc 100644
--- a/examples/cloning_example.yml
+++ b/examples/cloning_example.yml
@@ -8,26 +8,26 @@ demes:
description: Root population
epochs:
- end_time: 1000
- initial_size: 1e3
+ start_size: 1e3
- id: pop1
description: Population with epochs and changing cloning rates
ancestors: [root]
defaults:
cloning_rate: 0.2
epochs:
- - initial_size: 1e4
+ - start_size: 1e4
end_time: 500
- - initial_size: 1e2
+ - start_size: 1e2
end_time: 100
- - initial_size: 1e4
+ - start_size: 1e4
end_time: 0
cloning_rate: 0.5
- id: pop2
description: Population with epochs and changing cloning rates
ancestors: [root]
epochs:
- - initial_size: 1e4
+ - start_size: 1e4
end_time: 500
- - initial_size: 1e2
+ - start_size: 1e2
end_time: 0
cloning_rate: 1.0
diff --git a/examples/gutenkunst_ooa.yml b/examples/gutenkunst_ooa.yml
index dd4eecae..2988360b 100644
--- a/examples/gutenkunst_ooa.yml
+++ b/examples/gutenkunst_ooa.yml
@@ -13,38 +13,38 @@ demes:
description: Equilibrium/root population
epochs:
- end_time: 220e3
- initial_size: 7300
+ start_size: 7300
- id: AMH
description: Anatomically modern humans
ancestors: [ancestral]
epochs:
- end_time: 140e3
- initial_size: 12300
+ start_size: 12300
- id: OOA
description: Bottleneck out-of-Africa population
ancestors: [AMH]
epochs:
- end_time: 21.2e3
- initial_size: 2100
+ start_size: 2100
- id: YRI
description: Yoruba in Ibadan, Nigeria
ancestors: [AMH]
epochs:
- - initial_size: 12300
+ - start_size: 12300
end_time: 0
- id: CEU
description: Utah Residents (CEPH) with Northern and Western European Ancestry
ancestors: [OOA]
epochs:
- - initial_size: 1000
- final_size: 29725
+ - start_size: 1000
+ end_size: 29725
end_time: 0
- id: CHB
description: Han Chinese in Beijing, China
ancestors: [OOA]
epochs:
- - initial_size: 510
- final_size: 54090
+ - start_size: 510
+ end_size: 54090
end_time: 0
migrations:
symmetric:
diff --git a/examples/jacobs_papuans.yml b/examples/jacobs_papuans.yml
index 4341f673..347977b0 100644
--- a/examples/jacobs_papuans.yml
+++ b/examples/jacobs_papuans.yml
@@ -13,80 +13,80 @@ demes:
- id: YRI
epochs:
- end_time: 20225.0
- initial_size: 32671.0
+ start_size: 32671.0
- end_time: 2218.0
- initial_size: 41563.0
+ start_size: 41563.0
- end_time: 0
- initial_size: 48433.0
+ start_size: 48433.0
- id: DenA
ancestors: [YRI]
start_time: 20225.0
epochs:
- end_time: 15090.0
- initial_size: 13249.0
+ start_size: 13249.0
- end_time: 12500.0
- initial_size: 100.0
+ start_size: 100.0
- end_time: 9750.0
- initial_size: 100.0
+ start_size: 100.0
- end_time: 0
- initial_size: 5083.0
+ start_size: 5083.0
- id: NeaA
ancestors: [DenA]
start_time: 15090.0
epochs:
- end_time: 3375.0
- initial_size: 13249.0
+ start_size: 13249.0
- end_time: 0
- initial_size: 826.0
+ start_size: 826.0
- id: Den2
ancestors: [DenA]
epochs:
- start_time: 12500.0
- initial_size: 13249.0
+ start_size: 13249.0
end_time: 0
- id: Den1
ancestors: [DenA]
epochs:
- start_time: 9750.0
- initial_size: 13249.0
+ start_size: 13249.0
end_time: 0
- id: Nea1
ancestors: [NeaA]
epochs:
- start_time: 3375.0
- initial_size: 13249.0
+ start_size: 13249.0
end_time: 0
- id: Ghost
ancestors: [YRI]
start_time: 2218.0
epochs:
- end_time: 2119.0
- initial_size: 1394.0
+ start_size: 1394.0
- end_time: 0
- initial_size: 8516.0
+ start_size: 8516.0
- id: Papuan
ancestors: [Ghost]
start_time: 1784.0
epochs:
- end_time: 1685.0
- initial_size: 243.0
+ start_size: 243.0
- end_time: 0
- initial_size: 8834.0
+ start_size: 8834.0
- id: CHB
ancestors: [Ghost]
start_time: 1758.0
epochs:
- end_time: 1659.0
- initial_size: 2231.0
+ start_size: 2231.0
- end_time: 1293.0
- initial_size: 12971.0
+ start_size: 12971.0
- end_time: 0
- initial_size: 9025.0
+ start_size: 9025.0
- id: CEU
ancestors: [CHB]
epochs:
- start_time: 1293.0
- initial_size: 6962.0
+ start_size: 6962.0
end_time: 0
migrations:
symmetric:
diff --git a/examples/linear_growth.yml b/examples/linear_growth.yml
index de88f4ff..171d07e7 100644
--- a/examples/linear_growth.yml
+++ b/examples/linear_growth.yml
@@ -6,12 +6,12 @@ demes:
- id: my_pop
description: Multi-epoch population
epochs:
- - initial_size: 1e4
+ - start_size: 1e4
end_time: 1000
- - initial_size: 1e3
- final_size: 1e4
+ - start_size: 1e3
+ end_size: 1e4
end_time: 100
size_function: linear
- - initial_size: 1e4
- final_size: 1e2
+ - start_size: 1e4
+ end_size: 1e2
end_time: 0
diff --git a/examples/offshoots.yml b/examples/offshoots.yml
index d34768b2..4f8d6f3d 100644
--- a/examples/offshoots.yml
+++ b/examples/offshoots.yml
@@ -6,21 +6,21 @@ demes:
- id: ancestral
description: Main population
epochs:
- - initial_size: 1000
+ - start_size: 1000
end_time: 0
- id: offshoot1
description: More recent offshoot population
ancestors: [ancestral]
epochs:
- start_time: 500
- initial_size: 100
+ start_size: 100
end_time: 0
- id: offshoot2
description: More ancient offshoot population
ancestors: [ancestral]
epochs:
- start_time: 1000
- initial_size: 200
+ start_size: 200
end_time: 0
migrations:
asymmetric:
diff --git a/examples/selfing_example.yml b/examples/selfing_example.yml
index a4c3616c..b91566fe 100644
--- a/examples/selfing_example.yml
+++ b/examples/selfing_example.yml
@@ -9,7 +9,7 @@ demes:
defaults:
selfing_rate: 0.1
epochs:
- - initial_size: 1e3
+ - start_size: 1e3
end_time: 1000
- id: pop1
description: Population with epochs and changing selfing rates
@@ -17,20 +17,20 @@ demes:
defaults:
selfing_rate: 0.2
epochs:
- - initial_size: 1e4
+ - start_size: 1e4
end_time: 500
- - initial_size: 1e2
+ - start_size: 1e2
end_time: 100
- - initial_size: 1e4
+ - start_size: 1e4
end_time: 0
selfing_rate: 0.5
- id: pop2
description: Population with epochs and changing selfing rates
ancestors: [root]
epochs:
- - initial_size: 1e4
+ - start_size: 1e4
end_time: 500
selfing_rate: 0.9
- - initial_size: 1e2
+ - start_size: 1e2
end_time: 0
selfing_rate: 1.0
diff --git a/examples/zigzag.yml b/examples/zigzag.yml
index 9c505cac..e9f282fb 100644
--- a/examples/zigzag.yml
+++ b/examples/zigzag.yml
@@ -10,22 +10,22 @@ demes:
in this case).
epochs:
- end_time: 34133.31
- initial_size: 1431
+ start_size: 1431
- start_time: 34133.31
end_time: 8533.33
- final_size: 14312
+ end_size: 14312
- start_time: 8533.33
end_time: 2133.33
- final_size: 1431
+ end_size: 1431
- start_time: 2133.33
end_time: 533.33
- final_size: 14312
+ end_size: 14312
- start_time: 533.33
end_time: 133.33
- final_size: 1431
+ end_size: 1431
- start_time: 133.33
end_time: 33.333
- final_size: 14312
+ end_size: 14312
- start_time: 33.333
end_time: 0
- final_size: 14312
+ end_size: 14312
diff --git a/schema/graph.json b/schema/graph.json
index 8a1f050a..7feec3f9 100644
--- a/schema/graph.json
+++ b/schema/graph.json
@@ -32,10 +32,10 @@
"epoch": {
"type": "object",
"properties": {
- "initial_size": {
+ "start_size": {
"$ref": "#/definitions/size"
},
- "final_size": {
+ "end_size": {
"$ref": "#/definitions/size"
},
"size_function": {
@@ -56,7 +56,7 @@
"default": 0
}
},
- "required": ["initial_size", "final_size", "size_function", "start_time", "end_time"]
+ "required": ["start_size", "end_size", "size_function", "start_time", "end_time"]
},
"deme": {
"type": "object",
| allow Graph.description field to be missing/None.
See https://github.com/popsim-consortium/demes-spec/issues/42
| 2021-01-27T14:14:59 | 0.0 | [] | [] |
|||
zmoog/refurbished | zmoog__refurbished-101 | c6f442e6b0bc111d625f375259dde2d9167f83cc | diff --git a/README.md b/README.md
index 9fa9355..a4b222d 100644
--- a/README.md
+++ b/README.md
@@ -1,22 +1,35 @@
# Refurbished
-Refurbished is a Python library and CLI tool to access the products information available on the [Apple Certified Refurbished](https://www.apple.com/shop/refurbished) section of the Apple Store.
+Refurbished is a CLI tool (and a Python [library on PyPI](https://pypi.org/project/refurbished/)) to access the product information available on the [Apple Certified Refurbished](https://www.apple.com/shop/refurbished) section of the Apple Store.
## Usage
-Refurbished can be used as a library or as a handly CLI tool to search for refurbished products from the terminal.
+### As a CLI Tool
-### CLI
-
-A quick search for Macs with a min saving or 300 EUR on the Italian store:
+A quick search for Macs with a min saving of 300 EUR on the Italian store:
```shell
$ rfrb it macs --min-saving=300
+ Refurbished Products
+
+ Current Previous Saving Name
+ ────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────
+ 1,699 2,229 24% (-530) MacBook Pro 13,3" ricondizionato con Intel Core i5 quad-core a 2,0GHz e display Retina - Argento
+ 1,699 2,229 24% (-530) MacBook Pro 13,3" ricondizionato con Intel Core i5 quad‐core a 2,0GHz e display Retina - Grigio siderale
+ 1,899 2,459 23% (-560) MacBook Pro 13,3" ricondizionato con Intel Core i5 quad-core a 2,0GHz e display Retina - Argento
+ 1,899 2,459 23% (-560) MacBook Pro 13,3" ricondizionato con Intel Core i5 quad‐core a 2,0GHz e display Retina - Grigio siderale
+ 1,999 2,349 15% (-350) MacBook Pro 14" ricondizionato con chip Apple M1 Pro, CPU 8‐core e GPU 14‐core - Argento
+ 1,999 2,349 15% (-350) MacBook Pro 14" ricondizionato con chip Apple M1 Pro, CPU 8‐core e GPU 14‐core - Grigio siderale
+ 2,429 2,849 15% (-420) MacBook Pro 16" ricondizionato con chip Apple M1 Pro, CPU 10‐core e GPU 16‐core - Argento
+ 2,429 2,849 15% (-420) MacBook Pro 14" ricondizionato con chip Apple M1 Pro, CPU 10‐core e GPU 16‐core - Grigio siderale
+ 2,429 2,849 15% (-420) MacBook Pro 16" ricondizionato con chip Apple M1 Pro, CPU 10‐core e GPU 16‐core - Grigio siderale
+ 2,429 2,849 15% (-420) MacBook Pro 14" ricondizionato con chip Apple M1 Pro, CPU 10‐core e GPU 16‐core - Argento
+ 2,629 3,079 15% (-450) MacBook Pro 16" ricondizionato con chip Apple M1 Pro, CPU 10‐core e GPU 16‐core - Argento
+ 2,629 3,079 15% (-450) MacBook Pro 16" ricondizionato con chip Apple M1 Pro, CPU 10‐core e GPU 16‐core - Grigio siderale
+ 3,369 3,949 15% (-580) MacBook Pro 16" ricondizionato con chip Apple M1 Max, CPU 10‐core e GPU 32‐core - Grigio siderale
+ 3,369 3,949 15% (-580) MacBook Pro 16" ricondizionato con chip Apple M1 Max, CPU 10‐core e GPU 32‐core - Argento
+
-1979.00 1679.00 300.00 (15.159171298635673%) MacBook Pro 13,3" ricondizionato con Intel Core i5 quad‐core a 2,4GHz e display Retina - Grigio siderale
-2229.00 1889.00 340.00 (15.25347689546882%) MacBook Pro 13,3" ricondizionato con Intel Core i5 quad-core a 2,0GHz e display Retina - Argento
-2229.00 1889.00 340.00 (15.25347689546882%) MacBook Pro 13,3" ricondizionato con Intel Core i5 quad‐core a 2,0GHz e display Retina - Grigio siderale
-2459.00 2109.00 350.00 (14.233428222854819%) MacBook Pro 13,3" ricondizionato con Intel Core i5 quad-core a 2,0GHz e display Retina - Argento
```
#### Output formats
@@ -30,7 +43,7 @@ Refurbished supports several output formats:
Here are a few examples.
-##### text
+##### Text
```shell
$ rfrb it ipads --max-price 539
@@ -38,7 +51,7 @@ $ rfrb it ipads --max-price 539
639.00 539.00 100.00 (15.64945226917058%) iPad Air Wi-Fi 64GB ricondizionato - Celeste (quarta generazione)
```
-##### json
+##### JSON
```shell
$ rfrb it ipads --max-price 539 --format json
@@ -66,7 +79,7 @@ $ rfrb it ipads --max-price 539 --format json
]
```
-##### ndjson
+##### NDJSON
```shell
$ rfrb it ipads --max-price 539 --format ndjson
@@ -84,7 +97,7 @@ iPad Air Wi-Fi 64GB ricondizionato - Celeste (quarta generazione),ipad,it,https:
iPad Air Wi-Fi 64GB ricondizionato - Grigio siderale (quarta generazione),ipad,it,https://www.apple.com/it/shop/product/FYFM2TY/A/iPad-Air-Wi-Fi-64GB-ricondizionato-Grigio-siderale-quarta-generazione,539.00,639.00,100.00,0.16,FYFM2TY
```
-### Library
+### As a Python Library
The same search using the `refurbished` package in your own project:
@@ -108,6 +121,7 @@ MacBook Pro 13,3" ricondizionato con Intel Core i5 quad-core a 2,0GHz e display
- [price-parser](https://github.com/scrapinghub/price-parser)
- [pydantic](https://pydantic-docs.helpmanual.io/)
- [requests](https://requests.readthedocs.io/en/master/)
+- [rich](https://github.com/Textualize/rich)
## Development
diff --git a/refurbished/cli.py b/refurbished/cli.py
index 95efd2b..a684101 100644
--- a/refurbished/cli.py
+++ b/refurbished/cli.py
@@ -1,5 +1,10 @@
+import io
from typing import List
+from rich import box
+from rich.console import Console
+from rich.table import Table
+
from .model import Product
@@ -8,17 +13,28 @@ def __init__(self, values: List[Product]):
self.values = values
def str(self) -> str:
- if len(self.values) == 0:
- return "No products found"
- out = ""
- for p in self.values:
- out += (
- f"{p.previous_price} "
- f"{p.price} "
- f"{p.savings_price} "
- f"({p.saving_percentage * 100}%) {p.name}\n"
+ if not self.values:
+ return "No products found\n"
+
+ table = Table(title="Refurbished Products", box=box.SIMPLE)
+ table.add_column("Current")
+ table.add_column("Previous")
+ table.add_column("Saving")
+ table.add_column("Name")
+
+ for v in self.values:
+ table.add_row(
+ f"{v.price:,.0f}",
+ f"{v.previous_price:,.0f}",
+ f"{v.saving_percentage:.0%} (-{v.savings_price:,.0f})",
+ v.name,
)
- return out
+
+ # turn table into a string using the Console
+ console = Console(file=io.StringIO())
+ console.print(table)
+
+ return console.file.getvalue()
def data(self) -> List[Product]:
return self.values
diff --git a/requirements.txt b/requirements.txt
index 9de6b35..dcec156 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -3,3 +3,4 @@ click>=8.1.3
requests>=2.28.1
price-parser>=0.3.4
pydantic>=1.10.2
+rich>=12.6.0
diff --git a/setup.py b/setup.py
index 3073186..ce6e4a2 100644
--- a/setup.py
+++ b/setup.py
@@ -7,10 +7,11 @@
NAME = "refurbished"
REQUIRES = [
"beautifulsoup4 >= 4.11.1",
- "requests >= 2.28.1",
+ "click == 8.1.3",
"price-parser == 0.3.4",
"pydantic == 1.10.2",
- "click == 8.1.3"
+ "requests >= 2.28.1",
+ "rich >= 12.6.0",
]
setup(
| Text output format should use a table
The text format should use a tabular format.
Here's an example:
```text
$ rfrb it iphones --name '12 Pro Max'
Refurbished Products
Current Previous Saving Name
────────────────────────────────────────────────────────────────────────────────────────────────
1,109 1,309 15% (-200) iPhone 12 Pro Max 256GB ricondizionato - Oro (Senza SIM)
1,309 1,539 15% (-230) iPhone 12 Pro Max 512GB ricondizionato - Oro (Senza SIM)
1,309 1,539 15% (-230) iPhone 12 Pro Max 512GB ricondizionato - Argento (Senza SIM)
```
| The [Rich](https://github.com/Textualize/rich#rich-library) library is a great starting point to format data as tables. | 2022-10-27T22:10:21 | 0.0 | [] | [] |
||
zmoog/refurbished | zmoog__refurbished-69 | fb96783ec63fb09f11e97c407a38c8462678b079 | diff --git a/Makefile b/Makefile
index 4c315f2..b38d981 100644
--- a/Makefile
+++ b/Makefile
@@ -19,6 +19,8 @@ lint-flake8:
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 ${sources} --count --exit-zero --max-complexity=10 --max-line-length=${line_length} --statistics
+fix-lint: fix-black fix-isort ## Fix linting
+
fix-black:
@black ${black_options}
@@ -26,4 +28,6 @@ fix-isort:
@isort ${isort_options}
test:
- pytest
+ pytest tests
+
+ready: lint test
diff --git a/cli/rfrb b/cli/rfrb
index 157e40d..6be26c9 100755
--- a/cli/rfrb
+++ b/cli/rfrb
@@ -51,7 +51,7 @@ def get_products(
# the selected procuct is not available on this store
click.echo(
f"Product '{product_family}' is "
- "not available in the {country} store"
+ f"not available in the '{country}' store"
)
| Country ID is missing in error message
The errors message when a product is not available is missing the country ID:
```shell
$ python cli/rfrb be macs
Product 'macs' is not available in the '{country}' store
```
In this case, the right message is:
```text
Product 'macs' is not available in the 'be' store
```
| 2022-03-22T06:07:05 | 0.0 | [] | [] |
|||
pytest-dev/pytest-asyncio | pytest-dev__pytest-asyncio-920 | 69540bfae3865ebb0395a549cf469fb07c04c08a | diff --git a/.readthedocs.yaml b/.readthedocs.yaml
index efff237b..8ffb4b25 100644
--- a/.readthedocs.yaml
+++ b/.readthedocs.yaml
@@ -1,19 +1,29 @@
---
-# Read the Docs configuration file for Sphinx projects
-# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
version: 2
+
build:
- os: ubuntu-22.04
+ os: ubuntu-24.04
tools:
- python: '3.12'
-
-sphinx:
- configuration: docs/source/conf.py
- fail_on_warning: true
-
-python:
- install:
- - requirements: dependencies/default/constraints.txt
- - requirements: dependencies/docs/constraints.txt
- - path: .
+ python: >-
+ 3.12
+ commands:
+ - >-
+ PYTHONWARNINGS=error
+ python3 -Im venv "${READTHEDOCS_VIRTUALENV_PATH}"
+ - >-
+ PYTHONWARNINGS=error
+ "${READTHEDOCS_VIRTUALENV_PATH}"/bin/python -Im
+ pip install tox
+ - >-
+ PYTHONWARNINGS=error
+ "${READTHEDOCS_VIRTUALENV_PATH}"/bin/python -Im
+ tox -e docs --notest -vvvvv
+ - >-
+ PYTHONWARNINGS=error
+ "${READTHEDOCS_VIRTUALENV_PATH}"/bin/python -Im
+ tox -e docs --skip-pkg-install -q
+ --
+ "${READTHEDOCS_OUTPUT}"/html
+ -b html
+ -D language=en
diff --git a/docs/Makefile b/docs/Makefile
deleted file mode 100644
index d0c3cbf1..00000000
--- a/docs/Makefile
+++ /dev/null
@@ -1,20 +0,0 @@
-# Minimal makefile for Sphinx documentation
-#
-
-# You can set these variables from the command line, and also
-# from the environment for the first two.
-SPHINXOPTS ?=
-SPHINXBUILD ?= sphinx-build
-SOURCEDIR = source
-BUILDDIR = build
-
-# Put it first so that "make" without argument is like "make help".
-help:
- @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
-
-.PHONY: help Makefile
-
-# Catch-all target: route all unknown targets to Sphinx using the new
-# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
-%: Makefile
- @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
diff --git a/docs/source/concepts.rst b/docs/concepts.rst
similarity index 100%
rename from docs/source/concepts.rst
rename to docs/concepts.rst
diff --git a/docs/source/concepts_function_scope_example.py b/docs/concepts_function_scope_example.py
similarity index 100%
rename from docs/source/concepts_function_scope_example.py
rename to docs/concepts_function_scope_example.py
diff --git a/docs/source/concepts_module_scope_example.py b/docs/concepts_module_scope_example.py
similarity index 100%
rename from docs/source/concepts_module_scope_example.py
rename to docs/concepts_module_scope_example.py
diff --git a/docs/source/conf.py b/docs/conf.py
similarity index 100%
rename from docs/source/conf.py
rename to docs/conf.py
diff --git a/docs/source/how-to-guides/change_default_fixture_loop.rst b/docs/how-to-guides/change_default_fixture_loop.rst
similarity index 100%
rename from docs/source/how-to-guides/change_default_fixture_loop.rst
rename to docs/how-to-guides/change_default_fixture_loop.rst
diff --git a/docs/source/how-to-guides/change_fixture_loop.rst b/docs/how-to-guides/change_fixture_loop.rst
similarity index 100%
rename from docs/source/how-to-guides/change_fixture_loop.rst
rename to docs/how-to-guides/change_fixture_loop.rst
diff --git a/docs/source/how-to-guides/change_fixture_loop_example.py b/docs/how-to-guides/change_fixture_loop_example.py
similarity index 100%
rename from docs/source/how-to-guides/change_fixture_loop_example.py
rename to docs/how-to-guides/change_fixture_loop_example.py
diff --git a/docs/source/how-to-guides/class_scoped_loop_example.py b/docs/how-to-guides/class_scoped_loop_example.py
similarity index 100%
rename from docs/source/how-to-guides/class_scoped_loop_example.py
rename to docs/how-to-guides/class_scoped_loop_example.py
diff --git a/docs/source/how-to-guides/index.rst b/docs/how-to-guides/index.rst
similarity index 100%
rename from docs/source/how-to-guides/index.rst
rename to docs/how-to-guides/index.rst
diff --git a/docs/source/how-to-guides/module_scoped_loop_example.py b/docs/how-to-guides/module_scoped_loop_example.py
similarity index 100%
rename from docs/source/how-to-guides/module_scoped_loop_example.py
rename to docs/how-to-guides/module_scoped_loop_example.py
diff --git a/docs/source/how-to-guides/multiple_loops.rst b/docs/how-to-guides/multiple_loops.rst
similarity index 100%
rename from docs/source/how-to-guides/multiple_loops.rst
rename to docs/how-to-guides/multiple_loops.rst
diff --git a/docs/source/how-to-guides/multiple_loops_example.py b/docs/how-to-guides/multiple_loops_example.py
similarity index 100%
rename from docs/source/how-to-guides/multiple_loops_example.py
rename to docs/how-to-guides/multiple_loops_example.py
diff --git a/docs/source/how-to-guides/package_scoped_loop_example.py b/docs/how-to-guides/package_scoped_loop_example.py
similarity index 100%
rename from docs/source/how-to-guides/package_scoped_loop_example.py
rename to docs/how-to-guides/package_scoped_loop_example.py
diff --git a/docs/source/how-to-guides/session_scoped_loop_example.py b/docs/how-to-guides/session_scoped_loop_example.py
similarity index 100%
rename from docs/source/how-to-guides/session_scoped_loop_example.py
rename to docs/how-to-guides/session_scoped_loop_example.py
diff --git a/docs/source/how-to-guides/uvloop.rst b/docs/how-to-guides/uvloop.rst
similarity index 100%
rename from docs/source/how-to-guides/uvloop.rst
rename to docs/how-to-guides/uvloop.rst
diff --git a/docs/source/index.rst b/docs/index.rst
similarity index 100%
rename from docs/source/index.rst
rename to docs/index.rst
diff --git a/docs/make.bat b/docs/make.bat
deleted file mode 100644
index dc1312ab..00000000
--- a/docs/make.bat
+++ /dev/null
@@ -1,35 +0,0 @@
-@ECHO OFF
-
-pushd %~dp0
-
-REM Command file for Sphinx documentation
-
-if "%SPHINXBUILD%" == "" (
- set SPHINXBUILD=sphinx-build
-)
-set SOURCEDIR=source
-set BUILDDIR=build
-
-%SPHINXBUILD% >NUL 2>NUL
-if errorlevel 9009 (
- echo.
- echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
- echo.installed, then set the SPHINXBUILD environment variable to point
- echo.to the full path of the 'sphinx-build' executable. Alternatively you
- echo.may add the Sphinx directory to PATH.
- echo.
- echo.If you don't have Sphinx installed, grab it from
- echo.https://www.sphinx-doc.org/
- exit /b 1
-)
-
-if "%1" == "" goto help
-
-%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
-goto end
-
-:help
-%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
-
-:end
-popd
diff --git a/docs/source/reference/changelog.rst b/docs/reference/changelog.rst
similarity index 100%
rename from docs/source/reference/changelog.rst
rename to docs/reference/changelog.rst
diff --git a/docs/source/reference/configuration.rst b/docs/reference/configuration.rst
similarity index 100%
rename from docs/source/reference/configuration.rst
rename to docs/reference/configuration.rst
diff --git a/docs/source/reference/decorators/index.rst b/docs/reference/decorators/index.rst
similarity index 100%
rename from docs/source/reference/decorators/index.rst
rename to docs/reference/decorators/index.rst
diff --git a/docs/source/reference/fixtures/event_loop_example.py b/docs/reference/fixtures/event_loop_example.py
similarity index 100%
rename from docs/source/reference/fixtures/event_loop_example.py
rename to docs/reference/fixtures/event_loop_example.py
diff --git a/docs/source/reference/fixtures/event_loop_policy_example.py b/docs/reference/fixtures/event_loop_policy_example.py
similarity index 100%
rename from docs/source/reference/fixtures/event_loop_policy_example.py
rename to docs/reference/fixtures/event_loop_policy_example.py
diff --git a/docs/source/reference/fixtures/event_loop_policy_parametrized_example.py b/docs/reference/fixtures/event_loop_policy_parametrized_example.py
similarity index 100%
rename from docs/source/reference/fixtures/event_loop_policy_parametrized_example.py
rename to docs/reference/fixtures/event_loop_policy_parametrized_example.py
diff --git a/docs/source/reference/fixtures/index.rst b/docs/reference/fixtures/index.rst
similarity index 100%
rename from docs/source/reference/fixtures/index.rst
rename to docs/reference/fixtures/index.rst
diff --git a/docs/source/reference/functions.rst b/docs/reference/functions.rst
similarity index 100%
rename from docs/source/reference/functions.rst
rename to docs/reference/functions.rst
diff --git a/docs/source/reference/index.rst b/docs/reference/index.rst
similarity index 100%
rename from docs/source/reference/index.rst
rename to docs/reference/index.rst
diff --git a/docs/source/reference/markers/class_scoped_loop_custom_policies_strict_mode_example.py b/docs/reference/markers/class_scoped_loop_custom_policies_strict_mode_example.py
similarity index 100%
rename from docs/source/reference/markers/class_scoped_loop_custom_policies_strict_mode_example.py
rename to docs/reference/markers/class_scoped_loop_custom_policies_strict_mode_example.py
diff --git a/docs/source/reference/markers/class_scoped_loop_strict_mode_example.py b/docs/reference/markers/class_scoped_loop_strict_mode_example.py
similarity index 100%
rename from docs/source/reference/markers/class_scoped_loop_strict_mode_example.py
rename to docs/reference/markers/class_scoped_loop_strict_mode_example.py
diff --git a/docs/source/reference/markers/class_scoped_loop_with_fixture_strict_mode_example.py b/docs/reference/markers/class_scoped_loop_with_fixture_strict_mode_example.py
similarity index 100%
rename from docs/source/reference/markers/class_scoped_loop_with_fixture_strict_mode_example.py
rename to docs/reference/markers/class_scoped_loop_with_fixture_strict_mode_example.py
diff --git a/docs/source/reference/markers/function_scoped_loop_strict_mode_example.py b/docs/reference/markers/function_scoped_loop_strict_mode_example.py
similarity index 100%
rename from docs/source/reference/markers/function_scoped_loop_strict_mode_example.py
rename to docs/reference/markers/function_scoped_loop_strict_mode_example.py
diff --git a/docs/source/reference/markers/index.rst b/docs/reference/markers/index.rst
similarity index 100%
rename from docs/source/reference/markers/index.rst
rename to docs/reference/markers/index.rst
diff --git a/docs/source/reference/markers/module_scoped_loop_strict_mode_example.py b/docs/reference/markers/module_scoped_loop_strict_mode_example.py
similarity index 100%
rename from docs/source/reference/markers/module_scoped_loop_strict_mode_example.py
rename to docs/reference/markers/module_scoped_loop_strict_mode_example.py
diff --git a/docs/source/support.rst b/docs/support.rst
similarity index 100%
rename from docs/source/support.rst
rename to docs/support.rst
diff --git a/setup.cfg b/setup.cfg
index c04d3884..ac2f2adc 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -65,7 +65,7 @@ show_missing = true
[tool:pytest]
python_files = test_*.py *_example.py
addopts = -rsx --tb=short
-testpaths = docs/source tests
+testpaths = docs tests
asyncio_mode = auto
junit_family=xunit2
filterwarnings =
diff --git a/tox.ini b/tox.ini
index 665c2fff..79e96fa6 100644
--- a/tox.ini
+++ b/tox.ini
@@ -26,14 +26,48 @@ allowlist_externals =
make
[testenv:docs]
+allowlist_externals =
+ git
extras = docs
deps =
--requirement dependencies/docs/requirements.txt
--constraint dependencies/docs/constraints.txt
change_dir = docs
-commands = make html
-allowlist_externals =
- make
+description = Build The Docs with {basepython}
+commands =
+ # Retrieve possibly missing commits:
+ -git fetch --unshallow
+ -git fetch --tags
+
+ # Build the html docs with Sphinx:
+ {envpython} -Im sphinx \
+ -j auto \
+ {tty:--color} \
+ -a \
+ -T \
+ -n \
+ -W --keep-going \
+ -d "{temp_dir}{/}.doctrees" \
+ . \
+ {posargs:"{envdir}{/}docs_out" -b html}
+
+ # Print out the output docs dir and a way to serve html:
+ -{envpython} -c\
+ 'import pathlib;\
+ docs_dir = pathlib.Path(r"{envdir}") / "docs_out";\
+ index_file = docs_dir / "index.html";\
+ print("\n" + "=" * 120 +\
+ f"\n\nOpen the documentation with:\n\n\
+ \t$ python3 -Im webbrowser \N\{QUOTATION MARK\}file://\{index_file\}\N\{QUOTATION MARK\}\n\n\
+ To serve docs, use\n\n\
+ \t$ python3 -Im http.server --directory \
+ \N\{QUOTATION MARK\}\{docs_dir\}\N\{QUOTATION MARK\} 0\n\n" +\
+ "=" * 120)'
+changedir = {toxinidir}{/}docs
+isolated_build = true
+passenv =
+ SSH_AUTH_SOCK
+skip_install = false
[gh-actions]
python =
| Unify documentation build process for local and CI environment
Currently, the documentation can be built locally using `tox run -e docs` or as part of the CI via readthedocs. However, the build processes are different. This makes it hard to reproduce readthedocs builds locally.
The goal of this issue is to have the same build process for docs, both locally and in the CI.
see https://github.com/pytest-dev/pytest-asyncio/pull/912
| Hello @seifertm ,
I'm the author of #912, can I take this up?
Would be a good introduction to using ReadTheDocs and CI pipelines. | 2024-08-13T23:21:34 | 0.0 | [] | [] |
||
HQSquantumsimulations/qoqo | HQSquantumsimulations__qoqo-404 | a5ad77ae59c7392a147a67ab7c1d561c1854cce8 | diff --git a/CHANGELOG.md b/CHANGELOG.md
index 5b618295..96f4fd8c 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,7 +4,7 @@ This changelog track changes to the qoqo project starting at version v0.5.0
## 1.2.0-alpha.1
-* Preparing for 1.2.0
+* Preparing for 1.2.0 and updated nalgebra to 0.31
## 1.1.0
diff --git a/Cargo.lock b/Cargo.lock
index c89cbc9b..81a65acd 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -66,17 +66,6 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
-[[package]]
-name = "compatibility_test"
-version = "1.2.0-alpha.1"
-dependencies = [
- "bincode",
- "qoqo_calculator",
- "roqoqo 1.0.0",
- "roqoqo 1.2.0-alpha.1",
- "test-case",
-]
-
[[package]]
name = "ctor"
version = "0.1.26"
@@ -259,9 +248,9 @@ checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc"
[[package]]
name = "libc"
-version = "0.2.135"
+version = "0.2.136"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "68783febc7782c6c5cb401fbda4de5a9898be1762314da0bb2c10ced61f18b0c"
+checksum = "55edcf6c0bb319052dea84732cf99db461780fd5e8d3eb46ab6ff312ab31f197"
[[package]]
name = "libm"
@@ -303,22 +292,6 @@ dependencies = [
"autocfg",
]
-[[package]]
-name = "nalgebra"
-version = "0.30.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4fb2d0de08694bed883320212c18ee3008576bfe8c306f4c3c4a58b4876998be"
-dependencies = [
- "approx",
- "matrixmultiply",
- "nalgebra-macros",
- "num-complex",
- "num-rational",
- "num-traits",
- "simba",
- "typenum",
-]
-
[[package]]
name = "nalgebra"
version = "0.31.2"
@@ -580,7 +553,7 @@ name = "qoqo"
version = "1.2.0-alpha.1"
dependencies = [
"bincode",
- "nalgebra 0.30.1",
+ "nalgebra",
"ndarray",
"num-complex",
"numpy",
@@ -591,7 +564,7 @@ dependencies = [
"qoqo_calculator",
"qoqo_calculator_pyo3",
"quote",
- "roqoqo 1.2.0-alpha.1",
+ "roqoqo",
"serde",
"serde_json",
"syn",
@@ -697,26 +670,6 @@ dependencies = [
"bitflags",
]
-[[package]]
-name = "roqoqo"
-version = "1.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6b298dab631f5cd4dbb92811398f20accde321f56b96347de413fd888965144c"
-dependencies = [
- "bincode",
- "nalgebra 0.31.2",
- "ndarray",
- "num-complex",
- "proc-macro2",
- "qoqo_calculator",
- "quote",
- "rand",
- "roqoqo-derive 1.0.0",
- "serde",
- "syn",
- "thiserror",
-]
-
[[package]]
name = "roqoqo"
version = "1.2.0-alpha.1"
@@ -725,7 +678,7 @@ dependencies = [
"bincode",
"dyn-clone",
"futures",
- "nalgebra 0.30.1",
+ "nalgebra",
"ndarray",
"num-complex",
"petgraph",
@@ -734,7 +687,7 @@ dependencies = [
"quote",
"rand",
"rand_distr",
- "roqoqo-derive 1.2.0-alpha.1",
+ "roqoqo-derive",
"serde",
"serde_json",
"serde_test",
@@ -744,17 +697,6 @@ dependencies = [
"typetag",
]
-[[package]]
-name = "roqoqo-derive"
-version = "1.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c799726c6f2553b8145b671b57494efdc6a1bd2c84fcbf7e8313f09982fe5060"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
-
[[package]]
name = "roqoqo-derive"
version = "1.2.0-alpha.1"
@@ -768,13 +710,13 @@ dependencies = [
name = "roqoqo-test"
version = "1.2.0-alpha.1"
dependencies = [
- "nalgebra 0.30.1",
+ "nalgebra",
"ndarray",
"proc-macro2",
"qoqo_calculator",
"quote",
"rand",
- "roqoqo 1.2.0-alpha.1",
+ "roqoqo",
"syn",
]
@@ -801,18 +743,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
name = "serde"
-version = "1.0.146"
+version = "1.0.147"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6df50b7a60a0ad48e1b42eb38373eac8ff785d619fb14db917b4e63d5439361f"
+checksum = "d193d69bae983fc11a79df82342761dfbf28a99fc8d203dca4c3c1b590948965"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
-version = "1.0.146"
+version = "1.0.147"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a714fd32ba1d66047ce7d53dabd809e9922d538f9047de13cc4cffca47b36205"
+checksum = "4f1d362ca8fc9c3e3a7484440752472d68a6caa98f1ab81d99b5dfe517cec852"
dependencies = [
"proc-macro2",
"quote",
@@ -832,9 +774,9 @@ dependencies = [
[[package]]
name = "serde_test"
-version = "1.0.146"
+version = "1.0.147"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "15e7018257a3ef78bdfda9ac60f00a374913f7e019b0dde98d281d28f75e2bc2"
+checksum = "641666500e4e6fba7b91b73651a375cb53579468ab3c38389289b802797cad94"
dependencies = [
"serde",
]
diff --git a/Cargo.toml b/Cargo.toml
index 0e946593..4cb907ca 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -5,11 +5,10 @@ members = [
"roqoqo-test",
"qoqo",
"qoqo-macros",
- "compatibility_test"
]
[profile.release]
lto = true
[profile.bench]
-lto = true
\ No newline at end of file
+lto = true
diff --git a/qoqo/Cargo.toml b/qoqo/Cargo.toml
index 89bafbc4..65e99e88 100644
--- a/qoqo/Cargo.toml
+++ b/qoqo/Cargo.toml
@@ -40,7 +40,7 @@ serde_json = "1.0"
[dev-dependencies]
test-case = "2.0"
-nalgebra = "=0.30"
+nalgebra = "0.31"
[build-dependencies]
quote = "1.0"
diff --git a/qoqo/pyproject.toml b/qoqo/pyproject.toml
index b729e95f..40d7d932 100644
--- a/qoqo/pyproject.toml
+++ b/qoqo/pyproject.toml
@@ -3,7 +3,7 @@ name = "qoqo"
version = "1.2.0-alpha.1"
dependencies = [
'numpy',
- 'qoqo_calculator_pyo3>=1.2.0-alpha.1',
+ 'qoqo_calculator_pyo3>=1.1',
]
license = {text="Apache-2.0 AND Apache-2.0 with LLVM-exception AND MIT AND Unicode-DFS-2016 AND BSD-2-Clause AND BSD-3-CLause"}
maintainers = [{name = "HQS Quantum Simulations GmbH", email = "[email protected]"}]
diff --git a/qoqo/qoqo/DEPENDENCIES b/qoqo/qoqo/DEPENDENCIES
index 3a3234d7..ebeff0f0 100644
--- a/qoqo/qoqo/DEPENDENCIES
+++ b/qoqo/qoqo/DEPENDENCIES
@@ -1311,13 +1311,6 @@ See the License for the specific language governing permissions and
limitations under the License.
-====================================================
-compatibility_test 1.2.0-alpha.1
-https://github.com/HQSquantumsimulations/qoqo
-by HQS Quantum Simulations <[email protected]>
-Compatibility tests for roqoqo
-License: Apache-2.0
-
====================================================
fixedbitset 0.4.2
https://github.com/petgraph/fixedbitset
@@ -2762,7 +2755,7 @@ limitations under the License.
====================================================
-libc 0.2.135
+libc 0.2.136
https://github.com/rust-lang/libc
by The Rust Project Developers
Raw FFI bindings to platform libraries like libc.
@@ -3734,218 +3727,6 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
-====================================================
-nalgebra 0.30.1
-https://nalgebra.org
-by Sébastien Crozet <[email protected]>
-General-purpose linear algebra library with transformations and statically-sized or dynamically-sized matrices.
-License: BSD-3-Clause
-----------------------------------------------------
-LICENSE:
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright 2020 Sébastien Crozet
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
-
====================================================
nalgebra 0.31.2
https://nalgebra.org
@@ -11280,7 +11061,7 @@ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
====================================================
-roqoqo 1.0.0
+roqoqo 1.2.0-alpha.1
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Rust Quantum Computing Toolkit by HQS
@@ -11491,429 +11272,6 @@ LICENSE:
limitations under the License.
-====================================================
-roqoqo 1.2.0-alpha.1
-https://github.com/HQSquantumsimulations/qoqo
-by HQS Quantum Simulations <[email protected]>
-Rust Quantum Computing Toolkit by HQS
-License: Apache-2.0
-----------------------------------------------------
-LICENSE:
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright 2021 HQS Quantum Simulations GmbH
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
-
-====================================================
-roqoqo-derive 1.0.0
-by HQS Quantum Simulations <[email protected]>
-Macros for the roqoqo crate
-License: Apache-2.0
-----------------------------------------------------
-LICENSE:
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright 2021 HQS Quantum Simulations GmbH
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
-
====================================================
roqoqo-derive 1.2.0-alpha.1
by HQS Quantum Simulations <[email protected]>
@@ -12845,7 +12203,7 @@ limitations under the License.
====================================================
-serde 1.0.146
+serde 1.0.147
https://serde.rs
by Erick Tryzelaar <[email protected]>, David Tolnay <[email protected]>
A generic serialization/deserialization framework
@@ -13084,7 +12442,7 @@ limitations under the License.
====================================================
-serde_derive 1.0.146
+serde_derive 1.0.147
https://serde.rs
by Erick Tryzelaar <[email protected]>, David Tolnay <[email protected]>
Macros 1.1 implementation of #[derive(Serialize, Deserialize)]
@@ -13562,7 +12920,7 @@ limitations under the License.
====================================================
-serde_test 1.0.146
+serde_test 1.0.147
https://serde.rs
by Erick Tryzelaar <[email protected]>, David Tolnay <[email protected]>
Token De/Serializer for testing De/Serialize implementations
diff --git a/roqoqo/Cargo.toml b/roqoqo/Cargo.toml
index 2c163fce..2057e9e5 100644
--- a/roqoqo/Cargo.toml
+++ b/roqoqo/Cargo.toml
@@ -30,7 +30,7 @@ dyn-clone = {version="1.0", optional=true}
qoqo_calculator = { version="1.1"}
roqoqo-derive = {version="1.2.0-alpha.1", path="../roqoqo-derive"}
typetag = {version="0.2", optional=true}
-nalgebra = "=0.30"
+nalgebra = "0.31"
# schemars = { version="0.8", features = ["num-complex", "ndarray"], optional=true }
rand_distr = {version="0.4", optional=true}
rand = { version = "0.8.4"}
| qoqo Circuit from_ methods are not classmethods
from_json and from_bincode in qoqo.Circuit are not implemeted as classmethods.
We need to change that so we can call `Circuit.from_json(...)` directly
| bug fix implemented and merged | 2022-10-24T11:47:35 | 0.0 | [] | [] |
||
HQSquantumsimulations/qoqo | HQSquantumsimulations__qoqo-401 | 04634d20fcd810e8a10436e56446c31612a7b62f | diff --git a/CHANGELOG.md b/CHANGELOG.md
index 4a22e882..5b618295 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,29 +2,22 @@
This changelog track changes to the qoqo project starting at version v0.5.0
-## 1.1.0-beta.6
+## 1.2.0-alpha.1
-* Fixed github pipeline deploy
+* Preparing for 1.2.0
-## 1.1.0-beta.5
+## 1.1.0
-* Fixed github pipeline deploy qoqo coverage
+### Changed v1.1.0
-## 1.1.0-beta.4
+* Fixed nalgebra version to 0.30
-* Fixed compatibility tests
-
-## 1.1.0-beta.3
+### Added v1.1.0
+* Added 1.0.0 compatibility tests
* Added rich comparison for Python interface of Measurements
* Added PragmaLoop
-
-## 1.1.0-beta.1
-
* Allowed creating PragmaSetStateVector from float or integer numpy arrays.
-
-## 1.1.0-alpha.1
-
* Added `InputBit` to set bit in a (readout) bit register to a value.
* Added `InvolvedClassical` functionality to return which classical register variables are involved in an operation
* Added `CircuitDag` direct acyclical graph representation of `Circuit`
diff --git a/Cargo.lock b/Cargo.lock
index 4cdaa7c5..c89cbc9b 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -24,9 +24,9 @@ dependencies = [
[[package]]
name = "async-trait"
-version = "0.1.57"
+version = "0.1.58"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "76464446b8bc32758d7e88ee1a804d9914cd9b1cb264c029899680b0be29826f"
+checksum = "1e805d94e6b5001b651426cf4cd446b1ab5f319d27bab5c644f61de0a804360c"
dependencies = [
"proc-macro2",
"quote",
@@ -66,11 +66,22 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+[[package]]
+name = "compatibility_test"
+version = "1.2.0-alpha.1"
+dependencies = [
+ "bincode",
+ "qoqo_calculator",
+ "roqoqo 1.0.0",
+ "roqoqo 1.2.0-alpha.1",
+ "test-case",
+]
+
[[package]]
name = "ctor"
-version = "0.1.23"
+version = "0.1.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cdffe87e1d521a10f9696f833fe502293ea446d7f256c06128293a4119bdf4cb"
+checksum = "6d2301688392eb071b0bf1a37be05c469d3cc4dbbd95df672fe28ab021e6a096"
dependencies = [
"quote",
"syn",
@@ -99,9 +110,9 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "futures"
-version = "0.3.24"
+version = "0.3.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7f21eda599937fba36daeb58a22e8f5cee2d14c4a17b5b7739c7c8e5e3b8230c"
+checksum = "38390104763dc37a5145a53c29c63c1290b5d316d6086ec32c293f6736051bb0"
dependencies = [
"futures-channel",
"futures-core",
@@ -114,9 +125,9 @@ dependencies = [
[[package]]
name = "futures-channel"
-version = "0.3.24"
+version = "0.3.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "30bdd20c28fadd505d0fd6712cdfcb0d4b5648baf45faef7f852afb2399bb050"
+checksum = "52ba265a92256105f45b719605a571ffe2d1f0fea3807304b522c1d778f79eed"
dependencies = [
"futures-core",
"futures-sink",
@@ -124,15 +135,15 @@ dependencies = [
[[package]]
name = "futures-core"
-version = "0.3.24"
+version = "0.3.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4e5aa3de05362c3fb88de6531e6296e85cde7739cccad4b9dfeeb7f6ebce56bf"
+checksum = "04909a7a7e4633ae6c4a9ab280aeb86da1236243a77b694a49eacd659a4bd3ac"
[[package]]
name = "futures-executor"
-version = "0.3.24"
+version = "0.3.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9ff63c23854bee61b6e9cd331d523909f238fc7636290b96826e9cfa5faa00ab"
+checksum = "7acc85df6714c176ab5edf386123fafe217be88c0840ec11f199441134a074e2"
dependencies = [
"futures-core",
"futures-task",
@@ -141,15 +152,15 @@ dependencies = [
[[package]]
name = "futures-io"
-version = "0.3.24"
+version = "0.3.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bbf4d2a7a308fd4578637c0b17c7e1c7ba127b8f6ba00b29f717e9655d85eb68"
+checksum = "00f5fb52a06bdcadeb54e8d3671f8888a39697dcb0b81b23b55174030427f4eb"
[[package]]
name = "futures-macro"
-version = "0.3.24"
+version = "0.3.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "42cd15d1c7456c04dbdf7e88bcd69760d74f3a798d6444e16974b505b0e62f17"
+checksum = "bdfb8ce053d86b91919aad980c220b1fb8401a9394410e1c289ed7e66b61835d"
dependencies = [
"proc-macro2",
"quote",
@@ -158,21 +169,21 @@ dependencies = [
[[package]]
name = "futures-sink"
-version = "0.3.24"
+version = "0.3.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "21b20ba5a92e727ba30e72834706623d94ac93a725410b6a6b6fbc1b07f7ba56"
+checksum = "39c15cf1a4aa79df40f1bb462fb39676d0ad9e366c2a33b590d7c66f4f81fcf9"
[[package]]
name = "futures-task"
-version = "0.3.24"
+version = "0.3.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a6508c467c73851293f390476d4491cf4d227dbabcd4170f3bb6044959b294f1"
+checksum = "2ffb393ac5d9a6eaa9d3fdf37ae2776656b706e200c8e16b1bdb227f5198e6ea"
[[package]]
name = "futures-util"
-version = "0.3.24"
+version = "0.3.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "44fb6cb1be61cc1d2e43b262516aafcf63b241cffdb1d3fa115f91d9c7b09c90"
+checksum = "197676987abd2f9cadff84926f410af1c183608d36641465df73ae8211dc65d6"
dependencies = [
"futures-channel",
"futures-core",
@@ -188,9 +199,9 @@ dependencies = [
[[package]]
name = "getrandom"
-version = "0.2.7"
+version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4eb1a864a501629691edf6c15a593b7a51eebaa1e8468e9ddc623de7c9b58ec6"
+checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31"
dependencies = [
"cfg-if",
"libc",
@@ -242,15 +253,15 @@ dependencies = [
[[package]]
name = "itoa"
-version = "1.0.3"
+version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c8af84674fe1f223a982c933a0ee1086ac4d4052aa0fb8060c12c6ad838e754"
+checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc"
[[package]]
name = "libc"
-version = "0.2.134"
+version = "0.2.135"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "329c933548736bc49fd575ee68c89e8be4d260064184389a5b77517cddd99ffb"
+checksum = "68783febc7782c6c5cb401fbda4de5a9898be1762314da0bb2c10ced61f18b0c"
[[package]]
name = "libm"
@@ -294,9 +305,25 @@ dependencies = [
[[package]]
name = "nalgebra"
-version = "0.31.1"
+version = "0.30.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e9e0a04ce089f9401aac565c740ed30c46291260f27d4911fdbaa6ca65fa3044"
+checksum = "4fb2d0de08694bed883320212c18ee3008576bfe8c306f4c3c4a58b4876998be"
+dependencies = [
+ "approx",
+ "matrixmultiply",
+ "nalgebra-macros",
+ "num-complex",
+ "num-rational",
+ "num-traits",
+ "simba",
+ "typenum",
+]
+
+[[package]]
+name = "nalgebra"
+version = "0.31.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "da388c517f7c8540918d00f7aed9864c1ead01a14a0cc21513b171857119de12"
dependencies = [
"approx",
"matrixmultiply",
@@ -407,9 +434,9 @@ dependencies = [
[[package]]
name = "parking_lot_core"
-version = "0.9.3"
+version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "09a279cbf25cb0757810394fbc1e359949b59e348145c643a939a525692e6929"
+checksum = "4dc9e0dc2adc1c69d09143aff38d3d30c5c3f0df0dad82e6d25547af174ebec0"
dependencies = [
"cfg-if",
"libc",
@@ -480,9 +507,9 @@ dependencies = [
[[package]]
name = "proc-macro2"
-version = "1.0.46"
+version = "1.0.47"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "94e2ef8dbfc347b10c094890f778ee2e36ca9bb4262e86dc99cd217e35f3470b"
+checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725"
dependencies = [
"unicode-ident",
]
@@ -550,10 +577,10 @@ dependencies = [
[[package]]
name = "qoqo"
-version = "1.1.0-beta.6"
+version = "1.2.0-alpha.1"
dependencies = [
"bincode",
- "nalgebra",
+ "nalgebra 0.30.1",
"ndarray",
"num-complex",
"numpy",
@@ -564,7 +591,7 @@ dependencies = [
"qoqo_calculator",
"qoqo_calculator_pyo3",
"quote",
- "roqoqo 1.1.0-beta.6",
+ "roqoqo 1.2.0-alpha.1",
"serde",
"serde_json",
"syn",
@@ -574,7 +601,7 @@ dependencies = [
[[package]]
name = "qoqo-macros"
-version = "1.1.0-beta.6"
+version = "1.2.0-alpha.1"
dependencies = [
"proc-macro2",
"quote",
@@ -677,7 +704,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6b298dab631f5cd4dbb92811398f20accde321f56b96347de413fd888965144c"
dependencies = [
"bincode",
- "nalgebra",
+ "nalgebra 0.31.2",
"ndarray",
"num-complex",
"proc-macro2",
@@ -692,13 +719,13 @@ dependencies = [
[[package]]
name = "roqoqo"
-version = "1.1.0-beta.6"
+version = "1.2.0-alpha.1"
dependencies = [
"async-trait",
"bincode",
"dyn-clone",
"futures",
- "nalgebra",
+ "nalgebra 0.30.1",
"ndarray",
"num-complex",
"petgraph",
@@ -707,9 +734,7 @@ dependencies = [
"quote",
"rand",
"rand_distr",
- "roqoqo 1.0.0",
- "roqoqo-derive 1.0.0",
- "roqoqo-derive 1.1.0-beta.6",
+ "roqoqo-derive 1.2.0-alpha.1",
"serde",
"serde_json",
"serde_test",
@@ -732,7 +757,7 @@ dependencies = [
[[package]]
name = "roqoqo-derive"
-version = "1.1.0-beta.6"
+version = "1.2.0-alpha.1"
dependencies = [
"proc-macro2",
"quote",
@@ -741,15 +766,15 @@ dependencies = [
[[package]]
name = "roqoqo-test"
-version = "1.1.0-beta.6"
+version = "1.2.0-alpha.1"
dependencies = [
- "nalgebra",
+ "nalgebra 0.30.1",
"ndarray",
"proc-macro2",
"qoqo_calculator",
"quote",
"rand",
- "roqoqo 1.1.0-beta.6",
+ "roqoqo 1.2.0-alpha.1",
"syn",
]
@@ -776,18 +801,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
name = "serde"
-version = "1.0.145"
+version = "1.0.146"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "728eb6351430bccb993660dfffc5a72f91ccc1295abaa8ce19b27ebe4f75568b"
+checksum = "6df50b7a60a0ad48e1b42eb38373eac8ff785d619fb14db917b4e63d5439361f"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
-version = "1.0.145"
+version = "1.0.146"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "81fa1584d3d1bcacd84c277a0dfe21f5b0f6accf4a23d04d4c6d61f1af522b4c"
+checksum = "a714fd32ba1d66047ce7d53dabd809e9922d538f9047de13cc4cffca47b36205"
dependencies = [
"proc-macro2",
"quote",
@@ -796,9 +821,9 @@ dependencies = [
[[package]]
name = "serde_json"
-version = "1.0.85"
+version = "1.0.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e55a28e3aaef9d5ce0506d0a14dbba8054ddc7e499ef522dd8b26859ec9d4a44"
+checksum = "6ce777b7b150d76b9cf60d28b55f5847135a003f7d7350c6be7a773508ce7d45"
dependencies = [
"itoa",
"ryu",
@@ -807,9 +832,9 @@ dependencies = [
[[package]]
name = "serde_test"
-version = "1.0.145"
+version = "1.0.146"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9c17d2112159132660b4c5399e274f676fb75a2f8d70b7468f18f045b71138ed"
+checksum = "15e7018257a3ef78bdfda9ac60f00a374913f7e019b0dde98d281d28f75e2bc2"
dependencies = [
"serde",
]
@@ -844,9 +869,9 @@ checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
[[package]]
name = "syn"
-version = "1.0.101"
+version = "1.0.103"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e90cde112c4b9690b8cbe810cba9ddd8bc1d7472e2cae317b69e9438c1cba7d2"
+checksum = "a864042229133ada95abf3b54fdc62ef5ccabe9515b64717bcb9a1919e59445d"
dependencies = [
"proc-macro2",
"quote",
@@ -933,9 +958,9 @@ dependencies = [
[[package]]
name = "unicode-ident"
-version = "1.0.4"
+version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dcc811dc4066ac62f84f11307873c4850cb653bfa9b1719cee2bd2204a4bc5dd"
+checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3"
[[package]]
name = "unindent"
@@ -957,9 +982,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "wide"
-version = "0.7.4"
+version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b3aba2d1dac31ac7cae82847ac5b8be822aee8f99a4e100f279605016b185c5f"
+checksum = "ae41ecad2489a1655c8ef8489444b0b113c0a0c795944a3572a0931cf7d2525c"
dependencies = [
"bytemuck",
"safe_arch",
@@ -967,43 +992,57 @@ dependencies = [
[[package]]
name = "windows-sys"
-version = "0.36.1"
+version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2"
+checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7"
dependencies = [
+ "windows_aarch64_gnullvm",
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_msvc",
"windows_x86_64_gnu",
+ "windows_x86_64_gnullvm",
"windows_x86_64_msvc",
]
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.42.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "41d2aa71f6f0cbe00ae5167d90ef3cfe66527d6f613ca78ac8024c3ccab9a19e"
+
[[package]]
name = "windows_aarch64_msvc"
-version = "0.36.1"
+version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47"
+checksum = "dd0f252f5a35cac83d6311b2e795981f5ee6e67eb1f9a7f64eb4500fbc4dcdb4"
[[package]]
name = "windows_i686_gnu"
-version = "0.36.1"
+version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6"
+checksum = "fbeae19f6716841636c28d695375df17562ca208b2b7d0dc47635a50ae6c5de7"
[[package]]
name = "windows_i686_msvc"
-version = "0.36.1"
+version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024"
+checksum = "84c12f65daa39dd2babe6e442988fc329d6243fdce47d7d2d155b8d874862246"
[[package]]
name = "windows_x86_64_gnu"
-version = "0.36.1"
+version = "0.42.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bf7b1b21b5362cbc318f686150e5bcea75ecedc74dd157d874d754a2ca44b0ed"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1"
+checksum = "09d525d2ba30eeb3297665bd434a54297e4170c7f1a44cad4ef58095b4cd2028"
[[package]]
name = "windows_x86_64_msvc"
-version = "0.36.1"
+version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680"
+checksum = "f40009d85759725a34da6d89a94e63d7bdc50a862acf0dbc7c8e488f1edcb6f5"
diff --git a/Cargo.toml b/Cargo.toml
index 1bd0b71f..0e946593 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -4,7 +4,8 @@ members = [
"roqoqo-derive",
"roqoqo-test",
"qoqo",
- "qoqo-macros"
+ "qoqo-macros",
+ "compatibility_test"
]
[profile.release]
diff --git a/qoqo-macros/Cargo.toml b/qoqo-macros/Cargo.toml
index c30f7c0f..a92d32a9 100644
--- a/qoqo-macros/Cargo.toml
+++ b/qoqo-macros/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "qoqo-macros"
-version = "1.1.0-beta.6"
+version = "1.2.0-alpha.1"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
readme = "../README.md"
diff --git a/qoqo/Cargo.toml b/qoqo/Cargo.toml
index 4405e457..89bafbc4 100644
--- a/qoqo/Cargo.toml
+++ b/qoqo/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "qoqo"
-version = "1.1.0-beta.6"
+version = "1.2.0-alpha.1"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
homepage = "https://github.com/HQSquantumsimulations/qoqo"
@@ -32,15 +32,15 @@ num-complex = "0.4"
thiserror = "1.0"
qoqo_calculator = { version="1.1" }
qoqo_calculator_pyo3 = {version="1.1", default-features=false}
-qoqo-macros = {version="1.1.0-beta.6", path="../qoqo-macros"}
-roqoqo = {version="1.1.0-beta.6", path="../roqoqo", features=["serialize", "overrotate"]}
+qoqo-macros = {version="1.2.0-alpha.1", path="../qoqo-macros"}
+roqoqo = {version="1.2.0-alpha.1", path="../roqoqo", features=["serialize", "overrotate"]}
numpy = "0.17"
bincode = "1.3"
serde_json = "1.0"
[dev-dependencies]
test-case = "2.0"
-nalgebra = "0.31"
+nalgebra = "=0.30"
[build-dependencies]
quote = "1.0"
diff --git a/qoqo/pyproject.toml b/qoqo/pyproject.toml
index 1766eaa1..b729e95f 100644
--- a/qoqo/pyproject.toml
+++ b/qoqo/pyproject.toml
@@ -1,9 +1,9 @@
[project]
name = "qoqo"
-version = "1.1.0-beta.6"
+version = "1.2.0-alpha.1"
dependencies = [
'numpy',
- 'qoqo_calculator_pyo3>=1.1.0',
+ 'qoqo_calculator_pyo3>=1.2.0-alpha.1',
]
license = {text="Apache-2.0 AND Apache-2.0 with LLVM-exception AND MIT AND Unicode-DFS-2016 AND BSD-2-Clause AND BSD-3-CLause"}
maintainers = [{name = "HQS Quantum Simulations GmbH", email = "[email protected]"}]
diff --git a/qoqo/qoqo/DEPENDENCIES b/qoqo/qoqo/DEPENDENCIES
index 161b2c86..3a3234d7 100644
--- a/qoqo/qoqo/DEPENDENCIES
+++ b/qoqo/qoqo/DEPENDENCIES
@@ -1311,6 +1311,13 @@ See the License for the specific language governing permissions and
limitations under the License.
+====================================================
+compatibility_test 1.2.0-alpha.1
+https://github.com/HQSquantumsimulations/qoqo
+by HQS Quantum Simulations <[email protected]>
+Compatibility tests for roqoqo
+License: Apache-2.0
+
====================================================
fixedbitset 0.4.2
https://github.com/petgraph/fixedbitset
@@ -1553,7 +1560,7 @@ limitations under the License.
====================================================
-getrandom 0.2.7
+getrandom 0.2.8
https://github.com/rust-random/getrandom
by The Rand Project Developers
A small cross-platform library for retrieving random data from system source
@@ -2516,7 +2523,7 @@ limitations under the License.
====================================================
-itoa 1.0.3
+itoa 1.0.4
https://github.com/dtolnay/itoa
by David Tolnay <[email protected]>
Fast integer primitive to string conversion
@@ -2755,7 +2762,7 @@ limitations under the License.
====================================================
-libc 0.2.134
+libc 0.2.135
https://github.com/rust-lang/libc
by The Rust Project Developers
Raw FFI bindings to platform libraries like libc.
@@ -3728,7 +3735,7 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
====================================================
-nalgebra 0.31.1
+nalgebra 0.30.1
https://nalgebra.org
by Sébastien Crozet <[email protected]>
General-purpose linear algebra library with transformations and statically-sized or dynamically-sized matrices.
@@ -3939,6 +3946,218 @@ LICENSE:
limitations under the License.
+====================================================
+nalgebra 0.31.2
+https://nalgebra.org
+by Sébastien Crozet <[email protected]>
+General-purpose linear algebra library with transformations and statically-sized or dynamically-sized matrices.
+License: BSD-3-Clause
+----------------------------------------------------
+LICENSE:
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2020 Sébastien Crozet
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+
====================================================
nalgebra-macros 0.1.0
https://nalgebra.org
@@ -5670,7 +5889,7 @@ limitations under the License.
====================================================
-parking_lot_core 0.9.3
+parking_lot_core 0.9.4
https://github.com/Amanieu/parking_lot
by Amanieu d'Antras <[email protected]>
An advanced API for creating custom synchronization primitives.
@@ -7108,7 +7327,7 @@ limitations under the License.
====================================================
-proc-macro2 1.0.46
+proc-macro2 1.0.47
https://github.com/dtolnay/proc-macro2
by David Tolnay <[email protected]>, Alex Crichton <[email protected]>
A substitute implementation of the compiler's `proc_macro` API to decouple token-based libraries from the procedural macro use case.
@@ -8777,7 +8996,7 @@ LICENSE:
====================================================
-qoqo 1.1.0-beta.6
+qoqo 1.2.0-alpha.1
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Quantum computing circuit toolkit. Python interface of roqoqo
@@ -8989,7 +9208,7 @@ LICENSE:
====================================================
-qoqo-macros 1.1.0-beta.6
+qoqo-macros 1.2.0-alpha.1
by HQS Quantum Simulations <[email protected]>
Macros for the qoqo crate
License: Apache-2.0
@@ -11273,7 +11492,7 @@ LICENSE:
====================================================
-roqoqo 1.1.0-beta.6
+roqoqo 1.2.0-alpha.1
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Rust Quantum Computing Toolkit by HQS
@@ -11696,7 +11915,7 @@ LICENSE:
====================================================
-roqoqo-derive 1.1.0-beta.6
+roqoqo-derive 1.2.0-alpha.1
by HQS Quantum Simulations <[email protected]>
Macros for the roqoqo crate
License: Apache-2.0
@@ -11907,7 +12126,7 @@ LICENSE:
====================================================
-roqoqo-test 1.1.0-beta.6
+roqoqo-test 1.2.0-alpha.1
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Testing helper functions for roqoqo toolkit
@@ -12626,7 +12845,7 @@ limitations under the License.
====================================================
-serde 1.0.145
+serde 1.0.146
https://serde.rs
by Erick Tryzelaar <[email protected]>, David Tolnay <[email protected]>
A generic serialization/deserialization framework
@@ -12865,7 +13084,7 @@ limitations under the License.
====================================================
-serde_derive 1.0.145
+serde_derive 1.0.146
https://serde.rs
by Erick Tryzelaar <[email protected]>, David Tolnay <[email protected]>
Macros 1.1 implementation of #[derive(Serialize, Deserialize)]
@@ -13104,7 +13323,7 @@ limitations under the License.
====================================================
-serde_json 1.0.85
+serde_json 1.0.87
https://github.com/serde-rs/json
by Erick Tryzelaar <[email protected]>, David Tolnay <[email protected]>
A JSON serialization file format
@@ -13343,7 +13562,7 @@ limitations under the License.
====================================================
-serde_test 1.0.145
+serde_test 1.0.146
https://serde.rs
by Erick Tryzelaar <[email protected]>, David Tolnay <[email protected]>
Token De/Serializer for testing De/Serialize implementations
@@ -14035,7 +14254,7 @@ limitations under the License.
====================================================
-syn 1.0.101
+syn 1.0.103
https://github.com/dtolnay/syn
by David Tolnay <[email protected]>
Parser for Rust source code
@@ -15265,7 +15484,7 @@ See the License for the specific language governing permissions and
limitations under the License.
====================================================
-unicode-ident 1.0.4
+unicode-ident 1.0.5
https://github.com/dtolnay/unicode-ident
by David Tolnay <[email protected]>
Determine whether characters have the XID_Start or XID_Continue properties according to Unicode Standard Annex #31
@@ -16491,7 +16710,7 @@ limitations under the License.
====================================================
-wide 0.7.4
+wide 0.7.5
https://github.com/Lokathor/wide
by Lokathor <[email protected]>
A crate to help you go wide.
@@ -16513,13 +16732,13 @@ Permission is granted to anyone to use this software for any purpose, including
====================================================
-windows-sys 0.36.1
+windows-sys 0.42.0
https://github.com/microsoft/windows-rs
by Microsoft
Rust for Windows
License: MIT OR Apache-2.0
----------------------------------------------------
-license-apache:
+license-apache-2.0:
Apache License
Version 2.0, January 2004
@@ -16750,35 +16969,49 @@ license-mit:
====================================================
-windows_aarch64_msvc 0.36.1
+windows_aarch64_gnullvm 0.42.0
+https://github.com/microsoft/windows-rs
+by Microsoft
+Code gen support for the windows crate
+License: MIT OR Apache-2.0
+
+====================================================
+windows_aarch64_msvc 0.42.0
+https://github.com/microsoft/windows-rs
+by Microsoft
+Code gen support for the windows crate
+License: MIT OR Apache-2.0
+
+====================================================
+windows_i686_gnu 0.42.0
https://github.com/microsoft/windows-rs
by Microsoft
Code gen support for the windows crate
License: MIT OR Apache-2.0
====================================================
-windows_i686_gnu 0.36.1
+windows_i686_msvc 0.42.0
https://github.com/microsoft/windows-rs
by Microsoft
Code gen support for the windows crate
License: MIT OR Apache-2.0
====================================================
-windows_i686_msvc 0.36.1
+windows_x86_64_gnu 0.42.0
https://github.com/microsoft/windows-rs
by Microsoft
Code gen support for the windows crate
License: MIT OR Apache-2.0
====================================================
-windows_x86_64_gnu 0.36.1
+windows_x86_64_gnullvm 0.42.0
https://github.com/microsoft/windows-rs
by Microsoft
Code gen support for the windows crate
License: MIT OR Apache-2.0
====================================================
-windows_x86_64_msvc 0.36.1
+windows_x86_64_msvc 0.42.0
https://github.com/microsoft/windows-rs
by Microsoft
Code gen support for the windows crate
diff --git a/roqoqo-derive/Cargo.toml b/roqoqo-derive/Cargo.toml
index ed674a3a..c303c627 100644
--- a/roqoqo-derive/Cargo.toml
+++ b/roqoqo-derive/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "roqoqo-derive"
-version = "1.1.0-beta.6"
+version = "1.2.0-alpha.1"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
edition = "2021"
diff --git a/roqoqo/Cargo.toml b/roqoqo/Cargo.toml
index e19ca48a..2c163fce 100644
--- a/roqoqo/Cargo.toml
+++ b/roqoqo/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "roqoqo"
-version = "1.1.0-beta.6"
+version = "1.2.0-alpha.1"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
edition = "2021"
@@ -28,24 +28,21 @@ num-complex = { version = "0.4"}
thiserror = "1.0"
dyn-clone = {version="1.0", optional=true}
qoqo_calculator = { version="1.1"}
-roqoqo-derive = {version="1.1.0-beta.6", path="../roqoqo-derive"}
+roqoqo-derive = {version="1.2.0-alpha.1", path="../roqoqo-derive"}
typetag = {version="0.2", optional=true}
-nalgebra = "0.31"
+nalgebra = "=0.30"
# schemars = { version="0.8", features = ["num-complex", "ndarray"], optional=true }
-bincode = {version="1.3", optional=true}
rand_distr = {version="0.4", optional=true}
rand = { version = "0.8.4"}
async-trait = {version = "0.1", optional = true}
futures ={version = "0.3", optional=true}
petgraph = {version = "0.6.2", optional=true}
+bincode = {version="1.3", optional=true}
[dev-dependencies]
serde_test = "1.0"
test-case = "2.0"
serde_json = "1.0"
-test_roqoqo_derive_1_0 = {package= "roqoqo-derive", version="=1.0.0"}
-test_roqoqo_1_0 = {package= "roqoqo", version="=1.0.0"}
-bincode = {version="1.3", optional=false}
[build-dependencies]
quote = "1.0"
@@ -63,4 +60,4 @@ serialize = ["serde", "ndarray/serde", "num-complex/serde", "bincode", "petgraph
overrotate = [ "rand_distr", "roqoqo-derive/overrotate"]
async = ["async-trait", "futures"]
# json_schema=["schemars", "serialize", "qoqo_calculator/json_schema"]
-circuitdag = ["petgraph"]
+circuitdag = ["petgraph"]
\ No newline at end of file
diff --git a/roqoqo/build.rs b/roqoqo/build.rs
index fec0053b..b164c9e5 100644
--- a/roqoqo/build.rs
+++ b/roqoqo/build.rs
@@ -52,8 +52,12 @@ struct Visitor {
multi_qubit_gate_operations: Vec<Ident>,
// Operations that have only been introduced in roqoqoq 1.1.0
// These operations will only be added at end of automatically created enums
- // to maintain compatability with bincode encoding
+ // to maintain compatibility with bincode encoding
roqoqo_1_1_operations: Vec<Ident>,
+ // Operations that have only been introduced in roqoqoq 1.2.0
+ // These operations will only be added at end of automatically created enums
+ // to maintain compatibility with bincode encoding
+ roqoqo_1_2_operations: Vec<Ident>,
}
impl Visitor {
@@ -74,6 +78,7 @@ impl Visitor {
two_qubit_gate_operations: Vec::new(),
multi_qubit_gate_operations: Vec::new(),
roqoqo_1_1_operations: Vec::new(),
+ roqoqo_1_2_operations: Vec::new(),
}
}
}
@@ -221,6 +226,9 @@ impl<'ast> Visit<'ast> for Visitor {
if trait_name.as_str() == "ImplementedIn1point1" {
self.roqoqo_1_1_operations.push(id.clone());
}
+ if trait_name.as_str() == "ImplementedIn1point2" {
+ self.roqoqo_1_2_operations.push(id.clone());
+ }
if trait_name.as_str() == "OperateSingleQubitGate" {
self.single_qubit_gate_operations.push(id.clone());
}
@@ -271,7 +279,7 @@ fn main() {
.operations
.clone()
.into_iter()
- .filter(|v| !vis.roqoqo_1_1_operations.contains(v))
+ .filter(|v| !vis.roqoqo_1_1_operations.contains(v) && !vis.roqoqo_1_2_operations.contains(v))
.map(|v| {
let msg = format!("Variant for {}", v);
quote! {
@@ -280,9 +288,21 @@ fn main() {
#v(#v)}
});
let operations_quotes_1_1 = vis
+ .operations
+ .clone()
+ .into_iter()
+ .filter(|v| vis.roqoqo_1_1_operations.contains(v) && !vis.roqoqo_1_2_operations.contains(v))
+ .map(|v| {
+ let msg = format!("Variant for {}", v);
+ quote! {
+ #[allow(clippy::upper_case_acronyms)]
+ #[doc = #msg]
+ #v(#v)}
+ });
+ let operations_quotes_1_2 = vis
.operations
.into_iter()
- .filter(|v| vis.roqoqo_1_1_operations.contains(v))
+ .filter(|v| vis.roqoqo_1_2_operations.contains(v))
.map(|v| {
let msg = format!("Variant for {}", v);
quote! {
@@ -317,7 +337,7 @@ fn main() {
.pragma_operations
.clone()
.into_iter()
- .filter(|v| !vis.roqoqo_1_1_operations.contains(v))
+ .filter(|v| !vis.roqoqo_1_1_operations.contains(v) && !vis.roqoqo_1_2_operations.contains(v))
.map(|v| {
let msg = format!("Variant for {}", v);
quote! {
@@ -327,8 +347,19 @@ fn main() {
// Construct TokenStreams for variants of pragma enum
let pragma_operations_quotes_1_1 = vis
.pragma_operations
+ .clone()
.into_iter()
- .filter(|v| vis.roqoqo_1_1_operations.contains(v))
+ .filter(|v| vis.roqoqo_1_1_operations.contains(v) && !vis.roqoqo_1_2_operations.contains(v))
+ .map(|v| {
+ let msg = format!("Variant for {}", v);
+ quote! {
+ #[doc = #msg]
+ #v(#v)}
+ });
+ let pragma_operations_quotes_1_2 = vis
+ .pragma_operations
+ .into_iter()
+ .filter(|v| vis.roqoqo_1_2_operations.contains(v))
.map(|v| {
let msg = format!("Variant for {}", v);
quote! {
@@ -370,7 +401,7 @@ fn main() {
.definitions
.clone()
.into_iter()
- .filter(|v| !vis.roqoqo_1_1_operations.contains(v))
+ .filter(|v| !vis.roqoqo_1_1_operations.contains(v) && !vis.roqoqo_1_2_operations.contains(v))
.map(|v| {
let msg = format!("Variant for {}", v);
quote! {
@@ -379,9 +410,21 @@ fn main() {
});
// Construct TokenStreams for variants of definition enum
let definitions_quotes_1_1 = vis
+ .definitions
+ .clone()
+ .into_iter()
+ .filter(|v| vis.roqoqo_1_1_operations.contains(v) && !vis.roqoqo_1_2_operations.contains(v))
+ .map(|v| {
+ let msg = format!("Variant for {}", v);
+ quote! {
+ #[doc = #msg]
+ #v(#v)}
+ });
+ // Construct TokenStreams for variants of definition enum
+ let definitions_quotes_1_2 = vis
.definitions
.into_iter()
- .filter(|v| vis.roqoqo_1_1_operations.contains(v))
+ .filter(|v| vis.roqoqo_1_2_operations.contains(v))
.map(|v| {
let msg = format!("Variant for {}", v);
quote! {
@@ -430,7 +473,8 @@ fn main() {
// #[cfg_attr(feature = "json_schema", derive(schemars::JsonSchema))]
pub enum Operation {
#(#operations_quotes),* ,
- #(#operations_quotes_1_1),*
+ #(#operations_quotes_1_1),* ,
+ #(#operations_quotes_1_2),*
}
/// Enum of all Operations implementing [OperateSingleQubit]
@@ -463,7 +507,8 @@ fn main() {
#[cfg_attr(feature = "serialize", derive(serde::Serialize, serde::Deserialize))]
pub enum PragmaOperation {
#(#pragma_operations_quotes),* ,
- #(#pragma_operations_quotes_1_1),*
+ #(#pragma_operations_quotes_1_1),* ,
+ #(#pragma_operations_quotes_1_2),*
}
/// Enum of all Operations implementing [OperatePragmaNoise]
@@ -505,7 +550,8 @@ fn main() {
// #[cfg_attr(feature = "json_schema", derive(schemars::JsonSchema))]
pub enum Definition {
#(#definitions_quotes),* ,
- #(#definitions_quotes_1_1),*
+ #(#definitions_quotes_1_1),* ,
+ #(#definitions_quotes_1_2),*
}
/// Enum of all Operations implementing [OperateConstantGate]
diff --git a/roqoqo/src/operations/mod.rs b/roqoqo/src/operations/mod.rs
index a863090e..209229dd 100644
--- a/roqoqo/src/operations/mod.rs
+++ b/roqoqo/src/operations/mod.rs
@@ -701,6 +701,9 @@ pub trait OperateMultiQubitGate:
/// Marker trait to show that some operation has been implemented in roqoqo 1.1.0
pub(crate) trait ImplementedIn1point1: Operate {}
+/// Marker trait to show that some operation has been implemented in roqoqo 1.2.0
+pub(crate) trait ImplementedIn1point2: Operate {}
+
#[cfg(feature = "dynamic")]
/// A wrapper for Operate trait objects.
///
diff --git a/roqoqo/src/operations/two_qubit_gate_operations.rs b/roqoqo/src/operations/two_qubit_gate_operations.rs
index 45824b8f..3501719f 100644
--- a/roqoqo/src/operations/two_qubit_gate_operations.rs
+++ b/roqoqo/src/operations/two_qubit_gate_operations.rs
@@ -2175,6 +2175,8 @@ pub struct PhaseShiftedControlledPhase {
phi: CalculatorFloat,
}
+impl super::ImplementedIn1point2 for PhaseShiftedControlledPhase {}
+
#[allow(non_upper_case_globals)]
const TAGS_PhaseShiftedControlledPhase: &[&str; 4] = &[
"Operation",
| qoqo Circuit from_ methods are not classmethods
from_json and from_bincode in qoqo.Circuit are not implemeted as classmethods.
We need to change that so we can call `Circuit.from_json(...)` directly
| bug fix implemented and merged | 2022-10-21T12:38:14 | 0.0 | [] | [] |
||
HQSquantumsimulations/qoqo | HQSquantumsimulations__qoqo-387 | fd91b65cac8446e0e3d64205bb2469cabada515f | diff --git a/CHANGELOG.md b/CHANGELOG.md
index f96741c7..f464a47a 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,7 +2,7 @@
This changelog track changes to the qoqo project starting at version v0.5.0
-## 1.1.0-beta.11
+## 1.1.0-beta.12
* Fixed nalgebra version to 0.30
diff --git a/Cargo.lock b/Cargo.lock
index d50c22fb..751f9dee 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -248,9 +248,9 @@ checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc"
[[package]]
name = "libc"
-version = "0.2.134"
+version = "0.2.135"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "329c933548736bc49fd575ee68c89e8be4d260064184389a5b77517cddd99ffb"
+checksum = "68783febc7782c6c5cb401fbda4de5a9898be1762314da0bb2c10ced61f18b0c"
[[package]]
name = "libm"
@@ -550,7 +550,7 @@ dependencies = [
[[package]]
name = "qoqo"
-version = "1.1.0-beta.10"
+version = "1.1.0-beta.12"
dependencies = [
"bincode",
"nalgebra",
@@ -574,7 +574,7 @@ dependencies = [
[[package]]
name = "qoqo-macros"
-version = "1.1.0-beta.10"
+version = "1.1.0-beta.12"
dependencies = [
"proc-macro2",
"quote",
@@ -672,7 +672,7 @@ dependencies = [
[[package]]
name = "roqoqo"
-version = "1.1.0-beta.10"
+version = "1.1.0-beta.12"
dependencies = [
"async-trait",
"bincode",
@@ -699,7 +699,7 @@ dependencies = [
[[package]]
name = "roqoqo-derive"
-version = "1.1.0-beta.10"
+version = "1.1.0-beta.12"
dependencies = [
"proc-macro2",
"quote",
@@ -708,7 +708,7 @@ dependencies = [
[[package]]
name = "roqoqo-test"
-version = "1.1.0-beta.10"
+version = "1.1.0-beta.12"
dependencies = [
"nalgebra",
"ndarray",
@@ -763,9 +763,9 @@ dependencies = [
[[package]]
name = "serde_json"
-version = "1.0.85"
+version = "1.0.86"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e55a28e3aaef9d5ce0506d0a14dbba8054ddc7e499ef522dd8b26859ec9d4a44"
+checksum = "41feea4228a6f1cd09ec7a3593a682276702cd67b5273544757dae23c096f074"
dependencies = [
"itoa",
"ryu",
@@ -900,9 +900,9 @@ dependencies = [
[[package]]
name = "unicode-ident"
-version = "1.0.4"
+version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dcc811dc4066ac62f84f11307873c4850cb653bfa9b1719cee2bd2204a4bc5dd"
+checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3"
[[package]]
name = "unindent"
diff --git a/qoqo-macros/Cargo.toml b/qoqo-macros/Cargo.toml
index 8e555ac3..e378b759 100644
--- a/qoqo-macros/Cargo.toml
+++ b/qoqo-macros/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "qoqo-macros"
-version = "1.1.0-beta.10"
+version = "1.1.0-beta.12"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
readme = "../README.md"
diff --git a/qoqo/Cargo.toml b/qoqo/Cargo.toml
index e70d9156..19d786a7 100644
--- a/qoqo/Cargo.toml
+++ b/qoqo/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "qoqo"
-version = "1.1.0-beta.10"
+version = "1.1.0-beta.12"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
homepage = "https://github.com/HQSquantumsimulations/qoqo"
@@ -32,8 +32,8 @@ num-complex = "0.4"
thiserror = "1.0"
qoqo_calculator = { version="1.1" }
qoqo_calculator_pyo3 = {version="1.1", default-features=false}
-qoqo-macros = {version="1.1.0-beta.10", path="../qoqo-macros"}
-roqoqo = {version="1.1.0-beta.10", path="../roqoqo", features=["serialize", "overrotate"]}
+qoqo-macros = {version="1.1.0-beta.12", path="../qoqo-macros"}
+roqoqo = {version="1.1.0-beta.12", path="../roqoqo", features=["serialize", "overrotate"]}
numpy = "0.17"
bincode = "1.3"
serde_json = "1.0"
diff --git a/qoqo/pyproject.toml b/qoqo/pyproject.toml
index e3af6e97..0ad73042 100644
--- a/qoqo/pyproject.toml
+++ b/qoqo/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "qoqo"
-version = "1.1.0-beta.10"
+version = "1.1.0-beta.12"
dependencies = [
'numpy',
'qoqo_calculator_pyo3>=1.1.0',
diff --git a/qoqo/qoqo/DEPENDENCIES b/qoqo/qoqo/DEPENDENCIES
index 666dc85a..6d44d444 100644
--- a/qoqo/qoqo/DEPENDENCIES
+++ b/qoqo/qoqo/DEPENDENCIES
@@ -248,208 +248,208 @@ License: Apache-2.0
----------------------------------------------------
LICENSE:
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
====================================================
@@ -702,27 +702,27 @@ License: MIT
----------------------------------------------------
LICENSE.md:
-The MIT License (MIT)
-
-Copyright (c) 2014 Ty Overby
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
+The MIT License (MIT)
+
+Copyright (c) 2014 Ty Overby
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
====================================================
@@ -976,95 +976,95 @@ License: Zlib OR Apache-2.0 OR MIT
----------------------------------------------------
LICENSE-ZLIB:
-Copyright (c) 2019 Daniel "Lokathor" Gee.
-
-This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software.
-
-Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions:
-
-1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
-
-2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
-
-3. This notice may not be removed or altered from any source distribution.
+Copyright (c) 2019 Daniel "Lokathor" Gee.
+
+This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software.
+
+Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions:
+
+1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
+
+2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
+
+3. This notice may not be removed or altered from any source distribution.
----------------------------------------------------
LICENSE-MIT:
-MIT License
-
-Copyright (c) 2019 Daniel "Lokathor" Gee.
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+MIT License
+
+Copyright (c) 2019 Daniel "Lokathor" Gee.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
----------------------------------------------------
LICENSE-APACHE:
-Apache License
-Version 2.0, January 2004
-http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
- 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
- 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
- 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
- (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and
- (b) You must cause any modified files to carry prominent notices stating that You changed the files; and
- (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
- (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.
-
- You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
- 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
- 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
- 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
- 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
- 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
-
-END OF TERMS AND CONDITIONS
-
-APPENDIX: How to apply the Apache License to your work.
-
-To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives.
-
-Copyright [yyyy] [name of copyright owner]
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
+Apache License
+Version 2.0, January 2004
+http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
+ 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
+ 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
+ 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
+ (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and
+ (b) You must cause any modified files to carry prominent notices stating that You changed the files; and
+ (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
+ (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
+ 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
+ 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
+ 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
+ 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
+ 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
====================================================
@@ -2755,7 +2755,7 @@ limitations under the License.
====================================================
-libc 0.2.134
+libc 0.2.135
https://github.com/rust-lang/libc
by The Rust Project Developers
Raw FFI bindings to platform libraries like libc.
@@ -3707,236 +3707,236 @@ License: MIT
----------------------------------------------------
LICENSE:
-Copyright (c) 2017 Gilad Naaman
+Copyright (c) 2017 Gilad Naaman
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+====================================================
+nalgebra 0.30.1
+https://nalgebra.org
+by Sébastien Crozet <[email protected]>
+General-purpose linear algebra library with transformations and statically-sized or dynamically-sized matrices.
+License: BSD-3-Clause
+----------------------------------------------------
+LICENSE:
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
+ APPENDIX: How to apply the Apache License to your work.
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
+ Copyright 2020 Sébastien Crozet
-====================================================
-nalgebra 0.30.1
-https://nalgebra.org
-by Sébastien Crozet <[email protected]>
-General-purpose linear algebra library with transformations and statically-sized or dynamically-sized matrices.
-License: BSD-3-Clause
-----------------------------------------------------
-LICENSE:
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright 2020 Sébastien Crozet
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
====================================================
@@ -6667,207 +6667,207 @@ SOFTWARE.
----------------------------------------------------
LICENSE-APACHE:
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
-2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
-3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
-4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
-5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
-6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
-7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
-8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
-9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
-END OF TERMS AND CONDITIONS
-
-APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
-Copyright 2019-2020 CreepySkeleton <[email protected]>
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright 2019-2020 CreepySkeleton <[email protected]>
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
====================================================
@@ -8777,7 +8777,7 @@ LICENSE:
====================================================
-qoqo 1.1.0-beta.10
+qoqo 1.1.0-beta.12
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Quantum computing circuit toolkit. Python interface of roqoqo
@@ -8989,7 +8989,7 @@ LICENSE:
====================================================
-qoqo-macros 1.1.0-beta.10
+qoqo-macros 1.1.0-beta.12
by HQS Quantum Simulations <[email protected]>
Macros for the qoqo crate
License: Apache-2.0
@@ -11061,7 +11061,7 @@ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
====================================================
-roqoqo 1.1.0-beta.10
+roqoqo 1.1.0-beta.12
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Rust Quantum Computing Toolkit by HQS
@@ -11273,7 +11273,7 @@ LICENSE:
====================================================
-roqoqo-derive 1.1.0-beta.10
+roqoqo-derive 1.1.0-beta.12
by HQS Quantum Simulations <[email protected]>
Macros for the roqoqo crate
License: Apache-2.0
@@ -11484,7 +11484,7 @@ LICENSE:
====================================================
-roqoqo-test 1.1.0-beta.10
+roqoqo-test 1.1.0-beta.12
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Testing helper functions for roqoqo toolkit
@@ -11943,17 +11943,17 @@ License: Zlib OR Apache-2.0 OR MIT
----------------------------------------------------
LICENSE-ZLIB.md:
-Copyright (c) 2020 Daniel "Lokathor" Gee.
-
-This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software.
-
-Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions:
-
-1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
-
-2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
-
-3. This notice may not be removed or altered from any source distribution.
+Copyright (c) 2020 Daniel "Lokathor" Gee.
+
+This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software.
+
+Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions:
+
+1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
+
+2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
+
+3. This notice may not be removed or altered from any source distribution.
====================================================
@@ -12681,7 +12681,7 @@ limitations under the License.
====================================================
-serde_json 1.0.85
+serde_json 1.0.86
https://github.com/serde-rs/json
by Erick Tryzelaar <[email protected]>, David Tolnay <[email protected]>
A JSON serialization file format
@@ -14842,7 +14842,7 @@ See the License for the specific language governing permissions and
limitations under the License.
====================================================
-unicode-ident 1.0.4
+unicode-ident 1.0.5
https://github.com/dtolnay/unicode-ident
by David Tolnay <[email protected]>
Determine whether characters have the XID_Start or XID_Continue properties according to Unicode Standard Annex #31
@@ -16076,17 +16076,17 @@ License: Zlib OR Apache-2.0 OR MIT
----------------------------------------------------
LICENSE-ZLIB.md:
-Copyright (c) 2020 Daniel "Lokathor" Gee.
-
-This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software.
-
-Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions:
-
-1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
-
-2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
-
-3. This notice may not be removed or altered from any source distribution.
+Copyright (c) 2020 Daniel "Lokathor" Gee.
+
+This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software.
+
+Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions:
+
+1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
+
+2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
+
+3. This notice may not be removed or altered from any source distribution.
====================================================
diff --git a/roqoqo-derive/Cargo.toml b/roqoqo-derive/Cargo.toml
index 0accebeb..7fcce7d7 100644
--- a/roqoqo-derive/Cargo.toml
+++ b/roqoqo-derive/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "roqoqo-derive"
-version = "1.1.0-beta.10"
+version = "1.1.0-beta.12"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
edition = "2021"
diff --git a/roqoqo/Cargo.toml b/roqoqo/Cargo.toml
index 7c6abff8..af19ec67 100644
--- a/roqoqo/Cargo.toml
+++ b/roqoqo/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "roqoqo"
-version = "1.1.0-beta.10"
+version = "1.1.0-beta.12"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
edition = "2021"
@@ -28,7 +28,7 @@ num-complex = { version = "0.4"}
thiserror = "1.0"
dyn-clone = {version="1.0", optional=true}
qoqo_calculator = { version="1.1"}
-roqoqo-derive = {version="1.1.0-beta.10", path="../roqoqo-derive"}
+roqoqo-derive = {version="1.1.0-beta.12", path="../roqoqo-derive"}
typetag = {version="0.2", optional=true}
nalgebra = "=0.30"
# schemars = { version="0.8", features = ["num-complex", "ndarray"], optional=true }
| qoqo Circuit from_ methods are not classmethods
from_json and from_bincode in qoqo.Circuit are not implemeted as classmethods.
We need to change that so we can call `Circuit.from_json(...)` directly
| 2022-10-12T12:47:45 | 0.0 | [] | [] |
|||
HQSquantumsimulations/qoqo | HQSquantumsimulations__qoqo-386 | 7c5a4610b9eaa614cabd85223bf77f475053ac35 | diff --git a/CHANGELOG.md b/CHANGELOG.md
index cb70e583..f96741c7 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,6 +2,10 @@
This changelog track changes to the qoqo project starting at version v0.5.0
+## 1.1.0-beta.11
+
+* Fixed nalgebra version to 0.30
+
## 1.1.0-beta.10
* Temporarily removed compatibility tests in order to release
diff --git a/Cargo.lock b/Cargo.lock
index 89c4d133..d50c22fb 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -294,9 +294,9 @@ dependencies = [
[[package]]
name = "nalgebra"
-version = "0.31.1"
+version = "0.30.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e9e0a04ce089f9401aac565c740ed30c46291260f27d4911fdbaa6ca65fa3044"
+checksum = "4fb2d0de08694bed883320212c18ee3008576bfe8c306f4c3c4a58b4876998be"
dependencies = [
"approx",
"matrixmultiply",
diff --git a/qoqo/Cargo.toml b/qoqo/Cargo.toml
index 8826a10a..e70d9156 100644
--- a/qoqo/Cargo.toml
+++ b/qoqo/Cargo.toml
@@ -40,7 +40,7 @@ serde_json = "1.0"
[dev-dependencies]
test-case = "2.0"
-nalgebra = "0.31"
+nalgebra = "=0.30"
[build-dependencies]
quote = "1.0"
diff --git a/qoqo/qoqo/DEPENDENCIES b/qoqo/qoqo/DEPENDENCIES
index 55da07c0..666dc85a 100644
--- a/qoqo/qoqo/DEPENDENCIES
+++ b/qoqo/qoqo/DEPENDENCIES
@@ -3728,7 +3728,7 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
====================================================
-nalgebra 0.31.1
+nalgebra 0.30.1
https://nalgebra.org
by Sébastien Crozet <[email protected]>
General-purpose linear algebra library with transformations and statically-sized or dynamically-sized matrices.
diff --git a/roqoqo/Cargo.toml b/roqoqo/Cargo.toml
index a298e28f..7c6abff8 100644
--- a/roqoqo/Cargo.toml
+++ b/roqoqo/Cargo.toml
@@ -30,7 +30,7 @@ dyn-clone = {version="1.0", optional=true}
qoqo_calculator = { version="1.1"}
roqoqo-derive = {version="1.1.0-beta.10", path="../roqoqo-derive"}
typetag = {version="0.2", optional=true}
-nalgebra = "0.31"
+nalgebra = "=0.30"
# schemars = { version="0.8", features = ["num-complex", "ndarray"], optional=true }
bincode = {version="1.3", optional=true}
rand_distr = {version="0.4", optional=true}
| qoqo Circuit from_ methods are not classmethods
from_json and from_bincode in qoqo.Circuit are not implemeted as classmethods.
We need to change that so we can call `Circuit.from_json(...)` directly
| 2022-10-12T12:19:48 | 0.0 | [] | [] |
|||
HQSquantumsimulations/qoqo | HQSquantumsimulations__qoqo-380 | 32c283dec3a65acacf0b48b2a829bf8c66a9bfd3 | diff --git a/CHANGELOG.md b/CHANGELOG.md
index 8e24b0ac..cb70e583 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,6 +2,10 @@
This changelog track changes to the qoqo project starting at version v0.5.0
+## 1.1.0-beta.10
+
+* Temporarily removed compatibility tests in order to release
+
## 1.1.0-beta.9
* Fixed github pipeline deploy
diff --git a/Cargo.lock b/Cargo.lock
index b98dcc8b..89c4d133 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -242,9 +242,9 @@ dependencies = [
[[package]]
name = "itoa"
-version = "1.0.3"
+version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c8af84674fe1f223a982c933a0ee1086ac4d4052aa0fb8060c12c6ad838e754"
+checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc"
[[package]]
name = "libc"
@@ -550,7 +550,7 @@ dependencies = [
[[package]]
name = "qoqo"
-version = "1.1.0-beta.9"
+version = "1.1.0-beta.10"
dependencies = [
"bincode",
"nalgebra",
@@ -564,7 +564,7 @@ dependencies = [
"qoqo_calculator",
"qoqo_calculator_pyo3",
"quote",
- "roqoqo 1.1.0-beta.9",
+ "roqoqo",
"serde",
"serde_json",
"syn",
@@ -574,7 +574,7 @@ dependencies = [
[[package]]
name = "qoqo-macros"
-version = "1.1.0-beta.9"
+version = "1.1.0-beta.10"
dependencies = [
"proc-macro2",
"quote",
@@ -672,26 +672,7 @@ dependencies = [
[[package]]
name = "roqoqo"
-version = "1.0.0"
-source = "git+https://github.com/HQSquantumsimulations/qoqo.git?tag=v1.0.0#b76d8e0796c922c6280bb7ff3cebfc7e243977e3"
-dependencies = [
- "bincode",
- "nalgebra",
- "ndarray",
- "num-complex",
- "proc-macro2",
- "qoqo_calculator",
- "quote",
- "rand",
- "roqoqo-derive 1.0.0",
- "serde",
- "syn",
- "thiserror",
-]
-
-[[package]]
-name = "roqoqo"
-version = "1.1.0-beta.9"
+version = "1.1.0-beta.10"
dependencies = [
"async-trait",
"bincode",
@@ -706,8 +687,7 @@ dependencies = [
"quote",
"rand",
"rand_distr",
- "roqoqo 1.0.0",
- "roqoqo-derive 1.1.0-beta.9",
+ "roqoqo-derive",
"serde",
"serde_json",
"serde_test",
@@ -719,17 +699,7 @@ dependencies = [
[[package]]
name = "roqoqo-derive"
-version = "1.0.0"
-source = "git+https://github.com/HQSquantumsimulations/qoqo.git?tag=v1.0.0#b76d8e0796c922c6280bb7ff3cebfc7e243977e3"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
-
-[[package]]
-name = "roqoqo-derive"
-version = "1.1.0-beta.9"
+version = "1.1.0-beta.10"
dependencies = [
"proc-macro2",
"quote",
@@ -738,7 +708,7 @@ dependencies = [
[[package]]
name = "roqoqo-test"
-version = "1.1.0-beta.9"
+version = "1.1.0-beta.10"
dependencies = [
"nalgebra",
"ndarray",
@@ -746,7 +716,7 @@ dependencies = [
"qoqo_calculator",
"quote",
"rand",
- "roqoqo 1.1.0-beta.9",
+ "roqoqo",
"syn",
]
@@ -841,9 +811,9 @@ checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
[[package]]
name = "syn"
-version = "1.0.101"
+version = "1.0.102"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e90cde112c4b9690b8cbe810cba9ddd8bc1d7472e2cae317b69e9438c1cba7d2"
+checksum = "3fcd952facd492f9be3ef0d0b7032a6e442ee9b361d4acc2b1d0c4aaa5f613a1"
dependencies = [
"proc-macro2",
"quote",
@@ -954,9 +924,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "wide"
-version = "0.7.4"
+version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b3aba2d1dac31ac7cae82847ac5b8be822aee8f99a4e100f279605016b185c5f"
+checksum = "ae41ecad2489a1655c8ef8489444b0b113c0a0c795944a3572a0931cf7d2525c"
dependencies = [
"bytemuck",
"safe_arch",
diff --git a/qoqo-macros/Cargo.toml b/qoqo-macros/Cargo.toml
index 36300f59..8e555ac3 100644
--- a/qoqo-macros/Cargo.toml
+++ b/qoqo-macros/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "qoqo-macros"
-version = "1.1.0-beta.9"
+version = "1.1.0-beta.10"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
readme = "../README.md"
diff --git a/qoqo/Cargo.toml b/qoqo/Cargo.toml
index 2d120fc1..8826a10a 100644
--- a/qoqo/Cargo.toml
+++ b/qoqo/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "qoqo"
-version = "1.1.0-beta.9"
+version = "1.1.0-beta.10"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
homepage = "https://github.com/HQSquantumsimulations/qoqo"
@@ -32,8 +32,8 @@ num-complex = "0.4"
thiserror = "1.0"
qoqo_calculator = { version="1.1" }
qoqo_calculator_pyo3 = {version="1.1", default-features=false}
-qoqo-macros = {version="1.1.0-beta.9", path="../qoqo-macros"}
-roqoqo = {version="1.1.0-beta.9", path="../roqoqo", features=["serialize", "overrotate"]}
+qoqo-macros = {version="1.1.0-beta.10", path="../qoqo-macros"}
+roqoqo = {version="1.1.0-beta.10", path="../roqoqo", features=["serialize", "overrotate"]}
numpy = "0.17"
bincode = "1.3"
serde_json = "1.0"
diff --git a/qoqo/pyproject.toml b/qoqo/pyproject.toml
index 2ae19ecc..e3af6e97 100644
--- a/qoqo/pyproject.toml
+++ b/qoqo/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "qoqo"
-version = "1.1.0-beta.9"
+version = "1.1.0-beta.10"
dependencies = [
'numpy',
'qoqo_calculator_pyo3>=1.1.0',
diff --git a/qoqo/qoqo/DEPENDENCIES b/qoqo/qoqo/DEPENDENCIES
index 8409de99..55da07c0 100644
--- a/qoqo/qoqo/DEPENDENCIES
+++ b/qoqo/qoqo/DEPENDENCIES
@@ -2516,7 +2516,7 @@ limitations under the License.
====================================================
-itoa 1.0.3
+itoa 1.0.4
https://github.com/dtolnay/itoa
by David Tolnay <[email protected]>
Fast integer primitive to string conversion
@@ -8777,7 +8777,7 @@ LICENSE:
====================================================
-qoqo 1.1.0-beta.9
+qoqo 1.1.0-beta.10
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Quantum computing circuit toolkit. Python interface of roqoqo
@@ -8989,7 +8989,7 @@ LICENSE:
====================================================
-qoqo-macros 1.1.0-beta.9
+qoqo-macros 1.1.0-beta.10
by HQS Quantum Simulations <[email protected]>
Macros for the qoqo crate
License: Apache-2.0
@@ -11061,7 +11061,7 @@ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
====================================================
-roqoqo 1.1.0-beta.9
+roqoqo 1.1.0-beta.10
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Rust Quantum Computing Toolkit by HQS
@@ -11273,7 +11273,7 @@ LICENSE:
====================================================
-roqoqo-derive 1.1.0-beta.9
+roqoqo-derive 1.1.0-beta.10
by HQS Quantum Simulations <[email protected]>
Macros for the roqoqo crate
License: Apache-2.0
@@ -11484,7 +11484,7 @@ LICENSE:
====================================================
-roqoqo-test 1.1.0-beta.9
+roqoqo-test 1.1.0-beta.10
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Testing helper functions for roqoqo toolkit
@@ -13612,7 +13612,7 @@ limitations under the License.
====================================================
-syn 1.0.101
+syn 1.0.102
https://github.com/dtolnay/syn
by David Tolnay <[email protected]>
Parser for Rust source code
@@ -16068,7 +16068,7 @@ limitations under the License.
====================================================
-wide 0.7.4
+wide 0.7.5
https://github.com/Lokathor/wide
by Lokathor <[email protected]>
A crate to help you go wide.
diff --git a/roqoqo-derive/Cargo.toml b/roqoqo-derive/Cargo.toml
index e3354dde..0accebeb 100644
--- a/roqoqo-derive/Cargo.toml
+++ b/roqoqo-derive/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "roqoqo-derive"
-version = "1.1.0-beta.9"
+version = "1.1.0-beta.10"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
edition = "2021"
diff --git a/roqoqo/Cargo.toml b/roqoqo/Cargo.toml
index e5ccbf9e..a298e28f 100644
--- a/roqoqo/Cargo.toml
+++ b/roqoqo/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "roqoqo"
-version = "1.1.0-beta.9"
+version = "1.1.0-beta.10"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
edition = "2021"
@@ -28,7 +28,7 @@ num-complex = { version = "0.4"}
thiserror = "1.0"
dyn-clone = {version="1.0", optional=true}
qoqo_calculator = { version="1.1"}
-roqoqo-derive = {version="1.1.0-beta.9", path="../roqoqo-derive"}
+roqoqo-derive = {version="1.1.0-beta.10", path="../roqoqo-derive"}
typetag = {version="0.2", optional=true}
nalgebra = "0.31"
# schemars = { version="0.8", features = ["num-complex", "ndarray"], optional=true }
@@ -38,7 +38,7 @@ rand = { version = "0.8.4"}
async-trait = {version = "0.1", optional = true}
futures ={version = "0.3", optional=true}
petgraph = {version = "0.6.2", optional=true}
-test_roqoqo_1_0 = {package = "roqoqo", version="=1.0.0", optional=true, git="https://github.com/HQSquantumsimulations/qoqo.git", tag="v1.0.0"}
+# test_roqoqo_1_0 = {package = "roqoqo", version="=1.0.0", optional=true, git="https://github.com/HQSquantumsimulations/qoqo.git", tag="v1.0.0"}
# test_roqoqo_derive_1_0 = {package= "roqoqo-derive", version="=1.0.0", optional=true}
[dev-dependencies]
@@ -63,4 +63,4 @@ overrotate = [ "rand_distr", "roqoqo-derive/overrotate"]
async = ["async-trait", "futures"]
# json_schema=["schemars", "serialize", "qoqo_calculator/json_schema"]
circuitdag = ["petgraph"]
-compatibility = ["test_roqoqo_1_0"]
+# compatibility = ["test_roqoqo_1_0"]
| qoqo Circuit from_ methods are not classmethods
from_json and from_bincode in qoqo.Circuit are not implemeted as classmethods.
We need to change that so we can call `Circuit.from_json(...)` directly
| 2022-10-07T07:29:40 | 0.0 | [] | [] |
|||
HQSquantumsimulations/qoqo | HQSquantumsimulations__qoqo-379 | 4757f3e8271214c38904115d2b75a8b126224f02 | diff --git a/CHANGELOG.md b/CHANGELOG.md
index 4477e329..8e24b0ac 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,7 +2,7 @@
This changelog track changes to the qoqo project starting at version v0.5.0
-## 1.1.0-beta.8
+## 1.1.0-beta.9
* Fixed github pipeline deploy
diff --git a/Cargo.lock b/Cargo.lock
index b2174ab3..b98dcc8b 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -550,7 +550,7 @@ dependencies = [
[[package]]
name = "qoqo"
-version = "1.1.0-beta.8"
+version = "1.1.0-beta.9"
dependencies = [
"bincode",
"nalgebra",
@@ -564,7 +564,7 @@ dependencies = [
"qoqo_calculator",
"qoqo_calculator_pyo3",
"quote",
- "roqoqo 1.1.0-beta.8",
+ "roqoqo 1.1.0-beta.9",
"serde",
"serde_json",
"syn",
@@ -574,7 +574,7 @@ dependencies = [
[[package]]
name = "qoqo-macros"
-version = "1.1.0-beta.8"
+version = "1.1.0-beta.9"
dependencies = [
"proc-macro2",
"quote",
@@ -673,8 +673,7 @@ dependencies = [
[[package]]
name = "roqoqo"
version = "1.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6b298dab631f5cd4dbb92811398f20accde321f56b96347de413fd888965144c"
+source = "git+https://github.com/HQSquantumsimulations/qoqo.git?tag=v1.0.0#b76d8e0796c922c6280bb7ff3cebfc7e243977e3"
dependencies = [
"bincode",
"nalgebra",
@@ -692,7 +691,7 @@ dependencies = [
[[package]]
name = "roqoqo"
-version = "1.1.0-beta.8"
+version = "1.1.0-beta.9"
dependencies = [
"async-trait",
"bincode",
@@ -708,8 +707,7 @@ dependencies = [
"rand",
"rand_distr",
"roqoqo 1.0.0",
- "roqoqo-derive 1.0.0",
- "roqoqo-derive 1.1.0-beta.8",
+ "roqoqo-derive 1.1.0-beta.9",
"serde",
"serde_json",
"serde_test",
@@ -722,8 +720,7 @@ dependencies = [
[[package]]
name = "roqoqo-derive"
version = "1.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c799726c6f2553b8145b671b57494efdc6a1bd2c84fcbf7e8313f09982fe5060"
+source = "git+https://github.com/HQSquantumsimulations/qoqo.git?tag=v1.0.0#b76d8e0796c922c6280bb7ff3cebfc7e243977e3"
dependencies = [
"proc-macro2",
"quote",
@@ -732,7 +729,7 @@ dependencies = [
[[package]]
name = "roqoqo-derive"
-version = "1.1.0-beta.8"
+version = "1.1.0-beta.9"
dependencies = [
"proc-macro2",
"quote",
@@ -741,7 +738,7 @@ dependencies = [
[[package]]
name = "roqoqo-test"
-version = "1.1.0-beta.8"
+version = "1.1.0-beta.9"
dependencies = [
"nalgebra",
"ndarray",
@@ -749,7 +746,7 @@ dependencies = [
"qoqo_calculator",
"quote",
"rand",
- "roqoqo 1.1.0-beta.8",
+ "roqoqo 1.1.0-beta.9",
"syn",
]
diff --git a/qoqo-macros/Cargo.toml b/qoqo-macros/Cargo.toml
index 30e8d365..36300f59 100644
--- a/qoqo-macros/Cargo.toml
+++ b/qoqo-macros/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "qoqo-macros"
-version = "1.1.0-beta.8"
+version = "1.1.0-beta.9"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
readme = "../README.md"
diff --git a/qoqo/Cargo.toml b/qoqo/Cargo.toml
index 6a34088e..2d120fc1 100644
--- a/qoqo/Cargo.toml
+++ b/qoqo/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "qoqo"
-version = "1.1.0-beta.8"
+version = "1.1.0-beta.9"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
homepage = "https://github.com/HQSquantumsimulations/qoqo"
@@ -32,8 +32,8 @@ num-complex = "0.4"
thiserror = "1.0"
qoqo_calculator = { version="1.1" }
qoqo_calculator_pyo3 = {version="1.1", default-features=false}
-qoqo-macros = {version="1.1.0-beta.8", path="../qoqo-macros"}
-roqoqo = {version="1.1.0-beta.8", path="../roqoqo", features=["serialize", "overrotate"]}
+qoqo-macros = {version="1.1.0-beta.9", path="../qoqo-macros"}
+roqoqo = {version="1.1.0-beta.9", path="../roqoqo", features=["serialize", "overrotate"]}
numpy = "0.17"
bincode = "1.3"
serde_json = "1.0"
diff --git a/qoqo/pyproject.toml b/qoqo/pyproject.toml
index 4f7d23e0..2ae19ecc 100644
--- a/qoqo/pyproject.toml
+++ b/qoqo/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "qoqo"
-version = "1.1.0-beta.8"
+version = "1.1.0-beta.9"
dependencies = [
'numpy',
'qoqo_calculator_pyo3>=1.1.0',
diff --git a/qoqo/qoqo/DEPENDENCIES b/qoqo/qoqo/DEPENDENCIES
index 85c02647..8409de99 100644
--- a/qoqo/qoqo/DEPENDENCIES
+++ b/qoqo/qoqo/DEPENDENCIES
@@ -248,208 +248,208 @@ License: Apache-2.0
----------------------------------------------------
LICENSE:
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
====================================================
@@ -702,27 +702,27 @@ License: MIT
----------------------------------------------------
LICENSE.md:
-The MIT License (MIT)
-
-Copyright (c) 2014 Ty Overby
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
+The MIT License (MIT)
+
+Copyright (c) 2014 Ty Overby
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
====================================================
@@ -976,95 +976,95 @@ License: Zlib OR Apache-2.0 OR MIT
----------------------------------------------------
LICENSE-ZLIB:
-Copyright (c) 2019 Daniel "Lokathor" Gee.
-
-This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software.
-
-Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions:
-
-1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
-
-2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
-
-3. This notice may not be removed or altered from any source distribution.
+Copyright (c) 2019 Daniel "Lokathor" Gee.
+
+This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software.
+
+Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions:
+
+1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
+
+2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
+
+3. This notice may not be removed or altered from any source distribution.
----------------------------------------------------
LICENSE-MIT:
-MIT License
-
-Copyright (c) 2019 Daniel "Lokathor" Gee.
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+MIT License
+
+Copyright (c) 2019 Daniel "Lokathor" Gee.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
----------------------------------------------------
LICENSE-APACHE:
-Apache License
-Version 2.0, January 2004
-http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
- 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
- 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
- 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
- (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and
- (b) You must cause any modified files to carry prominent notices stating that You changed the files; and
- (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
- (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.
-
- You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
- 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
- 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
- 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
- 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
- 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
-
-END OF TERMS AND CONDITIONS
-
-APPENDIX: How to apply the Apache License to your work.
-
-To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives.
-
-Copyright [yyyy] [name of copyright owner]
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
+Apache License
+Version 2.0, January 2004
+http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
+ 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
+ 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
+ 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
+ (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and
+ (b) You must cause any modified files to carry prominent notices stating that You changed the files; and
+ (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
+ (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
+ 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
+ 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
+ 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
+ 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
+ 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
====================================================
@@ -3704,239 +3704,239 @@ https://github.com/Gilnaa/memoffset
by Gilad Naaman <[email protected]>
offset_of functionality for Rust structs.
License: MIT
-----------------------------------------------------
-LICENSE:
-
-Copyright (c) 2017 Gilad Naaman
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
-
-====================================================
-nalgebra 0.31.1
-https://nalgebra.org
-by Sébastien Crozet <[email protected]>
-General-purpose linear algebra library with transformations and statically-sized or dynamically-sized matrices.
-License: BSD-3-Clause
-----------------------------------------------------
-LICENSE:
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
+----------------------------------------------------
+LICENSE:
- APPENDIX: How to apply the Apache License to your work.
+Copyright (c) 2017 Gilad Naaman
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
- Copyright 2020 Sébastien Crozet
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
- http://www.apache.org/licenses/LICENSE-2.0
+====================================================
+nalgebra 0.31.1
+https://nalgebra.org
+by Sébastien Crozet <[email protected]>
+General-purpose linear algebra library with transformations and statically-sized or dynamically-sized matrices.
+License: BSD-3-Clause
+----------------------------------------------------
+LICENSE:
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2020 Sébastien Crozet
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
====================================================
@@ -6667,207 +6667,207 @@ SOFTWARE.
----------------------------------------------------
LICENSE-APACHE:
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
-2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
-3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
-4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
-5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
-6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
-7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
-8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
-9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
-END OF TERMS AND CONDITIONS
-
-APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
-Copyright 2019-2020 CreepySkeleton <[email protected]>
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright 2019-2020 CreepySkeleton <[email protected]>
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
====================================================
@@ -8777,7 +8777,7 @@ LICENSE:
====================================================
-qoqo 1.1.0-beta.8
+qoqo 1.1.0-beta.9
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Quantum computing circuit toolkit. Python interface of roqoqo
@@ -8989,7 +8989,7 @@ LICENSE:
====================================================
-qoqo-macros 1.1.0-beta.8
+qoqo-macros 1.1.0-beta.9
by HQS Quantum Simulations <[email protected]>
Macros for the qoqo crate
License: Apache-2.0
@@ -11061,7 +11061,7 @@ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
====================================================
-roqoqo 1.1.0-beta.8
+roqoqo 1.1.0-beta.9
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Rust Quantum Computing Toolkit by HQS
@@ -11273,7 +11273,7 @@ LICENSE:
====================================================
-roqoqo-derive 1.1.0-beta.8
+roqoqo-derive 1.1.0-beta.9
by HQS Quantum Simulations <[email protected]>
Macros for the roqoqo crate
License: Apache-2.0
@@ -11484,7 +11484,7 @@ LICENSE:
====================================================
-roqoqo-test 1.1.0-beta.8
+roqoqo-test 1.1.0-beta.9
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Testing helper functions for roqoqo toolkit
@@ -11943,17 +11943,17 @@ License: Zlib OR Apache-2.0 OR MIT
----------------------------------------------------
LICENSE-ZLIB.md:
-Copyright (c) 2020 Daniel "Lokathor" Gee.
-
-This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software.
-
-Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions:
-
-1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
-
-2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
-
-3. This notice may not be removed or altered from any source distribution.
+Copyright (c) 2020 Daniel "Lokathor" Gee.
+
+This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software.
+
+Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions:
+
+1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
+
+2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
+
+3. This notice may not be removed or altered from any source distribution.
====================================================
@@ -16076,17 +16076,17 @@ License: Zlib OR Apache-2.0 OR MIT
----------------------------------------------------
LICENSE-ZLIB.md:
-Copyright (c) 2020 Daniel "Lokathor" Gee.
-
-This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software.
-
-Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions:
-
-1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
-
-2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
-
-3. This notice may not be removed or altered from any source distribution.
+Copyright (c) 2020 Daniel "Lokathor" Gee.
+
+This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software.
+
+Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions:
+
+1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
+
+2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
+
+3. This notice may not be removed or altered from any source distribution.
====================================================
diff --git a/roqoqo-derive/Cargo.toml b/roqoqo-derive/Cargo.toml
index 9c468755..e3354dde 100644
--- a/roqoqo-derive/Cargo.toml
+++ b/roqoqo-derive/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "roqoqo-derive"
-version = "1.1.0-beta.8"
+version = "1.1.0-beta.9"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
edition = "2021"
diff --git a/roqoqo/Cargo.toml b/roqoqo/Cargo.toml
index 6332aa7c..e5ccbf9e 100644
--- a/roqoqo/Cargo.toml
+++ b/roqoqo/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "roqoqo"
-version = "1.1.0-beta.8"
+version = "1.1.0-beta.9"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
edition = "2021"
@@ -28,7 +28,7 @@ num-complex = { version = "0.4"}
thiserror = "1.0"
dyn-clone = {version="1.0", optional=true}
qoqo_calculator = { version="1.1"}
-roqoqo-derive = {version="1.1.0-beta.8", path="../roqoqo-derive"}
+roqoqo-derive = {version="1.1.0-beta.9", path="../roqoqo-derive"}
typetag = {version="0.2", optional=true}
nalgebra = "0.31"
# schemars = { version="0.8", features = ["num-complex", "ndarray"], optional=true }
@@ -38,8 +38,8 @@ rand = { version = "0.8.4"}
async-trait = {version = "0.1", optional = true}
futures ={version = "0.3", optional=true}
petgraph = {version = "0.6.2", optional=true}
-test_roqoqo_1_0 = {package= "roqoqo", version="=1.0.0", optional=true}
-test_roqoqo_derive_1_0 = {package= "roqoqo-derive", version="=1.0.0", optional=true}
+test_roqoqo_1_0 = {package = "roqoqo", version="=1.0.0", optional=true, git="https://github.com/HQSquantumsimulations/qoqo.git", tag="v1.0.0"}
+# test_roqoqo_derive_1_0 = {package= "roqoqo-derive", version="=1.0.0", optional=true}
[dev-dependencies]
serde_test = "1.0"
@@ -63,4 +63,4 @@ overrotate = [ "rand_distr", "roqoqo-derive/overrotate"]
async = ["async-trait", "futures"]
# json_schema=["schemars", "serialize", "qoqo_calculator/json_schema"]
circuitdag = ["petgraph"]
-compatibility = ["test_roqoqo_derive_1_0", "test_roqoqo_1_0"]
+compatibility = ["test_roqoqo_1_0"]
| qoqo Circuit from_ methods are not classmethods
from_json and from_bincode in qoqo.Circuit are not implemeted as classmethods.
We need to change that so we can call `Circuit.from_json(...)` directly
| 2022-10-06T14:39:05 | 0.0 | [] | [] |
|||
HQSquantumsimulations/qoqo | HQSquantumsimulations__qoqo-378 | ef2e87aba7a6cd92b0fed28478d1317cf850f2b0 | diff --git a/CHANGELOG.md b/CHANGELOG.md
index 288db200..4477e329 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,7 +2,7 @@
This changelog track changes to the qoqo project starting at version v0.5.0
-## 1.1.0-beta.7
+## 1.1.0-beta.8
* Fixed github pipeline deploy
diff --git a/Cargo.lock b/Cargo.lock
index a2464b80..b2174ab3 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -550,7 +550,7 @@ dependencies = [
[[package]]
name = "qoqo"
-version = "1.1.0-beta.7"
+version = "1.1.0-beta.8"
dependencies = [
"bincode",
"nalgebra",
@@ -564,7 +564,7 @@ dependencies = [
"qoqo_calculator",
"qoqo_calculator_pyo3",
"quote",
- "roqoqo 1.1.0-beta.7",
+ "roqoqo 1.1.0-beta.8",
"serde",
"serde_json",
"syn",
@@ -574,7 +574,7 @@ dependencies = [
[[package]]
name = "qoqo-macros"
-version = "1.1.0-beta.7"
+version = "1.1.0-beta.8"
dependencies = [
"proc-macro2",
"quote",
@@ -692,7 +692,7 @@ dependencies = [
[[package]]
name = "roqoqo"
-version = "1.1.0-beta.7"
+version = "1.1.0-beta.8"
dependencies = [
"async-trait",
"bincode",
@@ -708,7 +708,8 @@ dependencies = [
"rand",
"rand_distr",
"roqoqo 1.0.0",
- "roqoqo-derive 1.1.0-beta.7",
+ "roqoqo-derive 1.0.0",
+ "roqoqo-derive 1.1.0-beta.8",
"serde",
"serde_json",
"serde_test",
@@ -731,17 +732,16 @@ dependencies = [
[[package]]
name = "roqoqo-derive"
-version = "1.1.0-beta.7"
+version = "1.1.0-beta.8"
dependencies = [
"proc-macro2",
"quote",
- "roqoqo-derive 1.0.0",
"syn",
]
[[package]]
name = "roqoqo-test"
-version = "1.1.0-beta.7"
+version = "1.1.0-beta.8"
dependencies = [
"nalgebra",
"ndarray",
@@ -749,7 +749,7 @@ dependencies = [
"qoqo_calculator",
"quote",
"rand",
- "roqoqo 1.1.0-beta.7",
+ "roqoqo 1.1.0-beta.8",
"syn",
]
diff --git a/qoqo-macros/Cargo.toml b/qoqo-macros/Cargo.toml
index bc4e0438..30e8d365 100644
--- a/qoqo-macros/Cargo.toml
+++ b/qoqo-macros/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "qoqo-macros"
-version = "1.1.0-beta.7"
+version = "1.1.0-beta.8"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
readme = "../README.md"
diff --git a/qoqo/Cargo.toml b/qoqo/Cargo.toml
index 1fe1bd01..6a34088e 100644
--- a/qoqo/Cargo.toml
+++ b/qoqo/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "qoqo"
-version = "1.1.0-beta.7"
+version = "1.1.0-beta.8"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
homepage = "https://github.com/HQSquantumsimulations/qoqo"
@@ -32,8 +32,8 @@ num-complex = "0.4"
thiserror = "1.0"
qoqo_calculator = { version="1.1" }
qoqo_calculator_pyo3 = {version="1.1", default-features=false}
-qoqo-macros = {version="1.1.0-beta.7", path="../qoqo-macros"}
-roqoqo = {version="1.1.0-beta.7", path="../roqoqo", features=["serialize", "overrotate"]}
+qoqo-macros = {version="1.1.0-beta.8", path="../qoqo-macros"}
+roqoqo = {version="1.1.0-beta.8", path="../roqoqo", features=["serialize", "overrotate"]}
numpy = "0.17"
bincode = "1.3"
serde_json = "1.0"
diff --git a/qoqo/pyproject.toml b/qoqo/pyproject.toml
index 066f77ae..4f7d23e0 100644
--- a/qoqo/pyproject.toml
+++ b/qoqo/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "qoqo"
-version = "1.1.0-beta.7"
+version = "1.1.0-beta.8"
dependencies = [
'numpy',
'qoqo_calculator_pyo3>=1.1.0',
diff --git a/qoqo/qoqo/DEPENDENCIES b/qoqo/qoqo/DEPENDENCIES
index ac8bcff7..85c02647 100644
--- a/qoqo/qoqo/DEPENDENCIES
+++ b/qoqo/qoqo/DEPENDENCIES
@@ -248,208 +248,208 @@ License: Apache-2.0
----------------------------------------------------
LICENSE:
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
====================================================
@@ -702,27 +702,27 @@ License: MIT
----------------------------------------------------
LICENSE.md:
-The MIT License (MIT)
-
-Copyright (c) 2014 Ty Overby
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
+The MIT License (MIT)
+
+Copyright (c) 2014 Ty Overby
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
====================================================
@@ -976,95 +976,95 @@ License: Zlib OR Apache-2.0 OR MIT
----------------------------------------------------
LICENSE-ZLIB:
-Copyright (c) 2019 Daniel "Lokathor" Gee.
-
-This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software.
-
-Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions:
-
-1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
-
-2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
-
-3. This notice may not be removed or altered from any source distribution.
+Copyright (c) 2019 Daniel "Lokathor" Gee.
+
+This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software.
+
+Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions:
+
+1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
+
+2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
+
+3. This notice may not be removed or altered from any source distribution.
----------------------------------------------------
LICENSE-MIT:
-MIT License
-
-Copyright (c) 2019 Daniel "Lokathor" Gee.
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+MIT License
+
+Copyright (c) 2019 Daniel "Lokathor" Gee.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
----------------------------------------------------
LICENSE-APACHE:
-Apache License
-Version 2.0, January 2004
-http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
- 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
- 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
- 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
- (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and
- (b) You must cause any modified files to carry prominent notices stating that You changed the files; and
- (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
- (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.
-
- You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
- 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
- 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
- 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
- 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
- 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
-
-END OF TERMS AND CONDITIONS
-
-APPENDIX: How to apply the Apache License to your work.
-
-To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives.
-
-Copyright [yyyy] [name of copyright owner]
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
+Apache License
+Version 2.0, January 2004
+http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
+ 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
+ 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
+ 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
+ (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and
+ (b) You must cause any modified files to carry prominent notices stating that You changed the files; and
+ (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
+ (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
+ 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
+ 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
+ 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
+ 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
+ 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
====================================================
@@ -3707,236 +3707,236 @@ License: MIT
----------------------------------------------------
LICENSE:
-Copyright (c) 2017 Gilad Naaman
+Copyright (c) 2017 Gilad Naaman
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+====================================================
+nalgebra 0.31.1
+https://nalgebra.org
+by Sébastien Crozet <[email protected]>
+General-purpose linear algebra library with transformations and statically-sized or dynamically-sized matrices.
+License: BSD-3-Clause
+----------------------------------------------------
+LICENSE:
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
+ APPENDIX: How to apply the Apache License to your work.
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
+ Copyright 2020 Sébastien Crozet
-====================================================
-nalgebra 0.31.1
-https://nalgebra.org
-by Sébastien Crozet <[email protected]>
-General-purpose linear algebra library with transformations and statically-sized or dynamically-sized matrices.
-License: BSD-3-Clause
-----------------------------------------------------
-LICENSE:
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright 2020 Sébastien Crozet
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
====================================================
@@ -6667,207 +6667,207 @@ SOFTWARE.
----------------------------------------------------
LICENSE-APACHE:
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
-2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
-3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
-4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
-5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
-6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
-7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
-8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
-9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
-END OF TERMS AND CONDITIONS
-
-APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
-Copyright 2019-2020 CreepySkeleton <[email protected]>
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright 2019-2020 CreepySkeleton <[email protected]>
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
====================================================
@@ -8777,7 +8777,7 @@ LICENSE:
====================================================
-qoqo 1.1.0-beta.7
+qoqo 1.1.0-beta.8
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Quantum computing circuit toolkit. Python interface of roqoqo
@@ -8989,7 +8989,7 @@ LICENSE:
====================================================
-qoqo-macros 1.1.0-beta.7
+qoqo-macros 1.1.0-beta.8
by HQS Quantum Simulations <[email protected]>
Macros for the qoqo crate
License: Apache-2.0
@@ -11061,7 +11061,7 @@ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
====================================================
-roqoqo 1.1.0-beta.7
+roqoqo 1.1.0-beta.8
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Rust Quantum Computing Toolkit by HQS
@@ -11273,7 +11273,7 @@ LICENSE:
====================================================
-roqoqo-derive 1.1.0-beta.7
+roqoqo-derive 1.1.0-beta.8
by HQS Quantum Simulations <[email protected]>
Macros for the roqoqo crate
License: Apache-2.0
@@ -11484,7 +11484,7 @@ LICENSE:
====================================================
-roqoqo-test 1.1.0-beta.7
+roqoqo-test 1.1.0-beta.8
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Testing helper functions for roqoqo toolkit
@@ -11943,17 +11943,17 @@ License: Zlib OR Apache-2.0 OR MIT
----------------------------------------------------
LICENSE-ZLIB.md:
-Copyright (c) 2020 Daniel "Lokathor" Gee.
-
-This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software.
-
-Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions:
-
-1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
-
-2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
-
-3. This notice may not be removed or altered from any source distribution.
+Copyright (c) 2020 Daniel "Lokathor" Gee.
+
+This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software.
+
+Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions:
+
+1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
+
+2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
+
+3. This notice may not be removed or altered from any source distribution.
====================================================
@@ -16076,17 +16076,17 @@ License: Zlib OR Apache-2.0 OR MIT
----------------------------------------------------
LICENSE-ZLIB.md:
-Copyright (c) 2020 Daniel "Lokathor" Gee.
-
-This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software.
-
-Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions:
-
-1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
-
-2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
-
-3. This notice may not be removed or altered from any source distribution.
+Copyright (c) 2020 Daniel "Lokathor" Gee.
+
+This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software.
+
+Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions:
+
+1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
+
+2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
+
+3. This notice may not be removed or altered from any source distribution.
====================================================
diff --git a/roqoqo-derive/Cargo.toml b/roqoqo-derive/Cargo.toml
index 640a9a9d..9c468755 100644
--- a/roqoqo-derive/Cargo.toml
+++ b/roqoqo-derive/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "roqoqo-derive"
-version = "1.1.0-beta.7"
+version = "1.1.0-beta.8"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
edition = "2021"
@@ -19,9 +19,7 @@ doctest = false
proc-macro2 = "1.0"
syn = { version = "1.0", features = ["full", "visit"] }
quote = "1.0"
-test_roqoqo_derive_1_0 = {package= "roqoqo-derive", version="=1.0.0", optional=true}
[features]
default = []
overrotate = []
-compatibility = ["test_roqoqo_derive_1_0"]
diff --git a/roqoqo/Cargo.toml b/roqoqo/Cargo.toml
index b5aeb5ed..6332aa7c 100644
--- a/roqoqo/Cargo.toml
+++ b/roqoqo/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "roqoqo"
-version = "1.1.0-beta.7"
+version = "1.1.0-beta.8"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
edition = "2021"
@@ -28,7 +28,7 @@ num-complex = { version = "0.4"}
thiserror = "1.0"
dyn-clone = {version="1.0", optional=true}
qoqo_calculator = { version="1.1"}
-roqoqo-derive = {version="1.1.0-beta.7", path="../roqoqo-derive"}
+roqoqo-derive = {version="1.1.0-beta.8", path="../roqoqo-derive"}
typetag = {version="0.2", optional=true}
nalgebra = "0.31"
# schemars = { version="0.8", features = ["num-complex", "ndarray"], optional=true }
@@ -39,6 +39,7 @@ async-trait = {version = "0.1", optional = true}
futures ={version = "0.3", optional=true}
petgraph = {version = "0.6.2", optional=true}
test_roqoqo_1_0 = {package= "roqoqo", version="=1.0.0", optional=true}
+test_roqoqo_derive_1_0 = {package= "roqoqo-derive", version="=1.0.0", optional=true}
[dev-dependencies]
serde_test = "1.0"
@@ -62,4 +63,4 @@ overrotate = [ "rand_distr", "roqoqo-derive/overrotate"]
async = ["async-trait", "futures"]
# json_schema=["schemars", "serialize", "qoqo_calculator/json_schema"]
circuitdag = ["petgraph"]
-compatibility = ["roqoqo-derive/compatibility", "test_roqoqo_1_0"]
+compatibility = ["test_roqoqo_derive_1_0", "test_roqoqo_1_0"]
| qoqo Circuit from_ methods are not classmethods
from_json and from_bincode in qoqo.Circuit are not implemeted as classmethods.
We need to change that so we can call `Circuit.from_json(...)` directly
| 2022-10-06T13:59:26 | 0.0 | [] | [] |
|||
HQSquantumsimulations/qoqo | HQSquantumsimulations__qoqo-377 | 7b54695056f58cf186b8075372e01051badf4881 | diff --git a/.github/workflows/build_and_deploy.yml b/.github/workflows/build_and_deploy.yml
index 56d4c12c..601384b4 100644
--- a/.github/workflows/build_and_deploy.yml
+++ b/.github/workflows/build_and_deploy.yml
@@ -231,6 +231,7 @@ jobs:
- uses: taiki-e/install-action@cargo-llvm-cov
- run: |
cd qoqo
+ pip install numpy
cargo llvm-cov --no-default-features --lcov --output-path lcov_qoqo.info
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 27790576..4a22e882 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,6 +2,23 @@
This changelog track changes to the qoqo project starting at version v0.5.0
+## 1.1.0-beta.6
+
+* Fixed github pipeline deploy
+
+## 1.1.0-beta.5
+
+* Fixed github pipeline deploy qoqo coverage
+
+## 1.1.0-beta.4
+
+* Fixed compatibility tests
+
+## 1.1.0-beta.3
+
+* Added rich comparison for Python interface of Measurements
+* Added PragmaLoop
+
## 1.1.0-beta.1
* Allowed creating PragmaSetStateVector from float or integer numpy arrays.
diff --git a/Cargo.lock b/Cargo.lock
index d75dd523..4cdaa7c5 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -232,9 +232,9 @@ checksum = "adab1eaa3408fb7f0c777a73e7465fd5656136fc93b670eb6df3c88c2c1344e3"
[[package]]
name = "inventory"
-version = "0.3.1"
+version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "30a61b8101d87996f82d725ba701b1987b7afc72f481c13513a30b855b9c9133"
+checksum = "e21e0a36a4dc4b469422ee17f715e8313f4a637675656d6a13637954278c6f55"
dependencies = [
"ctor",
"ghost",
@@ -248,9 +248,9 @@ checksum = "6c8af84674fe1f223a982c933a0ee1086ac4d4052aa0fb8060c12c6ad838e754"
[[package]]
name = "libc"
-version = "0.2.132"
+version = "0.2.134"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8371e4e5341c3a96db127eb2465ac681ced4c433e01dd0e938adbef26ba93ba5"
+checksum = "329c933548736bc49fd575ee68c89e8be4d260064184389a5b77517cddd99ffb"
[[package]]
name = "libm"
@@ -260,9 +260,9 @@ checksum = "292a948cd991e376cf75541fe5b97a1081d713c618b4f1b9500f8844e49eb565"
[[package]]
name = "lock_api"
-version = "0.4.8"
+version = "0.4.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9f80bf5aacaf25cbfc8210d1cfb718f2bf3b11c4c54e5afe36c236853a8ec390"
+checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df"
dependencies = [
"autocfg",
"scopeguard",
@@ -376,9 +376,9 @@ dependencies = [
[[package]]
name = "numpy"
-version = "0.17.1"
+version = "0.17.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6522ac2e780f532432a7c7f5dbadbfcea9ff1cf4dd858fb509ca13061a928413"
+checksum = "a462c1af5ba1fddec1488c4646993a23ae7931f9e170ccba23e9c7c834277797"
dependencies = [
"ahash",
"libc",
@@ -391,9 +391,9 @@ dependencies = [
[[package]]
name = "once_cell"
-version = "1.14.0"
+version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2f7254b99e31cad77da24b08ebf628882739a608578bb1bcdfc1f9c21260d7c0"
+checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1"
[[package]]
name = "parking_lot"
@@ -480,18 +480,18 @@ dependencies = [
[[package]]
name = "proc-macro2"
-version = "1.0.43"
+version = "1.0.46"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0a2ca2c61bc9f3d74d2886294ab7b9853abd9c1ad903a3ac7815c58989bb7bab"
+checksum = "94e2ef8dbfc347b10c094890f778ee2e36ca9bb4262e86dc99cd217e35f3470b"
dependencies = [
"unicode-ident",
]
[[package]]
name = "pyo3"
-version = "0.17.1"
+version = "0.17.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "12f72538a0230791398a0986a6518ebd88abc3fded89007b506ed072acc831e1"
+checksum = "201b6887e5576bf2f945fe65172c1fcbf3fcf285b23e4d71eb171d9736e38d32"
dependencies = [
"cfg-if",
"indoc",
@@ -507,9 +507,9 @@ dependencies = [
[[package]]
name = "pyo3-build-config"
-version = "0.17.1"
+version = "0.17.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fc4cf18c20f4f09995f3554e6bcf9b09bd5e4d6b67c562fdfaafa644526ba479"
+checksum = "bf0708c9ed01692635cbf056e286008e5a2927ab1a5e48cdd3aeb1ba5a6fef47"
dependencies = [
"once_cell",
"target-lexicon",
@@ -517,9 +517,9 @@ dependencies = [
[[package]]
name = "pyo3-ffi"
-version = "0.17.1"
+version = "0.17.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a41877f28d8ebd600b6aa21a17b40c3b0fc4dfe73a27b6e81ab3d895e401b0e9"
+checksum = "90352dea4f486932b72ddf776264d293f85b79a1d214de1d023927b41461132d"
dependencies = [
"libc",
"pyo3-build-config",
@@ -527,9 +527,9 @@ dependencies = [
[[package]]
name = "pyo3-macros"
-version = "0.17.1"
+version = "0.17.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2e81c8d4bcc2f216dc1b665412df35e46d12ee8d3d046b381aad05f1fcf30547"
+checksum = "7eb24b804a2d9e88bfcc480a5a6dd76f006c1e3edaf064e8250423336e2cd79d"
dependencies = [
"proc-macro2",
"pyo3-macros-backend",
@@ -539,9 +539,9 @@ dependencies = [
[[package]]
name = "pyo3-macros-backend"
-version = "0.17.1"
+version = "0.17.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "85752a767ee19399a78272cc2ab625cd7d373b2e112b4b13db28de71fa892784"
+checksum = "f22bb49f6a7348c253d7ac67a6875f2dc65f36c2ae64a82c381d528972bea6d6"
dependencies = [
"proc-macro2",
"quote",
@@ -550,7 +550,7 @@ dependencies = [
[[package]]
name = "qoqo"
-version = "1.1.0-beta.2"
+version = "1.1.0-beta.6"
dependencies = [
"bincode",
"nalgebra",
@@ -564,7 +564,7 @@ dependencies = [
"qoqo_calculator",
"qoqo_calculator_pyo3",
"quote",
- "roqoqo",
+ "roqoqo 1.1.0-beta.6",
"serde",
"serde_json",
"syn",
@@ -574,7 +574,7 @@ dependencies = [
[[package]]
name = "qoqo-macros"
-version = "1.1.0-beta.2"
+version = "1.1.0-beta.6"
dependencies = [
"proc-macro2",
"quote",
@@ -638,9 +638,9 @@ dependencies = [
[[package]]
name = "rand_core"
-version = "0.6.3"
+version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7"
+checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
dependencies = [
"getrandom",
]
@@ -672,7 +672,27 @@ dependencies = [
[[package]]
name = "roqoqo"
-version = "1.1.0-beta.2"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6b298dab631f5cd4dbb92811398f20accde321f56b96347de413fd888965144c"
+dependencies = [
+ "bincode",
+ "nalgebra",
+ "ndarray",
+ "num-complex",
+ "proc-macro2",
+ "qoqo_calculator",
+ "quote",
+ "rand",
+ "roqoqo-derive 1.0.0",
+ "serde",
+ "syn",
+ "thiserror",
+]
+
+[[package]]
+name = "roqoqo"
+version = "1.1.0-beta.6"
dependencies = [
"async-trait",
"bincode",
@@ -687,7 +707,9 @@ dependencies = [
"quote",
"rand",
"rand_distr",
- "roqoqo-derive",
+ "roqoqo 1.0.0",
+ "roqoqo-derive 1.0.0",
+ "roqoqo-derive 1.1.0-beta.6",
"serde",
"serde_json",
"serde_test",
@@ -699,7 +721,18 @@ dependencies = [
[[package]]
name = "roqoqo-derive"
-version = "1.1.0-beta.2"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c799726c6f2553b8145b671b57494efdc6a1bd2c84fcbf7e8313f09982fe5060"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "roqoqo-derive"
+version = "1.1.0-beta.6"
dependencies = [
"proc-macro2",
"quote",
@@ -708,7 +741,7 @@ dependencies = [
[[package]]
name = "roqoqo-test"
-version = "1.1.0-beta.2"
+version = "1.1.0-beta.6"
dependencies = [
"nalgebra",
"ndarray",
@@ -716,7 +749,7 @@ dependencies = [
"qoqo_calculator",
"quote",
"rand",
- "roqoqo",
+ "roqoqo 1.1.0-beta.6",
"syn",
]
@@ -743,18 +776,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
name = "serde"
-version = "1.0.144"
+version = "1.0.145"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0f747710de3dcd43b88c9168773254e809d8ddbdf9653b84e2554ab219f17860"
+checksum = "728eb6351430bccb993660dfffc5a72f91ccc1295abaa8ce19b27ebe4f75568b"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
-version = "1.0.144"
+version = "1.0.145"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "94ed3a816fb1d101812f83e789f888322c34e291f894f19590dc310963e87a00"
+checksum = "81fa1584d3d1bcacd84c277a0dfe21f5b0f6accf4a23d04d4c6d61f1af522b4c"
dependencies = [
"proc-macro2",
"quote",
@@ -774,9 +807,9 @@ dependencies = [
[[package]]
name = "serde_test"
-version = "1.0.144"
+version = "1.0.145"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c7f3621491f256177206a7c2152c17f322c0d0b30af05359088172437d29e25"
+checksum = "9c17d2112159132660b4c5399e274f676fb75a2f8d70b7468f18f045b71138ed"
dependencies = [
"serde",
]
@@ -805,15 +838,15 @@ dependencies = [
[[package]]
name = "smallvec"
-version = "1.9.0"
+version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2fd0db749597d91ff862fd1d55ea87f7855a744a8425a64695b6fca237d1dad1"
+checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
[[package]]
name = "syn"
-version = "1.0.99"
+version = "1.0.101"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "58dbef6ec655055e20b86b15a8cc6d439cca19b667537ac6a1369572d151ab13"
+checksum = "e90cde112c4b9690b8cbe810cba9ddd8bc1d7472e2cae317b69e9438c1cba7d2"
dependencies = [
"proc-macro2",
"quote",
@@ -828,18 +861,18 @@ checksum = "c02424087780c9b71cc96799eaeddff35af2bc513278cda5c99fc1f5d026d3c1"
[[package]]
name = "test-case"
-version = "2.2.1"
+version = "2.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "07aea929e9488998b64adc414c29fe5620398f01c2e3f58164122b17e567a6d5"
+checksum = "21d6cf5a7dffb3f9dceec8e6b8ca528d9bd71d36c9f074defb548ce161f598c0"
dependencies = [
"test-case-macros",
]
[[package]]
name = "test-case-macros"
-version = "2.2.1"
+version = "2.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c95968eedc6fc4f5c21920e0f4264f78ec5e4c56bb394f319becc1a5830b3e54"
+checksum = "e45b7bf6e19353ddd832745c8fcf77a17a93171df7151187f26623f2b75b5b26"
dependencies = [
"cfg-if",
"proc-macro-error",
@@ -850,18 +883,18 @@ dependencies = [
[[package]]
name = "thiserror"
-version = "1.0.34"
+version = "1.0.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8c1b05ca9d106ba7d2e31a9dab4a64e7be2cce415321966ea3132c49a656e252"
+checksum = "10deb33631e3c9018b9baf9dcbbc4f737320d2b576bac10f6aefa048fa407e3e"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
-version = "1.0.34"
+version = "1.0.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e8f2591983642de85c921015f3f070c665a197ed69e417af436115e3a1407487"
+checksum = "982d17546b47146b28f7c22e3d08465f6b8903d0ea13c1660d9d84a6e7adcdbb"
dependencies = [
"proc-macro2",
"quote",
@@ -900,9 +933,9 @@ dependencies = [
[[package]]
name = "unicode-ident"
-version = "1.0.3"
+version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c4f5b37a154999a8f3f98cc23a628d850e154479cd94decf3414696e12e31aaf"
+checksum = "dcc811dc4066ac62f84f11307873c4850cb653bfa9b1719cee2bd2204a4bc5dd"
[[package]]
name = "unindent"
diff --git a/qoqo-macros/Cargo.toml b/qoqo-macros/Cargo.toml
index cb4812ab..c30f7c0f 100644
--- a/qoqo-macros/Cargo.toml
+++ b/qoqo-macros/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "qoqo-macros"
-version = "1.1.0-beta.2"
+version = "1.1.0-beta.6"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
readme = "../README.md"
diff --git a/qoqo/Cargo.toml b/qoqo/Cargo.toml
index 45f52ae5..4405e457 100644
--- a/qoqo/Cargo.toml
+++ b/qoqo/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "qoqo"
-version = "1.1.0-beta.2"
+version = "1.1.0-beta.6"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
homepage = "https://github.com/HQSquantumsimulations/qoqo"
@@ -32,8 +32,8 @@ num-complex = "0.4"
thiserror = "1.0"
qoqo_calculator = { version="1.1" }
qoqo_calculator_pyo3 = {version="1.1", default-features=false}
-qoqo-macros = {version="1.1.0-beta.2", path="../qoqo-macros"}
-roqoqo = {version="1.1.0-beta.2", path="../roqoqo", features=["serialize", "overrotate"]}
+qoqo-macros = {version="1.1.0-beta.6", path="../qoqo-macros"}
+roqoqo = {version="1.1.0-beta.6", path="../roqoqo", features=["serialize", "overrotate"]}
numpy = "0.17"
bincode = "1.3"
serde_json = "1.0"
@@ -56,8 +56,3 @@ extension-module = ["pyo3/extension-module"]#, "qoqo_calculator_pyo3/extension-m
default = ["extension-module"]
circuitdag = ["roqoqo/circuitdag"]
-[package.metadata.maturin]
-requires-dist = ["numpy", "qoqo_calculator_pyo3>=1.1.0"]
-maintainer = "HQS Quantum Simulations GmbH"
-maintainer-email = "[email protected]"
-requires-python = ">=3.7"
diff --git a/qoqo/pyproject.toml b/qoqo/pyproject.toml
index 48f52b6f..1766eaa1 100644
--- a/qoqo/pyproject.toml
+++ b/qoqo/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "qoqo"
-version = "1.1.0-beta.2"
+version = "1.1.0-beta.6"
dependencies = [
'numpy',
'qoqo_calculator_pyo3>=1.1.0',
diff --git a/qoqo/qoqo/DEPENDENCIES b/qoqo/qoqo/DEPENDENCIES
index 5830e7c9..161b2c86 100644
--- a/qoqo/qoqo/DEPENDENCIES
+++ b/qoqo/qoqo/DEPENDENCIES
@@ -4,6 +4,35 @@ https://github.com/tkaitchuck/ahash
by Tom Kaitchuck <[email protected]>
A non-cryptographic hash function using AES-NI for high performance
License: MIT OR Apache-2.0
+----------------------------------------------------
+LICENSE-MIT:
+
+Copyright (c) 2016 Amanieu d'Antras
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
----------------------------------------------------
LICENSE-APACHE:
@@ -209,35 +238,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------
-LICENSE-MIT:
-
-Copyright (c) 2016 Amanieu d'Antras
-
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
====================================================
approx 0.5.1
@@ -458,6 +458,35 @@ https://github.com/cuviper/autocfg
by Josh Stone <[email protected]>
Automatic cfg for Rust compiler features
License: Apache-2.0 OR MIT
+----------------------------------------------------
+LICENSE-MIT:
+
+Copyright (c) 2018 Josh Stone
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
----------------------------------------------------
LICENSE-APACHE:
@@ -663,35 +692,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------
-LICENSE-MIT:
-
-Copyright (c) 2018 Josh Stone
-
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
====================================================
bincode 1.3.3
@@ -732,6 +732,35 @@ by The Rust Project Developers
A macro to generate structures which behave like bitflags.
License: MIT/Apache-2.0
+----------------------------------------------------
+LICENSE-MIT:
+
+Copyright (c) 2014 The Rust Project Developers
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
----------------------------------------------------
LICENSE-APACHE:
@@ -937,35 +966,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------
-LICENSE-MIT:
-
-Copyright (c) 2014 The Rust Project Developers
-
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
====================================================
bytemuck 1.12.1
@@ -973,6 +973,34 @@ https://github.com/Lokathor/bytemuck
by Lokathor <[email protected]>
A crate for mucking around with piles of bytes.
License: Zlib OR Apache-2.0 OR MIT
+----------------------------------------------------
+LICENSE-ZLIB:
+
+Copyright (c) 2019 Daniel "Lokathor" Gee.
+
+This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software.
+
+Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions:
+
+1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
+
+2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
+
+3. This notice may not be removed or altered from any source distribution.
+
+----------------------------------------------------
+LICENSE-MIT:
+
+MIT License
+
+Copyright (c) 2019 Daniel "Lokathor" Gee.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
----------------------------------------------------
LICENSE-APACHE:
@@ -1038,34 +1066,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------
-LICENSE-ZLIB:
-
-Copyright (c) 2019 Daniel "Lokathor" Gee.
-
-This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software.
-
-Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions:
-
-1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
-
-2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
-
-3. This notice may not be removed or altered from any source distribution.
-
-----------------------------------------------------
-LICENSE-MIT:
-
-MIT License
-
-Copyright (c) 2019 Daniel "Lokathor" Gee.
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
====================================================
cfg-if 1.0.0
@@ -1077,21 +1077,50 @@ item that gets emitted.
License: MIT/Apache-2.0
----------------------------------------------------
-LICENSE-APACHE:
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+LICENSE-MIT:
-1. Definitions.
+Copyright (c) 2014 Alex Crichton
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
+----------------------------------------------------
+LICENSE-APACHE:
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
@@ -1281,10 +1310,17 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+fixedbitset 0.4.2
+https://github.com/petgraph/fixedbitset
+by bluss
+FixedBitSet is a simple bitset collection
+License: MIT/Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright (c) 2014 Alex Crichton
+Copyright (c) 2015-2017
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
@@ -1310,13 +1346,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-fixedbitset 0.4.2
-https://github.com/petgraph/fixedbitset
-by bluss
-FixedBitSet is a simple bitset collection
-License: MIT/Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -1522,10 +1551,18 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+getrandom 0.2.7
+https://github.com/rust-random/getrandom
+by The Rand Project Developers
+A small cross-platform library for retrieving random data from system source
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright (c) 2015-2017
+Copyright 2018 Developers of the Rand project
+Copyright (c) 2014 The Rust Project Developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
@@ -1551,13 +1588,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-getrandom 0.2.7
-https://github.com/rust-random/getrandom
-by The Rand Project Developers
-A small cross-platform library for retrieving random data from system source
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -1763,11 +1793,17 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+hashbrown 0.12.3
+https://github.com/rust-lang/hashbrown
+by Amanieu d'Antras <[email protected]>
+A Rust port of Google's SwissTable hash map
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright 2018 Developers of the Rand project
-Copyright (c) 2014 The Rust Project Developers
+Copyright (c) 2016 Amanieu d'Antras
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
@@ -1793,13 +1829,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-hashbrown 0.12.3
-https://github.com/rust-lang/hashbrown
-by Amanieu d'Antras <[email protected]>
-A Rust port of Google's SwissTable hash map
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -2005,10 +2034,17 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+indexmap 1.9.1
+https://github.com/bluss/indexmap
+by
+A hash table with consistent order and fast iteration.
+License: Apache-2.0 OR MIT
----------------------------------------------------
LICENSE-MIT:
-Copyright (c) 2016 Amanieu d'Antras
+Copyright (c) 2016--2017
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
@@ -2034,13 +2070,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-indexmap 1.9.1
-https://github.com/bluss/indexmap
-by
-A hash table with consistent order and fast iteration.
-License: Apache-2.0 OR MIT
----------------------------------------------------
LICENSE-APACHE:
@@ -2246,11 +2275,16 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+indoc 1.0.7
+https://github.com/dtolnay/indoc
+by David Tolnay <[email protected]>
+Indented document literals
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright (c) 2016--2017
-
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
@@ -2275,13 +2309,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-indoc 1.0.7
-https://github.com/dtolnay/indoc
-by David Tolnay <[email protected]>
-Indented document literals
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -2487,6 +2514,13 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+itoa 1.0.3
+https://github.com/dtolnay/itoa
+by David Tolnay <[email protected]>
+Fast integer primitive to string conversion
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
@@ -2514,13 +2548,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-itoa 1.0.3
-https://github.com/dtolnay/itoa
-by David Tolnay <[email protected]>
-Fast integer primitive to string conversion
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -2726,9 +2753,19 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+libc 0.2.134
+https://github.com/rust-lang/libc
+by The Rust Project Developers
+Raw FFI bindings to platform libraries like libc.
+
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
+Copyright (c) 2014-2020 The Rust Project Developers
+
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
@@ -2753,14 +2790,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-libc 0.2.132
-https://github.com/rust-lang/libc
-by The Rust Project Developers
-Raw FFI bindings to platform libraries like libc.
-
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -2941,10 +2970,17 @@ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
END OF TERMS AND CONDITIONS
+
+====================================================
+libm 0.2.5
+https://github.com/rust-lang/libm
+by Jorge Aparicio <[email protected]>
+libm in pure Rust
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright (c) 2014-2020 The Rust Project Developers
+Copyright (c) 2018 Jorge Aparicio
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
@@ -2970,13 +3006,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-libm 0.2.5
-https://github.com/rust-lang/libm
-by Jorge Aparicio <[email protected]>
-libm in pure Rust
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -3182,10 +3211,17 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+lock_api 0.4.9
+https://github.com/Amanieu/parking_lot
+by Amanieu d'Antras <[email protected]>
+Wrappers to create fully-featured Mutex and RwLock types. Compatible with no_std.
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright (c) 2018 Jorge Aparicio
+Copyright (c) 2016 The Rust Project Developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
@@ -3211,13 +3247,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-lock_api 0.4.8
-https://github.com/Amanieu/parking_lot
-by Amanieu d'Antras <[email protected]>
-Wrappers to create fully-featured Mutex and RwLock types. Compatible with no_std.
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -3423,10 +3452,21 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+matrixmultiply 0.3.2
+https://github.com/bluss/matrixmultiply/
+by bluss, R. Janis Goldschmidt
+General matrix multiplication for f32 and f64 matrices. Operates on matrices with general layout (they can use arbitrary row and column stride). Detects and uses AVX or SSE2 on x86 platforms transparently for higher performance. Uses a microkernel strategy, so that the implementation is easy to parallelize and optimize.
+
+Supports multithreading.
+License: MIT/Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright (c) 2016 The Rust Project Developers
+Copyright (c) 2016 - 2021 Ulrik Sverdrup "bluss"
+Copyirhgt (c) 2018 R. Janis Goldschmidt
+Copyright (c) 2021 DutchGhost [constparse.rs]
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
@@ -3452,15 +3492,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-matrixmultiply 0.3.2
-https://github.com/bluss/matrixmultiply/
-by bluss, R. Janis Goldschmidt
-General matrix multiplication for f32 and f64 matrices. Operates on matrices with general layout (they can use arbitrary row and column stride). Detects and uses AVX or SSE2 on x86 platforms transparently for higher performance. Uses a microkernel strategy, so that the implementation is easy to parallelize and optimize.
-
-Supports multithreading.
-License: MIT/Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -3666,37 +3697,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------
-LICENSE-MIT:
-
-Copyright (c) 2016 - 2021 Ulrik Sverdrup "bluss"
-Copyirhgt (c) 2018 R. Janis Goldschmidt
-Copyright (c) 2021 DutchGhost [constparse.rs]
-
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
====================================================
memoffset 0.6.5
@@ -3952,6 +3952,37 @@ https://github.com/rust-ndarray/ndarray
by Ulrik Sverdrup "bluss", Jim Turner
An n-dimensional array for general elements and for numerics. Lightweight array views and slicing; views support chunking and splitting.
License: MIT OR Apache-2.0
+----------------------------------------------------
+LICENSE-MIT:
+
+Copyright (c) 2015 - 2021 Ulrik Sverdrup "bluss",
+ Jim Turner,
+ and ndarray developers
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
----------------------------------------------------
LICENSE-APACHE:
@@ -4157,12 +4188,17 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------
-LICENSE-MIT:
-Copyright (c) 2015 - 2021 Ulrik Sverdrup "bluss",
- Jim Turner,
- and ndarray developers
+====================================================
+num-complex 0.4.2
+https://github.com/rust-num/num-complex
+by The Rust Project Developers
+Complex numbers implementation for Rust
+License: MIT OR Apache-2.0
+----------------------------------------------------
+LICENSE-MIT:
+
+Copyright (c) 2014 The Rust Project Developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
@@ -4188,13 +4224,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-num-complex 0.4.2
-https://github.com/rust-num/num-complex
-by The Rust Project Developers
-Complex numbers implementation for Rust
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -4400,6 +4429,13 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+num-integer 0.1.45
+https://github.com/rust-num/num-integer
+by The Rust Project Developers
+Integer traits and functions
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
@@ -4429,13 +4465,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-num-integer 0.1.45
-https://github.com/rust-num/num-integer
-by The Rust Project Developers
-Integer traits and functions
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -4641,6 +4670,13 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+num-rational 0.4.1
+https://github.com/rust-num/num-rational
+by The Rust Project Developers
+Rational numbers implementation for Rust
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
@@ -4670,13 +4706,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-num-rational 0.4.1
-https://github.com/rust-num/num-rational
-by The Rust Project Developers
-Rational numbers implementation for Rust
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -4882,6 +4911,13 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+num-traits 0.2.15
+https://github.com/rust-num/num-traits
+by The Rust Project Developers
+Numeric traits for generic mathematics
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
@@ -4911,13 +4947,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-num-traits 0.2.15
-https://github.com/rust-num/num-traits
-by The Rust Project Developers
-Numeric traits for generic mathematics
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -5123,38 +5152,9 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------
-LICENSE-MIT:
-
-Copyright (c) 2014 The Rust Project Developers
-
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
====================================================
-numpy 0.17.1
+numpy 0.17.2
https://github.com/PyO3/rust-numpy
by The rust-numpy Project Developers, PyO3 Project and Contributors <https://github.com/PyO3>
PyO3-based Rust bindings of the NumPy C-API
@@ -5190,11 +5190,38 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
====================================================
-once_cell 1.14.0
+once_cell 1.15.0
https://github.com/matklad/once_cell
by Aleksey Kladov <[email protected]>
Single assignment cells and lazy values.
License: MIT OR Apache-2.0
+----------------------------------------------------
+LICENSE-MIT:
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
----------------------------------------------------
LICENSE-APACHE:
@@ -5400,9 +5427,18 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+parking_lot 0.12.1
+https://github.com/Amanieu/parking_lot
+by Amanieu d'Antras <[email protected]>
+More compact and efficient implementations of the standard synchronization primitives.
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
+Copyright (c) 2016 The Rust Project Developers
+
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
@@ -5427,13 +5463,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-parking_lot 0.12.1
-https://github.com/Amanieu/parking_lot
-by Amanieu d'Antras <[email protected]>
-More compact and efficient implementations of the standard synchronization primitives.
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -5639,6 +5668,13 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+parking_lot_core 0.9.3
+https://github.com/Amanieu/parking_lot
+by Amanieu d'Antras <[email protected]>
+An advanced API for creating custom synchronization primitives.
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
@@ -5668,13 +5704,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-parking_lot_core 0.9.3
-https://github.com/Amanieu/parking_lot
-by Amanieu d'Antras <[email protected]>
-An advanced API for creating custom synchronization primitives.
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -5880,10 +5909,17 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+paste 1.0.9
+https://github.com/dtolnay/paste
+by David Tolnay <[email protected]>
+Macros for all your token pasting needs
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright (c) 2016 The Rust Project Developers
+Copyright (c) 2018
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
@@ -5909,13 +5945,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-paste 1.0.9
-https://github.com/dtolnay/paste
-by David Tolnay <[email protected]>
-Macros for all your token pasting needs
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -6121,10 +6150,17 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+petgraph 0.6.2
+https://github.com/petgraph/petgraph
+by bluss, mitchmindtree
+Graph data structure library. Provides graph types and graph algorithms.
+License: MIT/Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright (c) 2018
+Copyright (c) 2015
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
@@ -6150,13 +6186,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-petgraph 0.6.2
-https://github.com/petgraph/petgraph
-by bluss, mitchmindtree
-Graph data structure library. Provides graph types and graph algorithms.
-License: MIT/Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -6362,10 +6391,17 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+ppv-lite86 0.2.16
+https://github.com/cryptocorrosion/cryptocorrosion
+by The CryptoCorrosion Contributors
+Implementation of the crypto-simd API for x86
+License: MIT/Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright (c) 2015
+Copyright (c) 2019 The CryptoCorrosion Contributors
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
@@ -6391,13 +6427,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-ppv-lite86 0.2.16
-https://github.com/cryptocorrosion/cryptocorrosion
-by The CryptoCorrosion Contributors
-Implementation of the crypto-simd API for x86
-License: MIT/Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -6603,42 +6632,38 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+proc-macro-error 1.0.4
+https://gitlab.com/CreepySkeleton/proc-macro-error
+by CreepySkeleton <[email protected]>
+Almost drop-in replacement to panics in proc-macros
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright (c) 2019 The CryptoCorrosion Contributors
+MIT License
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
+Copyright (c) 2019-2020 CreepySkeleton
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
-====================================================
-proc-macro-error 1.0.4
-https://gitlab.com/CreepySkeleton/proc-macro-error
-by CreepySkeleton <[email protected]>
-Almost drop-in replacement to panics in proc-macros
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -6844,6 +6869,13 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+proc-macro-error-attr 1.0.4
+https://gitlab.com/CreepySkeleton/proc-macro-error
+by CreepySkeleton <[email protected]>
+Attribute macro for proc-macro-error crate
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
@@ -6869,15 +6901,8 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
-
-====================================================
-proc-macro-error-attr 1.0.4
-https://gitlab.com/CreepySkeleton/proc-macro-error
-by CreepySkeleton <[email protected]>
-Attribute macro for proc-macro-error crate
-License: MIT OR Apache-2.0
-----------------------------------------------------
-LICENSE-APACHE:
+----------------------------------------------------
+LICENSE-APACHE:
Apache License
Version 2.0, January 2004
@@ -7081,38 +7106,42 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+proc-macro2 1.0.46
+https://github.com/dtolnay/proc-macro2
+by David Tolnay <[email protected]>, Alex Crichton <[email protected]>
+A substitute implementation of the compiler's `proc_macro` API to decouple token-based libraries from the procedural macro use case.
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-MIT License
-
-Copyright (c) 2019-2020 CreepySkeleton
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
+Copyright (c) 2014 Alex Crichton
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
-====================================================
-proc-macro2 1.0.43
-https://github.com/dtolnay/proc-macro2
-by David Tolnay <[email protected]>, Alex Crichton <[email protected]>
-A substitute implementation of the compiler's `proc_macro` API to decouple token-based libraries from the procedural macro use case.
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -7318,38 +7347,9 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------
-LICENSE-MIT:
-
-Copyright (c) 2014 Alex Crichton
-
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
====================================================
-pyo3 0.17.1
+pyo3 0.17.2
https://github.com/pyo3/pyo3
by PyO3 Project and Contributors <https://github.com/PyO3>
Bindings to Python interpreter
@@ -7549,7 +7549,7 @@ LICENSE:
====================================================
-pyo3-build-config 0.17.1
+pyo3-build-config 0.17.2
https://github.com/pyo3/pyo3
by PyO3 Project and Contributors <https://github.com/PyO3>
Build configuration for the PyO3 ecosystem
@@ -7749,7 +7749,7 @@ LICENSE:
====================================================
-pyo3-ffi 0.17.1
+pyo3-ffi 0.17.2
https://github.com/pyo3/pyo3
by PyO3 Project and Contributors <https://github.com/PyO3>
Python-API bindings for the PyO3 ecosystem
@@ -8377,7 +8377,7 @@ docs/license.html_lib.
====================================================
-pyo3-macros 0.17.1
+pyo3-macros 0.17.2
https://github.com/pyo3/pyo3
by PyO3 Project and Contributors <https://github.com/PyO3>
Proc macros for PyO3 package
@@ -8577,7 +8577,7 @@ LICENSE:
====================================================
-pyo3-macros-backend 0.17.1
+pyo3-macros-backend 0.17.2
https://github.com/pyo3/pyo3
by PyO3 Project and Contributors <https://github.com/PyO3>
Code generation for PyO3 package
@@ -8777,7 +8777,7 @@ LICENSE:
====================================================
-qoqo 1.1.0-beta.2
+qoqo 1.1.0-beta.6
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Quantum computing circuit toolkit. Python interface of roqoqo
@@ -8989,7 +8989,7 @@ LICENSE:
====================================================
-qoqo-macros 1.1.0-beta.2
+qoqo-macros 1.1.0-beta.6
by HQS Quantum Simulations <[email protected]>
Macros for the qoqo crate
License: Apache-2.0
@@ -9629,6 +9629,35 @@ https://github.com/dtolnay/quote
by David Tolnay <[email protected]>
Quasi-quoting macro quote!(...)
License: MIT OR Apache-2.0
+----------------------------------------------------
+LICENSE-MIT:
+
+Copyright (c) 2016 The Rust Project Developers
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
----------------------------------------------------
LICENSE-APACHE:
@@ -9834,10 +9863,19 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+rand 0.8.5
+https://rust-random.github.io/book
+by The Rand Project Developers, The Rust Project Developers
+Random number generators and other randomness functionality.
+
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright (c) 2016 The Rust Project Developers
+Copyright 2018 Developers of the Rand project
+Copyright (c) 2014 The Rust Project Developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
@@ -9863,14 +9901,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-rand 0.8.5
-https://rust-random.github.io/book
-by The Rand Project Developers, The Rust Project Developers
-Random number generators and other randomness functionality.
-
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -10051,6 +10081,14 @@ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
END OF TERMS AND CONDITIONS
+
+====================================================
+rand_chacha 0.3.1
+https://rust-random.github.io/book
+by The Rand Project Developers, The Rust Project Developers, The CryptoCorrosion Contributors
+ChaCha random number generator
+
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
@@ -10081,14 +10119,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-rand_chacha 0.3.1
-https://rust-random.github.io/book
-by The Rand Project Developers, The Rust Project Developers, The CryptoCorrosion Contributors
-ChaCha random number generator
-
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -10294,6 +10324,14 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+rand_core 0.6.4
+https://rust-random.github.io/book
+by The Rand Project Developers, The Rust Project Developers
+Core random number generator traits and tools for implementation.
+
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
@@ -10324,14 +10362,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-rand_core 0.6.3
-https://rust-random.github.io/book
-by The Rand Project Developers, The Rust Project Developers
-Core random number generator traits and tools for implementation.
-
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -10523,25 +10553,18 @@ APPENDIX: How to apply the Apache License to your work.
same "printed page" as the copyright notice for easier
identification within third-party archives.
-Copyright [yyyy] [name of copyright owner]
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- https://www.apache.org/licenses/LICENSE-2.0
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
+====================================================
+rand_distr 0.4.3
+https://rust-random.github.io/book
+by The Rand Project Developers
+Sampling from random number distributions
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
Copyright 2018 Developers of the Rand project
-Copyright (c) 2014 The Rust Project Developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
@@ -10567,14 +10590,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-rand_distr 0.4.3
-https://rust-random.github.io/book
-by The Rand Project Developers
-Sampling from random number distributions
-
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -10766,10 +10781,21 @@ APPENDIX: How to apply the Apache License to your work.
same "printed page" as the copyright notice for easier
identification within third-party archives.
+
+====================================================
+rawpointer 0.2.1
+https://github.com/bluss/rawpointer/
+by bluss
+Extra methods for raw pointers and `NonNull<T>`.
+
+For example `.post_inc()` and `.pre_dec()` (c.f. `ptr++` and `--ptr`),
+`offset` and `add` for `NonNull<T>`, and the function `ptrdistance`.
+
+License: MIT/Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright 2018 Developers of the Rand project
+Copyright (c) 2015
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
@@ -10795,17 +10821,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-rawpointer 0.2.1
-https://github.com/bluss/rawpointer/
-by bluss
-Extra methods for raw pointers and `NonNull<T>`.
-
-For example `.post_inc()` and `.pre_dec()` (c.f. `ptr++` and `--ptr`),
-`offset` and `add` for `NonNull<T>`, and the function `ptrdistance`.
-
-License: MIT/Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -11011,35 +11026,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------
-LICENSE-MIT:
-
-Copyright (c) 2015
-
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
====================================================
redox_syscall 0.2.16
@@ -11075,7 +11061,7 @@ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
====================================================
-roqoqo 1.1.0-beta.2
+roqoqo 1.0.0
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Rust Quantum Computing Toolkit by HQS
@@ -11287,9 +11273,10 @@ LICENSE:
====================================================
-roqoqo-derive 1.1.0-beta.2
+roqoqo 1.1.0-beta.6
+https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
-Macros for the roqoqo crate
+Rust Quantum Computing Toolkit by HQS
License: Apache-2.0
----------------------------------------------------
LICENSE:
@@ -11498,10 +11485,9 @@ LICENSE:
====================================================
-roqoqo-test 1.1.0-beta.2
-https://github.com/HQSquantumsimulations/qoqo
+roqoqo-derive 1.0.0
by HQS Quantum Simulations <[email protected]>
-Testing helper functions for roqoqo toolkit
+Macros for the roqoqo crate
License: Apache-2.0
----------------------------------------------------
LICENSE:
@@ -11710,8 +11696,431 @@ LICENSE:
====================================================
-ryu 1.0.11
-https://github.com/dtolnay/ryu
+roqoqo-derive 1.1.0-beta.6
+by HQS Quantum Simulations <[email protected]>
+Macros for the roqoqo crate
+License: Apache-2.0
+----------------------------------------------------
+LICENSE:
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2021 HQS Quantum Simulations GmbH
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+
+====================================================
+roqoqo-test 1.1.0-beta.6
+https://github.com/HQSquantumsimulations/qoqo
+by HQS Quantum Simulations <[email protected]>
+Testing helper functions for roqoqo toolkit
+License: Apache-2.0
+----------------------------------------------------
+LICENSE:
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2021 HQS Quantum Simulations GmbH
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+
+====================================================
+ryu 1.0.11
+https://github.com/dtolnay/ryu
by David Tolnay <[email protected]>
Fast floating point to string conversion
License: Apache-2.0 OR BSL-1.0
@@ -11981,6 +12390,35 @@ Defines the macros `defer!`, `defer_on_unwind!`, `defer_on_success!` as
shorthands for guards with one of the implemented strategies.
License: MIT/Apache-2.0
+----------------------------------------------------
+LICENSE-MIT:
+
+Copyright (c) 2016-2019 Ulrik Sverdrup "bluss" and scopeguard developers
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
----------------------------------------------------
LICENSE-APACHE:
@@ -12186,11 +12624,16 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+serde 1.0.145
+https://serde.rs
+by Erick Tryzelaar <[email protected]>, David Tolnay <[email protected]>
+A generic serialization/deserialization framework
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright (c) 2016-2019 Ulrik Sverdrup "bluss" and scopeguard developers
-
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
@@ -12215,13 +12658,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-serde 1.0.144
-https://serde.rs
-by Erick Tryzelaar <[email protected]>, David Tolnay <[email protected]>
-A generic serialization/deserialization framework
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -12427,6 +12863,13 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+serde_derive 1.0.145
+https://serde.rs
+by Erick Tryzelaar <[email protected]>, David Tolnay <[email protected]>
+Macros 1.1 implementation of #[derive(Serialize, Deserialize)]
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
@@ -12454,13 +12897,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-serde_derive 1.0.144
-https://serde.rs
-by Erick Tryzelaar <[email protected]>, David Tolnay <[email protected]>
-Macros 1.1 implementation of #[derive(Serialize, Deserialize)]
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -12666,6 +13102,13 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+serde_json 1.0.85
+https://github.com/serde-rs/json
+by Erick Tryzelaar <[email protected]>, David Tolnay <[email protected]>
+A JSON serialization file format
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
@@ -12693,13 +13136,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-serde_json 1.0.85
-https://github.com/serde-rs/json
-by Erick Tryzelaar <[email protected]>, David Tolnay <[email protected]>
-A JSON serialization file format
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -12905,6 +13341,13 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+serde_test 1.0.145
+https://serde.rs
+by Erick Tryzelaar <[email protected]>, David Tolnay <[email protected]>
+Token De/Serializer for testing De/Serialize implementations
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
@@ -12932,13 +13375,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-serde_test 1.0.144
-https://serde.rs
-by Erick Tryzelaar <[email protected]>, David Tolnay <[email protected]>
-Token De/Serializer for testing De/Serialize implementations
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -13144,33 +13580,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------
-LICENSE-MIT:
-
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
====================================================
simba 0.7.2
@@ -13377,19 +13786,48 @@ LICENSE:
http://www.apache.org/licenses/LICENSE-2.0
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+
+====================================================
+smallvec 1.10.0
+https://github.com/servo/rust-smallvec
+by The Servo Project Developers
+'Small vector' optimization: store up to a small number of items on the stack
+License: MIT OR Apache-2.0
+----------------------------------------------------
+LICENSE-MIT:
+
+Copyright (c) 2018 The Servo Project Developers
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
-====================================================
-smallvec 1.9.0
-https://github.com/servo/rust-smallvec
-by The Servo Project Developers
-'Small vector' optimization: store up to a small number of items on the stack
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -13595,11 +14033,16 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+syn 1.0.101
+https://github.com/dtolnay/syn
+by David Tolnay <[email protected]>
+Parser for Rust source code
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright (c) 2018 The Servo Project Developers
-
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
@@ -13624,13 +14067,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-syn 1.0.99
-https://github.com/dtolnay/syn
-by David Tolnay <[email protected]>
-Parser for Rust source code
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -13836,33 +14272,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------
-LICENSE-MIT:
-
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
====================================================
target-lexicon 0.12.4
@@ -14079,272 +14488,67 @@ LICENSE:
--- LLVM Exceptions to the Apache 2.0 License ----
-As an exception, if, as a result of your compiling your source code, portions
-of this Software are embedded into an Object form of such source code, you
-may redistribute such embedded portions in such Object form without complying
-with the conditions of Sections 4(a), 4(b) and 4(d) of the License.
-
-In addition, if you combine or link compiled forms of this Software with
-software that is licensed under the GPLv2 ("Combined Software") and if a
-court of competent jurisdiction determines that the patent provision (Section
-3), the indemnity provision (Section 9) or other Section of the License
-conflicts with the conditions of the GPLv2, you may retroactively and
-prospectively choose to deem waived or otherwise exclude such Section(s) of
-the License, but only in their entirety and only with respect to the Combined
-Software.
-
-
-
-====================================================
-test-case 2.2.1
-https://github.com/frondeus/test-case
-by Marcin Sas-Szymanski <[email protected]>, Wojciech Polak <[email protected]>, Łukasz Biel <[email protected]>
-Provides #[test_case(...)] procedural macro attribute for generating parametrized test cases easily
-License: MIT
-----------------------------------------------------
-LICENSE:
-
-MIT License
-
-Copyright (c) 2017 Marcin Sas-Szymański
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
-
-
-====================================================
-test-case-macros 2.2.1
-https://github.com/frondeus/test-case
-by Marcin Sas-Szymanski <[email protected]>, Wojciech Polak <[email protected]>, Łukasz Biel <[email protected]>
-Provides #[test_case(...)] procedural macro attribute for generating parametrized test cases easily
-License: MIT
-
-====================================================
-thiserror 1.0.34
-https://github.com/dtolnay/thiserror
-by David Tolnay <[email protected]>
-derive(Error)
-License: MIT OR Apache-2.0
-----------------------------------------------------
-LICENSE-APACHE:
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
-2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
-3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
-4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
-5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
+As an exception, if, as a result of your compiling your source code, portions
+of this Software are embedded into an Object form of such source code, you
+may redistribute such embedded portions in such Object form without complying
+with the conditions of Sections 4(a), 4(b) and 4(d) of the License.
-6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
+In addition, if you combine or link compiled forms of this Software with
+software that is licensed under the GPLv2 ("Combined Software") and if a
+court of competent jurisdiction determines that the patent provision (Section
+3), the indemnity provision (Section 9) or other Section of the License
+conflicts with the conditions of the GPLv2, you may retroactively and
+prospectively choose to deem waived or otherwise exclude such Section(s) of
+the License, but only in their entirety and only with respect to the Combined
+Software.
-7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
+====================================================
+test-case 2.2.2
+https://github.com/frondeus/test-case
+by Marcin Sas-Szymanski <[email protected]>, Wojciech Polak <[email protected]>, Łukasz Biel <[email protected]>
+Provides #[test_case(...)] procedural macro attribute for generating parametrized test cases easily
+License: MIT
+----------------------------------------------------
+LICENSE:
-END OF TERMS AND CONDITIONS
+MIT License
-APPENDIX: How to apply the Apache License to your work.
+Copyright (c) 2017 Marcin Sas-Szymański
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
-Copyright [yyyy] [name of copyright owner]
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
- http://www.apache.org/licenses/LICENSE-2.0
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
+====================================================
+test-case-macros 2.2.2
+https://github.com/frondeus/test-case
+by Marcin Sas-Szymanski <[email protected]>, Wojciech Polak <[email protected]>, Łukasz Biel <[email protected]>
+Provides #[test_case(...)] procedural macro attribute for generating parametrized test cases easily
+License: MIT
+====================================================
+thiserror 1.0.37
+https://github.com/dtolnay/thiserror
+by David Tolnay <[email protected]>
+derive(Error)
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
@@ -14372,13 +14576,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-thiserror-impl 1.0.34
-https://github.com/dtolnay/thiserror
-by David Tolnay <[email protected]>
-Implementation detail of the `thiserror` crate
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -14584,6 +14781,13 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+thiserror-impl 1.0.37
+https://github.com/dtolnay/thiserror
+by David Tolnay <[email protected]>
+Implementation detail of the `thiserror` crate
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
@@ -14611,16 +14815,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-typenum 1.15.0
-https://github.com/paholg/typenum
-by Paho Lurie-Gregg <[email protected]>, Andre Bogus <[email protected]>
-Typenum is a Rust library for type-level numbers evaluated at
- compile time. It currently supports bits, unsigned integers, and signed
- integers. It also provides a type-level array of type-level numbers, but its
- implementation is incomplete.
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -14812,7 +15006,7 @@ APPENDIX: How to apply the Apache License to your work.
same "printed page" as the copyright notice for easier
identification within third-party archives.
-Copyright 2014 Paho Lurie-Gregg
+Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -14825,6 +15019,17 @@ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+
+====================================================
+typenum 1.15.0
+https://github.com/paholg/typenum
+by Paho Lurie-Gregg <[email protected]>, Andre Bogus <[email protected]>
+Typenum is a Rust library for type-level numbers evaluated at
+ compile time. It currently supports bits, unsigned integers, and signed
+ integers. It also provides a type-level array of type-level numbers, but its
+ implementation is incomplete.
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE:
@@ -14854,63 +15059,6 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
-
-====================================================
-unicode-ident 1.0.3
-https://github.com/dtolnay/unicode-ident
-by David Tolnay <[email protected]>
-Determine whether characters have the XID_Start or XID_Continue properties according to Unicode Standard Annex #31
-License: (MIT OR Apache-2.0) AND Unicode-DFS-2016
-----------------------------------------------------
-LICENSE-UNICODE:
-
-UNICODE, INC. LICENSE AGREEMENT - DATA FILES AND SOFTWARE
-
-See Terms of Use <https://www.unicode.org/copyright.html>
-for definitions of Unicode Inc.’s Data Files and Software.
-
-NOTICE TO USER: Carefully read the following legal agreement.
-BY DOWNLOADING, INSTALLING, COPYING OR OTHERWISE USING UNICODE INC.'S
-DATA FILES ("DATA FILES"), AND/OR SOFTWARE ("SOFTWARE"),
-YOU UNEQUIVOCALLY ACCEPT, AND AGREE TO BE BOUND BY, ALL OF THE
-TERMS AND CONDITIONS OF THIS AGREEMENT.
-IF YOU DO NOT AGREE, DO NOT DOWNLOAD, INSTALL, COPY, DISTRIBUTE OR USE
-THE DATA FILES OR SOFTWARE.
-
-COPYRIGHT AND PERMISSION NOTICE
-
-Copyright © 1991-2022 Unicode, Inc. All rights reserved.
-Distributed under the Terms of Use in https://www.unicode.org/copyright.html.
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of the Unicode data files and any associated documentation
-(the "Data Files") or Unicode software and any associated documentation
-(the "Software") to deal in the Data Files or Software
-without restriction, including without limitation the rights to use,
-copy, modify, merge, publish, distribute, and/or sell copies of
-the Data Files or Software, and to permit persons to whom the Data Files
-or Software are furnished to do so, provided that either
-(a) this copyright and permission notice appear with all copies
-of the Data Files or Software, or
-(b) this copyright and permission notice appear in associated
-Documentation.
-
-THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-NONINFRINGEMENT OF THIRD PARTY RIGHTS.
-IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS
-NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL
-DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
-DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
-TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-PERFORMANCE OF THE DATA FILES OR SOFTWARE.
-
-Except as contained in this notice, the name of a copyright holder
-shall not be used in advertising or otherwise to promote the sale,
-use or other dealings in these Data Files or Software without prior
-written authorization of the copyright holder.
-
----------------------------------------------------
LICENSE-APACHE:
@@ -15102,7 +15250,7 @@ APPENDIX: How to apply the Apache License to your work.
same "printed page" as the copyright notice for easier
identification within third-party archives.
-Copyright [yyyy] [name of copyright owner]
+Copyright 2014 Paho Lurie-Gregg
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -15110,11 +15258,67 @@ You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+====================================================
+unicode-ident 1.0.4
+https://github.com/dtolnay/unicode-ident
+by David Tolnay <[email protected]>
+Determine whether characters have the XID_Start or XID_Continue properties according to Unicode Standard Annex #31
+License: (MIT OR Apache-2.0) AND Unicode-DFS-2016
+----------------------------------------------------
+LICENSE-UNICODE:
+
+UNICODE, INC. LICENSE AGREEMENT - DATA FILES AND SOFTWARE
+
+See Terms of Use <https://www.unicode.org/copyright.html>
+for definitions of Unicode Inc.’s Data Files and Software.
+
+NOTICE TO USER: Carefully read the following legal agreement.
+BY DOWNLOADING, INSTALLING, COPYING OR OTHERWISE USING UNICODE INC.'S
+DATA FILES ("DATA FILES"), AND/OR SOFTWARE ("SOFTWARE"),
+YOU UNEQUIVOCALLY ACCEPT, AND AGREE TO BE BOUND BY, ALL OF THE
+TERMS AND CONDITIONS OF THIS AGREEMENT.
+IF YOU DO NOT AGREE, DO NOT DOWNLOAD, INSTALL, COPY, DISTRIBUTE OR USE
+THE DATA FILES OR SOFTWARE.
+
+COPYRIGHT AND PERMISSION NOTICE
+
+Copyright © 1991-2022 Unicode, Inc. All rights reserved.
+Distributed under the Terms of Use in https://www.unicode.org/copyright.html.
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of the Unicode data files and any associated documentation
+(the "Data Files") or Unicode software and any associated documentation
+(the "Software") to deal in the Data Files or Software
+without restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, and/or sell copies of
+the Data Files or Software, and to permit persons to whom the Data Files
+or Software are furnished to do so, provided that either
+(a) this copyright and permission notice appear with all copies
+of the Data Files or Software, or
+(b) this copyright and permission notice appear in associated
+Documentation.
+
+THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT OF THIRD PARTY RIGHTS.
+IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS
+NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL
+DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
+DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THE DATA FILES OR SOFTWARE.
+
+Except as contained in this notice, the name of a copyright holder
+shall not be used in advertising or otherwise to promote the sale,
+use or other dealings in these Data Files or Software without prior
+written authorization of the copyright holder.
----------------------------------------------------
LICENSE-MIT:
@@ -15143,13 +15347,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-unindent 0.1.10
-https://github.com/dtolnay/indoc
-by David Tolnay <[email protected]>
-Remove a column of leading whitespace from a string
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -15355,6 +15552,13 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+unindent 0.1.10
+https://github.com/dtolnay/indoc
+by David Tolnay <[email protected]>
+Remove a column of leading whitespace from a string
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
@@ -15382,13 +15586,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-version_check 0.9.4
-https://github.com/SergioBenitez/version_check
-by Sergio Benitez <[email protected]>
-Tiny crate to check the version of the installed/running rustc.
-License: MIT/Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -15594,6 +15791,13 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+version_check 0.9.4
+https://github.com/SergioBenitez/version_check
+by Sergio Benitez <[email protected]>
+Tiny crate to check the version of the installed/running rustc.
+License: MIT/Apache-2.0
----------------------------------------------------
LICENSE-MIT:
@@ -15617,13 +15821,6 @@ COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-====================================================
-wasi 0.11.0+wasi-snapshot-preview1
-https://github.com/bytecodealliance/wasi
-by The Cranelift Project Developers
-Experimental WASI API bindings for Rust
-License: Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT
----------------------------------------------------
LICENSE-APACHE:
@@ -15829,6 +16026,13 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+wasi 0.11.0+wasi-snapshot-preview1
+https://github.com/bytecodealliance/wasi
+by The Cranelift Project Developers
+Experimental WASI API bindings for Rust
+License: Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT
----------------------------------------------------
LICENSE-Apache-2.0_WITH_LLVM-exception:
@@ -16080,6 +16284,211 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
+----------------------------------------------------
+LICENSE-APACHE:
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
====================================================
wide 0.7.4
diff --git a/qoqo/src/measurements/basis_rotation_measurement.rs b/qoqo/src/measurements/basis_rotation_measurement.rs
index e30a1ece..6ede6a94 100644
--- a/qoqo/src/measurements/basis_rotation_measurement.rs
+++ b/qoqo/src/measurements/basis_rotation_measurement.rs
@@ -18,7 +18,6 @@ use bincode::serialize;
use pyo3::exceptions::{PyRuntimeError, PyValueError};
use pyo3::prelude::*;
use pyo3::types::PyByteArray;
-use pyo3::types::PyType;
use roqoqo::measurements::PauliZProduct;
use roqoqo::prelude::*;
use roqoqo::registers::{BitOutputRegister, ComplexOutputRegister, FloatOutputRegister};
@@ -219,12 +218,51 @@ impl PauliZProductWrapper {
///
/// Raises:
/// RuntimeError: Cannot deserialize string to PauliZProduct.
- #[allow(unused_variables)]
- #[classmethod]
- pub fn from_json(cls: &PyType, json_string: &str) -> PyResult<Self> {
+ #[staticmethod]
+ pub fn from_json(json_string: &str) -> PyResult<Self> {
Ok(Self {
internal: serde_json::from_str(json_string)
.map_err(|_| PyValueError::new_err("Cannot deserialize string to PauliZProduct"))?,
})
}
+
+ /// Implement __repr__ magic method
+ pub fn __repr__(&self) -> String {
+ format!("{:?}", self.internal)
+ }
+
+ /// Return a copy of the Object (copy here produces a deepcopy).
+ pub fn __copy__(&self) -> Self {
+ self.clone()
+ }
+
+ /// Return a deep copy of the Object.
+ pub fn __deepcopy__(&self, _memodict: Py<PyAny>) -> Self {
+ self.clone()
+ }
+
+ /// Return the __richcmp__ magic method to perform rich comparison operations on QuantumProgram.
+ ///
+ /// Args:
+ /// other: The object to compare self to.
+ /// op: Type of comparison.
+ ///
+ /// Returns:
+ /// Whether the two operations compared evaluated to True or False
+ ///
+ /// Raises:
+ /// NotImplementedError: Other comparison not implemented
+ fn __richcmp__(
+ &self,
+ other: PauliZProductWrapper,
+ op: pyo3::class::basic::CompareOp,
+ ) -> PyResult<bool> {
+ match op {
+ pyo3::class::basic::CompareOp::Eq => Ok(self.internal == other.internal),
+ pyo3::class::basic::CompareOp::Ne => Ok(self.internal != other.internal),
+ _ => Err(pyo3::exceptions::PyNotImplementedError::new_err(
+ "Other comparison not implemented",
+ )),
+ }
+ }
}
diff --git a/qoqo/src/measurements/cheated_basis_rotation_measurement.rs b/qoqo/src/measurements/cheated_basis_rotation_measurement.rs
index 82e108ae..8784049f 100644
--- a/qoqo/src/measurements/cheated_basis_rotation_measurement.rs
+++ b/qoqo/src/measurements/cheated_basis_rotation_measurement.rs
@@ -18,7 +18,6 @@ use bincode::serialize;
use pyo3::exceptions::{PyRuntimeError, PyValueError};
use pyo3::prelude::*;
use pyo3::types::PyByteArray;
-use pyo3::types::PyType;
use roqoqo::measurements::CheatedPauliZProduct;
use roqoqo::prelude::*;
use roqoqo::registers::{BitOutputRegister, ComplexOutputRegister, FloatOutputRegister};
@@ -218,12 +217,51 @@ impl CheatedPauliZProductWrapper {
///
/// Raises:
/// RuntimeError: Cannot deserialize string to CheatedPauliZProduct.
- #[allow(unused_variables)]
- #[classmethod]
- pub fn from_json(cls: &PyType, json_string: &str) -> PyResult<Self> {
+ #[staticmethod]
+ pub fn from_json(json_string: &str) -> PyResult<Self> {
Ok(Self {
internal: serde_json::from_str(json_string)
.map_err(|_| PyValueError::new_err("Cannot deserialize string to PauliZProduct"))?,
})
}
+
+ /// Implement __repr__ magic method
+ pub fn __repr__(&self) -> String {
+ format!("{:?}", self.internal)
+ }
+
+ /// Return a copy of the Object (copy here produces a deepcopy).
+ pub fn __copy__(&self) -> Self {
+ self.clone()
+ }
+
+ /// Return a deep copy of the Object.
+ pub fn __deepcopy__(&self, _memodict: Py<PyAny>) -> Self {
+ self.clone()
+ }
+
+ /// Return the __richcmp__ magic method to perform rich comparison operations on QuantumProgram.
+ ///
+ /// Args:
+ /// other: The object to compare self to.
+ /// op: Type of comparison.
+ ///
+ /// Returns:
+ /// Whether the two operations compared evaluated to True or False
+ ///
+ /// Raises:
+ /// NotImplementedError: Other comparison not implemented
+ fn __richcmp__(
+ &self,
+ other: CheatedPauliZProductWrapper,
+ op: pyo3::class::basic::CompareOp,
+ ) -> PyResult<bool> {
+ match op {
+ pyo3::class::basic::CompareOp::Eq => Ok(self.internal == other.internal),
+ pyo3::class::basic::CompareOp::Ne => Ok(self.internal != other.internal),
+ _ => Err(pyo3::exceptions::PyNotImplementedError::new_err(
+ "Other comparison not implemented",
+ )),
+ }
+ }
}
diff --git a/qoqo/src/measurements/cheated_measurement.rs b/qoqo/src/measurements/cheated_measurement.rs
index 25d27306..202ef35a 100644
--- a/qoqo/src/measurements/cheated_measurement.rs
+++ b/qoqo/src/measurements/cheated_measurement.rs
@@ -18,7 +18,6 @@ use bincode::serialize;
use pyo3::exceptions::{PyRuntimeError, PyValueError};
use pyo3::prelude::*;
use pyo3::types::PyByteArray;
-use pyo3::types::PyType;
use roqoqo::measurements::Cheated;
use roqoqo::prelude::*;
use roqoqo::registers::{BitOutputRegister, ComplexOutputRegister, FloatOutputRegister};
@@ -218,12 +217,51 @@ impl CheatedWrapper {
///
/// Raises:
/// RuntimeError: Cannot deserialize string to Cheated.
- #[allow(unused_variables)]
- #[classmethod]
- pub fn from_json(cls: &PyType, json_string: &str) -> PyResult<Self> {
+ #[staticmethod]
+ pub fn from_json(json_string: &str) -> PyResult<Self> {
Ok(Self {
internal: serde_json::from_str(json_string)
.map_err(|_| PyValueError::new_err("Cannot deserialize string to Cheated"))?,
})
}
+
+ /// Implement __repr__ magic method
+ pub fn __repr__(&self) -> String {
+ format!("{:?}", self.internal)
+ }
+
+ /// Return a copy of the Object (copy here produces a deepcopy).
+ pub fn __copy__(&self) -> Self {
+ self.clone()
+ }
+
+ /// Return a deep copy of the Object.
+ pub fn __deepcopy__(&self, _memodict: Py<PyAny>) -> Self {
+ self.clone()
+ }
+
+ /// Return the __richcmp__ magic method to perform rich comparison operations on QuantumProgram.
+ ///
+ /// Args:
+ /// other: The object to compare self to.
+ /// op: Type of comparison.
+ ///
+ /// Returns:
+ /// Whether the two operations compared evaluated to True or False
+ ///
+ /// Raises:
+ /// NotImplementedError: Other comparison not implemented
+ fn __richcmp__(
+ &self,
+ other: CheatedWrapper,
+ op: pyo3::class::basic::CompareOp,
+ ) -> PyResult<bool> {
+ match op {
+ pyo3::class::basic::CompareOp::Eq => Ok(self.internal == other.internal),
+ pyo3::class::basic::CompareOp::Ne => Ok(self.internal != other.internal),
+ _ => Err(pyo3::exceptions::PyNotImplementedError::new_err(
+ "Other comparison not implemented",
+ )),
+ }
+ }
}
diff --git a/qoqo/src/measurements/classical_register_measurement.rs b/qoqo/src/measurements/classical_register_measurement.rs
index 8de35106..be019ef0 100644
--- a/qoqo/src/measurements/classical_register_measurement.rs
+++ b/qoqo/src/measurements/classical_register_measurement.rs
@@ -17,7 +17,6 @@ use bincode::serialize;
use pyo3::exceptions::{PyRuntimeError, PyValueError};
use pyo3::prelude::*;
use pyo3::types::PyByteArray;
-use pyo3::types::PyType;
use roqoqo::measurements::ClassicalRegister;
use roqoqo::prelude::*;
use roqoqo::Circuit;
@@ -146,13 +145,51 @@ impl ClassicalRegisterWrapper {
///
/// Raises:
/// PyRuntimeError: Cannot deserialize string to ClassicalRegister.
- #[allow(unused_variables)]
- #[classmethod]
- pub fn from_json(cls: &PyType, json_string: &str) -> PyResult<Self> {
+ #[staticmethod]
+ pub fn from_json(json_string: &str) -> PyResult<Self> {
Ok(Self {
internal: serde_json::from_str(json_string).map_err(|_| {
PyValueError::new_err("Cannot deserialize string to ClassicalRegister")
})?,
})
}
+
+ /// Implement __repr__ magic method
+ pub fn __repr__(&self) -> String {
+ format!("{:?}", self.internal)
+ }
+
+ /// Return a copy of the Object (copy here produces a deepcopy).
+ pub fn __copy__(&self) -> Self {
+ self.clone()
+ }
+
+ /// Return a deep copy of the Object.
+ pub fn __deepcopy__(&self, _memodict: Py<PyAny>) -> Self {
+ self.clone()
+ }
+ /// Return the __richcmp__ magic method to perform rich comparison operations on QuantumProgram.
+ ///
+ /// Args:
+ /// other: The object to compare self to.
+ /// op: Type of comparison.
+ ///
+ /// Returns:
+ /// Whether the two operations compared evaluated to True or False
+ ///
+ /// Raises:
+ /// NotImplementedError: Other comparison not implemented
+ fn __richcmp__(
+ &self,
+ other: ClassicalRegisterWrapper,
+ op: pyo3::class::basic::CompareOp,
+ ) -> PyResult<bool> {
+ match op {
+ pyo3::class::basic::CompareOp::Eq => Ok(self.internal == other.internal),
+ pyo3::class::basic::CompareOp::Ne => Ok(self.internal != other.internal),
+ _ => Err(pyo3::exceptions::PyNotImplementedError::new_err(
+ "Other comparison not implemented",
+ )),
+ }
+ }
}
diff --git a/qoqo/src/measurements/measurement_auxiliary_data_input.rs b/qoqo/src/measurements/measurement_auxiliary_data_input.rs
index f612db46..6b23fc41 100644
--- a/qoqo/src/measurements/measurement_auxiliary_data_input.rs
+++ b/qoqo/src/measurements/measurement_auxiliary_data_input.rs
@@ -119,6 +119,35 @@ impl PauliZProductInputWrapper {
PyRuntimeError::new_err(format!("Failed to add symbolic expectation value {:?}", x))
})
}
+
+ /// Implement __repr__ magic method
+ pub fn __repr__(&self) -> String {
+ format!("{:?}", self.internal)
+ }
+
+ /// Return a copy of the Object (copy here produces a deepcopy).
+ pub fn __copy__(&self) -> Self {
+ self.clone()
+ }
+
+ fn __richcmp__(
+ &self,
+ other: PauliZProductInputWrapper,
+ op: pyo3::class::basic::CompareOp,
+ ) -> PyResult<bool> {
+ match op {
+ pyo3::class::basic::CompareOp::Eq => Ok(self.internal == other.internal),
+ pyo3::class::basic::CompareOp::Ne => Ok(self.internal != other.internal),
+ _ => Err(pyo3::exceptions::PyNotImplementedError::new_err(
+ "Other comparison not implemented",
+ )),
+ }
+ }
+
+ /// Return a deep copy of the Object.
+ pub fn __deepcopy__(&self, _memodict: Py<PyAny>) -> Self {
+ self.clone()
+ }
}
#[pyclass(name = "CheatedPauliZProductInput", module = "qoqo.measurements")]
@@ -213,6 +242,35 @@ impl CheatedPauliZProductInputWrapper {
PyRuntimeError::new_err(format!("Failed to add symbolic expectation value {:?}", x))
})
}
+
+ /// Implement __repr__ magic method
+ pub fn __repr__(&self) -> String {
+ format!("{:?}", self.internal)
+ }
+
+ /// Return a copy of the Object (copy here produces a deepcopy).
+ pub fn __copy__(&self) -> Self {
+ self.clone()
+ }
+
+ /// Return a deep copy of the Object.
+ pub fn __deepcopy__(&self, _memodict: Py<PyAny>) -> Self {
+ self.clone()
+ }
+
+ fn __richcmp__(
+ &self,
+ other: CheatedPauliZProductInputWrapper,
+ op: pyo3::class::basic::CompareOp,
+ ) -> PyResult<bool> {
+ match op {
+ pyo3::class::basic::CompareOp::Eq => Ok(self.internal == other.internal),
+ pyo3::class::basic::CompareOp::Ne => Ok(self.internal != other.internal),
+ _ => Err(pyo3::exceptions::PyNotImplementedError::new_err(
+ "Other comparison not implemented",
+ )),
+ }
+ }
}
#[pyclass(name = "CheatedInput", module = "qoqo.measurements")]
@@ -272,4 +330,32 @@ impl CheatedInputWrapper {
))
})
}
+
+ /// Implement __repr__ magic method
+ pub fn __repr__(&self) -> String {
+ format!("{:?}", self.internal)
+ }
+
+ /// Return a copy of the Object (copy here produces a deepcopy).
+ pub fn __copy__(&self) -> Self {
+ self.clone()
+ }
+
+ /// Return a deep copy of the Object.
+ pub fn __deepcopy__(&self, _memodict: Py<PyAny>) -> Self {
+ self.clone()
+ }
+ fn __richcmp__(
+ &self,
+ other: CheatedInputWrapper,
+ op: pyo3::class::basic::CompareOp,
+ ) -> PyResult<bool> {
+ match op {
+ pyo3::class::basic::CompareOp::Eq => Ok(self.internal == other.internal),
+ pyo3::class::basic::CompareOp::Ne => Ok(self.internal != other.internal),
+ _ => Err(pyo3::exceptions::PyNotImplementedError::new_err(
+ "Other comparison not implemented",
+ )),
+ }
+ }
}
diff --git a/qoqo/src/operations/mod.rs b/qoqo/src/operations/mod.rs
index 1d23454a..767f86db 100644
--- a/qoqo/src/operations/mod.rs
+++ b/qoqo/src/operations/mod.rs
@@ -89,6 +89,7 @@ use pyo3::prelude::*;
/// PragmaRandomNoise
/// PragmaGeneralNoise
/// PragmaConditional
+/// PragmaLoop
/// CNOT
/// SWAP
/// FSwap
@@ -158,6 +159,7 @@ pub fn operations(_py: Python, m: &PyModule) -> PyResult<()> {
m.add_class::<PragmaGeneralNoiseWrapper>()?;
m.add_class::<PragmaConditionalWrapper>()?;
m.add_class::<PragmaChangeDeviceWrapper>()?;
+ m.add_class::<PragmaLoopWrapper>()?;
m.add_class::<CNOTWrapper>()?;
m.add_class::<SWAPWrapper>()?;
m.add_class::<FSwapWrapper>()?;
diff --git a/qoqo/src/operations/pragma_operations.rs b/qoqo/src/operations/pragma_operations.rs
index 5ac4dd49..5c3be94c 100644
--- a/qoqo/src/operations/pragma_operations.rs
+++ b/qoqo/src/operations/pragma_operations.rs
@@ -41,6 +41,18 @@ struct PragmaSetNumberOfMeasurements {
readout: String,
}
+#[wrap(Operate, OperatePragma)]
+/// This PRAGMA measurement operation returns the statevector of a quantum register.
+///
+/// Args:
+/// repetitions (CalculatorFloat): The number of repetitions as a symbolic float. At evaluation the floor of any float value is taken
+/// circuit (Circuit): The Circuit that is looped.
+///
+pub struct PragmaLoop {
+ repetitions: CalculatorFloat,
+ circuit: Circuit,
+}
+
/// Module containing the PragmaSetStateVector class.
#[pymodule]
fn pragma_set_statevector(_py: Python, module: &PyModule) -> PyResult<()> {
diff --git a/qoqo/src/operations/two_qubit_gate_operations.rs b/qoqo/src/operations/two_qubit_gate_operations.rs
index cd6ae3b6..3457ef1a 100644
--- a/qoqo/src/operations/two_qubit_gate_operations.rs
+++ b/qoqo/src/operations/two_qubit_gate_operations.rs
@@ -513,7 +513,7 @@ pub struct ComplexPMInteraction {
/// 1 & 0 & 0 & 0 \\\\
/// 0 & e^{i \phi} & 0 & 0 \\\\
/// 0 & 0 & e^{i \phi} & 0 \\\\
-/// 0 & 0 & 0 & e^{i (2\cdot\phi - \pi)}
+/// 0 & 0 & 0 & e^{i (2\cdot\phi + \pi)}
/// \end{pmatrix}
///
/// Args:
@@ -536,7 +536,7 @@ pub struct PhaseShiftedControlledZ {
/// 1 & 0 & 0 & 0 \\\\
/// 0 & e^{i \phi} & 0 & 0 \\\\
/// 0 & 0 & e^{i \phi} & 0 \\\\
-/// 0 & 0 & 0 & e^{i(2\cdot\phi - \theta)}
+/// 0 & 0 & 0 & e^{i(2\cdot\phi + \theta)}
/// \end{pmatrix}
///
/// Args:
diff --git a/roqoqo-derive/Cargo.toml b/roqoqo-derive/Cargo.toml
index b5de1176..ed674a3a 100644
--- a/roqoqo-derive/Cargo.toml
+++ b/roqoqo-derive/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "roqoqo-derive"
-version = "1.1.0-beta.2"
+version = "1.1.0-beta.6"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
edition = "2021"
diff --git a/roqoqo/Cargo.toml b/roqoqo/Cargo.toml
index dc3e1870..e19ca48a 100644
--- a/roqoqo/Cargo.toml
+++ b/roqoqo/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "roqoqo"
-version = "1.1.0-beta.2"
+version = "1.1.0-beta.6"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
edition = "2021"
@@ -28,7 +28,7 @@ num-complex = { version = "0.4"}
thiserror = "1.0"
dyn-clone = {version="1.0", optional=true}
qoqo_calculator = { version="1.1"}
-roqoqo-derive = {version="1.1.0-beta.2", path="../roqoqo-derive"}
+roqoqo-derive = {version="1.1.0-beta.6", path="../roqoqo-derive"}
typetag = {version="0.2", optional=true}
nalgebra = "0.31"
# schemars = { version="0.8", features = ["num-complex", "ndarray"], optional=true }
@@ -43,6 +43,9 @@ petgraph = {version = "0.6.2", optional=true}
serde_test = "1.0"
test-case = "2.0"
serde_json = "1.0"
+test_roqoqo_derive_1_0 = {package= "roqoqo-derive", version="=1.0.0"}
+test_roqoqo_1_0 = {package= "roqoqo", version="=1.0.0"}
+bincode = {version="1.3", optional=false}
[build-dependencies]
quote = "1.0"
diff --git a/roqoqo/build.rs b/roqoqo/build.rs
index 0136bbdd..fec0053b 100644
--- a/roqoqo/build.rs
+++ b/roqoqo/build.rs
@@ -50,6 +50,10 @@ struct Visitor {
two_qubit_gate_operations: Vec<Ident>,
// Identifiers of structs belonging to MultiQubitGateOperation enum
multi_qubit_gate_operations: Vec<Ident>,
+ // Operations that have only been introduced in roqoqoq 1.1.0
+ // These operations will only be added at end of automatically created enums
+ // to maintain compatability with bincode encoding
+ roqoqo_1_1_operations: Vec<Ident>,
}
impl Visitor {
@@ -69,6 +73,7 @@ impl Visitor {
single_qubit_gate_operations: Vec::new(),
two_qubit_gate_operations: Vec::new(),
multi_qubit_gate_operations: Vec::new(),
+ roqoqo_1_1_operations: Vec::new(),
}
}
}
@@ -213,6 +218,9 @@ impl<'ast> Visit<'ast> for Visitor {
if trait_name.as_str() == "Operate" {
self.operations.push(id.clone());
}
+ if trait_name.as_str() == "ImplementedIn1point1" {
+ self.roqoqo_1_1_operations.push(id.clone());
+ }
if trait_name.as_str() == "OperateSingleQubitGate" {
self.single_qubit_gate_operations.push(id.clone());
}
@@ -259,13 +267,29 @@ fn main() {
}
// Construct TokenStreams for variants of operation enum
- let operations_quotes = vis.operations.into_iter().map(|v| {
- let msg = format!("Variant for {}", v);
- quote! {
- #[allow(clippy::upper_case_acronyms)]
- #[doc = #msg]
- #v(#v)}
- });
+ let operations_quotes = vis
+ .operations
+ .clone()
+ .into_iter()
+ .filter(|v| !vis.roqoqo_1_1_operations.contains(v))
+ .map(|v| {
+ let msg = format!("Variant for {}", v);
+ quote! {
+ #[allow(clippy::upper_case_acronyms)]
+ #[doc = #msg]
+ #v(#v)}
+ });
+ let operations_quotes_1_1 = vis
+ .operations
+ .into_iter()
+ .filter(|v| vis.roqoqo_1_1_operations.contains(v))
+ .map(|v| {
+ let msg = format!("Variant for {}", v);
+ quote! {
+ #[allow(clippy::upper_case_acronyms)]
+ #[doc = #msg]
+ #v(#v)}
+ });
// Construct TokenStreams for variants of operation enum
let single_qubit_operations_quotes = vis.single_qubit_operations.into_iter().map(|v| {
let msg = format!("Variant for {}", v);
@@ -289,12 +313,28 @@ fn main() {
#v(#v)}
});
// Construct TokenStreams for variants of pragma enum
- let pragma_operations_quotes = vis.pragma_operations.into_iter().map(|v| {
- let msg = format!("Variant for {}", v);
- quote! {
- #[doc = #msg]
- #v(#v)}
- });
+ let pragma_operations_quotes = vis
+ .pragma_operations
+ .clone()
+ .into_iter()
+ .filter(|v| !vis.roqoqo_1_1_operations.contains(v))
+ .map(|v| {
+ let msg = format!("Variant for {}", v);
+ quote! {
+ #[doc = #msg]
+ #v(#v)}
+ });
+ // Construct TokenStreams for variants of pragma enum
+ let pragma_operations_quotes_1_1 = vis
+ .pragma_operations
+ .into_iter()
+ .filter(|v| vis.roqoqo_1_1_operations.contains(v))
+ .map(|v| {
+ let msg = format!("Variant for {}", v);
+ quote! {
+ #[doc = #msg]
+ #v(#v)}
+ });
// Construct TokenStreams for variants of pragma enum
let pragma_noise_operations_quotes = vis.pragma_noise_operations.clone().into_iter().map(|v| {
let msg = format!("Variant for {}", v);
@@ -326,12 +366,28 @@ fn main() {
#v(#v)}
});
// Construct TokenStreams for variants of definition enum
- let definitions_quotes = vis.definitions.into_iter().map(|v| {
- let msg = format!("Variant for {}", v);
- quote! {
- #[doc = #msg]
- #v(#v)}
- });
+ let definitions_quotes = vis
+ .definitions
+ .clone()
+ .into_iter()
+ .filter(|v| !vis.roqoqo_1_1_operations.contains(v))
+ .map(|v| {
+ let msg = format!("Variant for {}", v);
+ quote! {
+ #[doc = #msg]
+ #v(#v)}
+ });
+ // Construct TokenStreams for variants of definition enum
+ let definitions_quotes_1_1 = vis
+ .definitions
+ .into_iter()
+ .filter(|v| vis.roqoqo_1_1_operations.contains(v))
+ .map(|v| {
+ let msg = format!("Variant for {}", v);
+ quote! {
+ #[doc = #msg]
+ #v(#v)}
+ });
// Construct TokenStreams for variants of operation enum
let constant_gate_operations_quote = vis.constant_gate_operations.into_iter().map(|v| {
let msg = format!("Variant for {}", v);
@@ -373,7 +429,8 @@ fn main() {
#[cfg_attr(feature = "serialize", derive(serde::Serialize, serde::Deserialize))]
// #[cfg_attr(feature = "json_schema", derive(schemars::JsonSchema))]
pub enum Operation {
- #(#operations_quotes),*
+ #(#operations_quotes),* ,
+ #(#operations_quotes_1_1),*
}
/// Enum of all Operations implementing [OperateSingleQubit]
@@ -405,7 +462,8 @@ fn main() {
// #[cfg_attr(feature = "json_schema", derive(schemars::JsonSchema))]
#[cfg_attr(feature = "serialize", derive(serde::Serialize, serde::Deserialize))]
pub enum PragmaOperation {
- #(#pragma_operations_quotes),*
+ #(#pragma_operations_quotes),* ,
+ #(#pragma_operations_quotes_1_1),*
}
/// Enum of all Operations implementing [OperatePragmaNoise]
@@ -446,7 +504,8 @@ fn main() {
#[cfg_attr(feature = "serialize", derive(serde::Serialize, serde::Deserialize))]
// #[cfg_attr(feature = "json_schema", derive(schemars::JsonSchema))]
pub enum Definition {
- #(#definitions_quotes),*
+ #(#definitions_quotes),* ,
+ #(#definitions_quotes_1_1),*
}
/// Enum of all Operations implementing [OperateConstantGate]
diff --git a/roqoqo/src/operations/define_operations.rs b/roqoqo/src/operations/define_operations.rs
index 63cbf5cf..4e6c322a 100644
--- a/roqoqo/src/operations/define_operations.rs
+++ b/roqoqo/src/operations/define_operations.rs
@@ -241,6 +241,8 @@ pub struct InputBit {
value: bool,
}
+impl super::ImplementedIn1point1 for InputBit {}
+
#[allow(non_upper_case_globals)]
const TAGS_InputBit: &[&str; 3] = &["Operation", "Definition", "InputBit"];
diff --git a/roqoqo/src/operations/mod.rs b/roqoqo/src/operations/mod.rs
index 79443a0b..a863090e 100644
--- a/roqoqo/src/operations/mod.rs
+++ b/roqoqo/src/operations/mod.rs
@@ -698,6 +698,9 @@ pub trait OperateMultiQubitGate:
// Implementing DynOperation for storing dynamic operations from extern crates in trait object
+/// Marker trait to show that some operation has been implemented in roqoqo 1.1.0
+pub(crate) trait ImplementedIn1point1: Operate {}
+
#[cfg(feature = "dynamic")]
/// A wrapper for Operate trait objects.
///
diff --git a/roqoqo/src/operations/pragma_operations.rs b/roqoqo/src/operations/pragma_operations.rs
index 7f060f3b..6bf7ca1e 100644
--- a/roqoqo/src/operations/pragma_operations.rs
+++ b/roqoqo/src/operations/pragma_operations.rs
@@ -29,6 +29,8 @@ use serde::{Deserialize, Serialize};
use std::collections::{HashMap, HashSet};
use std::convert::TryFrom;
+use super::InvolvedClassical;
+
/// This PRAGMA Operation sets the number of measurements of the circuit.
///
/// This is used for backends that allow setting the number of tries. However, setting the number of
@@ -1090,3 +1092,74 @@ impl Substitute for PragmaChangeDevice {
Ok(self.clone())
}
}
+
+/// This PRAGMA repeats a circuit .
+///
+#[derive(Debug, Clone, PartialEq, roqoqo_derive::Operate, roqoqo_derive::OperatePragma)]
+#[cfg_attr(feature = "serialize", derive(serde::Serialize, serde::Deserialize))]
+// #[cfg_attr(feature = "json_schema", derive(schemars::JsonSchema))]
+pub struct PragmaLoop {
+ /// The name of the classical readout register.
+ repetitions: CalculatorFloat,
+ /// The Circuit that is looped.
+ circuit: Circuit,
+}
+impl super::ImplementedIn1point1 for PragmaLoop {}
+
+#[allow(non_upper_case_globals)]
+const TAGS_PragmaLoop: &[&str; 3] = &["Operation", "PragmaOperation", "PragmaLoop"];
+
+/// Implements [Substitute] trait allowing to replace symbolic parameters and to perform qubit mappings.
+impl Substitute for PragmaLoop {
+ /// Remaps qubits in operations in clone of the operation.
+ fn remap_qubits(&self, mapping: &HashMap<usize, usize>) -> Result<Self, RoqoqoError> {
+ let new_circuit = self.circuit.remap_qubits(mapping)?;
+ Ok(PragmaLoop::new(self.repetitions.clone(), new_circuit))
+ }
+
+ /// Substitutes symbolic parameters in clone of the operation.
+ fn substitute_parameters(&self, calculator: &Calculator) -> Result<Self, RoqoqoError> {
+ let new_repetitions = calculator.parse_get(self.repetitions.clone())?;
+ let new_circuit = self.circuit.substitute_parameters(calculator)?;
+ Ok(PragmaLoop::new(new_repetitions.into(), new_circuit))
+ }
+}
+
+// Implements the InvolveQubits trait for PragmaLoop.
+impl InvolveQubits for PragmaLoop {
+ /// Lists all involved qubits (here: All).
+ fn involved_qubits(&self) -> InvolvedQubits {
+ self.circuit.involved_qubits()
+ }
+
+ fn involved_classical(&self) -> InvolvedClassical {
+ let mut involved = InvolvedClassical::None;
+ for op in self.circuit.iter() {
+ let tmp_involved = op.involved_classical();
+ match &tmp_involved {
+ InvolvedClassical::All(x) => {
+ return InvolvedClassical::All(x.clone());
+ }
+ InvolvedClassical::AllQubits(x) => {
+ return InvolvedClassical::AllQubits(x.clone());
+ }
+ InvolvedClassical::None => (),
+ InvolvedClassical::Set(x) => match involved {
+ InvolvedClassical::All(y) => {
+ return InvolvedClassical::All(y);
+ }
+ InvolvedClassical::AllQubits(y) => {
+ return InvolvedClassical::AllQubits(y);
+ }
+ InvolvedClassical::None => involved = tmp_involved,
+ InvolvedClassical::Set(y) => {
+ let mut combined = x.clone();
+ combined.extend(y);
+ involved = InvolvedClassical::Set(combined)
+ }
+ },
+ }
+ }
+ involved
+ }
+}
| qoqo Circuit from_ methods are not classmethods
from_json and from_bincode in qoqo.Circuit are not implemeted as classmethods.
We need to change that so we can call `Circuit.from_json(...)` directly
| bug fix implemented and merged | 2022-10-06T13:28:07 | 0.0 | [] | [] |
||
HQSquantumsimulations/qoqo | HQSquantumsimulations__qoqo-376 | 8c67fb841bf566ec4415d0408edd80eefebb5a85 | diff --git a/CHANGELOG.md b/CHANGELOG.md
index 4a22e882..288db200 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,7 +2,7 @@
This changelog track changes to the qoqo project starting at version v0.5.0
-## 1.1.0-beta.6
+## 1.1.0-beta.7
* Fixed github pipeline deploy
diff --git a/Cargo.lock b/Cargo.lock
index 4cdaa7c5..a2464b80 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -550,7 +550,7 @@ dependencies = [
[[package]]
name = "qoqo"
-version = "1.1.0-beta.6"
+version = "1.1.0-beta.7"
dependencies = [
"bincode",
"nalgebra",
@@ -564,7 +564,7 @@ dependencies = [
"qoqo_calculator",
"qoqo_calculator_pyo3",
"quote",
- "roqoqo 1.1.0-beta.6",
+ "roqoqo 1.1.0-beta.7",
"serde",
"serde_json",
"syn",
@@ -574,7 +574,7 @@ dependencies = [
[[package]]
name = "qoqo-macros"
-version = "1.1.0-beta.6"
+version = "1.1.0-beta.7"
dependencies = [
"proc-macro2",
"quote",
@@ -692,7 +692,7 @@ dependencies = [
[[package]]
name = "roqoqo"
-version = "1.1.0-beta.6"
+version = "1.1.0-beta.7"
dependencies = [
"async-trait",
"bincode",
@@ -708,8 +708,7 @@ dependencies = [
"rand",
"rand_distr",
"roqoqo 1.0.0",
- "roqoqo-derive 1.0.0",
- "roqoqo-derive 1.1.0-beta.6",
+ "roqoqo-derive 1.1.0-beta.7",
"serde",
"serde_json",
"serde_test",
@@ -732,16 +731,17 @@ dependencies = [
[[package]]
name = "roqoqo-derive"
-version = "1.1.0-beta.6"
+version = "1.1.0-beta.7"
dependencies = [
"proc-macro2",
"quote",
+ "roqoqo-derive 1.0.0",
"syn",
]
[[package]]
name = "roqoqo-test"
-version = "1.1.0-beta.6"
+version = "1.1.0-beta.7"
dependencies = [
"nalgebra",
"ndarray",
@@ -749,7 +749,7 @@ dependencies = [
"qoqo_calculator",
"quote",
"rand",
- "roqoqo 1.1.0-beta.6",
+ "roqoqo 1.1.0-beta.7",
"syn",
]
diff --git a/qoqo-macros/Cargo.toml b/qoqo-macros/Cargo.toml
index c30f7c0f..bc4e0438 100644
--- a/qoqo-macros/Cargo.toml
+++ b/qoqo-macros/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "qoqo-macros"
-version = "1.1.0-beta.6"
+version = "1.1.0-beta.7"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
readme = "../README.md"
diff --git a/qoqo/Cargo.toml b/qoqo/Cargo.toml
index 4405e457..1fe1bd01 100644
--- a/qoqo/Cargo.toml
+++ b/qoqo/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "qoqo"
-version = "1.1.0-beta.6"
+version = "1.1.0-beta.7"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
homepage = "https://github.com/HQSquantumsimulations/qoqo"
@@ -32,8 +32,8 @@ num-complex = "0.4"
thiserror = "1.0"
qoqo_calculator = { version="1.1" }
qoqo_calculator_pyo3 = {version="1.1", default-features=false}
-qoqo-macros = {version="1.1.0-beta.6", path="../qoqo-macros"}
-roqoqo = {version="1.1.0-beta.6", path="../roqoqo", features=["serialize", "overrotate"]}
+qoqo-macros = {version="1.1.0-beta.7", path="../qoqo-macros"}
+roqoqo = {version="1.1.0-beta.7", path="../roqoqo", features=["serialize", "overrotate"]}
numpy = "0.17"
bincode = "1.3"
serde_json = "1.0"
diff --git a/qoqo/pyproject.toml b/qoqo/pyproject.toml
index 1766eaa1..066f77ae 100644
--- a/qoqo/pyproject.toml
+++ b/qoqo/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "qoqo"
-version = "1.1.0-beta.6"
+version = "1.1.0-beta.7"
dependencies = [
'numpy',
'qoqo_calculator_pyo3>=1.1.0',
diff --git a/qoqo/qoqo/DEPENDENCIES b/qoqo/qoqo/DEPENDENCIES
index 161b2c86..ac8bcff7 100644
--- a/qoqo/qoqo/DEPENDENCIES
+++ b/qoqo/qoqo/DEPENDENCIES
@@ -8777,7 +8777,7 @@ LICENSE:
====================================================
-qoqo 1.1.0-beta.6
+qoqo 1.1.0-beta.7
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Quantum computing circuit toolkit. Python interface of roqoqo
@@ -8989,7 +8989,7 @@ LICENSE:
====================================================
-qoqo-macros 1.1.0-beta.6
+qoqo-macros 1.1.0-beta.7
by HQS Quantum Simulations <[email protected]>
Macros for the qoqo crate
License: Apache-2.0
@@ -11061,7 +11061,7 @@ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
====================================================
-roqoqo 1.0.0
+roqoqo 1.1.0-beta.7
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Rust Quantum Computing Toolkit by HQS
@@ -11273,430 +11273,7 @@ LICENSE:
====================================================
-roqoqo 1.1.0-beta.6
-https://github.com/HQSquantumsimulations/qoqo
-by HQS Quantum Simulations <[email protected]>
-Rust Quantum Computing Toolkit by HQS
-License: Apache-2.0
-----------------------------------------------------
-LICENSE:
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright 2021 HQS Quantum Simulations GmbH
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
-
-====================================================
-roqoqo-derive 1.0.0
-by HQS Quantum Simulations <[email protected]>
-Macros for the roqoqo crate
-License: Apache-2.0
-----------------------------------------------------
-LICENSE:
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright 2021 HQS Quantum Simulations GmbH
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
-
-====================================================
-roqoqo-derive 1.1.0-beta.6
+roqoqo-derive 1.1.0-beta.7
by HQS Quantum Simulations <[email protected]>
Macros for the roqoqo crate
License: Apache-2.0
@@ -11907,7 +11484,7 @@ LICENSE:
====================================================
-roqoqo-test 1.1.0-beta.6
+roqoqo-test 1.1.0-beta.7
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Testing helper functions for roqoqo toolkit
diff --git a/roqoqo-derive/Cargo.toml b/roqoqo-derive/Cargo.toml
index ed674a3a..640a9a9d 100644
--- a/roqoqo-derive/Cargo.toml
+++ b/roqoqo-derive/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "roqoqo-derive"
-version = "1.1.0-beta.6"
+version = "1.1.0-beta.7"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
edition = "2021"
@@ -19,7 +19,9 @@ doctest = false
proc-macro2 = "1.0"
syn = { version = "1.0", features = ["full", "visit"] }
quote = "1.0"
+test_roqoqo_derive_1_0 = {package= "roqoqo-derive", version="=1.0.0", optional=true}
[features]
default = []
overrotate = []
+compatibility = ["test_roqoqo_derive_1_0"]
diff --git a/roqoqo/Cargo.toml b/roqoqo/Cargo.toml
index e19ca48a..b5aeb5ed 100644
--- a/roqoqo/Cargo.toml
+++ b/roqoqo/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "roqoqo"
-version = "1.1.0-beta.6"
+version = "1.1.0-beta.7"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
edition = "2021"
@@ -28,7 +28,7 @@ num-complex = { version = "0.4"}
thiserror = "1.0"
dyn-clone = {version="1.0", optional=true}
qoqo_calculator = { version="1.1"}
-roqoqo-derive = {version="1.1.0-beta.6", path="../roqoqo-derive"}
+roqoqo-derive = {version="1.1.0-beta.7", path="../roqoqo-derive"}
typetag = {version="0.2", optional=true}
nalgebra = "0.31"
# schemars = { version="0.8", features = ["num-complex", "ndarray"], optional=true }
@@ -38,14 +38,12 @@ rand = { version = "0.8.4"}
async-trait = {version = "0.1", optional = true}
futures ={version = "0.3", optional=true}
petgraph = {version = "0.6.2", optional=true}
+test_roqoqo_1_0 = {package= "roqoqo", version="=1.0.0", optional=true}
[dev-dependencies]
serde_test = "1.0"
test-case = "2.0"
serde_json = "1.0"
-test_roqoqo_derive_1_0 = {package= "roqoqo-derive", version="=1.0.0"}
-test_roqoqo_1_0 = {package= "roqoqo", version="=1.0.0"}
-bincode = {version="1.3", optional=false}
[build-dependencies]
quote = "1.0"
@@ -64,3 +62,4 @@ overrotate = [ "rand_distr", "roqoqo-derive/overrotate"]
async = ["async-trait", "futures"]
# json_schema=["schemars", "serialize", "qoqo_calculator/json_schema"]
circuitdag = ["petgraph"]
+compatibility = ["roqoqo-derive/compatibility", "test_roqoqo_1_0"]
diff --git a/roqoqo/build.rs b/roqoqo/build.rs
index fec0053b..6a84c637 100644
--- a/roqoqo/build.rs
+++ b/roqoqo/build.rs
@@ -52,7 +52,7 @@ struct Visitor {
multi_qubit_gate_operations: Vec<Ident>,
// Operations that have only been introduced in roqoqoq 1.1.0
// These operations will only be added at end of automatically created enums
- // to maintain compatability with bincode encoding
+ // to maintain compatibility with bincode encoding
roqoqo_1_1_operations: Vec<Ident>,
}
| qoqo Circuit from_ methods are not classmethods
from_json and from_bincode in qoqo.Circuit are not implemeted as classmethods.
We need to change that so we can call `Circuit.from_json(...)` directly
| 2022-10-06T13:17:28 | 0.0 | [] | [] |
|||
HQSquantumsimulations/qoqo | HQSquantumsimulations__qoqo-375 | 9e3876599f4f860a82518e00a1ed6ad3001f87f7 | diff --git a/CHANGELOG.md b/CHANGELOG.md
index 42daeb51..4a22e882 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,9 +2,13 @@
This changelog track changes to the qoqo project starting at version v0.5.0
+## 1.1.0-beta.6
+
+* Fixed github pipeline deploy
+
## 1.1.0-beta.5
-* Fixed github pipelines
+* Fixed github pipeline deploy qoqo coverage
## 1.1.0-beta.4
diff --git a/Cargo.lock b/Cargo.lock
index 22ccd230..4cdaa7c5 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -550,7 +550,7 @@ dependencies = [
[[package]]
name = "qoqo"
-version = "1.1.0-beta.5"
+version = "1.1.0-beta.6"
dependencies = [
"bincode",
"nalgebra",
@@ -564,7 +564,7 @@ dependencies = [
"qoqo_calculator",
"qoqo_calculator_pyo3",
"quote",
- "roqoqo 1.1.0-beta.5",
+ "roqoqo 1.1.0-beta.6",
"serde",
"serde_json",
"syn",
@@ -574,7 +574,7 @@ dependencies = [
[[package]]
name = "qoqo-macros"
-version = "1.1.0-beta.5"
+version = "1.1.0-beta.6"
dependencies = [
"proc-macro2",
"quote",
@@ -692,7 +692,7 @@ dependencies = [
[[package]]
name = "roqoqo"
-version = "1.1.0-beta.5"
+version = "1.1.0-beta.6"
dependencies = [
"async-trait",
"bincode",
@@ -709,7 +709,7 @@ dependencies = [
"rand_distr",
"roqoqo 1.0.0",
"roqoqo-derive 1.0.0",
- "roqoqo-derive 1.1.0-beta.5",
+ "roqoqo-derive 1.1.0-beta.6",
"serde",
"serde_json",
"serde_test",
@@ -732,7 +732,7 @@ dependencies = [
[[package]]
name = "roqoqo-derive"
-version = "1.1.0-beta.5"
+version = "1.1.0-beta.6"
dependencies = [
"proc-macro2",
"quote",
@@ -741,7 +741,7 @@ dependencies = [
[[package]]
name = "roqoqo-test"
-version = "1.1.0-beta.5"
+version = "1.1.0-beta.6"
dependencies = [
"nalgebra",
"ndarray",
@@ -749,7 +749,7 @@ dependencies = [
"qoqo_calculator",
"quote",
"rand",
- "roqoqo 1.1.0-beta.5",
+ "roqoqo 1.1.0-beta.6",
"syn",
]
diff --git a/qoqo-macros/Cargo.toml b/qoqo-macros/Cargo.toml
index dac5cc2a..c30f7c0f 100644
--- a/qoqo-macros/Cargo.toml
+++ b/qoqo-macros/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "qoqo-macros"
-version = "1.1.0-beta.5"
+version = "1.1.0-beta.6"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
readme = "../README.md"
diff --git a/qoqo/Cargo.toml b/qoqo/Cargo.toml
index c1c0b574..4405e457 100644
--- a/qoqo/Cargo.toml
+++ b/qoqo/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "qoqo"
-version = "1.1.0-beta.5"
+version = "1.1.0-beta.6"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
homepage = "https://github.com/HQSquantumsimulations/qoqo"
@@ -32,8 +32,8 @@ num-complex = "0.4"
thiserror = "1.0"
qoqo_calculator = { version="1.1" }
qoqo_calculator_pyo3 = {version="1.1", default-features=false}
-qoqo-macros = {version="1.1.0-beta.5", path="../qoqo-macros"}
-roqoqo = {version="1.1.0-beta.5", path="../roqoqo", features=["serialize", "overrotate"]}
+qoqo-macros = {version="1.1.0-beta.6", path="../qoqo-macros"}
+roqoqo = {version="1.1.0-beta.6", path="../roqoqo", features=["serialize", "overrotate"]}
numpy = "0.17"
bincode = "1.3"
serde_json = "1.0"
diff --git a/qoqo/pyproject.toml b/qoqo/pyproject.toml
index 85d17f7b..1766eaa1 100644
--- a/qoqo/pyproject.toml
+++ b/qoqo/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "qoqo"
-version = "1.1.0-beta.5"
+version = "1.1.0-beta.6"
dependencies = [
'numpy',
'qoqo_calculator_pyo3>=1.1.0',
diff --git a/qoqo/qoqo/DEPENDENCIES b/qoqo/qoqo/DEPENDENCIES
index 6478302f..161b2c86 100644
--- a/qoqo/qoqo/DEPENDENCIES
+++ b/qoqo/qoqo/DEPENDENCIES
@@ -8777,7 +8777,7 @@ LICENSE:
====================================================
-qoqo 1.1.0-beta.5
+qoqo 1.1.0-beta.6
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Quantum computing circuit toolkit. Python interface of roqoqo
@@ -8989,7 +8989,7 @@ LICENSE:
====================================================
-qoqo-macros 1.1.0-beta.5
+qoqo-macros 1.1.0-beta.6
by HQS Quantum Simulations <[email protected]>
Macros for the qoqo crate
License: Apache-2.0
@@ -11273,7 +11273,7 @@ LICENSE:
====================================================
-roqoqo 1.1.0-beta.5
+roqoqo 1.1.0-beta.6
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Rust Quantum Computing Toolkit by HQS
@@ -11696,7 +11696,7 @@ LICENSE:
====================================================
-roqoqo-derive 1.1.0-beta.5
+roqoqo-derive 1.1.0-beta.6
by HQS Quantum Simulations <[email protected]>
Macros for the roqoqo crate
License: Apache-2.0
@@ -11907,7 +11907,7 @@ LICENSE:
====================================================
-roqoqo-test 1.1.0-beta.5
+roqoqo-test 1.1.0-beta.6
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Testing helper functions for roqoqo toolkit
diff --git a/roqoqo-derive/Cargo.toml b/roqoqo-derive/Cargo.toml
index 345f3011..ed674a3a 100644
--- a/roqoqo-derive/Cargo.toml
+++ b/roqoqo-derive/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "roqoqo-derive"
-version = "1.1.0-beta.5"
+version = "1.1.0-beta.6"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
edition = "2021"
diff --git a/roqoqo/Cargo.toml b/roqoqo/Cargo.toml
index 8da6e82c..e19ca48a 100644
--- a/roqoqo/Cargo.toml
+++ b/roqoqo/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "roqoqo"
-version = "1.1.0-beta.5"
+version = "1.1.0-beta.6"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
edition = "2021"
@@ -28,7 +28,7 @@ num-complex = { version = "0.4"}
thiserror = "1.0"
dyn-clone = {version="1.0", optional=true}
qoqo_calculator = { version="1.1"}
-roqoqo-derive = {version="1.1.0-beta.5", path="../roqoqo-derive"}
+roqoqo-derive = {version="1.1.0-beta.6", path="../roqoqo-derive"}
typetag = {version="0.2", optional=true}
nalgebra = "0.31"
# schemars = { version="0.8", features = ["num-complex", "ndarray"], optional=true }
@@ -43,8 +43,8 @@ petgraph = {version = "0.6.2", optional=true}
serde_test = "1.0"
test-case = "2.0"
serde_json = "1.0"
-test_roqoqo_derive_1_0 = {package= "roqoqo-derive", version="1.0.0"}
-test_qoqo_1_0 = {package= "roqoqo", version="1.0.0"}
+test_roqoqo_derive_1_0 = {package= "roqoqo-derive", version="=1.0.0"}
+test_roqoqo_1_0 = {package= "roqoqo", version="=1.0.0"}
bincode = {version="1.3", optional=false}
[build-dependencies]
| qoqo Circuit from_ methods are not classmethods
from_json and from_bincode in qoqo.Circuit are not implemeted as classmethods.
We need to change that so we can call `Circuit.from_json(...)` directly
| 2022-10-06T10:33:47 | 0.0 | [] | [] |
|||
HQSquantumsimulations/qoqo | HQSquantumsimulations__qoqo-374 | 0cce56d16ca5e26b67075bdf7f3c734af2f6a598 | diff --git a/.github/workflows/build_and_deploy.yml b/.github/workflows/build_and_deploy.yml
index 56d4c12c..601384b4 100644
--- a/.github/workflows/build_and_deploy.yml
+++ b/.github/workflows/build_and_deploy.yml
@@ -231,6 +231,7 @@ jobs:
- uses: taiki-e/install-action@cargo-llvm-cov
- run: |
cd qoqo
+ pip install numpy
cargo llvm-cov --no-default-features --lcov --output-path lcov_qoqo.info
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 40e73339..42daeb51 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,6 +2,10 @@
This changelog track changes to the qoqo project starting at version v0.5.0
+## 1.1.0-beta.5
+
+* Fixed github pipelines
+
## 1.1.0-beta.4
* Fixed compatibility tests
diff --git a/Cargo.lock b/Cargo.lock
index eaedc40d..22ccd230 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -550,7 +550,7 @@ dependencies = [
[[package]]
name = "qoqo"
-version = "1.1.0-beta.4"
+version = "1.1.0-beta.5"
dependencies = [
"bincode",
"nalgebra",
@@ -564,7 +564,7 @@ dependencies = [
"qoqo_calculator",
"qoqo_calculator_pyo3",
"quote",
- "roqoqo 1.1.0-beta.4",
+ "roqoqo 1.1.0-beta.5",
"serde",
"serde_json",
"syn",
@@ -574,7 +574,7 @@ dependencies = [
[[package]]
name = "qoqo-macros"
-version = "1.1.0-beta.4"
+version = "1.1.0-beta.5"
dependencies = [
"proc-macro2",
"quote",
@@ -692,7 +692,7 @@ dependencies = [
[[package]]
name = "roqoqo"
-version = "1.1.0-beta.4"
+version = "1.1.0-beta.5"
dependencies = [
"async-trait",
"bincode",
@@ -708,7 +708,8 @@ dependencies = [
"rand",
"rand_distr",
"roqoqo 1.0.0",
- "roqoqo-derive 1.1.0-beta.4",
+ "roqoqo-derive 1.0.0",
+ "roqoqo-derive 1.1.0-beta.5",
"serde",
"serde_json",
"serde_test",
@@ -731,7 +732,7 @@ dependencies = [
[[package]]
name = "roqoqo-derive"
-version = "1.1.0-beta.4"
+version = "1.1.0-beta.5"
dependencies = [
"proc-macro2",
"quote",
@@ -740,7 +741,7 @@ dependencies = [
[[package]]
name = "roqoqo-test"
-version = "1.1.0-beta.4"
+version = "1.1.0-beta.5"
dependencies = [
"nalgebra",
"ndarray",
@@ -748,7 +749,7 @@ dependencies = [
"qoqo_calculator",
"quote",
"rand",
- "roqoqo 1.1.0-beta.4",
+ "roqoqo 1.1.0-beta.5",
"syn",
]
diff --git a/qoqo-macros/Cargo.toml b/qoqo-macros/Cargo.toml
index 66609cae..dac5cc2a 100644
--- a/qoqo-macros/Cargo.toml
+++ b/qoqo-macros/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "qoqo-macros"
-version = "1.1.0-beta.4"
+version = "1.1.0-beta.5"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
readme = "../README.md"
diff --git a/qoqo/Cargo.toml b/qoqo/Cargo.toml
index 4f953711..c1c0b574 100644
--- a/qoqo/Cargo.toml
+++ b/qoqo/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "qoqo"
-version = "1.1.0-beta.4"
+version = "1.1.0-beta.5"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
homepage = "https://github.com/HQSquantumsimulations/qoqo"
@@ -32,8 +32,8 @@ num-complex = "0.4"
thiserror = "1.0"
qoqo_calculator = { version="1.1" }
qoqo_calculator_pyo3 = {version="1.1", default-features=false}
-qoqo-macros = {version="1.1.0-beta.4", path="../qoqo-macros"}
-roqoqo = {version="1.1.0-beta.4", path="../roqoqo", features=["serialize", "overrotate"]}
+qoqo-macros = {version="1.1.0-beta.5", path="../qoqo-macros"}
+roqoqo = {version="1.1.0-beta.5", path="../roqoqo", features=["serialize", "overrotate"]}
numpy = "0.17"
bincode = "1.3"
serde_json = "1.0"
diff --git a/qoqo/pyproject.toml b/qoqo/pyproject.toml
index 9f86caeb..85d17f7b 100644
--- a/qoqo/pyproject.toml
+++ b/qoqo/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "qoqo"
-version = "1.1.0-beta.4"
+version = "1.1.0-beta.5"
dependencies = [
'numpy',
'qoqo_calculator_pyo3>=1.1.0',
diff --git a/qoqo/qoqo/DEPENDENCIES b/qoqo/qoqo/DEPENDENCIES
index 8ef72d13..6478302f 100644
--- a/qoqo/qoqo/DEPENDENCIES
+++ b/qoqo/qoqo/DEPENDENCIES
@@ -8777,7 +8777,7 @@ LICENSE:
====================================================
-qoqo 1.1.0-beta.4
+qoqo 1.1.0-beta.5
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Quantum computing circuit toolkit. Python interface of roqoqo
@@ -8989,7 +8989,7 @@ LICENSE:
====================================================
-qoqo-macros 1.1.0-beta.4
+qoqo-macros 1.1.0-beta.5
by HQS Quantum Simulations <[email protected]>
Macros for the qoqo crate
License: Apache-2.0
@@ -11273,7 +11273,7 @@ LICENSE:
====================================================
-roqoqo 1.1.0-beta.4
+roqoqo 1.1.0-beta.5
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Rust Quantum Computing Toolkit by HQS
@@ -11696,7 +11696,7 @@ LICENSE:
====================================================
-roqoqo-derive 1.1.0-beta.4
+roqoqo-derive 1.1.0-beta.5
by HQS Quantum Simulations <[email protected]>
Macros for the roqoqo crate
License: Apache-2.0
@@ -11907,7 +11907,7 @@ LICENSE:
====================================================
-roqoqo-test 1.1.0-beta.4
+roqoqo-test 1.1.0-beta.5
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Testing helper functions for roqoqo toolkit
diff --git a/roqoqo-derive/Cargo.toml b/roqoqo-derive/Cargo.toml
index f4323d11..345f3011 100644
--- a/roqoqo-derive/Cargo.toml
+++ b/roqoqo-derive/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "roqoqo-derive"
-version = "1.1.0-beta.4"
+version = "1.1.0-beta.5"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
edition = "2021"
diff --git a/roqoqo/Cargo.toml b/roqoqo/Cargo.toml
index 57f12fbc..8da6e82c 100644
--- a/roqoqo/Cargo.toml
+++ b/roqoqo/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "roqoqo"
-version = "1.1.0-beta.4"
+version = "1.1.0-beta.5"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
edition = "2021"
@@ -28,7 +28,7 @@ num-complex = { version = "0.4"}
thiserror = "1.0"
dyn-clone = {version="1.0", optional=true}
qoqo_calculator = { version="1.1"}
-roqoqo-derive = {version="1.1.0-beta.4", path="../roqoqo-derive"}
+roqoqo-derive = {version="1.1.0-beta.5", path="../roqoqo-derive"}
typetag = {version="0.2", optional=true}
nalgebra = "0.31"
# schemars = { version="0.8", features = ["num-complex", "ndarray"], optional=true }
@@ -43,6 +43,7 @@ petgraph = {version = "0.6.2", optional=true}
serde_test = "1.0"
test-case = "2.0"
serde_json = "1.0"
+test_roqoqo_derive_1_0 = {package= "roqoqo-derive", version="1.0.0"}
test_qoqo_1_0 = {package= "roqoqo", version="1.0.0"}
bincode = {version="1.3", optional=false}
| qoqo Circuit from_ methods are not classmethods
from_json and from_bincode in qoqo.Circuit are not implemeted as classmethods.
We need to change that so we can call `Circuit.from_json(...)` directly
| 2022-10-06T09:07:27 | 0.0 | [] | [] |
|||
HQSquantumsimulations/qoqo | HQSquantumsimulations__qoqo-373 | dd78fbe68cf06b6ed534eadae37214bf1c48f92e | diff --git a/CHANGELOG.md b/CHANGELOG.md
index dfc75866..40e73339 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,6 +2,10 @@
This changelog track changes to the qoqo project starting at version v0.5.0
+## 1.1.0-beta.4
+
+* Fixed compatibility tests
+
## 1.1.0-beta.3
* Added rich comparison for Python interface of Measurements
diff --git a/Cargo.lock b/Cargo.lock
index 7aaa3ec6..eaedc40d 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -232,9 +232,9 @@ checksum = "adab1eaa3408fb7f0c777a73e7465fd5656136fc93b670eb6df3c88c2c1344e3"
[[package]]
name = "inventory"
-version = "0.3.1"
+version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "30a61b8101d87996f82d725ba701b1987b7afc72f481c13513a30b855b9c9133"
+checksum = "e21e0a36a4dc4b469422ee17f715e8313f4a637675656d6a13637954278c6f55"
dependencies = [
"ctor",
"ghost",
@@ -489,9 +489,9 @@ dependencies = [
[[package]]
name = "pyo3"
-version = "0.17.1"
+version = "0.17.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "12f72538a0230791398a0986a6518ebd88abc3fded89007b506ed072acc831e1"
+checksum = "201b6887e5576bf2f945fe65172c1fcbf3fcf285b23e4d71eb171d9736e38d32"
dependencies = [
"cfg-if",
"indoc",
@@ -507,9 +507,9 @@ dependencies = [
[[package]]
name = "pyo3-build-config"
-version = "0.17.1"
+version = "0.17.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fc4cf18c20f4f09995f3554e6bcf9b09bd5e4d6b67c562fdfaafa644526ba479"
+checksum = "bf0708c9ed01692635cbf056e286008e5a2927ab1a5e48cdd3aeb1ba5a6fef47"
dependencies = [
"once_cell",
"target-lexicon",
@@ -517,9 +517,9 @@ dependencies = [
[[package]]
name = "pyo3-ffi"
-version = "0.17.1"
+version = "0.17.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a41877f28d8ebd600b6aa21a17b40c3b0fc4dfe73a27b6e81ab3d895e401b0e9"
+checksum = "90352dea4f486932b72ddf776264d293f85b79a1d214de1d023927b41461132d"
dependencies = [
"libc",
"pyo3-build-config",
@@ -527,9 +527,9 @@ dependencies = [
[[package]]
name = "pyo3-macros"
-version = "0.17.1"
+version = "0.17.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2e81c8d4bcc2f216dc1b665412df35e46d12ee8d3d046b381aad05f1fcf30547"
+checksum = "7eb24b804a2d9e88bfcc480a5a6dd76f006c1e3edaf064e8250423336e2cd79d"
dependencies = [
"proc-macro2",
"pyo3-macros-backend",
@@ -539,9 +539,9 @@ dependencies = [
[[package]]
name = "pyo3-macros-backend"
-version = "0.17.1"
+version = "0.17.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "85752a767ee19399a78272cc2ab625cd7d373b2e112b4b13db28de71fa892784"
+checksum = "f22bb49f6a7348c253d7ac67a6875f2dc65f36c2ae64a82c381d528972bea6d6"
dependencies = [
"proc-macro2",
"quote",
@@ -550,7 +550,7 @@ dependencies = [
[[package]]
name = "qoqo"
-version = "1.1.0-beta.3"
+version = "1.1.0-beta.4"
dependencies = [
"bincode",
"nalgebra",
@@ -564,7 +564,7 @@ dependencies = [
"qoqo_calculator",
"qoqo_calculator_pyo3",
"quote",
- "roqoqo 1.1.0-beta.3",
+ "roqoqo 1.1.0-beta.4",
"serde",
"serde_json",
"syn",
@@ -574,7 +574,7 @@ dependencies = [
[[package]]
name = "qoqo-macros"
-version = "1.1.0-beta.3"
+version = "1.1.0-beta.4"
dependencies = [
"proc-macro2",
"quote",
@@ -692,7 +692,7 @@ dependencies = [
[[package]]
name = "roqoqo"
-version = "1.1.0-beta.3"
+version = "1.1.0-beta.4"
dependencies = [
"async-trait",
"bincode",
@@ -708,7 +708,7 @@ dependencies = [
"rand",
"rand_distr",
"roqoqo 1.0.0",
- "roqoqo-derive 1.1.0-beta.3",
+ "roqoqo-derive 1.1.0-beta.4",
"serde",
"serde_json",
"serde_test",
@@ -731,7 +731,7 @@ dependencies = [
[[package]]
name = "roqoqo-derive"
-version = "1.1.0-beta.3"
+version = "1.1.0-beta.4"
dependencies = [
"proc-macro2",
"quote",
@@ -740,7 +740,7 @@ dependencies = [
[[package]]
name = "roqoqo-test"
-version = "1.1.0-beta.3"
+version = "1.1.0-beta.4"
dependencies = [
"nalgebra",
"ndarray",
@@ -748,7 +748,7 @@ dependencies = [
"qoqo_calculator",
"quote",
"rand",
- "roqoqo 1.1.0-beta.3",
+ "roqoqo 1.1.0-beta.4",
"syn",
]
@@ -837,9 +837,9 @@ dependencies = [
[[package]]
name = "smallvec"
-version = "1.9.0"
+version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2fd0db749597d91ff862fd1d55ea87f7855a744a8425a64695b6fca237d1dad1"
+checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
[[package]]
name = "syn"
@@ -860,18 +860,18 @@ checksum = "c02424087780c9b71cc96799eaeddff35af2bc513278cda5c99fc1f5d026d3c1"
[[package]]
name = "test-case"
-version = "2.2.1"
+version = "2.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "07aea929e9488998b64adc414c29fe5620398f01c2e3f58164122b17e567a6d5"
+checksum = "21d6cf5a7dffb3f9dceec8e6b8ca528d9bd71d36c9f074defb548ce161f598c0"
dependencies = [
"test-case-macros",
]
[[package]]
name = "test-case-macros"
-version = "2.2.1"
+version = "2.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c95968eedc6fc4f5c21920e0f4264f78ec5e4c56bb394f319becc1a5830b3e54"
+checksum = "e45b7bf6e19353ddd832745c8fcf77a17a93171df7151187f26623f2b75b5b26"
dependencies = [
"cfg-if",
"proc-macro-error",
diff --git a/qoqo-macros/Cargo.toml b/qoqo-macros/Cargo.toml
index e8432204..66609cae 100644
--- a/qoqo-macros/Cargo.toml
+++ b/qoqo-macros/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "qoqo-macros"
-version = "1.1.0-beta.3"
+version = "1.1.0-beta.4"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
readme = "../README.md"
diff --git a/qoqo/Cargo.toml b/qoqo/Cargo.toml
index 14759148..4f953711 100644
--- a/qoqo/Cargo.toml
+++ b/qoqo/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "qoqo"
-version = "1.1.0-beta.3"
+version = "1.1.0-beta.4"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
homepage = "https://github.com/HQSquantumsimulations/qoqo"
@@ -32,8 +32,8 @@ num-complex = "0.4"
thiserror = "1.0"
qoqo_calculator = { version="1.1" }
qoqo_calculator_pyo3 = {version="1.1", default-features=false}
-qoqo-macros = {version="1.1.0-beta.3", path="../qoqo-macros"}
-roqoqo = {version="1.1.0-beta.3", path="../roqoqo", features=["serialize", "overrotate"]}
+qoqo-macros = {version="1.1.0-beta.4", path="../qoqo-macros"}
+roqoqo = {version="1.1.0-beta.4", path="../roqoqo", features=["serialize", "overrotate"]}
numpy = "0.17"
bincode = "1.3"
serde_json = "1.0"
diff --git a/qoqo/pyproject.toml b/qoqo/pyproject.toml
index 39d3e43e..9f86caeb 100644
--- a/qoqo/pyproject.toml
+++ b/qoqo/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "qoqo"
-version = "1.1.0-beta.3"
+version = "1.1.0-beta.4"
dependencies = [
'numpy',
'qoqo_calculator_pyo3>=1.1.0',
diff --git a/qoqo/qoqo/DEPENDENCIES b/qoqo/qoqo/DEPENDENCIES
index 4f17be94..8ef72d13 100644
--- a/qoqo/qoqo/DEPENDENCIES
+++ b/qoqo/qoqo/DEPENDENCIES
@@ -4,6 +4,35 @@ https://github.com/tkaitchuck/ahash
by Tom Kaitchuck <[email protected]>
A non-cryptographic hash function using AES-NI for high performance
License: MIT OR Apache-2.0
+----------------------------------------------------
+LICENSE-MIT:
+
+Copyright (c) 2016 Amanieu d'Antras
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
----------------------------------------------------
LICENSE-APACHE:
@@ -209,35 +238,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------
-LICENSE-MIT:
-
-Copyright (c) 2016 Amanieu d'Antras
-
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
====================================================
approx 0.5.1
@@ -458,6 +458,35 @@ https://github.com/cuviper/autocfg
by Josh Stone <[email protected]>
Automatic cfg for Rust compiler features
License: Apache-2.0 OR MIT
+----------------------------------------------------
+LICENSE-MIT:
+
+Copyright (c) 2018 Josh Stone
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
----------------------------------------------------
LICENSE-APACHE:
@@ -663,35 +692,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------
-LICENSE-MIT:
-
-Copyright (c) 2018 Josh Stone
-
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
====================================================
bincode 1.3.3
@@ -732,6 +732,35 @@ by The Rust Project Developers
A macro to generate structures which behave like bitflags.
License: MIT/Apache-2.0
+----------------------------------------------------
+LICENSE-MIT:
+
+Copyright (c) 2014 The Rust Project Developers
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
----------------------------------------------------
LICENSE-APACHE:
@@ -937,35 +966,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------
-LICENSE-MIT:
-
-Copyright (c) 2014 The Rust Project Developers
-
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
====================================================
bytemuck 1.12.1
@@ -973,6 +973,34 @@ https://github.com/Lokathor/bytemuck
by Lokathor <[email protected]>
A crate for mucking around with piles of bytes.
License: Zlib OR Apache-2.0 OR MIT
+----------------------------------------------------
+LICENSE-ZLIB:
+
+Copyright (c) 2019 Daniel "Lokathor" Gee.
+
+This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software.
+
+Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions:
+
+1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
+
+2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
+
+3. This notice may not be removed or altered from any source distribution.
+
+----------------------------------------------------
+LICENSE-MIT:
+
+MIT License
+
+Copyright (c) 2019 Daniel "Lokathor" Gee.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
----------------------------------------------------
LICENSE-APACHE:
@@ -1038,34 +1066,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------
-LICENSE-ZLIB:
-
-Copyright (c) 2019 Daniel "Lokathor" Gee.
-
-This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software.
-
-Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions:
-
-1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
-
-2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
-
-3. This notice may not be removed or altered from any source distribution.
-
-----------------------------------------------------
-LICENSE-MIT:
-
-MIT License
-
-Copyright (c) 2019 Daniel "Lokathor" Gee.
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
====================================================
cfg-if 1.0.0
@@ -1077,21 +1077,50 @@ item that gets emitted.
License: MIT/Apache-2.0
----------------------------------------------------
-LICENSE-APACHE:
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+LICENSE-MIT:
-1. Definitions.
+Copyright (c) 2014 Alex Crichton
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
+----------------------------------------------------
+LICENSE-APACHE:
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
@@ -1281,10 +1310,17 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+fixedbitset 0.4.2
+https://github.com/petgraph/fixedbitset
+by bluss
+FixedBitSet is a simple bitset collection
+License: MIT/Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright (c) 2014 Alex Crichton
+Copyright (c) 2015-2017
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
@@ -1310,13 +1346,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-fixedbitset 0.4.2
-https://github.com/petgraph/fixedbitset
-by bluss
-FixedBitSet is a simple bitset collection
-License: MIT/Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -1522,10 +1551,18 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+getrandom 0.2.7
+https://github.com/rust-random/getrandom
+by The Rand Project Developers
+A small cross-platform library for retrieving random data from system source
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright (c) 2015-2017
+Copyright 2018 Developers of the Rand project
+Copyright (c) 2014 The Rust Project Developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
@@ -1551,13 +1588,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-getrandom 0.2.7
-https://github.com/rust-random/getrandom
-by The Rand Project Developers
-A small cross-platform library for retrieving random data from system source
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -1763,11 +1793,17 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+hashbrown 0.12.3
+https://github.com/rust-lang/hashbrown
+by Amanieu d'Antras <[email protected]>
+A Rust port of Google's SwissTable hash map
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright 2018 Developers of the Rand project
-Copyright (c) 2014 The Rust Project Developers
+Copyright (c) 2016 Amanieu d'Antras
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
@@ -1793,13 +1829,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-hashbrown 0.12.3
-https://github.com/rust-lang/hashbrown
-by Amanieu d'Antras <[email protected]>
-A Rust port of Google's SwissTable hash map
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -2005,10 +2034,17 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+indexmap 1.9.1
+https://github.com/bluss/indexmap
+by
+A hash table with consistent order and fast iteration.
+License: Apache-2.0 OR MIT
----------------------------------------------------
LICENSE-MIT:
-Copyright (c) 2016 Amanieu d'Antras
+Copyright (c) 2016--2017
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
@@ -2034,13 +2070,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-indexmap 1.9.1
-https://github.com/bluss/indexmap
-by
-A hash table with consistent order and fast iteration.
-License: Apache-2.0 OR MIT
----------------------------------------------------
LICENSE-APACHE:
@@ -2246,11 +2275,16 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+indoc 1.0.7
+https://github.com/dtolnay/indoc
+by David Tolnay <[email protected]>
+Indented document literals
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright (c) 2016--2017
-
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
@@ -2275,13 +2309,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-indoc 1.0.7
-https://github.com/dtolnay/indoc
-by David Tolnay <[email protected]>
-Indented document literals
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -2487,6 +2514,13 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+itoa 1.0.3
+https://github.com/dtolnay/itoa
+by David Tolnay <[email protected]>
+Fast integer primitive to string conversion
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
@@ -2514,13 +2548,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-itoa 1.0.3
-https://github.com/dtolnay/itoa
-by David Tolnay <[email protected]>
-Fast integer primitive to string conversion
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -2726,9 +2753,19 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+libc 0.2.134
+https://github.com/rust-lang/libc
+by The Rust Project Developers
+Raw FFI bindings to platform libraries like libc.
+
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
+Copyright (c) 2014-2020 The Rust Project Developers
+
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
@@ -2753,14 +2790,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-libc 0.2.134
-https://github.com/rust-lang/libc
-by The Rust Project Developers
-Raw FFI bindings to platform libraries like libc.
-
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -2941,10 +2970,17 @@ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
END OF TERMS AND CONDITIONS
+
+====================================================
+libm 0.2.5
+https://github.com/rust-lang/libm
+by Jorge Aparicio <[email protected]>
+libm in pure Rust
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright (c) 2014-2020 The Rust Project Developers
+Copyright (c) 2018 Jorge Aparicio
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
@@ -2970,13 +3006,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-libm 0.2.5
-https://github.com/rust-lang/libm
-by Jorge Aparicio <[email protected]>
-libm in pure Rust
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -3182,10 +3211,17 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+lock_api 0.4.9
+https://github.com/Amanieu/parking_lot
+by Amanieu d'Antras <[email protected]>
+Wrappers to create fully-featured Mutex and RwLock types. Compatible with no_std.
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright (c) 2018 Jorge Aparicio
+Copyright (c) 2016 The Rust Project Developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
@@ -3211,13 +3247,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-lock_api 0.4.9
-https://github.com/Amanieu/parking_lot
-by Amanieu d'Antras <[email protected]>
-Wrappers to create fully-featured Mutex and RwLock types. Compatible with no_std.
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -3423,10 +3452,21 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+matrixmultiply 0.3.2
+https://github.com/bluss/matrixmultiply/
+by bluss, R. Janis Goldschmidt
+General matrix multiplication for f32 and f64 matrices. Operates on matrices with general layout (they can use arbitrary row and column stride). Detects and uses AVX or SSE2 on x86 platforms transparently for higher performance. Uses a microkernel strategy, so that the implementation is easy to parallelize and optimize.
+
+Supports multithreading.
+License: MIT/Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright (c) 2016 The Rust Project Developers
+Copyright (c) 2016 - 2021 Ulrik Sverdrup "bluss"
+Copyirhgt (c) 2018 R. Janis Goldschmidt
+Copyright (c) 2021 DutchGhost [constparse.rs]
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
@@ -3452,15 +3492,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-matrixmultiply 0.3.2
-https://github.com/bluss/matrixmultiply/
-by bluss, R. Janis Goldschmidt
-General matrix multiplication for f32 and f64 matrices. Operates on matrices with general layout (they can use arbitrary row and column stride). Detects and uses AVX or SSE2 on x86 platforms transparently for higher performance. Uses a microkernel strategy, so that the implementation is easy to parallelize and optimize.
-
-Supports multithreading.
-License: MIT/Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -3666,37 +3697,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------
-LICENSE-MIT:
-
-Copyright (c) 2016 - 2021 Ulrik Sverdrup "bluss"
-Copyirhgt (c) 2018 R. Janis Goldschmidt
-Copyright (c) 2021 DutchGhost [constparse.rs]
-
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
====================================================
memoffset 0.6.5
@@ -3953,24 +3953,55 @@ by Ulrik Sverdrup "bluss", Jim Turner
An n-dimensional array for general elements and for numerics. Lightweight array views and slicing; views support chunking and splitting.
License: MIT OR Apache-2.0
----------------------------------------------------
-LICENSE-APACHE:
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+LICENSE-MIT:
-1. Definitions.
+Copyright (c) 2015 - 2021 Ulrik Sverdrup "bluss",
+ Jim Turner,
+ and ndarray developers
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
+----------------------------------------------------
+LICENSE-APACHE:
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
@@ -4157,12 +4188,17 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+num-complex 0.4.2
+https://github.com/rust-num/num-complex
+by The Rust Project Developers
+Complex numbers implementation for Rust
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright (c) 2015 - 2021 Ulrik Sverdrup "bluss",
- Jim Turner,
- and ndarray developers
+Copyright (c) 2014 The Rust Project Developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
@@ -4188,13 +4224,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-num-complex 0.4.2
-https://github.com/rust-num/num-complex
-by The Rust Project Developers
-Complex numbers implementation for Rust
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -4400,6 +4429,13 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+num-integer 0.1.45
+https://github.com/rust-num/num-integer
+by The Rust Project Developers
+Integer traits and functions
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
@@ -4429,13 +4465,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-num-integer 0.1.45
-https://github.com/rust-num/num-integer
-by The Rust Project Developers
-Integer traits and functions
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -4641,6 +4670,13 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+num-rational 0.4.1
+https://github.com/rust-num/num-rational
+by The Rust Project Developers
+Rational numbers implementation for Rust
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
@@ -4670,13 +4706,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-num-rational 0.4.1
-https://github.com/rust-num/num-rational
-by The Rust Project Developers
-Rational numbers implementation for Rust
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -4882,6 +4911,13 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+num-traits 0.2.15
+https://github.com/rust-num/num-traits
+by The Rust Project Developers
+Numeric traits for generic mathematics
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
@@ -4911,13 +4947,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-num-traits 0.2.15
-https://github.com/rust-num/num-traits
-by The Rust Project Developers
-Numeric traits for generic mathematics
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -5123,35 +5152,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------
-LICENSE-MIT:
-
-Copyright (c) 2014 The Rust Project Developers
-
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
====================================================
numpy 0.17.2
@@ -5195,6 +5195,33 @@ https://github.com/matklad/once_cell
by Aleksey Kladov <[email protected]>
Single assignment cells and lazy values.
License: MIT OR Apache-2.0
+----------------------------------------------------
+LICENSE-MIT:
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
----------------------------------------------------
LICENSE-APACHE:
@@ -5400,9 +5427,18 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+parking_lot 0.12.1
+https://github.com/Amanieu/parking_lot
+by Amanieu d'Antras <[email protected]>
+More compact and efficient implementations of the standard synchronization primitives.
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
+Copyright (c) 2016 The Rust Project Developers
+
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
@@ -5427,13 +5463,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-parking_lot 0.12.1
-https://github.com/Amanieu/parking_lot
-by Amanieu d'Antras <[email protected]>
-More compact and efficient implementations of the standard synchronization primitives.
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -5639,6 +5668,13 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+parking_lot_core 0.9.3
+https://github.com/Amanieu/parking_lot
+by Amanieu d'Antras <[email protected]>
+An advanced API for creating custom synchronization primitives.
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
@@ -5668,13 +5704,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-parking_lot_core 0.9.3
-https://github.com/Amanieu/parking_lot
-by Amanieu d'Antras <[email protected]>
-An advanced API for creating custom synchronization primitives.
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -5880,10 +5909,17 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+paste 1.0.9
+https://github.com/dtolnay/paste
+by David Tolnay <[email protected]>
+Macros for all your token pasting needs
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright (c) 2016 The Rust Project Developers
+Copyright (c) 2018
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
@@ -5909,13 +5945,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-paste 1.0.9
-https://github.com/dtolnay/paste
-by David Tolnay <[email protected]>
-Macros for all your token pasting needs
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -6121,10 +6150,17 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+petgraph 0.6.2
+https://github.com/petgraph/petgraph
+by bluss, mitchmindtree
+Graph data structure library. Provides graph types and graph algorithms.
+License: MIT/Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright (c) 2018
+Copyright (c) 2015
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
@@ -6150,13 +6186,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-petgraph 0.6.2
-https://github.com/petgraph/petgraph
-by bluss, mitchmindtree
-Graph data structure library. Provides graph types and graph algorithms.
-License: MIT/Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -6362,10 +6391,17 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+ppv-lite86 0.2.16
+https://github.com/cryptocorrosion/cryptocorrosion
+by The CryptoCorrosion Contributors
+Implementation of the crypto-simd API for x86
+License: MIT/Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright (c) 2015
+Copyright (c) 2019 The CryptoCorrosion Contributors
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
@@ -6391,13 +6427,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-ppv-lite86 0.2.16
-https://github.com/cryptocorrosion/cryptocorrosion
-by The CryptoCorrosion Contributors
-Implementation of the crypto-simd API for x86
-License: MIT/Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -6603,35 +6632,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------
-LICENSE-MIT:
-
-Copyright (c) 2019 The CryptoCorrosion Contributors
-
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
====================================================
proc-macro-error 1.0.4
@@ -6640,10 +6640,35 @@ by CreepySkeleton <[email protected]>
Almost drop-in replacement to panics in proc-macros
License: MIT OR Apache-2.0
----------------------------------------------------
-LICENSE-APACHE:
+LICENSE-MIT:
- Apache License
- Version 2.0, January 2004
+MIT License
+
+Copyright (c) 2019-2020 CreepySkeleton
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+----------------------------------------------------
+LICENSE-APACHE:
+
+ Apache License
+ Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
@@ -6844,6 +6869,13 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+proc-macro-error-attr 1.0.4
+https://gitlab.com/CreepySkeleton/proc-macro-error
+by CreepySkeleton <[email protected]>
+Attribute macro for proc-macro-error crate
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
@@ -6869,13 +6901,6 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
-
-====================================================
-proc-macro-error-attr 1.0.4
-https://gitlab.com/CreepySkeleton/proc-macro-error
-by CreepySkeleton <[email protected]>
-Attribute macro for proc-macro-error crate
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -7081,31 +7106,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------
-LICENSE-MIT:
-
-MIT License
-
-Copyright (c) 2019-2020 CreepySkeleton
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
-
====================================================
proc-macro2 1.0.46
@@ -7113,6 +7113,35 @@ https://github.com/dtolnay/proc-macro2
by David Tolnay <[email protected]>, Alex Crichton <[email protected]>
A substitute implementation of the compiler's `proc_macro` API to decouple token-based libraries from the procedural macro use case.
License: MIT OR Apache-2.0
+----------------------------------------------------
+LICENSE-MIT:
+
+Copyright (c) 2014 Alex Crichton
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
----------------------------------------------------
LICENSE-APACHE:
@@ -7318,38 +7347,9 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------
-LICENSE-MIT:
-
-Copyright (c) 2014 Alex Crichton
-
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
====================================================
-pyo3 0.17.1
+pyo3 0.17.2
https://github.com/pyo3/pyo3
by PyO3 Project and Contributors <https://github.com/PyO3>
Bindings to Python interpreter
@@ -7549,7 +7549,7 @@ LICENSE:
====================================================
-pyo3-build-config 0.17.1
+pyo3-build-config 0.17.2
https://github.com/pyo3/pyo3
by PyO3 Project and Contributors <https://github.com/PyO3>
Build configuration for the PyO3 ecosystem
@@ -7749,7 +7749,7 @@ LICENSE:
====================================================
-pyo3-ffi 0.17.1
+pyo3-ffi 0.17.2
https://github.com/pyo3/pyo3
by PyO3 Project and Contributors <https://github.com/PyO3>
Python-API bindings for the PyO3 ecosystem
@@ -8377,7 +8377,7 @@ docs/license.html_lib.
====================================================
-pyo3-macros 0.17.1
+pyo3-macros 0.17.2
https://github.com/pyo3/pyo3
by PyO3 Project and Contributors <https://github.com/PyO3>
Proc macros for PyO3 package
@@ -8577,7 +8577,7 @@ LICENSE:
====================================================
-pyo3-macros-backend 0.17.1
+pyo3-macros-backend 0.17.2
https://github.com/pyo3/pyo3
by PyO3 Project and Contributors <https://github.com/PyO3>
Code generation for PyO3 package
@@ -8777,7 +8777,7 @@ LICENSE:
====================================================
-qoqo 1.1.0-beta.3
+qoqo 1.1.0-beta.4
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Quantum computing circuit toolkit. Python interface of roqoqo
@@ -8989,7 +8989,7 @@ LICENSE:
====================================================
-qoqo-macros 1.1.0-beta.3
+qoqo-macros 1.1.0-beta.4
by HQS Quantum Simulations <[email protected]>
Macros for the qoqo crate
License: Apache-2.0
@@ -9629,6 +9629,35 @@ https://github.com/dtolnay/quote
by David Tolnay <[email protected]>
Quasi-quoting macro quote!(...)
License: MIT OR Apache-2.0
+----------------------------------------------------
+LICENSE-MIT:
+
+Copyright (c) 2016 The Rust Project Developers
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
----------------------------------------------------
LICENSE-APACHE:
@@ -9834,10 +9863,19 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+rand 0.8.5
+https://rust-random.github.io/book
+by The Rand Project Developers, The Rust Project Developers
+Random number generators and other randomness functionality.
+
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright (c) 2016 The Rust Project Developers
+Copyright 2018 Developers of the Rand project
+Copyright (c) 2014 The Rust Project Developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
@@ -9863,14 +9901,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-rand 0.8.5
-https://rust-random.github.io/book
-by The Rand Project Developers, The Rust Project Developers
-Random number generators and other randomness functionality.
-
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -10051,6 +10081,14 @@ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
END OF TERMS AND CONDITIONS
+
+====================================================
+rand_chacha 0.3.1
+https://rust-random.github.io/book
+by The Rand Project Developers, The Rust Project Developers, The CryptoCorrosion Contributors
+ChaCha random number generator
+
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
@@ -10081,14 +10119,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-rand_chacha 0.3.1
-https://rust-random.github.io/book
-by The Rand Project Developers, The Rust Project Developers, The CryptoCorrosion Contributors
-ChaCha random number generator
-
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -10294,6 +10324,14 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+rand_core 0.6.4
+https://rust-random.github.io/book
+by The Rand Project Developers, The Rust Project Developers
+Core random number generator traits and tools for implementation.
+
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
@@ -10324,14 +10362,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-rand_core 0.6.4
-https://rust-random.github.io/book
-by The Rand Project Developers, The Rust Project Developers
-Core random number generator traits and tools for implementation.
-
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -10523,11 +10553,18 @@ APPENDIX: How to apply the Apache License to your work.
same "printed page" as the copyright notice for easier
identification within third-party archives.
+
+====================================================
+rand_distr 0.4.3
+https://rust-random.github.io/book
+by The Rand Project Developers
+Sampling from random number distributions
+
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
Copyright 2018 Developers of the Rand project
-Copyright (c) 2014 The Rust Project Developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
@@ -10553,14 +10590,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-rand_distr 0.4.3
-https://rust-random.github.io/book
-by The Rand Project Developers
-Sampling from random number distributions
-
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -10752,10 +10781,21 @@ APPENDIX: How to apply the Apache License to your work.
same "printed page" as the copyright notice for easier
identification within third-party archives.
+
+====================================================
+rawpointer 0.2.1
+https://github.com/bluss/rawpointer/
+by bluss
+Extra methods for raw pointers and `NonNull<T>`.
+
+For example `.post_inc()` and `.pre_dec()` (c.f. `ptr++` and `--ptr`),
+`offset` and `add` for `NonNull<T>`, and the function `ptrdistance`.
+
+License: MIT/Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright 2018 Developers of the Rand project
+Copyright (c) 2015
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
@@ -10781,17 +10821,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-rawpointer 0.2.1
-https://github.com/bluss/rawpointer/
-by bluss
-Extra methods for raw pointers and `NonNull<T>`.
-
-For example `.post_inc()` and `.pre_dec()` (c.f. `ptr++` and `--ptr`),
-`offset` and `add` for `NonNull<T>`, and the function `ptrdistance`.
-
-License: MIT/Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -10997,46 +11026,17 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+redox_syscall 0.2.16
+https://gitlab.redox-os.org/redox-os/syscall
+by Jeremy Soller <[email protected]>
+A Rust library to access raw Redox system calls
+License: MIT
----------------------------------------------------
-LICENSE-MIT:
+LICENSE:
-Copyright (c) 2015
-
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
-
-====================================================
-redox_syscall 0.2.16
-https://gitlab.redox-os.org/redox-os/syscall
-by Jeremy Soller <[email protected]>
-A Rust library to access raw Redox system calls
-License: MIT
-----------------------------------------------------
-LICENSE:
-
-Copyright (c) 2017 Redox OS Developers
+Copyright (c) 2017 Redox OS Developers
MIT License
@@ -11273,7 +11273,7 @@ LICENSE:
====================================================
-roqoqo 1.1.0-beta.3
+roqoqo 1.1.0-beta.4
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Rust Quantum Computing Toolkit by HQS
@@ -11696,7 +11696,7 @@ LICENSE:
====================================================
-roqoqo-derive 1.1.0-beta.3
+roqoqo-derive 1.1.0-beta.4
by HQS Quantum Simulations <[email protected]>
Macros for the roqoqo crate
License: Apache-2.0
@@ -11907,7 +11907,7 @@ LICENSE:
====================================================
-roqoqo-test 1.1.0-beta.3
+roqoqo-test 1.1.0-beta.4
https://github.com/HQSquantumsimulations/qoqo
by HQS Quantum Simulations <[email protected]>
Testing helper functions for roqoqo toolkit
@@ -12390,6 +12390,35 @@ Defines the macros `defer!`, `defer_on_unwind!`, `defer_on_success!` as
shorthands for guards with one of the implemented strategies.
License: MIT/Apache-2.0
+----------------------------------------------------
+LICENSE-MIT:
+
+Copyright (c) 2016-2019 Ulrik Sverdrup "bluss" and scopeguard developers
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
----------------------------------------------------
LICENSE-APACHE:
@@ -12595,11 +12624,16 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+serde 1.0.145
+https://serde.rs
+by Erick Tryzelaar <[email protected]>, David Tolnay <[email protected]>
+A generic serialization/deserialization framework
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright (c) 2016-2019 Ulrik Sverdrup "bluss" and scopeguard developers
-
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
@@ -12624,13 +12658,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-serde 1.0.145
-https://serde.rs
-by Erick Tryzelaar <[email protected]>, David Tolnay <[email protected]>
-A generic serialization/deserialization framework
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -12836,6 +12863,13 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+serde_derive 1.0.145
+https://serde.rs
+by Erick Tryzelaar <[email protected]>, David Tolnay <[email protected]>
+Macros 1.1 implementation of #[derive(Serialize, Deserialize)]
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
@@ -12863,13 +12897,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-serde_derive 1.0.145
-https://serde.rs
-by Erick Tryzelaar <[email protected]>, David Tolnay <[email protected]>
-Macros 1.1 implementation of #[derive(Serialize, Deserialize)]
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -13075,6 +13102,13 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+serde_json 1.0.85
+https://github.com/serde-rs/json
+by Erick Tryzelaar <[email protected]>, David Tolnay <[email protected]>
+A JSON serialization file format
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
@@ -13102,13 +13136,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-serde_json 1.0.85
-https://github.com/serde-rs/json
-by Erick Tryzelaar <[email protected]>, David Tolnay <[email protected]>
-A JSON serialization file format
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -13314,6 +13341,13 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+serde_test 1.0.145
+https://serde.rs
+by Erick Tryzelaar <[email protected]>, David Tolnay <[email protected]>
+Token De/Serializer for testing De/Serialize implementations
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
@@ -13341,13 +13375,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-serde_test 1.0.145
-https://serde.rs
-by Erick Tryzelaar <[email protected]>, David Tolnay <[email protected]>
-Token De/Serializer for testing De/Serialize implementations
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -13553,33 +13580,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------
-LICENSE-MIT:
-
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
====================================================
simba 0.7.2
@@ -13794,11 +13794,40 @@ LICENSE:
====================================================
-smallvec 1.9.0
+smallvec 1.10.0
https://github.com/servo/rust-smallvec
by The Servo Project Developers
'Small vector' optimization: store up to a small number of items on the stack
License: MIT OR Apache-2.0
+----------------------------------------------------
+LICENSE-MIT:
+
+Copyright (c) 2018 The Servo Project Developers
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
----------------------------------------------------
LICENSE-APACHE:
@@ -14004,11 +14033,16 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+syn 1.0.101
+https://github.com/dtolnay/syn
+by David Tolnay <[email protected]>
+Parser for Rust source code
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
-Copyright (c) 2018 The Servo Project Developers
-
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
@@ -14033,13 +14067,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-syn 1.0.101
-https://github.com/dtolnay/syn
-by David Tolnay <[email protected]>
-Parser for Rust source code
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -14245,33 +14272,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------
-LICENSE-MIT:
-
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
====================================================
target-lexicon 0.12.4
@@ -14505,7 +14505,7 @@ Software.
====================================================
-test-case 2.2.1
+test-case 2.2.2
https://github.com/frondeus/test-case
by Marcin Sas-Szymanski <[email protected]>, Wojciech Polak <[email protected]>, Łukasz Biel <[email protected]>
Provides #[test_case(...)] procedural macro attribute for generating parametrized test cases easily
@@ -14537,7 +14537,7 @@ SOFTWARE.
====================================================
-test-case-macros 2.2.1
+test-case-macros 2.2.2
https://github.com/frondeus/test-case
by Marcin Sas-Szymanski <[email protected]>, Wojciech Polak <[email protected]>, Łukasz Biel <[email protected]>
Provides #[test_case(...)] procedural macro attribute for generating parametrized test cases easily
@@ -14549,6 +14549,33 @@ https://github.com/dtolnay/thiserror
by David Tolnay <[email protected]>
derive(Error)
License: MIT OR Apache-2.0
+----------------------------------------------------
+LICENSE-MIT:
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
----------------------------------------------------
LICENSE-APACHE:
@@ -14754,6 +14781,13 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+thiserror-impl 1.0.37
+https://github.com/dtolnay/thiserror
+by David Tolnay <[email protected]>
+Implementation detail of the `thiserror` crate
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
@@ -14781,13 +14815,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-thiserror-impl 1.0.37
-https://github.com/dtolnay/thiserror
-by David Tolnay <[email protected]>
-Implementation detail of the `thiserror` crate
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -14993,33 +15020,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------
-LICENSE-MIT:
-
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
====================================================
typenum 1.15.0
@@ -15030,6 +15030,35 @@ Typenum is a Rust library for type-level numbers evaluated at
integers. It also provides a type-level array of type-level numbers, but its
implementation is incomplete.
License: MIT OR Apache-2.0
+----------------------------------------------------
+LICENSE:
+
+MIT OR Apache-2.0
+----------------------------------------------------
+LICENSE-MIT:
+
+The MIT License (MIT)
+
+Copyright (c) 2014 Paho Lurie-Gregg
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
----------------------------------------------------
LICENSE-APACHE:
@@ -15234,35 +15263,6 @@ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------
-LICENSE:
-
-MIT OR Apache-2.0
-----------------------------------------------------
-LICENSE-MIT:
-
-The MIT License (MIT)
-
-Copyright (c) 2014 Paho Lurie-Gregg
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
-
====================================================
unicode-ident 1.0.4
@@ -15320,6 +15320,33 @@ shall not be used in advertising or otherwise to promote the sale,
use or other dealings in these Data Files or Software without prior
written authorization of the copyright holder.
+----------------------------------------------------
+LICENSE-MIT:
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
----------------------------------------------------
LICENSE-APACHE:
@@ -15525,6 +15552,13 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+====================================================
+unindent 0.1.10
+https://github.com/dtolnay/indoc
+by David Tolnay <[email protected]>
+Remove a column of leading whitespace from a string
+License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-MIT:
@@ -15552,13 +15586,6 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
-
-====================================================
-unindent 0.1.10
-https://github.com/dtolnay/indoc
-by David Tolnay <[email protected]>
-Remove a column of leading whitespace from a string
-License: MIT OR Apache-2.0
----------------------------------------------------
LICENSE-APACHE:
@@ -15764,33 +15791,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------
-LICENSE-MIT:
-
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
====================================================
version_check 0.9.4
@@ -15798,6 +15798,29 @@ https://github.com/SergioBenitez/version_check
by Sergio Benitez <[email protected]>
Tiny crate to check the version of the installed/running rustc.
License: MIT/Apache-2.0
+----------------------------------------------------
+LICENSE-MIT:
+
+The MIT License (MIT)
+Copyright (c) 2017-2018 Sergio Benitez
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
----------------------------------------------------
LICENSE-APACHE:
@@ -16003,29 +16026,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------
-LICENSE-MIT:
-
-The MIT License (MIT)
-Copyright (c) 2017-2018 Sergio Benitez
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software is furnished to do so,
-subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
-FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
-IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
====================================================
wasi 0.11.0+wasi-snapshot-preview1
@@ -16034,242 +16034,37 @@ by The Cranelift Project Developers
Experimental WASI API bindings for Rust
License: Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT
----------------------------------------------------
-LICENSE-APACHE:
+LICENSE-Apache-2.0_WITH_LLVM-exception:
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
-1. Definitions.
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
+ 1. Definitions.
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
-2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
-3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
-4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
-5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
-6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
-7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
-8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
-9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
-END OF TERMS AND CONDITIONS
-
-APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
-Copyright [yyyy] [name of copyright owner]
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-----------------------------------------------------
-LICENSE-Apache-2.0_WITH_LLVM-exception:
-
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
@@ -16489,6 +16284,211 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
+----------------------------------------------------
+LICENSE-APACHE:
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
====================================================
wide 0.7.4
diff --git a/roqoqo-derive/Cargo.toml b/roqoqo-derive/Cargo.toml
index 2bd08633..f4323d11 100644
--- a/roqoqo-derive/Cargo.toml
+++ b/roqoqo-derive/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "roqoqo-derive"
-version = "1.1.0-beta.3"
+version = "1.1.0-beta.4"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
edition = "2021"
diff --git a/roqoqo/Cargo.toml b/roqoqo/Cargo.toml
index 6c423906..57f12fbc 100644
--- a/roqoqo/Cargo.toml
+++ b/roqoqo/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "roqoqo"
-version = "1.1.0-beta.3"
+version = "1.1.0-beta.4"
authors = ["HQS Quantum Simulations <[email protected]>"]
license = "Apache-2.0"
edition = "2021"
@@ -28,7 +28,7 @@ num-complex = { version = "0.4"}
thiserror = "1.0"
dyn-clone = {version="1.0", optional=true}
qoqo_calculator = { version="1.1"}
-roqoqo-derive = {version="1.1.0-beta.3", path="../roqoqo-derive"}
+roqoqo-derive = {version="1.1.0-beta.4", path="../roqoqo-derive"}
typetag = {version="0.2", optional=true}
nalgebra = "0.31"
# schemars = { version="0.8", features = ["num-complex", "ndarray"], optional=true }
@@ -44,6 +44,7 @@ serde_test = "1.0"
test-case = "2.0"
serde_json = "1.0"
test_qoqo_1_0 = {package= "roqoqo", version="1.0.0"}
+bincode = {version="1.3", optional=false}
[build-dependencies]
quote = "1.0"
| qoqo Circuit from_ methods are not classmethods
from_json and from_bincode in qoqo.Circuit are not implemeted as classmethods.
We need to change that so we can call `Circuit.from_json(...)` directly
| 2022-10-05T13:02:55 | 0.0 | [] | [] |
|||
HQSquantumsimulations/qoqo | HQSquantumsimulations__qoqo-157 | ed9c86e6252c62c86db6de2497a7ef88186be9e5 | diff --git a/qoqo/src/circuit.rs b/qoqo/src/circuit.rs
index 3acefa98..d811a83f 100644
--- a/qoqo/src/circuit.rs
+++ b/qoqo/src/circuit.rs
@@ -15,6 +15,7 @@ use bincode::{deserialize, serialize};
use pyo3::exceptions::{PyIndexError, PyRuntimeError, PyTypeError, PyValueError};
use pyo3::prelude::*;
use pyo3::types::PyByteArray;
+use pyo3::types::PyType;
use pyo3::{PyIterProtocol, PyMappingProtocol, PyNumberProtocol, PyObjectProtocol};
use roqoqo::prelude::*;
use roqoqo::{Circuit, OperationIterator, ROQOQO_VERSION};
@@ -216,6 +217,8 @@ impl CircuitWrapper {
Ok(b)
}
+ #[allow(unused_variables)]
+ #[classmethod]
/// Convert the bincode representation of the Circuit to a Circuit using the [bincode] crate.
///
/// Args:
@@ -227,12 +230,12 @@ impl CircuitWrapper {
/// Raises:
/// TypeError: Input cannot be converted to byte array.
/// ValueError: Input cannot be deserialized to Circuit.
- pub fn from_bincode(&self, input: &PyAny) -> PyResult<CircuitWrapper> {
+ pub fn from_bincode(cls: &PyType, input: &PyAny) -> PyResult<Self> {
let bytes = input
.extract::<Vec<u8>>()
.map_err(|_| PyTypeError::new_err("Input cannot be converted to byte array"))?;
- Ok(CircuitWrapper {
+ Ok(Self {
internal: deserialize(&bytes[..])
.map_err(|_| PyValueError::new_err("Input cannot be deserialized to Circuit"))?,
})
@@ -251,6 +254,8 @@ impl CircuitWrapper {
Ok(serialized)
}
+ #[allow(unused_variables)]
+ #[classmethod]
/// Convert the json representation of a Circuit to a Circuit.
///
/// Args:
@@ -261,9 +266,9 @@ impl CircuitWrapper {
///
/// Raises:
/// ValueError: Input cannot be deserialized to Circuit.
- fn from_json(&self, input: &str) -> PyResult<CircuitWrapper> {
- Ok(CircuitWrapper {
- internal: serde_json::from_str(input)
+ pub fn from_json(cls: &PyType, json_string: &str) -> PyResult<Self> {
+ Ok(Self {
+ internal: serde_json::from_str(json_string)
.map_err(|_| PyValueError::new_err("Input cannot be deserialized to Circuit"))?,
})
}
| qoqo Circuit from_ methods are not classmethods
from_json and from_bincode in qoqo.Circuit are not implemeted as classmethods.
We need to change that so we can call `Circuit.from_json(...)` directly
| 2021-12-16T08:28:43 | 0.0 | [] | [] |
|||
HQSquantumsimulations/qoqo | HQSquantumsimulations__qoqo-44 | 5e2498c67716bae7b16ed9cf5153c0790eb067cc | diff --git a/qoqo/src/operations/_auto_generated_operation_conversion.rs b/qoqo/src/operations/_auto_generated_operation_conversion.rs
index f8f548b1..1c766faa 100644
--- a/qoqo/src/operations/_auto_generated_operation_conversion.rs
+++ b/qoqo/src/operations/_auto_generated_operation_conversion.rs
@@ -285,6 +285,12 @@ pub fn convert_operation_to_pyobject(operation: Operation) -> PyResult<PyObject>
let pyobject: PyObject = pyref.to_object(py);
Ok(pyobject)
}
+ Operation::PhaseShiftedControlledZ(internal) => {
+ let pyref: Py<PhaseShiftedControlledZWrapper> =
+ Py::new(py, PhaseShiftedControlledZWrapper { internal }).unwrap();
+ let pyobject: PyObject = pyref.to_object(py);
+ Ok(pyobject)
+ }
Operation::MeasureQubit(internal) => {
let pyref: Py<MeasureQubitWrapper> =
Py::new(py, MeasureQubitWrapper { internal }).unwrap();
@@ -1186,6 +1192,26 @@ pub fn convert_pyany_to_operation(op: &PyAny) -> Result<Operation, QoqoError> {
.map_err(|_| QoqoError::ConversionError)?;
Ok(ComplexPMInteraction::new(control, target, t_real, t_imag).into())
}
+ "PhaseShiftedControlledZ" => {
+ let control_pyobject = op
+ .call_method0("control")
+ .map_err(|_| QoqoError::ConversionError)?;
+ let control: usize = control_pyobject
+ .extract()
+ .map_err(|_| QoqoError::ConversionError)?;
+ let target_pyobject = op
+ .call_method0("target")
+ .map_err(|_| QoqoError::ConversionError)?;
+ let target: usize = target_pyobject
+ .extract()
+ .map_err(|_| QoqoError::ConversionError)?;
+ let phi_pyobject = op
+ .call_method0("phi")
+ .map_err(|_| QoqoError::ConversionError)?;
+ let phi = convert_into_calculator_float(phi_pyobject)
+ .map_err(|_| QoqoError::ConversionError)?;
+ Ok(PhaseShiftedControlledZ::new(control, target, phi).into())
+ }
"MeasureQubit" => {
let qubit_pyobject = op
.call_method0("qubit")
diff --git a/qoqo/src/operations/mod.rs b/qoqo/src/operations/mod.rs
index 03676cb9..f2a6bce6 100644
--- a/qoqo/src/operations/mod.rs
+++ b/qoqo/src/operations/mod.rs
@@ -172,6 +172,7 @@ pub fn operations(_py: Python, m: &PyModule) -> PyResult<()> {
m.add_class::<BogoliubovWrapper>()?;
m.add_class::<PMInteractionWrapper>()?;
m.add_class::<ComplexPMInteractionWrapper>()?;
+ m.add_class::<PhaseShiftedControlledZWrapper>()?;
Ok(())
}
diff --git a/qoqo/src/operations/two_qubit_gate_operations.rs b/qoqo/src/operations/two_qubit_gate_operations.rs
index 92551891..3a2e8b35 100644
--- a/qoqo/src/operations/two_qubit_gate_operations.rs
+++ b/qoqo/src/operations/two_qubit_gate_operations.rs
@@ -491,3 +491,30 @@ pub struct ComplexPMInteraction {
t_real: CalculatorFloat,
t_imag: CalculatorFloat,
}
+
+#[allow(clippy::upper_case_acronyms)]
+#[wrap(Operate, OperateTwoQubit, OperateGate)] //+ OperateTwoQubitGate (tbd)
+/// The phased-shifted controlled-Z gate.
+///
+/// Modified, i.e. phase-shifted ControlledPauliZ two-qubit gate (https://arxiv.org/pdf/1908.06101.pdf eq.(1)).
+///
+/// The unitary matrix representation is:
+///
+/// .. math::
+/// U = \begin{pmatrix}
+/// 1 & 0 & 0 & 0 \\\\
+/// 0 & e^{i \phi} & 0 & 0 \\\\
+/// 0 & 0 & e^{i \phi} & 0 \\\\
+/// 0 & 0 & 0 & e^{i (2\cdot\phi - \pi}
+/// \end{pmatrix}
+///
+/// Args:
+/// control (int): The index of the most significant qubit in the unitary representation. Here, the qubit that controls the application of the phase-shift on the target qubit.
+/// target (int):: The index of the least significant qubit in the unitary representation. Here, the qubit phase-shift is applied to.
+/// phi (CalculatorFloat): The single qubit phase $\phi$.
+///
+pub struct PhaseShiftedControlledZ {
+ control: usize,
+ target: usize,
+ phi: CalculatorFloat,
+}
diff --git a/roqoqo/src/lib.rs b/roqoqo/src/lib.rs
index af5b771c..e88925c8 100644
--- a/roqoqo/src/lib.rs
+++ b/roqoqo/src/lib.rs
@@ -165,9 +165,9 @@ pub enum RoqoqoBackendError {
CalculatorError(#[from] CalculatorError),
}
-pub mod operations;
-
+#[doc(hidden)]
mod circuit;
+pub mod operations;
pub mod prelude;
pub use circuit::*;
pub mod backends;
diff --git a/roqoqo/src/operations/_auto_generated_operations.rs b/roqoqo/src/operations/_auto_generated_operations.rs
index 7e5942dc..18d7dbe8 100644
--- a/roqoqo/src/operations/_auto_generated_operations.rs
+++ b/roqoqo/src/operations/_auto_generated_operations.rs
@@ -157,6 +157,9 @@ pub enum Operation {
#[doc = "Variant for ComplexPMInteraction"]
ComplexPMInteraction(ComplexPMInteraction),
#[allow(clippy::upper_case_acronyms)]
+ #[doc = "Variant for PhaseShiftedControlledZ"]
+ PhaseShiftedControlledZ(PhaseShiftedControlledZ),
+ #[allow(clippy::upper_case_acronyms)]
#[doc = "Variant for MeasureQubit"]
MeasureQubit(MeasureQubit),
#[allow(clippy::upper_case_acronyms)]
@@ -310,6 +313,9 @@ pub enum TwoQubitOperation {
#[allow(clippy::upper_case_acronyms)]
#[doc = "Variant for ComplexPMInteraction"]
ComplexPMInteraction(ComplexPMInteraction),
+ #[allow(clippy::upper_case_acronyms)]
+ #[doc = "Variant for PhaseShiftedControlledZ"]
+ PhaseShiftedControlledZ(PhaseShiftedControlledZ),
}
#[doc = r" Enum of all Operations implementing [OperateMultiQubit]"]
#[derive(
@@ -516,6 +522,9 @@ pub enum GateOperation {
#[allow(clippy::upper_case_acronyms)]
#[doc = "Variant for ComplexPMInteraction"]
ComplexPMInteraction(ComplexPMInteraction),
+ #[allow(clippy::upper_case_acronyms)]
+ #[doc = "Variant for PhaseShiftedControlledZ"]
+ PhaseShiftedControlledZ(PhaseShiftedControlledZ),
}
#[doc = r" Enum of all Operations implementing [Rotate]"]
#[allow(clippy::upper_case_acronyms)]
diff --git a/roqoqo/src/operations/two_qubit_gate_operations.rs b/roqoqo/src/operations/two_qubit_gate_operations.rs
index c67f8470..cb73896b 100644
--- a/roqoqo/src/operations/two_qubit_gate_operations.rs
+++ b/roqoqo/src/operations/two_qubit_gate_operations.rs
@@ -2151,3 +2151,89 @@ impl OperateTwoQubitGate for ComplexPMInteraction {
}
}
}
+
+/// Implements the phased-shifted controlled-Z gate.
+///
+/// Modified, i.e. phase-shifted ControlledPauliZ two-qubit gate (https://arxiv.org/pdf/1908.06101.pdf eq.(1)).
+/// The unitary matrix representation is:
+///
+/// $$
+/// U = \begin{pmatrix}
+/// 1 & 0 & 0 & 0 \\\\
+/// 0 & e^{i \phi} & 0 & 0 \\\\
+/// 0 & 0 & e^{i \phi} & 0 \\\\
+/// 0 & 0 & 0 & e^{i (2\cdot\phi - \pi}
+/// \end{pmatrix}
+/// $$
+///
+#[allow(clippy::upper_case_acronyms)]
+#[derive(
+ Debug,
+ Clone,
+ PartialEq,
+ roqoqo_derive::InvolveQubits,
+ roqoqo_derive::Operate,
+ roqoqo_derive::Substitute,
+ roqoqo_derive::OperateTwoQubit,
+)]
+#[cfg_attr(feature = "serialize", derive(serde::Serialize, serde::Deserialize))]
+pub struct PhaseShiftedControlledZ {
+ /// The index of the most significant qubit in the unitary representation. Here, the qubit that controls the application of the phase-shift on the target qubit.
+ control: usize,
+ /// The index of the least significant qubit in the unitary representation. Here, the qubit phase-shift is applied to.
+ target: usize,
+ /// The single qubit phase $\phi$.
+ phi: CalculatorFloat,
+}
+
+#[allow(non_upper_case_globals)]
+const TAGS_PhaseShiftedControlledZ: &[&str; 4] = &[
+ "Operation",
+ "GateOperation",
+ "TwoQubitGateOperation",
+ "PhaseShiftedControlledZ",
+];
+
+/// Trait for all Operations acting with a unitary gate on a set of qubits.
+impl OperateGate for PhaseShiftedControlledZ {
+ /// Returns unitary matrix of the gate.
+ ///
+ /// # Returns
+ ///
+ /// * `Ok(Array2<Complex64>)` - The unitary matrix representation of the gate.
+ /// * `Err(RoqoqoError)` - The conversion of parameters to f64 failed.
+ fn unitary_matrix(&self) -> Result<Array2<Complex64>, RoqoqoError> {
+ // exp(i*x) = cos(x)+i*sin(x)
+ let phi: f64 = f64::try_from(self.phi.clone())?;
+ let cos: f64 = phi.cos();
+ let sin: f64 = phi.sin();
+ let cos2: f64 = (2.0 * phi - PI).cos();
+ let sin2: f64 = (2.0 * phi - PI).sin();
+ Ok(array![
+ [
+ Complex64::new(1.0, 0.0),
+ Complex64::new(0.0, 0.0),
+ Complex64::new(0.0, 0.0),
+ Complex64::new(0.0, 0.0)
+ ],
+ [
+ Complex64::new(0.0, 0.0),
+ Complex64::new(cos, sin),
+ Complex64::new(0.0, 0.0),
+ Complex64::new(0.0, 0.0)
+ ],
+ [
+ Complex64::new(0.0, 0.0),
+ Complex64::new(0.0, 0.0),
+ Complex64::new(cos, sin),
+ Complex64::new(0.0, 0.0)
+ ],
+ [
+ Complex64::new(0.0, 0.0),
+ Complex64::new(0.0, 0.0),
+ Complex64::new(0.0, 0.0),
+ Complex64::new(cos2, sin2)
+ ],
+ ])
+ }
+}
| Roqoqo documentation - modules
circuit appears on the list of Modules in docs.rs although in lib.rs it says:
```
#[doc(hidden)]
mod circuit;
```
It is intended that users access Struct 'Circuit' (not module circuit) in order to get the documentation, so module circuit should be hidden from the Modules list.
| 2021-07-08T08:10:30 | 0.0 | [] | [] |
|||
vortico/flama | vortico__flama-155 | 49d9caec3b7550953efc3f71e98f3eff74b2c4ea | diff --git a/examples/add_models.py b/examples/add_models.py
index 3bb24500..a69b863d 100644
--- a/examples/add_models.py
+++ b/examples/add_models.py
@@ -1,3 +1,5 @@
+import logging
+
import flama
from flama import Flama, Route
@@ -7,13 +9,13 @@ class AppStatus:
async def startup():
- print("\nStarting up the ML API...\n")
+ logging.info("\nStarting up the ML API...\n")
# Here, whatever action we want to be run at the startup of the application
AppStatus.loaded = True
async def shutdown():
- print("\nShutting down the ML API...\n")
+ logging.info("\nShutting down the ML API...\n")
# Here, whatever action we want to be run at the shutdown of the application
diff --git a/flama/authentication/components.py b/flama/authentication/components.py
index 0f021541..037a50b8 100644
--- a/flama/authentication/components.py
+++ b/flama/authentication/components.py
@@ -21,7 +21,6 @@ def __init__(self, secret: bytes, *, header_key: str, header_prefix: str, cookie
self.cookie_key = cookie_key
def _token_from_cookies(self, cookies: Cookies) -> bytes:
- print(f"ERROR: {cookies}")
try:
token = cookies[self.cookie_key]["value"]
except KeyError:
@@ -31,7 +30,6 @@ def _token_from_cookies(self, cookies: Cookies) -> bytes:
return token.encode()
def _token_from_header(self, headers: Headers) -> bytes:
- print(f"ERROR: {headers}")
try:
header_prefix, token = headers[self.header_key].split()
except KeyError:
diff --git a/flama/http.py b/flama/http.py
index 79705154..97909bde 100644
--- a/flama/http.py
+++ b/flama/http.py
@@ -72,14 +72,12 @@ async def __call__( # type: ignore[override]
class EnhancedJSONEncoder(json.JSONEncoder):
def default(self, o):
- if isinstance(o, (Path, os.PathLike)):
+ if isinstance(o, (Path, os.PathLike, uuid.UUID)):
return str(o)
if isinstance(o, (bytes, bytearray)):
return o.decode("utf-8")
if isinstance(o, enum.Enum):
return o.value
- if isinstance(o, uuid.UUID):
- return str(o)
if isinstance(o, (set, frozenset)):
return list(o)
if isinstance(o, (datetime.datetime, datetime.date, datetime.time)):
diff --git a/flama/resources/crud.py b/flama/resources/crud.py
index 938c2bd7..8fd0c1c0 100644
--- a/flama/resources/crud.py
+++ b/flama/resources/crud.py
@@ -194,8 +194,8 @@ async def partial_update(
summary:
Partially update a resource
description:
- Partially update a resource in this collection. Only the specified fields will be replaced, keeping the
- rest, so no one is required.
+ Partially update a resource in this collection. Only the specified fields will be replaced, keeping the
+ rest, so no one is required.
responses:
200:
description:
diff --git a/pyproject.toml b/pyproject.toml
index 81eef2d3..d9e0f4f5 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -123,7 +123,7 @@ skip_glob = [
[tool.ruff]
line-length = 120
# Enable Pyflakes and pycodestyle rules.
-select = ["E", "F"]
+select = ["C90", "E", "F", "G", "I", "W", "T"]
ignore = ["E721"]
exclude = [
".git",
| Undesired `print` statements in `BaseTokenComponent`
### Summary
We've left a couple of print statements which are printing error messages, which would lead to confusion. These error messages appear when loading cookies.
### Reproduce steps
The errors appear whenever we make use of any of the derived classes of `BaseTokenComponent`, e.g. `AccessTokenComponent` and `RefreshTokenComponent`.
### Schema library
None
### Models library
None
### Error logs
_No response_
| 2024-11-06T19:45:22 | 0.0 | [] | [] |
|||
vortico/flama | vortico__flama-138 | 7a8e780331bfb6f3854af19b260a0c7aafed926f | diff --git a/flama/asgi.py b/flama/asgi.py
index e2dd3ba0..b4818df0 100644
--- a/flama/asgi.py
+++ b/flama/asgi.py
@@ -76,7 +76,10 @@ def resolve(self, headers: types.Headers) -> types.Cookies:
cookie = SimpleCookie()
cookie.load(headers.get("cookie", ""))
return types.Cookies(
- {str(name): {str(k): str(v) for k, v in morsel.items()} for name, morsel in cookie.items()}
+ {
+ str(name): {**{str(k): str(v) for k, v in morsel.items()}, "value": morsel.value}
+ for name, morsel in cookie.items()
+ }
)
diff --git a/flama/authentication.py b/flama/authentication.py
deleted file mode 100644
index 61789009..00000000
--- a/flama/authentication.py
+++ /dev/null
@@ -1,21 +0,0 @@
-from starlette.authentication import (
- AuthCredentials,
- AuthenticationBackend,
- AuthenticationError,
- BaseUser,
- SimpleUser,
- UnauthenticatedUser,
- has_required_scope,
- requires,
-)
-
-__all__ = [
- "has_required_scope",
- "requires",
- "AuthenticationError",
- "AuthenticationBackend",
- "AuthCredentials",
- "BaseUser",
- "SimpleUser",
- "UnauthenticatedUser",
-]
diff --git a/flama/authentication/__init__.py b/flama/authentication/__init__.py
new file mode 100644
index 00000000..8411d86c
--- /dev/null
+++ b/flama/authentication/__init__.py
@@ -0,0 +1,3 @@
+from flama.authentication.components import * # noqa
+from flama.authentication.jwt import * # noqa
+from flama.authentication.middlewares import * # noqa
diff --git a/flama/authentication/components.py b/flama/authentication/components.py
new file mode 100644
index 00000000..7a271b3f
--- /dev/null
+++ b/flama/authentication/components.py
@@ -0,0 +1,79 @@
+import http
+import logging
+
+from flama import Component
+from flama.authentication import exceptions, jwt
+from flama.exceptions import HTTPException
+from flama.types import Headers
+from flama.types.http import Cookies
+
+logger = logging.getLogger(__name__)
+
+__all__ = ["JWTComponent"]
+
+
+class JWTComponent(Component):
+ def __init__(
+ self,
+ secret: bytes,
+ *,
+ header_key: str = "Authorization",
+ header_prefix: str = "Bearer",
+ cookie_key: str = "flama_authentication",
+ ):
+ self.secret = secret
+ self.header_key = header_key
+ self.header_prefix = header_prefix
+ self.cookie_key = cookie_key
+
+ def _token_from_cookies(self, cookies: Cookies) -> bytes:
+ try:
+ token = cookies[self.cookie_key]["value"]
+ except KeyError:
+ print(cookies)
+ logger.debug("'%s' not found in cookies", self.cookie_key)
+ raise exceptions.Unauthorized()
+
+ return token.encode()
+
+ def _token_from_header(self, headers: Headers) -> bytes:
+ try:
+ header_prefix, token = headers[self.header_key].split()
+ except KeyError:
+ logger.debug("'%s' not found in headers", self.header_key)
+ raise exceptions.Unauthorized()
+ except ValueError:
+ logger.debug("Wrong format for authorization header value")
+ raise exceptions.JWTException(
+ f"Authentication header must be '{self.header_key}: {self.header_prefix} <token>'"
+ )
+
+ if header_prefix != self.header_prefix:
+ logger.debug("Wrong prefix '%s' for authorization header, expected '%s'", header_prefix, self.header_prefix)
+ raise exceptions.JWTException(
+ f"Authentication header must be '{self.header_key}: {self.header_prefix} <token>'"
+ )
+
+ return token.encode()
+
+ def resolve(self, headers: Headers, cookies: Cookies) -> jwt.JWT:
+ try:
+ try:
+ encoded_token = self._token_from_header(headers)
+ except exceptions.Unauthorized:
+ encoded_token = self._token_from_cookies(cookies)
+ except exceptions.Unauthorized:
+ raise HTTPException(status_code=http.HTTPStatus.UNAUTHORIZED)
+ except exceptions.JWTException as e:
+ raise HTTPException(
+ status_code=http.HTTPStatus.BAD_REQUEST, detail={"error": e.__class__, "description": str(e)}
+ )
+
+ try:
+ token = jwt.JWT.decode(encoded_token, self.secret)
+ except (exceptions.JWTDecodeException, exceptions.JWTValidateException) as e:
+ raise HTTPException(
+ status_code=http.HTTPStatus.BAD_REQUEST, detail={"error": e.__class__, "description": str(e)}
+ )
+
+ return token
diff --git a/flama/authentication/exceptions.py b/flama/authentication/exceptions.py
new file mode 100644
index 00000000..916d88d7
--- /dev/null
+++ b/flama/authentication/exceptions.py
@@ -0,0 +1,38 @@
+__all__ = [
+ "AuthenticationException",
+ "Unauthorized",
+ "JWTException",
+ "JWTDecodeException",
+ "JWTValidateException",
+ "JWTClaimValidateException",
+]
+
+
+class AuthenticationException(Exception):
+ ...
+
+
+class Unauthorized(AuthenticationException):
+ ...
+
+
+class Forbidden(AuthenticationException):
+ ...
+
+
+class JWTException(AuthenticationException):
+ ...
+
+
+class JWTDecodeException(JWTException):
+ ...
+
+
+class JWTValidateException(JWTException):
+ ...
+
+
+class JWTClaimValidateException(JWTValidateException):
+ def __init__(self, claim: str) -> None:
+ self.claim = claim
+ super().__init__(f"Claim '{self.claim}' is not valid")
diff --git a/flama/authentication/jwt/__init__.py b/flama/authentication/jwt/__init__.py
new file mode 100644
index 00000000..f2d37798
--- /dev/null
+++ b/flama/authentication/jwt/__init__.py
@@ -0,0 +1,3 @@
+from flama.authentication.jwt.jwt import JWT
+
+__all__ = ["JWT"]
diff --git a/flama/authentication/jwt/algorithms.py b/flama/authentication/jwt/algorithms.py
new file mode 100644
index 00000000..c73d63e1
--- /dev/null
+++ b/flama/authentication/jwt/algorithms.py
@@ -0,0 +1,55 @@
+import abc
+import hmac
+
+__all__ = ["SignAlgorithm", "HMACAlgorithm"]
+
+
+class SignAlgorithm(abc.ABC):
+ """Abstract class for signature algorithms."""
+
+ @abc.abstractmethod
+ def sign(self, message: bytes, key: bytes) -> bytes:
+ """Sign a message using the given key.
+
+ :param message: Message to sign.
+ :param key: Key used to sign the message.
+ :return: Signature.
+ """
+ ...
+
+ @abc.abstractmethod
+ def verify(self, message: bytes, signature: bytes, key) -> bool:
+ """Verify the signature of a message.
+
+ :param message: Message to verify.
+ :param signature: Signed message.
+ :param key: Key used to sign the message.
+ :return: True if the signature is valid, False otherwise.
+ """
+ ...
+
+
+class HMACAlgorithm(SignAlgorithm):
+ """HMAC using SHA algorithms for JWS."""
+
+ def __init__(self, sha):
+ self.hash_algorithm = sha
+
+ def sign(self, message: bytes, key: bytes) -> bytes:
+ """Sign a message using the given key.
+
+ :param message: Message to sign.
+ :param key: Key used to sign the message.
+ :return: Signature.
+ """
+ return hmac.new(key, message, self.hash_algorithm).digest()
+
+ def verify(self, message: bytes, signature: bytes, key) -> bool:
+ """Verify the signature of a message.
+
+ :param message: Message to verify.
+ :param signature: Signed message.
+ :param key: Key used to sign the message.
+ :return: True if the signature is valid, False otherwise.
+ """
+ return hmac.compare_digest(signature, hmac.new(key, message, self.hash_algorithm).digest())
diff --git a/flama/authentication/jwt/claims.py b/flama/authentication/jwt/claims.py
new file mode 100644
index 00000000..9b5d1d98
--- /dev/null
+++ b/flama/authentication/jwt/claims.py
@@ -0,0 +1,150 @@
+import abc
+import time
+import typing as t
+
+from flama.authentication import exceptions
+
+if t.TYPE_CHECKING:
+ from flama.authentication.jwt.jwt import Payload
+
+__all__ = [
+ "ClaimValidator",
+ "IssValidator",
+ "SubValidator",
+ "AudValidator",
+ "ExpValidator",
+ "NbfValidator",
+ "IatValidator",
+ "JtiValidator",
+]
+
+
+class ClaimValidator(abc.ABC):
+ claim: t.ClassVar[str]
+
+ def __init__(self, payload: "Payload", claims: t.Dict[str, t.Any]) -> None:
+ self.value = claims.get(self.claim)
+ self.payload = payload
+
+ @abc.abstractmethod
+ def validate(self):
+ """Validate the claim.
+
+ :raises JWTClaimValidateException: if the claim is not valid.
+ """
+ ...
+
+
+class IssValidator(ClaimValidator):
+ """Issuer claim validator."""
+
+ claim = "iss"
+
+ def validate(self):
+ """Validate the claim.
+
+ :raises JWTClaimValidateException: if the claim is not valid.
+ """
+ ...
+
+
+class SubValidator(ClaimValidator):
+ """Subject claim validator."""
+
+ claim = "sub"
+
+ def validate(self):
+ """Validate the claim.
+
+ :raises JWTClaimValidateException: if the claim is not valid.
+ """
+ ...
+
+
+class AudValidator(ClaimValidator):
+ """Audience claim validator."""
+
+ claim = "aud"
+
+ def validate(self):
+ """Validate the claim.
+
+ :raises JWTClaimValidateException: if the claim is not valid.
+ """
+ ...
+
+
+class ExpValidator(ClaimValidator):
+ """Expiration time claim validator.
+
+ The value of the claim must be a number representing the expiration time of the token in seconds since the epoch
+ (UTC). The expiration time must be after the current time.
+ """
+
+ claim = "exp"
+
+ def validate(self):
+ """Validate the claim.
+
+ The value of the claim must be a number representing the expiration time of the token in seconds since the
+ epoch (UTC). The expiration time must be after the current time.
+
+ :raises JWTClaimValidateException: if the claim is not valid.
+ """
+ if self.payload.exp is not None and self.payload.exp < int(time.time()):
+ raise exceptions.JWTClaimValidateException("exp")
+
+
+class NbfValidator(ClaimValidator):
+ """Not before claim validator.
+
+ The value of the claim must be a number representing the time before which the token must not be accepted for
+ processing in seconds since the epoch (UTC). The time must be before the current time.
+ """
+
+ claim = "nbf"
+
+ def validate(self):
+ """Validate the claim.
+
+ The value of the claim must be a number representing the time before which the token must not be accepted for
+ processing in seconds since the epoch (UTC). The time must be before the current time.
+
+ :raises JWTClaimValidateException: if the claim is not valid.
+ """
+ if self.payload.nbf is not None and self.payload.nbf > int(time.time()):
+ raise exceptions.JWTClaimValidateException("nbf")
+
+
+class IatValidator(ClaimValidator):
+ """Issued at claim validator.
+
+ The value of the claim must be a number representing the time at which the JWT was issued in seconds since the
+ epoch (UTC). The time must be before the current time.
+ """
+
+ claim = "iat"
+
+ def validate(self):
+ """Validate the claim.
+
+ The value of the claim must be a number representing the time at which the JWT was issued in seconds since the
+ epoch (UTC). The time must be before the current time.
+
+ :raises JWTClaimValidateException: if the claim is not valid.
+ """
+ if self.payload.iat is not None and self.payload.iat > int(time.time()):
+ raise exceptions.JWTClaimValidateException("iat")
+
+
+class JtiValidator(ClaimValidator):
+ """JWT ID claim validator."""
+
+ claim = "jti"
+
+ def validate(self):
+ """Validate the claim.
+
+ :raises JWTClaimValidateException: if the claim is not valid.
+ """
+ ...
diff --git a/flama/authentication/jwt/jws.py b/flama/authentication/jwt/jws.py
new file mode 100644
index 00000000..e83aa823
--- /dev/null
+++ b/flama/authentication/jwt/jws.py
@@ -0,0 +1,104 @@
+import base64
+import hashlib
+import json
+import typing as t
+
+from flama.authentication import exceptions
+from flama.authentication.jwt.algorithms import HMACAlgorithm
+
+if t.TYPE_CHECKING:
+ from flama.authentication.jwt.algorithms import SignAlgorithm
+
+__all__ = ["JWS"]
+
+
+class JWS:
+ """JSON Web Signature (JWS) implementation.
+
+ It is used to create and decode signed JWT tokens, and to validate the signature of the token. The token is signed
+ using the algorithm specified in the header. The supported algorithms are:
+ - HMAC with SHA-256
+ - HMAC with SHA-384
+ - HMAC with SHA-512
+ """
+
+ ALGORITHMS = {
+ "HS256": HMACAlgorithm(hashlib.sha256),
+ "HS384": HMACAlgorithm(hashlib.sha384),
+ "HS512": HMACAlgorithm(hashlib.sha512),
+ }
+
+ @classmethod
+ def _get_algorithm(cls, header: t.Dict[str, t.Any]) -> "SignAlgorithm":
+ """Get the algorithm to sign the token.
+
+ It gets the algorithm from the header, and it returns the corresponding algorithm implementation.
+
+ :param header: JWT header.
+ :return: Algorithm implementation.
+ """
+ if "alg" not in header:
+ raise exceptions.JWTDecodeException("Missing algorithm in header")
+
+ if header["alg"] not in cls.ALGORITHMS:
+ raise exceptions.JWTDecodeException(f"Unsupported algorithm '{header['alg']}'")
+
+ return cls.ALGORITHMS[header["alg"]]
+
+ @classmethod
+ def encode(cls, header: t.Dict[str, t.Any], payload: t.Dict[str, t.Any], key: bytes) -> bytes:
+ """Encode a JWS token.
+
+ It generates a signed token using the given key. The result is a JWT token with a format of:
+ <header>.<payload>.<signature>
+
+ :param header: JWT header.
+ :param payload: JWT payload.
+ :param key: Key used to sign the token.
+ :return: Encoded token.
+ """
+ header_segment = base64.urlsafe_b64encode(json.dumps(header).encode())
+ payload_segment = base64.urlsafe_b64encode(json.dumps(payload).encode())
+
+ algorithm = cls._get_algorithm(header)
+
+ signing_input = b".".join([header_segment, payload_segment])
+ signature = base64.urlsafe_b64encode(algorithm.sign(signing_input, key))
+ return b".".join([header_segment, payload_segment, signature])
+
+ @classmethod
+ def decode(cls, token: bytes, key: bytes) -> t.Tuple[t.Dict[str, t.Any], t.Dict[str, t.Any], bytes]:
+ """Decode a JWS token.
+
+ It decode and validate the signature of the token. The token format must be: <header>.<payload>.<signature>
+
+ The header, payload and signature are constructed from the decoded token.
+
+ :param token: Token to decode.
+ :param key: Key used to sign the token.
+ :return: A tuple with the header, payload and signature of the token.
+ :raises JWTDecodeException: If the token format is not correct.
+ :raises JWTValidateException: If the token is not valid.
+ """
+ try:
+ signing_input, signature = token.rsplit(b".", 1)
+ header_segment, payload_segment = signing_input.split(b".", 1)
+ except ValueError:
+ raise exceptions.JWTDecodeException("Not enough segments")
+
+ try:
+ header = json.loads(base64.urlsafe_b64decode(header_segment))
+ except ValueError:
+ raise exceptions.JWTDecodeException("Wrong header format")
+
+ try:
+ payload = json.loads(base64.urlsafe_b64decode(payload_segment))
+ except ValueError:
+ raise exceptions.JWTDecodeException("Wrong payload format")
+
+ algorithm = cls._get_algorithm(header)
+
+ if not algorithm.verify(signing_input, base64.urlsafe_b64decode(signature), key):
+ raise exceptions.JWTValidateException(f"Signature verification failed for token '{token.decode()}'")
+
+ return header, payload, signature
diff --git a/flama/authentication/jwt/jwt.py b/flama/authentication/jwt/jwt.py
new file mode 100644
index 00000000..00d25ecd
--- /dev/null
+++ b/flama/authentication/jwt/jwt.py
@@ -0,0 +1,228 @@
+import dataclasses
+import logging
+import time
+import typing as t
+
+from flama.authentication import exceptions
+from flama.authentication.jwt import claims
+from flama.authentication.jwt.jws import JWS
+
+logger = logging.getLogger(__name__)
+
+
+__all__ = ["JWT"]
+
+VALIDATORS = [
+ claims.IssValidator,
+ claims.SubValidator,
+ claims.AudValidator,
+ claims.ExpValidator,
+ claims.NbfValidator,
+ claims.IatValidator,
+ claims.JtiValidator,
+]
+
+
[email protected](frozen=True)
+class Header:
+ """JWT header.
+
+ It contains the metadata of the token. The header is represented as a dictionary, and it is
+ validated when the token is decoded. The header must contain the algorithm used to sign the token.
+
+ Additional information about the header can be found in the RFC 7519: https://tools.ietf.org/html/rfc7519
+ """
+
+ typ: str = "JWT"
+ alg: t.Optional[str] = None
+ cty: t.Optional[str] = None
+
+ def asdict(self) -> t.Dict[str, t.Any]:
+ """Return the header as a dictionary.
+
+ The fields are sorted alphabetically and the None values are removed.
+
+ :return: Header as a dictionary.
+ """
+ return dataclasses.asdict(
+ self, dict_factory=lambda x: {k: v for k, v in sorted(x, key=lambda y: y[0]) if v is not None}
+ )
+
+
[email protected](frozen=True)
+class Payload:
+ """JWT payload.
+
+ It contains the claims of the token. The claims are the statements about an entity (typically, the user) and
+ additional data. The claims are represented as a dictionary, and they are validated when the token is decoded.
+
+ Additional information about the claims can be found in the RFC 7519: https://tools.ietf.org/html/rfc7519
+
+ The user data is stored in the `data` field, and it is encoded as a dictionary. This field is not part of the JWT
+ standard and it is not validated when the token is decoded.
+ """
+
+ data: t.Dict[str, t.Any]
+ iss: t.Optional[str] = None
+ sub: t.Optional[str] = None
+ aud: t.Optional[str] = None
+ exp: t.Optional[int] = None
+ nbf: t.Optional[int] = None
+ iat: t.Optional[int] = None
+ jti: t.Optional[str] = None
+
+ def __init__(
+ self,
+ data: t.Optional[t.Dict[str, t.Any]] = None,
+ iss: t.Optional[str] = None,
+ sub: t.Optional[str] = None,
+ aud: t.Optional[str] = None,
+ exp: t.Optional[int] = None,
+ nbf: t.Optional[int] = None,
+ iat: t.Optional[int] = None,
+ jti: t.Optional[str] = None,
+ **kwargs: t.Any,
+ ) -> None:
+ """Initialize the payload.
+
+ It contains the claims of the token. The claims are the statements about an entity (typically, the user) and
+ additional data. The claims are represented as a dictionary, and they are validated when the token is decoded.
+
+ Additional information about the claims can be found in the RFC 7519: https://tools.ietf.org/html/rfc7519
+
+ The user data is stored in the `data` field, and it is encoded as a dictionary. This field is not part of the
+ JWT standard and it is not validated when the token is decoded. The user data can be passed as a dictionary or
+ as keyword arguments.
+
+ :param data: User data.
+ :param iss: Issuer.
+ :param sub: Subject.
+ :param aud: Audience.
+ :param exp: Expiration time.
+ :param nbf: Not before.
+ :param iat: Issued at.
+ :param jti: JWT ID.
+ :param kwargs: User data.
+ """
+ object.__setattr__(self, "iss", iss)
+ object.__setattr__(self, "sub", sub)
+ object.__setattr__(self, "aud", aud)
+ object.__setattr__(self, "exp", exp)
+ object.__setattr__(self, "nbf", nbf)
+ object.__setattr__(self, "iat", iat if iat is not None else int(time.time()))
+ object.__setattr__(self, "jti", jti)
+ object.__setattr__(self, "data", {**(data or {}), **kwargs})
+
+ def asdict(self) -> t.Dict[str, t.Any]:
+ """Return the payload as a dictionary.
+
+ The fields are sorted alphabetically and the None values are removed.
+
+ :return: Payload as a dictionary.
+ """
+ return dataclasses.asdict(
+ self, dict_factory=lambda x: {k: v for k, v in sorted(x, key=lambda y: y[0]) if v is not None}
+ )
+
+
[email protected](frozen=True)
+class JWT:
+ """JSON Web Token (JWT) implementation.
+
+ This is a convenient wrapper of the JWT methods from the authlib library. It is used to create and decode JWT
+ tokens, and to validate the signature of the token.
+
+ The token is signed using JSW, and the signature is validated using the algorithm specified in the header.
+ """
+
+ header: Header
+ payload: Payload
+
+ def __init__(self, header: t.Dict[str, t.Any], payload: t.Dict[str, t.Any]) -> None:
+ object.__setattr__(self, "header", Header(**header))
+ object.__setattr__(self, "payload", Payload(**payload))
+
+ def encode(self, key: bytes) -> bytes:
+ """Encode a JWT token.
+
+ The token is signed using the given secret. The result is a JWT token with a format of:
+ <header>.<payload>.<signature>
+
+ :param key: Secret used to sign the token.
+ :return: Encoded token.
+ """
+ return JWS.encode(
+ header=dataclasses.asdict(
+ self.header, dict_factory=lambda x: {k: v for k, v in sorted(x, key=lambda y: y[0]) if v is not None}
+ ),
+ payload=dataclasses.asdict(
+ self.payload, dict_factory=lambda x: {k: v for k, v in sorted(x, key=lambda y: y[0]) if v is not None}
+ ),
+ key=key,
+ )
+
+ @classmethod
+ def decode(cls, token: bytes, key: bytes) -> "JWT":
+ """Decode a JWT token.
+
+ The token format must be: <header>.<payload>.<signature>
+
+ :param token: Token to decode.
+ :param key: Key used to sign the token.
+ :return: An instance of JWT with the decoded token.
+ :raises JWTDecodeException: If the token format is not correct.
+ :raises JWTValidateException: If the token is not valid.
+ """
+ try:
+ header, payload, _ = JWS.decode(token, key)
+ decoded_token = cls(header=header, payload=payload)
+ decoded_token.validate()
+ except exceptions.JWTDecodeException:
+ logger.debug("Error decoding token")
+ raise
+ except exceptions.JWTValidateException as e:
+ logger.debug("Error validating token: %s", e)
+ raise
+ else:
+ logger.debug("Decoded token: %s", decoded_token)
+
+ return decoded_token
+
+ def validate(self, validators: t.Optional[t.List[claims.ClaimValidator]] = None, **claims: t.Any) -> None:
+ """Validate the token claims.
+
+ It validates all the default claims in the payload in the following order:
+ - Issuer (iss)
+ - Subject (sub)
+ - Audience (aud)
+ - Expiration time (exp)
+ - Not before (nbf)
+ - Issued at (iat)
+ - JWT ID (jti)
+
+ Once all the default claims are validated, it validates the runs custom validators.
+
+ If any of the claims is not valid, an exception is raised.
+
+ :param validators: Custom validators to run.
+ :param claims: Claims values used to validate.
+ :raises JWTValidateException: If any of the claims is not valid.
+ """
+ invalid_claims = []
+
+ for validator in [*VALIDATORS, *(validators or [])]:
+ try:
+ validator(self.payload, claims).validate()
+ except exceptions.JWTClaimValidateException as e:
+ logger.debug("Claim '%s' is not valid", e.claim)
+ invalid_claims.append(e.claim)
+
+ if invalid_claims:
+ raise exceptions.JWTValidateException(f"Invalid claims ({', '.join(invalid_claims)})")
+
+ def asdict(self) -> t.Dict[str, t.Any]:
+ """Return the JWT as a dictionary.
+
+ :return: JWT as a dictionary.
+ """
+ return {"header": self.header.asdict(), "payload": self.payload.asdict()}
diff --git a/flama/authentication/middlewares.py b/flama/authentication/middlewares.py
new file mode 100644
index 00000000..6d9552e2
--- /dev/null
+++ b/flama/authentication/middlewares.py
@@ -0,0 +1,59 @@
+import http
+import logging
+import typing as t
+
+from flama.authentication.jwt.jwt import JWT
+from flama.exceptions import HTTPException
+from flama.http import APIErrorResponse, Request
+
+if t.TYPE_CHECKING:
+ from flama import Flama, types
+ from flama.http import Response
+ from flama.routing import BaseRoute
+
+__all__ = ["AuthenticationMiddleware"]
+
+
+logger = logging.getLogger(__name__)
+
+
+class AuthenticationMiddleware:
+ def __init__(self, app: "types.App"):
+ self.app: "Flama" = t.cast("Flama", app)
+
+ async def __call__(self, scope: "types.Scope", receive: "types.Receive", send: "types.Send") -> None:
+ if scope["type"] not in ("http", "websocket"):
+ await self.app(scope, receive, send)
+ return
+
+ response = await self._get_response(scope, receive)
+
+ await response(scope, receive, send)
+
+ def _get_permissions(self, route: "BaseRoute") -> t.Set[str]:
+ return set(route.tags.get("permissions", []))
+
+ async def _get_response(self, scope: "types.Scope", receive: "types.Receive") -> t.Union["Response", "Flama"]:
+ app: "Flama" = scope["app"]
+
+ route, _ = app.router.resolve_route(scope)
+
+ required_permissions = self._get_permissions(route)
+
+ if not required_permissions:
+ return self.app
+
+ try:
+ token: JWT = await app.injector.resolve(JWT).value({"request": Request(scope, receive=receive)})
+ except HTTPException as e:
+ logger.debug("JWT error: %s", e.detail)
+ return APIErrorResponse(status_code=e.status_code, detail=e.detail)
+
+ user_permissions = set(token.payload.data.get("permissions", [])) | {
+ y for x in token.payload.data.get("roles", {}).values() for y in x
+ }
+ if not (user_permissions >= required_permissions):
+ logger.debug("User does not have the required permissions: %s", required_permissions)
+ return APIErrorResponse(status_code=http.HTTPStatus.FORBIDDEN, detail="Insufficient permissions")
+
+ return self.app
diff --git a/flama/injection/injector.py b/flama/injection/injector.py
index 7ad052f2..b2134fb6 100644
--- a/flama/injection/injector.py
+++ b/flama/injection/injector.py
@@ -69,19 +69,19 @@ def resolver(self):
self._resolver = None
@t.overload
- def resolve(self, annotation: t.Any):
+ def resolve(self, annotation: t.Any) -> "ResolutionTree":
...
@t.overload
- def resolve(self, annotation: t.Any, *, name: str):
+ def resolve(self, annotation: t.Any, *, name: str) -> "ResolutionTree":
...
@t.overload
- def resolve(self, annotation: t.Any, *, default: t.Any):
+ def resolve(self, annotation: t.Any, *, default: t.Any) -> "ResolutionTree":
...
@t.overload
- def resolve(self, annotation: t.Any, *, name: str, default: t.Any):
+ def resolve(self, annotation: t.Any, *, name: str, default: t.Any) -> "ResolutionTree":
...
def resolve(
| Authentication mechanism
## Summary
Provide a set of tools for implementing an authentication mechanism in Flama applications.
## Motivation
Authentication is a commonly used feature for all server-side applications.
## Proposed changes
Create a set of tools for working with the basics of an authentication workflow, such as mechanism for encoding and decoding [JWT](https://jwt.io/), a `Component` for injecting it into a function, and a `Middleware` for checking the permissions necessaries for a `Route` within a JWT.
| 2023-10-27T13:33:00 | 0.0 | [] | [] |
|||
vortico/flama | vortico__flama-136 | eda5df444b3d7128e59ca8936d80f7612c532ef3 | diff --git a/flama/concurrency.py b/flama/concurrency.py
index e008d18e..3706d0f4 100644
--- a/flama/concurrency.py
+++ b/flama/concurrency.py
@@ -90,12 +90,12 @@ async def run_task_group(*tasks: t.Coroutine[t.Any, t.Any, t.Any]) -> t.List[asy
class AsyncProcess(multiprocessing.Process):
"""Multiprocessing Process class whose target is an async function."""
+ _target: t.Optional[t.Callable[..., t.Union[t.Any, t.Coroutine]]]
+ _args: t.List[t.Any]
+ _kwargs: t.Dict[str, t.Any]
+
def run(self):
- if self._target: # type: ignore
- task = self._target(*self._args, **self._kwargs) # type: ignore
-
- if is_async(self._target): # type: ignore
- policy = asyncio.get_event_loop_policy()
- loop = policy.new_event_loop()
- policy.set_event_loop(loop)
- loop.run_until_complete(task)
+ if self._target:
+ result_or_task = self._target(*self._args, **self._kwargs)
+
+ return asyncio.run(result_or_task) if is_async(self._target) else result_or_task
diff --git a/flama/ddd/workers.py b/flama/ddd/workers.py
index f3c84df3..ef6a53a3 100644
--- a/flama/ddd/workers.py
+++ b/flama/ddd/workers.py
@@ -6,6 +6,7 @@
from flama.ddd import types
from flama.ddd.repositories import AbstractRepository, SQLAlchemyRepository
+from flama.exceptions import ApplicationError
if t.TYPE_CHECKING:
from sqlalchemy.ext.asyncio import AsyncConnection
@@ -17,95 +18,181 @@
class WorkerType(abc.ABCMeta):
+ """Metaclass for workers.
+
+ It will gather all the repositories defined in the class as class attributes as a single dictionary under the name
+ `_repositories`.
+ """
+
def __new__(mcs, name: str, bases: t.Tuple[type], namespace: t.Dict[str, t.Any]):
- namespace["_repositories"] = types.Repositories(
- {
- k: v
- for k, v in namespace.get("__annotations__", {}).items()
- if inspect.isclass(v) and issubclass(v, AbstractRepository)
+ if mcs._is_abstract_worker(namespace) and "__annotations__" in namespace:
+ namespace["_repositories"] = types.Repositories(
+ {
+ k: v
+ for k, v in namespace["__annotations__"].items()
+ if inspect.isclass(v) and issubclass(v, AbstractRepository)
+ }
+ )
+
+ namespace["__annotations__"] = {
+ k: v for k, v in namespace["__annotations__"].items() if k not in namespace["_repositories"]
}
- )
return super().__new__(mcs, name, bases, namespace)
+ @staticmethod
+ def _is_abstract_worker(namespace: t.Dict[str, t.Any]) -> bool:
+ return namespace.get("__module__") != "flama.ddd.workers" or namespace.get("__qualname__") != "AbstractWorker"
+
+
+class AbstractWorker(abc.ABC, metaclass=WorkerType):
+ """Abstract class for workers.
+
+ It will be used to define the workers for the application. A worker consists of a set of repositories that will be
+ used to interact with entities and a mechanism for isolate a single unit of work.
+ """
-class AbstractWorker(abc.ABC):
_repositories: t.ClassVar[t.Dict[str, t.Type[AbstractRepository]]]
def __init__(self, app: t.Optional["Flama"] = None):
+ """Initialize the worker.
+
+ It will receive the application instance as a parameter.
+
+ :param app: Application instance.
+ """
self._app = app
@property
def app(self) -> "Flama":
- assert self._app, "Worker not initialized"
+ """Application instance.
+
+ :return: Application instance.
+ """
+ if not self._app:
+ raise ApplicationError("Worker not initialized")
+
return self._app
@app.setter
- def app(self, app: "Flama"):
+ def app(self, app: "Flama") -> None:
+ """Set the application instance.
+
+ :param app: Application instance.
+ """
self._app = app
@app.deleter
- def app(self):
+ def app(self) -> None:
+ """Delete the application instance."""
self._app = None
@abc.abstractmethod
- async def __aenter__(self) -> "AbstractWorker":
+ async def begin(self) -> None:
+ """Start a unit of work."""
...
@abc.abstractmethod
- async def __aexit__(self, exc_type, exc_val, exc_tb):
+ async def end(self, *, rollback: bool = False) -> None:
+ """End a unit of work.
+
+ :param rollback: If the unit of work should be rolled back.
+ """
...
+ async def __aenter__(self) -> "AbstractWorker":
+ """Start a unit of work."""
+ await self.begin()
+ return self
+
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
+ """End a unit of work."""
+ await self.end(rollback=exc_type is not None)
+
@abc.abstractmethod
async def commit(self) -> None:
+ """Commit the unit of work."""
...
@abc.abstractmethod
async def rollback(self) -> None:
+ """Rollback the unit of work."""
...
-class SQLAlchemyWorker(AbstractWorker, metaclass=WorkerType):
+class SQLAlchemyWorker(AbstractWorker):
_repositories: t.ClassVar[t.Dict[str, t.Type[SQLAlchemyRepository]]]
_connection: "AsyncConnection"
_transaction: "AsyncTransaction"
@property
def connection(self) -> "AsyncConnection":
+ """Connection to the database.
+
+ :return: Connection to the database.
+ :raises AttributeError: If the connection is not initialized.
+ """
try:
return self._connection
except AttributeError:
raise AttributeError("Connection not initialized")
- async def begin(self):
- self._connection = self.app.sqlalchemy.engine.connect()
- await self._connection.__aenter__()
- self._transaction = self._connection.begin()
- await self._transaction
+ @property
+ def transaction(self) -> "AsyncTransaction":
+ """Database transaction.
- async def close(self):
- if hasattr(self, "_transaction"):
- await self._transaction.__aexit__(None, None, None)
- del self._transaction
+ :return: Database transaction.
+ :raises AttributeError: If the transaction is not started.
+ """
+ try:
+ return self._transaction
+ except AttributeError:
+ raise AttributeError("Transaction not started")
- if hasattr(self, "_connection"):
- await self._connection.__aexit__(None, None, None)
- del self._connection
+ async def begin_transaction(self) -> None:
+ """Open a connection and begin a transaction."""
+
+ self._connection = await self.app.sqlalchemy.open_connection()
+ self._transaction = await self.app.sqlalchemy.begin_transaction(self._connection)
+
+ async def end_transaction(self, *, rollback: bool = False) -> None:
+ """End a transaction and close the connection.
+
+ :param rollback: If the transaction should be rolled back.
+ :raises AttributeError: If the connection is not initialized or the transaction is not started.
+ """
+ await self.app.sqlalchemy.end_transaction(self.transaction, rollback=rollback)
+ del self._transaction
+
+ await self.app.sqlalchemy.close_connection(self.connection)
+ del self._connection
+
+ async def begin(self) -> None:
+ """Start a unit of work.
+
+ Initialize the connection, begin a transaction, and create the repositories.
+ """
+ await self.begin_transaction()
- async def __aenter__(self):
- await self.begin()
for repository, repository_class in self._repositories.items():
setattr(self, repository, repository_class(self.connection))
- return self
- async def __aexit__(self, exc_type, exc_val, exc_tb):
- await self.close()
+ async def end(self, *, rollback: bool = False) -> None:
+ """End a unit of work.
+
+ Close the connection, commit or rollback the transaction, and delete the repositories.
+
+ :param rollback: If the unit of work should be rolled back.
+ """
+ await self.end_transaction(rollback=rollback)
for repository in self._repositories.keys():
delattr(self, repository)
async def commit(self):
+ """Commit the unit of work."""
await self.connection.commit()
async def rollback(self):
+ """Rollback the unit of work."""
await self.connection.rollback()
diff --git a/flama/exceptions.py b/flama/exceptions.py
index d20f91cc..babbdf72 100644
--- a/flama/exceptions.py
+++ b/flama/exceptions.py
@@ -19,6 +19,10 @@ class ApplicationError(Exception):
...
+class SQLAlchemyError(ApplicationError):
+ ...
+
+
class DecodeError(Exception):
"""
Raised by a Codec when `decode` fails due to malformed syntax.
diff --git a/flama/resources/workers.py b/flama/resources/workers.py
index 45b5d21e..6f7b311b 100644
--- a/flama/resources/workers.py
+++ b/flama/resources/workers.py
@@ -1,6 +1,7 @@
import typing as t
from flama.ddd import SQLAlchemyWorker
+from flama.exceptions import ApplicationError
if t.TYPE_CHECKING:
from flama import Flama
@@ -8,24 +9,26 @@
class FlamaWorker(SQLAlchemyWorker):
- _repositories: t.ClassVar[t.Dict[str, t.Type["SQLAlchemyTableRepository"]]]
+ _repositories: t.Dict[str, t.Type["SQLAlchemyTableRepository"]]
def __init__(self, app: t.Optional["Flama"] = None):
super().__init__(app)
+ self._repositories = {}
self._init_repositories: t.Optional[t.Dict[str, "SQLAlchemyTableRepository"]] = None
@property
def repositories(self) -> t.Dict[str, "SQLAlchemyTableRepository"]:
- assert self._init_repositories, "Repositories not initialized"
+ if not self._init_repositories:
+ raise ApplicationError("Repositories not initialized")
+
return self._init_repositories
- async def __aenter__(self):
- await self.begin()
+ async def begin(self) -> None:
+ await self.begin_transaction()
self._init_repositories = {r: cls(self.connection) for r, cls in self._repositories.items()}
- return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
- await self.close()
+ await self.end_transaction()
del self._init_repositories
def add_repository(self, name: str, cls: t.Type["SQLAlchemyTableRepository"]) -> None:
diff --git a/flama/sqlalchemy.py b/flama/sqlalchemy.py
index 13470458..5bb8febf 100644
--- a/flama/sqlalchemy.py
+++ b/flama/sqlalchemy.py
@@ -1,11 +1,16 @@
+import abc
import typing as t
+from flama import exceptions
from flama.modules import Module
try:
import sqlalchemy
from sqlalchemy import MetaData
- from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
+ from sqlalchemy.ext.asyncio import create_async_engine
+
+ if t.TYPE_CHECKING:
+ from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine, AsyncTransaction
metadata = MetaData()
except Exception: # pragma: no cover
@@ -15,39 +20,354 @@
__all__ = ["metadata", "SQLAlchemyModule"]
+class ConnectionManager(abc.ABC):
+ """Abstract class for connection managers.
+
+ It will be used to manage the connections and transactions.
+ """
+
+ def __init__(self, engine: "AsyncEngine") -> None:
+ """Initialize the connection manager.
+
+ :param engine: SQLAlchemy engine.
+ """
+ self._engine = engine
+
+ async def open(self) -> "AsyncConnection":
+ """Open a new connection to the database.
+
+ :return: Database connection.
+ """
+ ...
+
+ @abc.abstractmethod
+ async def close(self, connection: "AsyncConnection") -> None:
+ """Close the connection to the database.
+
+ :param connection: Database connection.
+ """
+ ...
+
+ @abc.abstractmethod
+ async def begin(self, connection: "AsyncConnection") -> "AsyncTransaction":
+ """Begin a new transaction.
+
+ :param connection: Database connection to use for the transaction.
+ :return: Database transaction.
+ """
+ ...
+
+ @abc.abstractmethod
+ async def end(self, transaction: "AsyncTransaction", *, rollback: bool = False) -> None:
+ """End a transaction.
+
+ :param transaction: Database transaction.
+ :param rollback: If the transaction should be rolled back.
+ """
+ ...
+
+
+class SingleConnectionManager(ConnectionManager):
+ """Connection manager that uses a single connection and transaction.
+
+ A single connection is opened when requested, and subsequent requests will share this same connection. Once all
+ clients finishes with the connection, it will be closed.
+
+ The transaction is similar to the transaction, but it will be created at first request and subsequent requests will
+ generate a nested transaction.
+ """
+
+ def __init__(self, engine: "AsyncEngine") -> None:
+ """Initialize the connection manager.
+
+ :param engine: SQLAlchemy engine.
+ """
+ super().__init__(engine)
+ self._connection: t.Optional["AsyncConnection"] = None
+ self._transaction: t.Optional["AsyncTransaction"] = None
+ self._clients = 0
+
+ @property
+ def connection(self) -> "AsyncConnection":
+ """Connection to the database.
+
+ :return: Connection to the database.
+ :raises SQLAlchemyError: If the connection is not initialized.
+ """
+ if not self._connection:
+ raise exceptions.SQLAlchemyError("Connection not initialized")
+
+ return self._connection
+
+ @property
+ def transaction(self) -> "AsyncTransaction":
+ """Transaction to the database.
+
+ :return: Transaction to the database.
+ :raises SQLAlchemyError: If the transaction is not initialized.
+ """
+ if not self._transaction:
+ raise exceptions.SQLAlchemyError("Transaction not started")
+
+ return self._transaction
+
+ async def open(self) -> "AsyncConnection":
+ """Open a new connection to the database.
+
+ The first client will open a new connection, and subsequent clients will share this same connection.
+
+ :return: Database connection.
+ """
+ try:
+ connection = self.connection
+ except exceptions.SQLAlchemyError:
+ self._connection = connection = self._engine.connect()
+ await self._connection.__aenter__()
+
+ self._clients += 1
+ return connection
+
+ async def close(self, connection: "AsyncConnection") -> None:
+ """Close the connection to the database.
+
+ If this is the last client, the connection will be closed.
+
+ :param connection: Database connection.
+ :raises SQLAlchemyError: If the connection is a different connection from the one opened.
+ """
+ if connection != self.connection:
+ raise exceptions.SQLAlchemyError("Wrong connection")
+
+ self._clients -= 1
+
+ if self._clients == 0:
+ await connection.__aexit__(None, None, None)
+ self._connection = None
+
+ async def begin(self, connection: "AsyncConnection") -> "AsyncTransaction":
+ """Begin a new transaction.
+
+ If no transaction is started, a new transaction will be created. If a transaction is already started, a nested
+ transaction will be created.
+
+ :return: Database transaction.
+ :raises SQLAlchemyError: If the connection is a different connection from the one opened.
+ """
+ if connection != self.connection:
+ raise exceptions.SQLAlchemyError("Wrong connection")
+
+ if self._transaction is None:
+ self._transaction = transaction = connection.begin()
+ else:
+ transaction = connection.begin_nested()
+
+ await transaction
+ return transaction
+
+ async def end(self, transaction: "AsyncTransaction", *, rollback: bool = False) -> None:
+ """End a transaction.
+
+ :param transaction: Database transaction.
+ :param rollback: If the transaction should be rolled back.
+ """
+ if rollback:
+ await transaction.rollback()
+ else:
+ await transaction.commit()
+
+ if transaction == self.transaction:
+ self._transaction = None
+
+
+class MultipleConnectionManager(ConnectionManager):
+ """Connection manager that handlers several connections and transactions."""
+
+ def __init__(self, engine: "AsyncEngine") -> None:
+ """Initialize the connection manager.
+
+ This manager keeps track of the connections and transactions, and it will close the connections when requested.
+ If a connection is closed, all their transactions will be commited and finished too.
+
+ :param engine: SQLAlchemy engine.
+ """
+ super().__init__(engine)
+ self._connections: t.Set["AsyncConnection"] = set()
+ self._transactions: t.Dict["AsyncConnection", "AsyncTransaction"] = {}
+
+ async def open(self) -> "AsyncConnection":
+ """Open a new connection to the database.
+
+ :return: Database connection.
+ """
+ connection = self._engine.connect()
+ await connection.__aenter__()
+ self._connections.add(connection)
+ return connection
+
+ async def close(self, connection: "AsyncConnection") -> None:
+ """Close the connection to the database.
+
+ :param connection: Database connection.
+ :raises SQLAlchemyError: If the connection is not initialized.
+ """
+ if connection not in self._connections:
+ raise exceptions.SQLAlchemyError("Connection not initialized")
+
+ if connection in self._transactions:
+ await self.end(self._transactions[connection])
+
+ await connection.__aexit__(None, None, None)
+ self._connections.remove(connection)
+
+ async def begin(self, connection: "AsyncConnection") -> "AsyncTransaction":
+ """Begin a new transaction.
+
+ :param connection: Database connection to use for the transaction.
+ :return: Database transaction.
+ :raises SQLAlchemyError: If the connection is not initialized.
+ """
+ if connection not in self._connections:
+ raise exceptions.SQLAlchemyError("Connection not initialized")
+
+ if connection in self._transactions:
+ raise exceptions.SQLAlchemyError("Transaction already started in this connection")
+
+ transaction = await connection.begin()
+ self._transactions[connection] = transaction
+ return transaction
+
+ async def end(self, transaction: "AsyncTransaction", *, rollback: bool = False) -> None:
+ """End a transaction.
+
+ :param transaction: Database transaction.
+ :param rollback: If the transaction should be rolled back.
+ :raises SQLAlchemyError: If the transaction is not started.
+ """
+ if transaction.connection not in self._transactions:
+ raise exceptions.SQLAlchemyError("Transaction not started")
+
+ if rollback:
+ await transaction.rollback()
+ else:
+ await transaction.commit()
+
+ del self._transactions[transaction.connection]
+
+
class SQLAlchemyModule(Module):
+ """SQLAlchemy module.
+
+ It will initialize the SQLAlchemy engine and connection manager. It will also provide an interface to handle
+ connections and transactions.
+
+ Referring to how connections and transactions are managed, it can work in two modes: single connection and multiple
+ connections.
+ * Multiple connections (default): It will open a new connection and transaction for each request. It will keep
+ track of the connections, and it will close it when requested. Multiple transactions using the same connection is
+ also supported.
+ * Single connection: It will open a single connection and transaction, and all requests will share this connection
+ and transaction. The connection will be closed when all clients finishes. It will create a single transaction and
+ new transactions requested will be nested from this one.
+ """
+
name = "sqlalchemy"
- def __init__(self, database: t.Optional[str] = None, engine_args: t.Optional[t.Dict[str, t.Any]] = None):
+ def __init__(
+ self, database: str, single_connection: bool = False, engine_args: t.Optional[t.Dict[str, t.Any]] = None
+ ):
+ """Initialize the SQLAlchemy module.
+
+ Referring to how connections and transactions are managed, it can work in two modes: single connection and
+ multiple connections.
+ * Multiple connections (default): It will open a new connection and transaction for each request. It will keep
+ track of the connections, and it will close it when requested. Multiple transactions using the same connection
+ is also supported.
+ * Single connection: It will open a single connection and transaction, and all requests will share this
+ connection and transaction. The connection will be closed when all clients finishes. It will create a single
+ transaction and new transactions requested will be nested from this one.
+
+ :param database: Database connection string.
+ :param single_connection: If the module should work in single connection mode.
+ :param engine_args: Arguments to pass to the SQLAlchemy engine.
+ :raises ApplicationError: If SQLAlchemy is not installed.
+ """
+ if sqlalchemy is None:
+ raise exceptions.ApplicationError("sqlalchemy[asyncio] must be installed to use SQLAlchemyModule")
+
+ if not database:
+ raise exceptions.ApplicationError("Database connection string must be provided")
+
super().__init__()
self.database = database
+ self.metadata: "MetaData" = metadata # type: ignore[assignment]
self._engine: t.Optional["AsyncEngine"] = None
self._engine_args = engine_args or {}
- self._metadata: t.Optional["MetaData"] = metadata
+ self._connection_manager: t.Optional["ConnectionManager"] = None
+ self._manager_cls: t.Type["ConnectionManager"] = (
+ SingleConnectionManager if single_connection else MultipleConnectionManager
+ )
@property
- def engine(self) -> t.Optional["AsyncEngine"]:
- assert sqlalchemy is not None, "sqlalchemy[asyncio] must be installed to use SQLAlchemyModule."
+ def engine(self) -> "AsyncEngine":
+ """SQLAlchemy engine.
+
+ :return: SQLAlchemy engine.
+ :raises ApplicationError: If SQLAlchemyModule is not initialized.
+ """
+ if self._engine is None:
+ raise exceptions.ApplicationError("SQLAlchemyModule not initialized")
return self._engine
- @engine.setter
- def engine(self, value: "AsyncEngine"):
- self._engine = value
+ @property
+ def connection_manager(self) -> "ConnectionManager":
+ """Connection manager.
+
+ :return: Connection manager.
+ :raises ApplicationError: If SQLAlchemyModule is not initialized.
+ """
+ if self._connection_manager is None:
+ raise exceptions.ApplicationError("SQLAlchemyModule not initialized")
+ return self._connection_manager
- @engine.deleter
- def engine(self):
- self._engine = None
+ async def open_connection(self) -> "AsyncConnection":
+ """Open a new connection to the database.
- @property
- def metadata(self) -> t.Optional["MetaData"]:
- assert sqlalchemy is not None, "sqlalchemy[asyncio] must be installed to use SQLAlchemyModule."
- return self._metadata
+ :return: Database connection.
+ """
+ return await self.connection_manager.open()
+
+ async def close_connection(self, connection: "AsyncConnection") -> None:
+ """Close the connection to the database.
+
+ :param connection: Database connection.
+ """
+ return await self.connection_manager.close(connection)
+
+ async def begin_transaction(self, connection: "AsyncConnection") -> "AsyncTransaction":
+ """Begin a new transaction.
+
+ :param connection: Database connection to use for the transaction.
+ :return: Database transaction.
+ """
+ return await self.connection_manager.begin(connection)
+
+ async def end_transaction(self, transaction: "AsyncTransaction", *, rollback: bool = False) -> None:
+ """End a transaction.
+
+ :param transaction: Database transaction.
+ :param rollback: If the transaction should be rolled back.
+ :return: Database transaction.
+ """
+ return await self.connection_manager.end(transaction, rollback=rollback)
async def on_startup(self):
- if self.database:
- self.engine = create_async_engine(self.database, **self._engine_args)
+ """Initialize the SQLAlchemy engine and connection manager."""
+ self._engine = create_async_engine(self.database, **self._engine_args)
+ self._connection_manager = self._manager_cls(self._engine)
async def on_shutdown(self):
- if self.engine:
- await self.engine.dispose()
+ """Close the SQLAlchemy engine and connection manager."""
+ await self.engine.dispose()
+ self._engine = None
+ self._connection_manager = None
| Connections manager for SQLAlchemy module
| 2023-10-23T12:04:52 | 0.0 | [] | [] |
|||
vortico/flama | vortico__flama-125 | 283a658bf4caedbe65f2f0f31a5440f82255a8fe | diff --git a/flama/client.py b/flama/client.py
index 42ab49d5..fcc05a60 100644
--- a/flama/client.py
+++ b/flama/client.py
@@ -102,7 +102,7 @@ def __init__(
if models:
app = Flama() if not app else app
- for (name, url, path) in models:
+ for name, url, path in models:
app.models.add_model(url, path, name)
self.models = {m[0]: m[1] for m in models or {}}
diff --git a/flama/ddd/repositories.py b/flama/ddd/repositories.py
index f111a976..7fbe1d86 100644
--- a/flama/ddd/repositories.py
+++ b/flama/ddd/repositories.py
@@ -48,9 +48,9 @@ def __eq__(self, other):
@property
def primary_key(self) -> sqlalchemy.Column:
- """Returns the primary key of the model.
+ """Returns the primary key of the table.
- :return: sqlalchemy.Column: The primary key of the model.
+ :return: sqlalchemy.Column: The primary key of the table.
:raises: exceptions.IntegrityError: If the model has a composed primary key.
"""
@@ -61,8 +61,8 @@ def primary_key(self) -> sqlalchemy.Column:
return model_pk_columns[0]
- async def create(self, data: t.Union[t.Dict[str, t.Any], types.Schema]) -> t.Optional[t.Tuple[t.Any, ...]]:
- """Creates a new element in the repository.
+ async def create(self, *data: t.Union[t.Dict[str, t.Any], types.Schema]) -> t.Optional[t.List[t.Tuple[t.Any, ...]]]:
+ """Creates new elements in the table.
If the element already exists, it raises an `exceptions.IntegrityError`. If the element is created, it returns
the primary key of the element.
@@ -72,13 +72,13 @@ async def create(self, data: t.Union[t.Dict[str, t.Any], types.Schema]) -> t.Opt
:raises: exceptions.IntegrityError: If the element already exists.
"""
try:
- result = await self._connection.execute(sqlalchemy.insert(self.table).values(**data))
+ result = await self._connection.execute(sqlalchemy.insert(self.table), data)
except sqlalchemy.exc.IntegrityError as e:
raise exceptions.IntegrityError(str(e))
- return tuple(result.inserted_primary_key) if result.inserted_primary_key else None
+ return [tuple(x) for x in result.inserted_primary_key_rows] if result.inserted_primary_key_rows else None
async def retrieve(self, id: t.Any) -> types.Schema:
- """Retrieves an element from the repository.
+ """Retrieves an element from the table.
If the element does not exist, it raises a `NotFoundError`.
@@ -98,7 +98,7 @@ async def retrieve(self, id: t.Any) -> types.Schema:
return types.Schema(element._asdict())
async def update(self, id: t.Any, data: t.Union[t.Dict[str, t.Any], types.Schema]) -> types.Schema:
- """Updates an element in the repository.
+ """Updates an element in the table.
If the element does not exist, it raises a `NotFoundError`. If the element is updated, it returns the updated
element.
@@ -119,7 +119,7 @@ async def update(self, id: t.Any, data: t.Union[t.Dict[str, t.Any], types.Schema
return types.Schema({pk.name: id, **data})
async def delete(self, id: t.Any) -> None:
- """Deletes an element from the repository.
+ """Deletes an element from the table.
If the element does not exist, it raises a `NotFoundError`.
@@ -134,7 +134,7 @@ async def delete(self, id: t.Any) -> None:
raise exceptions.NotFoundError(id)
async def list(self, *clauses, **filters) -> t.List[types.Schema]:
- """Lists all the elements in the repository.
+ """Lists all the elements in the table.
If no elements are found, it returns an empty list. If no clauses or filters are given, it returns all the
elements in the repository.
@@ -157,14 +157,29 @@ async def list(self, *clauses, **filters) -> t.List[types.Schema]:
return [types.Schema(row._asdict()) async for row in await self._connection.stream(query)]
- async def drop(self) -> int:
- """Drops all the elements in the repository.
+ async def drop(self, *clauses, **filters) -> int:
+ """Drops elements in the table.
- Returns the number of elements dropped.
+ Returns the number of elements dropped. If no clauses or filters are given, it deletes all the elements in the
+ repository.
+
+ Clauses are used to filter the elements using sqlalchemy clauses. Filters are used to filter the elements using
+ exact values to specific columns. Clauses and filters can be combined.
+ Clause example: `table.c["id"]._in((1, 2, 3))`
+ Filter example: `id=1`
+
+ :param clauses: Clauses to filter the elements.
+ :param filters: Filters to filter the elements.
:return: The number of elements dropped.
"""
- result = await self._connection.execute(sqlalchemy.delete(self.table))
+ query = sqlalchemy.delete(self.table)
+
+ where_clauses = tuple(clauses) + tuple(self.table.c[k] == v for k, v in filters.items())
+ if where_clauses:
+ query = query.where(sqlalchemy.and_(*where_clauses))
+
+ result = await self._connection.execute(query)
return result.rowcount
@@ -178,8 +193,8 @@ def __init__(self, connection: "AsyncConnection"):
def __eq__(self, other):
return isinstance(other, SQLAlchemyTableRepository) and self._table == other._table and super().__eq__(other)
- async def create(self, data: t.Union[t.Dict[str, t.Any], types.Schema]) -> t.Optional[t.Tuple[t.Any, ...]]:
- """Creates a new element in the repository.
+ async def create(self, *data: t.Union[t.Dict[str, t.Any], types.Schema]) -> t.Optional[t.List[t.Tuple[t.Any, ...]]]:
+ """Creates new elements in the repository.
If the element already exists, it raises an `exceptions.IntegrityError`. If the element is created, it returns
the primary key of the element.
@@ -188,7 +203,7 @@ async def create(self, data: t.Union[t.Dict[str, t.Any], types.Schema]) -> t.Opt
:return: The primary key of the created element.
:raises: exceptions.IntegrityError: If the element already exists.
"""
- return await self._table_manager.create(data)
+ return await self._table_manager.create(*data)
async def retrieve(self, id: t.Any) -> types.Schema:
"""Retrieves an element from the repository.
@@ -242,11 +257,20 @@ async def list(self, *clauses, **filters) -> t.List[types.Schema]:
"""
return await self._table_manager.list(*clauses, **filters)
- async def drop(self) -> int:
- """Drops all the elements in the repository.
+ async def drop(self, *clauses, **filters) -> int:
+ """Drops elements in the repository.
+
+ Returns the number of elements dropped. If no clauses or filters are given, it deletes all the elements in the
+ repository.
+
+ Clauses are used to filter the elements using sqlalchemy clauses. Filters are used to filter the elements using
+ exact values to specific columns. Clauses and filters can be combined.
- Returns the number of elements dropped.
+ Clause example: `table.c["id"]._in((1, 2, 3))`
+ Filter example: `id=1`
+ :param clauses: Clauses to filter the elements.
+ :param filters: Filters to filter the elements.
:return: The number of elements dropped.
"""
return await self._table_manager.drop()
diff --git a/flama/resources/crud.py b/flama/resources/crud.py
index aa70f6b2..cd0635e5 100644
--- a/flama/resources/crud.py
+++ b/flama/resources/crud.py
@@ -44,7 +44,7 @@ async def create(
return http.APIResponse( # type: ignore[return-value]
schema=rest_schemas.output.schema,
- content={**element, **dict(zip([x.name for x in self.model.primary_key], result or []))},
+ content={**element, **dict(zip([x.name for x in self.model.primary_key], result[0] if result else []))},
status_code=201,
)
| Allow bulk creation and deletion on SQLAlchemy repositories
### Summary
SQLAlchemy provides a way to perform bulk operations so we could enhance our `SQLAlchemyTableManager` to allow bulk inserts and bulk deletes.
Currently SQLAlchemy implements a mechanism for retrieving primary keys from a bulk insert operation, but [it only works on some dialects](https://docs.sqlalchemy.org/en/14/core/connections.html#sqlalchemy.engine.CursorResult.inserted_primary_key_rows), anyway it won't affect single row inserts.
### Motivation
To make our interface for interacting with SQLAlchemy tables a bit more flexible, by providing more performant operations on commonly used actions.
### Proposed changes
Given the following table
```python
import sqlalchemy
table = sqlalchemy.Table(
"Puppy",
sqlalchemy.Column("id", sqlalchemy.Integer, primary_key=True, autoincrement=True),
sqlalchemy.Column("name", sqlalchemy.String(255), nullable=False)
)
```
And a manager like:
```python
from flama.ddd import SQLAlchemyTableManager
manager = SQLAlchemyTableManager(table, connection=...)
```
The interface should be:
```python
## Single insert
primary_keys = manager.create({"name": "Canna"})
# primary_keys = [(1,)]
## Bulk insert
primary_keys = manager.create({"name": "Canna"}, {"name": "Sandy"})
# primary_keys = [(1,), (2,)]
## Single delete (by ID)
manager.drop(1)
# return value is None
## Bulk delete (through filtering)
drop_count = manager.create({"name": "Canna"}, {"name": "Sandy"})
# drop_count = 2
```
### Impact
The changes won't break current compatibility.
| 2023-10-07T11:32:47 | 0.0 | [] | [] |
|||
vortico/flama | vortico__flama-110 | 7c9f1210bc767b88f9dd9e85cbcdbe80063e4e8f | diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index bed99040..8a173101 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -44,20 +44,13 @@ repos:
exclude: "make"
- id: ruff
name: Ruff - Code Linter
- entry: ./scripts/ruff
+ entry: ./scripts/ruff --fix
language: system
types: [file, python]
exclude: "make"
- - id: mypy
- name: Mypy - Static types check
- entry: ./scripts/mypy
+ - id: pyright
+ name: Pyright - Static types check
+ entry: ./scripts/pyright
language: system
types: [file, python]
exclude: "(make|tests/|examples/)"
- - id: pytest
- name: Pytest - Unit tests
- entry: ./scripts/test
- language: system
- types: [python]
- exclude: "make"
- pass_filenames: false
diff --git a/Makefile b/Makefile
index 40d25e5b..28d2c1ab 100644
--- a/Makefile
+++ b/Makefile
@@ -36,14 +36,14 @@ black: ## Runs black on Flama
ruff: ## Runs ruff on Flama
@./scripts/ruff .
-mypy: ## Runs mypy on Flama
- @./scripts/mypy .
+pyright: ## Runs pyright on Flama
+ @./scripts/pyright
-docker_push: ## Runs mypy on Flama
+docker_push: ## Push docker images to registry
@./scripts/docker_push .
-.PHONY: help check clean install build lint tests publish version isort black ruff mypy docker_push
+.PHONY: help check clean install build lint tests publish version isort black ruff pyright docker_push
.DEFAULT_GOAL := help
help:
- @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
\ No newline at end of file
+ @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
diff --git a/flama/__init__.py b/flama/__init__.py
index e811a815..31ab8970 100644
--- a/flama/__init__.py
+++ b/flama/__init__.py
@@ -1,5 +1,3 @@
-from starlette.config import Config # noqa
-
from flama.applications import * # noqa
from flama.background import * # noqa
from flama.cli import * # noqa
diff --git a/flama/applications.py b/flama/applications.py
index eec19a94..32a6d8d2 100644
--- a/flama/applications.py
+++ b/flama/applications.py
@@ -23,11 +23,13 @@ class Flama:
def __init__(
self,
routes: t.Optional[t.Sequence[t.Union["BaseRoute", "Mount"]]] = None,
- components: t.Optional[t.Sequence[injection.Component]] = None,
- modules: t.Optional[t.Set["Module"]] = None,
+ components: t.Optional[t.Union[t.Sequence[injection.Component], t.Set[injection.Component]]] = None,
+ modules: t.Optional[t.Union[t.Sequence["Module"], t.Set["Module"]]] = None,
middleware: t.Optional[t.Sequence["Middleware"]] = None,
debug: bool = False,
- events: t.Optional[t.Union[t.Dict[str, t.List[t.Callable]], Events]] = None,
+ events: t.Optional[
+ t.Union[t.Dict[str, t.List[t.Callable[..., t.Coroutine[t.Any, t.Any, None]]]], Events]
+ ] = None,
lifespan: t.Optional[t.Callable[[t.Optional["Flama"]], t.AsyncContextManager]] = None,
title: str = "Flama",
version: str = "0.1.0",
@@ -53,6 +55,8 @@ def __init__(
:param schema_library: Schema library to use.
"""
self._debug = debug
+ self._status = types.AppStatus.NOT_INITIALIZED
+ self._shutdown = False
# Create Dependency Injector
self._injector = injection.Injector(
diff --git a/flama/background.py b/flama/background.py
index 5df2f609..d4857a9e 100644
--- a/flama/background.py
+++ b/flama/background.py
@@ -1,9 +1,7 @@
-import asyncio
import enum
import functools
import sys
import typing as t
-from multiprocessing import Process
import starlette.background
@@ -12,11 +10,20 @@
if sys.version_info < (3, 10): # PORT: Remove when stop supporting 3.9 # pragma: no cover
from typing_extensions import ParamSpec
- t.ParamSpec = ParamSpec
+ t.ParamSpec = ParamSpec # type: ignore
__all__ = ["BackgroundTask", "BackgroundTasks", "Concurrency", "BackgroundThreadTask", "BackgroundProcessTask"]
-P = t.ParamSpec("P")
+P = t.ParamSpec("P") # type: ignore # PORT: Remove this comment when stop supporting 3.9
+
+
+class task_wrapper:
+ def __init__(self, target: t.Callable[P, t.Union[None, t.Awaitable[None]]]):
+ self.target = target
+ functools.update_wrapper(self, target)
+
+ async def __call__(self, *args, **kwargs):
+ await concurrency.run(self.target, *args, **kwargs)
class Concurrency(enum.Enum):
@@ -26,41 +33,20 @@ class Concurrency(enum.Enum):
class BackgroundTask(starlette.background.BackgroundTask):
def __init__(
- self, concurrency: t.Union[Concurrency, str], func: t.Callable[P, t.Any], *args: P.args, **kwargs: P.kwargs
+ self,
+ concurrency: t.Union[Concurrency, str],
+ func: t.Callable[P, t.Union[None, t.Awaitable[None]]],
+ *args: P.args,
+ **kwargs: P.kwargs
) -> None:
- self.func = self._create_task_function(func)
+ self.func = task_wrapper(func)
self.args = args
self.kwargs = kwargs
self.concurrency = Concurrency[concurrency] if isinstance(concurrency, str) else concurrency
- def _create_task_function(self, func: t.Callable[P, t.Any]) -> t.Callable[P, t.Any]:
- if asyncio.iscoroutinefunction(func):
-
- @functools.wraps(func)
- async def _inner(*args, **kwargs):
- await func(*args, **kwargs)
-
- else:
-
- @functools.wraps(func)
- async def _inner(*args, **kwargs):
- await concurrency.run(func, *args, **kwargs)
-
- return _inner
-
- def _create_process_target(self, func: t.Callable[P, t.Any]):
- @functools.wraps(func)
- def process_target(*args: P.args, **kwargs: P.kwargs): # pragma: no cover
- policy = asyncio.get_event_loop_policy()
- loop = policy.new_event_loop()
- policy.set_event_loop(loop)
- loop.run_until_complete(func(*args, **kwargs))
-
- return process_target
-
async def __call__(self):
if self.concurrency == Concurrency.process:
- Process(target=self._create_process_target(self.func), args=self.args, kwargs=self.kwargs).start()
+ concurrency.AsyncProcess(target=self.func, args=self.args, kwargs=self.kwargs).start()
else:
await self.func(*self.args, **self.kwargs)
@@ -72,8 +58,7 @@ def __init__(self, tasks: t.Optional[t.Sequence[BackgroundTask]] = None):
def add_task(
self, concurrency: t.Union[Concurrency, str], func: t.Callable[P, t.Any], *args: P.args, **kwargs: P.kwargs
) -> None:
- task = BackgroundTask(concurrency, func, *args, **kwargs)
- self.tasks.append(task)
+ self.tasks.append(BackgroundTask(concurrency, func, *args, **kwargs))
async def __call__(self) -> None:
for task in self.tasks:
diff --git a/flama/client.py b/flama/client.py
new file mode 100644
index 00000000..82deca7f
--- /dev/null
+++ b/flama/client.py
@@ -0,0 +1,200 @@
+import asyncio
+import contextlib
+import functools
+import importlib.metadata
+import logging
+import typing as t
+from types import TracebackType
+
+import httpx
+
+from flama import types
+from flama.applications import Flama
+
+__all__ = ["Client", "AsyncClient", "LifespanContextManager"]
+
+logger = logging.getLogger(__name__)
+
+
+class LifespanContextManager:
+ def __init__(self, app: Flama, timeout: float = 60.0):
+ self.app = app
+ self.timeout = timeout
+ self._startup_complete = asyncio.Event()
+ self._shutdown_complete = asyncio.Event()
+ self._receive_queue = asyncio.Queue(maxsize=2)
+ self._exception: t.Optional[BaseException] = None
+ self._task: t.Optional[asyncio.Task] = None
+
+ async def _startup(self) -> None:
+ await self._receive_queue.put(types.Message({"type": "lifespan.startup"}))
+ await asyncio.wait_for(self._startup_complete.wait(), timeout=self.timeout)
+ if self._exception:
+ raise self._exception
+
+ async def _shutdown(self) -> None:
+ await self._receive_queue.put(types.Message({"type": "lifespan.shutdown"}))
+ await asyncio.wait_for(self._shutdown_complete.wait(), timeout=self.timeout)
+
+ async def _receive(self) -> types.Message:
+ return await self._receive_queue.get()
+
+ async def _send(self, message: types.Message) -> None:
+ if message["type"] == "lifespan.startup.complete":
+ self._startup_complete.set()
+ elif message["type"] == "lifespan.shutdown.complete":
+ self._shutdown_complete.set()
+
+ async def _app_task(self) -> None:
+ with contextlib.suppress(asyncio.CancelledError):
+ scope = types.Scope({"type": "lifespan"})
+
+ try:
+ await self.app(scope, self._receive, self._send)
+ except BaseException as exc:
+ self._exception = exc
+ self._startup_complete.set()
+ self._shutdown_complete.set()
+
+ raise
+
+ def _run_app(self) -> None:
+ self._task = asyncio.get_event_loop().create_task(self._app_task())
+
+ async def _stop_app(self) -> None:
+ assert self._task is not None
+
+ if not self._task.done():
+ self._task.cancel()
+
+ await self._task
+
+ async def __aenter__(self) -> "LifespanContextManager":
+ self._run_app()
+
+ try:
+ await self._startup()
+ except BaseException:
+ await self._stop_app()
+ raise
+
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: t.Optional[t.Type[BaseException]] = None,
+ exc_value: t.Optional[BaseException] = None,
+ traceback: t.Optional[TracebackType] = None,
+ ):
+ await self._shutdown()
+ await self._stop_app()
+
+
+class _BaseClient:
+ def __init__(
+ self,
+ /,
+ app: t.Optional[Flama] = None,
+ models: t.Optional[t.Sequence[t.Tuple[str, str, str]]] = None,
+ **kwargs,
+ ):
+ self.models: t.Optional[t.Dict[str, str]] = None
+
+ if models:
+ app = Flama() if not app else app
+ for (name, url, path) in models:
+ app.models.add_model(url, path, name)
+
+ self.models = {m[0]: m[1] for m in models or {}}
+
+ self.lifespan = LifespanContextManager(app) if app else None
+
+ kwargs["app"] = app
+ kwargs.setdefault("base_url", "http://localapp")
+ kwargs["headers"] = {"user-agent": f"flama/{importlib.metadata.version('flama')}", **kwargs.get("headers", {})}
+
+ super().__init__(**kwargs)
+
+
+class Client(_BaseClient, httpx.Client):
+ """A client for interacting with a Flama application either remote or local.
+
+ This client can handle a local python object:
+ >>> client = Client(app=Flama())
+
+ Or connect to a remote API:
+ >>> client = Client(base_url="https://foo.bar")
+
+ Or generate a Flama application based on a set of flm model files:
+ >>> client = Client(models=[("foo", "/foo/", "model_foo.flm"), ("bar", "/bar/", "model_bar.flm")])
+
+ For initializing the application it's required to use it as an async context manager:
+ >>> with Client(app=Flama()) as client:
+ >>> client.post(...)
+ """
+
+ def __enter__(self) -> "Client":
+ super().__enter__()
+ if self.lifespan:
+ asyncio.get_event_loop().run_until_complete(self.lifespan.__aenter__())
+
+ return self
+
+ def __exit__(
+ self,
+ exc_type: t.Optional[t.Type[BaseException]] = None,
+ exc_value: t.Optional[BaseException] = None,
+ traceback: t.Optional[TracebackType] = None,
+ ):
+ if self.lifespan:
+ asyncio.get_event_loop().run_until_complete(self.lifespan.__aexit__(exc_type, exc_value, traceback))
+ super().__exit__(exc_type, exc_value, traceback)
+
+ def model_request(self, model: str, method: str, url: str, **kwargs) -> httpx.Response:
+ assert self.models, "No models found for request."
+ return self.request(method, f"{self.models[model].rstrip('/')}{url}", **kwargs)
+
+ model_inspect = functools.partialmethod(model_request, method="GET", url="/")
+ model_predict = functools.partialmethod(model_request, method="POST", url="/predict/")
+
+
+class AsyncClient(_BaseClient, httpx.AsyncClient):
+ """An async client for interacting with a Flama application either remote or local.
+
+ This client can handle a local python object:
+ >>> client = AsyncClient(app=Flama())
+
+ Or connect to a remote API:
+ >>> client = AsyncClient(base_url="https://foo.bar")
+
+ Or generate a Flama application based on a set of flm model files:
+ >>> client = AsyncClient(models=[("foo", "/foo/", "model_foo.flm"), ("bar", "/bar/", "model_bar.flm")])
+
+ For initializing the application it's required to use it as an async context manager:
+ >>> async with AsyncClient(app=Flama()) as client:
+ >>> client.post(...)
+ """
+
+ async def __aenter__(self) -> "AsyncClient":
+ await super().__aenter__()
+ if self.lifespan:
+ await self.lifespan.__aenter__()
+
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: t.Optional[t.Type[BaseException]] = None,
+ exc_value: t.Optional[BaseException] = None,
+ traceback: t.Optional[TracebackType] = None,
+ ):
+ if self.lifespan:
+ await self.lifespan.__aexit__(exc_type, exc_value, traceback)
+ await super().__aexit__(exc_type, exc_value, traceback)
+
+ async def model_request(self, model: str, method: str, url: str, **kwargs) -> t.Awaitable[httpx.Response]:
+ assert self.models, "No models found for request."
+ return self.request(method, f"{self.models[model].rstrip('/')}{url}", **kwargs)
+
+ model_inspect = functools.partialmethod(model_request, method="GET", url="/")
+ model_predict = functools.partialmethod(model_request, method="POST", url="/predict/")
diff --git a/flama/concurrency.py b/flama/concurrency.py
index 1bd36765..fb6f9a66 100644
--- a/flama/concurrency.py
+++ b/flama/concurrency.py
@@ -1,23 +1,36 @@
import asyncio
import functools
+import multiprocessing
import sys
import typing as t
-from starlette.concurrency import run_in_threadpool
+if sys.version_info < (3, 9): # PORT: Remove when stop supporting 3.8 # pragma: no cover
+ import contextvars
+
+ async def to_thread(func, /, *args, **kwargs):
+ return await asyncio.get_running_loop().run_in_executor(
+ None, functools.partial(contextvars.copy_context().run, func, *args, **kwargs)
+ )
+
+ asyncio.to_thread = to_thread # pyright: ignore
if sys.version_info < (3, 10): # PORT: Remove when stop supporting 3.9 # pragma: no cover
from typing_extensions import ParamSpec, TypeGuard
- t.TypeGuard = TypeGuard
- t.ParamSpec = ParamSpec
+ t.TypeGuard = TypeGuard # type: ignore
+ t.ParamSpec = ParamSpec # type: ignore
-__all__ = ["is_async", "run"]
+__all__ = ["is_async", "run", "run_task_group", "AsyncProcess"]
-T = t.TypeVar("T", covariant=True)
-P = t.ParamSpec("P")
+R = t.TypeVar("R", covariant=True)
+P = t.ParamSpec("P") # type: ignore # PORT: Remove this comment when stop supporting 3.9
-def is_async(obj: t.Any) -> t.TypeGuard[t.Callable[..., t.Awaitable]]:
+def is_async(
+ obj: t.Any,
+) -> t.TypeGuard[ # type: ignore # PORT: Remove this comment when stop supporting 3.9
+ t.Callable[..., t.Awaitable[t.Any]]
+]:
"""Check if given object is an async function, callable or partialised function.
:param obj: Object to check.
@@ -26,13 +39,15 @@ def is_async(obj: t.Any) -> t.TypeGuard[t.Callable[..., t.Awaitable]]:
while isinstance(obj, functools.partial):
obj = obj.func
- return asyncio.iscoroutinefunction(obj) or (
- callable(obj) and asyncio.iscoroutinefunction(obj.__call__) # type: ignore[operator]
- )
+ return asyncio.iscoroutinefunction(obj) or (callable(obj) and asyncio.iscoroutinefunction(obj.__call__))
-async def run(func: t.Callable[P, t.Union[T, t.Awaitable[T]]], *args: P.args, **kwargs: P.kwargs) -> T:
- """Run a function either as asyncio awaiting it if it's an async function or running it in a threadpool if it's a
+async def run(
+ func: t.Union[t.Callable[P, R], t.Callable[P, t.Awaitable[R]]],
+ *args: P.args,
+ **kwargs: P.kwargs,
+) -> R:
+ """Run a function either as asyncio awaiting it if it's an async function or running it in a thread if it's a
sync function.
:param func: Function to run.
@@ -41,6 +56,44 @@ async def run(func: t.Callable[P, t.Union[T, t.Awaitable[T]]], *args: P.args, **
:return: Function returned value.
"""
if is_async(func):
- return await func(*args, **kwargs) # type: ignore[no-any-return]
+ return await func(*args, **kwargs)
+
+ return await asyncio.to_thread(func, *args, **kwargs) # type: ignore
+
+
+if sys.version_info < (3, 11): # PORT: Remove when stop supporting 3.10 # pragma: no cover
+
+ async def run_task_group(*tasks: t.Coroutine[t.Any, t.Any, t.Any]) -> t.List[asyncio.Task]:
+ """Run a group of tasks.
+
+ :param tasks: Tasks to run.
+ :result: Finished tasks.
+ """
+ tasks_list = [asyncio.create_task(task) for task in tasks]
+ await asyncio.wait(tasks_list)
+ return tasks_list
+
+else:
+
+ async def run_task_group(*tasks: t.Coroutine[t.Any, t.Any, t.Any]) -> t.List[asyncio.Task]:
+ """Run a group of tasks.
+
+ :param tasks: Tasks to run.
+ :result: Finished tasks.
+ """
+ async with asyncio.TaskGroup() as task_group:
+ return [task_group.create_task(task) for task in tasks]
+
+
+class AsyncProcess(multiprocessing.Process):
+ """Multiprocessing Process class whose target is an async function."""
+
+ def run(self):
+ if self._target: # type: ignore
+ task = self._target(*self._args, **self._kwargs) # type: ignore
- return await run_in_threadpool(func, *args, **kwargs) # type: ignore[arg-type]
+ if is_async(self._target): # type: ignore
+ policy = asyncio.get_event_loop_policy()
+ loop = policy.new_event_loop()
+ policy.set_event_loop(loop)
+ loop.run_until_complete(task)
diff --git a/flama/config/__init__.py b/flama/config/__init__.py
new file mode 100644
index 00000000..3794bee6
--- /dev/null
+++ b/flama/config/__init__.py
@@ -0,0 +1,2 @@
+from flama.config.config import * # noqa
+from flama.config.fields import * # noqa
diff --git a/flama/config/config.py b/flama/config/config.py
new file mode 100644
index 00000000..9c38fbe1
--- /dev/null
+++ b/flama/config/config.py
@@ -0,0 +1,133 @@
+import dataclasses
+import functools
+import inspect
+import json
+import os
+import typing as t
+
+from flama.config import exceptions, types
+from flama.config.data_structures import FileDict
+
+__all__ = ["Config"]
+
+R = t.TypeVar("R")
+Unknown = t.NewType("Unknown", str)
+
+
+class Config:
+ """Tool for retrieving config parameters from a config file or environment variables.
+
+ This class can handle several config file formats, some examples:
+ >>> config = Config(".env", "ini")
+ >>> config_json = Config("config.json", "json")
+ >>> config_yaml = Config("config.yaml", "yaml")
+ >>> config_toml = Config("config.toml", "toml")
+
+ Once the config object is created it allows to get config parameters using following syntax:
+ >>> FOO = config("FOO", default="bar")
+
+ The value of the config parameter will be looked for based on the following order:
+ 1. Environment variable
+ 2. Config file
+ 3. Explicit default value
+
+ It is possible to convert config parameters into python types by specifying the type as part of the call. In this
+ case an environment variable contains the values 'true' or 'false':
+ >>> DEBUG = config("DEBUG", type=bool)
+
+ Also, a more complex conversion is possible by using custom functions, lets take the example of a variable where
+ zero means False and any other value than zero means a True:
+ >>> DEBUG = config("DEBUG", type=lambda x: x != 0)
+
+ For last, there is a case when the config parameter is a json valid value, in this case it is possible to convert
+ it into a dataclass:
+ >>> @dataclasses.dataclass
+ ... class Puppy:
+ ... name: str
+ ... age: int
+ >>> PUPPY = config("PUPPY", type=Puppy)
+ """
+
+ def __init__(
+ self,
+ config_file: t.Optional[t.Union[str, os.PathLike]],
+ format: t.Union[str, types.FileFormat] = types.FileFormat.INI,
+ ) -> None:
+ """Tool for retrieving config parameters from a config file or environment variables.
+
+ :param config_file: Config file path.
+ :param format: Config file format.
+ """
+ self.config_file = FileDict(config_file, format) if config_file else None
+
+ def _get_item_from_environment(self, key: str) -> t.Any:
+ return os.environ[key]
+
+ def _get_item_from_config_file(self, key: str) -> t.Any:
+ return functools.reduce(lambda x, k: x[k], key.split("."), self.config_file or {})
+
+ def _get_item(self, key: str, default: R = Unknown) -> R:
+ try:
+ return self._get_item_from_environment(key)
+ except KeyError:
+ ...
+
+ try:
+ return self._get_item_from_config_file(key)
+ except KeyError:
+ ...
+
+ if default is not Unknown:
+ return t.cast(R, default)
+
+ raise KeyError(key)
+
+ def _build_dataclass(self, data: t.Any, type: t.Type[R]) -> R:
+ if isinstance(data, str):
+ try:
+ data = json.loads(data)
+ except Exception as e:
+ raise exceptions.ConfigError("Cannot parse value as json for config dataclass") from e
+
+ if not isinstance(data, dict):
+ raise exceptions.ConfigError("Wrong value for config dataclass")
+
+ try:
+ return type(**(json.loads(data) if isinstance(data, str) else data))
+ except Exception as e:
+ raise exceptions.ConfigError("Cannot create config dataclass") from e
+
+ def __call__(
+ self, key: str, /, default: R = Unknown, type: t.Optional[t.Union[t.Type[R], t.Callable[[t.Any], R]]] = None
+ ) -> R:
+ """Get config parameter value.
+
+ :param key: Config parameter name.
+ :param default: Default value if config parameter is not found.
+ :param type: Type to convert config parameter value.
+ :return: Config parameter value.
+ :raises KeyError: If config parameter is not found and no default value is specified.
+ :raises ConfigError: If config parameter cannot be converted to the specified type.
+
+ Examples:
+ >>> config = Config(".env", "ini")
+ >>> FOO = config("FOO", default="bar") # Default value if FOO is not found
+ >>> DEBUG = config("DEBUG", type=bool) # Convert value to boolean
+ >>> @dataclasses.dataclass
+ ... class Puppy:
+ ... name: str
+ ... age: int
+ >>> PUPPY = config("PUPPY", type=Puppy) # Parse json value and convert it to a dataclass
+ """
+ value = self._get_item(key, default)
+
+ if type is None:
+ return value
+
+ if dataclasses.is_dataclass(type) and inspect.isclass(type):
+ return self._build_dataclass(data=value, type=type)
+
+ try:
+ return t.cast(t.Callable[[t.Any], R], type)(value)
+ except Exception as e:
+ raise exceptions.ConfigError("Cannot create config type") from e
diff --git a/flama/config/data_structures.py b/flama/config/data_structures.py
new file mode 100644
index 00000000..d8a17b0d
--- /dev/null
+++ b/flama/config/data_structures.py
@@ -0,0 +1,48 @@
+import os
+import typing as t
+
+from flama.config import exceptions, loaders, types
+
+__all__ = ["FileDict"]
+
+
+class FileDict(t.Mapping[str, t.Any]):
+ """A dictionary that loads its data from a file. Supports JSON, TOML, YAML and INI files."""
+
+ _LOADERS: t.Dict[types.FileFormat, loaders.FileLoader] = {
+ types.FileFormat.JSON: loaders.JSONFileLoader(),
+ types.FileFormat.TOML: loaders.TOMLFileLoader(),
+ types.FileFormat.YAML: loaders.YAMLFileLoader(),
+ types.FileFormat.INI: loaders.ConfigFileLoader(),
+ }
+
+ def __init__(self, config_file: t.Union[str, os.PathLike], format: t.Union[str, types.FileFormat]):
+ """A dictionary that loads its data from a file. Supports JSON, TOML, YAML and INI files.
+
+ :param config_file: Config file path.
+ :param format: Config file format.
+ """
+ try:
+ self._loader = self._LOADERS[types.FileFormat[format.upper()] if isinstance(format, str) else format]
+ except KeyError:
+ raise exceptions.ConfigError("Wrong config file format")
+
+ try:
+ self._data = self._loader.load(config_file)
+ except Exception:
+ raise exceptions.ConfigError("Config file cannot be loaded")
+
+ def __getitem__(self, key: str) -> t.Any:
+ return self._data.__getitem__(key)
+
+ def __eq__(self, other: object) -> bool:
+ return self._data.__eq__(other)
+
+ def __iter__(self) -> t.Iterator:
+ return self._data.__iter__()
+
+ def __len__(self) -> int:
+ return self._data.__len__()
+
+ def __repr__(self) -> str:
+ return f"FileDict({self._data.__repr__()})"
diff --git a/flama/config/exceptions.py b/flama/config/exceptions.py
new file mode 100644
index 00000000..41a452c2
--- /dev/null
+++ b/flama/config/exceptions.py
@@ -0,0 +1,7 @@
+__all__ = ["ConfigError"]
+
+
+class ConfigError(Exception):
+ """Exception raised when there is a problem with the config."""
+
+ ...
diff --git a/flama/config/fields.py b/flama/config/fields.py
new file mode 100644
index 00000000..95198dce
--- /dev/null
+++ b/flama/config/fields.py
@@ -0,0 +1,87 @@
+import dataclasses
+import typing as t
+import urllib.parse
+
+__all__ = ["Secret", "URL"]
+
+
+class Secret:
+ """Secret object.
+
+ It is used to hide sensitive data in logs and exceptions. It is recommended to use this class for all sensitive
+ data. For example: passwords, tokens, etc.
+
+ This class is not meant to be used for encryption or security purposes.
+ """
+
+ def __init__(self, value: str):
+ """Secret object.
+
+ :param value: Sensitive data.
+ """
+ self._value = value
+
+ def __repr__(self) -> str:
+ return "Secret('*****')"
+
+ def __str__(self) -> str:
+ return self._value
+
+ def __bool__(self) -> bool:
+ return bool(self._value)
+
+ def __eq__(self, other: t.Any) -> bool:
+ return self._value == other
+
+
[email protected](frozen=True)
+class URL:
+ """URL object. It is used to parse and build URLs."""
+
+ scheme: str
+ netloc: str
+ path: str
+ params: str
+ query: str
+ fragment: str
+
+ def __init__(self, url: str):
+ """URL object.
+
+ :param url: URL string to be parsed.
+ :param kwargs: Individual components to replace those parsed from string.
+ """
+ parsed_url = urllib.parse.urlparse(url)
+ object.__setattr__(self, "scheme", parsed_url.scheme)
+ object.__setattr__(self, "netloc", parsed_url.netloc)
+ object.__setattr__(self, "path", parsed_url.path)
+ object.__setattr__(self, "params", parsed_url.params)
+ object.__setattr__(self, "query", parsed_url.query)
+ object.__setattr__(self, "fragment", parsed_url.fragment)
+
+ @property
+ def components(self) -> t.Dict[str, t.Optional[str]]:
+ """URL components map.
+
+ :return: Components.
+ """
+ return dataclasses.asdict(self)
+
+ @property
+ def url(self) -> str:
+ """Build URL string.
+
+ :return: URL string.
+ """
+ return str(urllib.parse.urlunparse(tuple(self.components.values())))
+
+ def __str__(self) -> str:
+ return self.url
+
+ def __repr__(self) -> str:
+ return f"URL('{self.url}')"
+
+ def __eq__(self, other: t.Any) -> bool:
+ return (isinstance(other, URL) and self.components == other.components) or (
+ isinstance(other, str) and self.url == other
+ )
diff --git a/flama/config/loaders.py b/flama/config/loaders.py
new file mode 100644
index 00000000..a438e1df
--- /dev/null
+++ b/flama/config/loaders.py
@@ -0,0 +1,94 @@
+import abc
+import configparser
+import json
+import os
+import sys
+import typing as t
+
+import yaml
+
+if sys.version_info < (3, 11): # PORT: Remove when stop supporting 3.10 # pragma: no cover
+ try:
+ import tomli
+
+ tomllib = tomli
+ except ModuleNotFoundError:
+ tomllib = None
+else:
+ import tomllib
+
+__all__ = ["FileLoader", "ConfigFileLoader", "JSONFileLoader", "YAMLFileLoader", "TOMLFileLoader"]
+
+
+class FileLoader(abc.ABC):
+ """Common interface for loading a file."""
+
+ @abc.abstractmethod
+ def load(self, f: t.Union[str, os.PathLike]) -> t.Dict[str, t.Any]:
+ """Loads a file into a dict.
+
+ :param f: File path.
+ :return: Dict with the file contents.
+ """
+ ...
+
+
+class ConfigFileLoader(FileLoader):
+ """Loads an ini formatted file into a dict."""
+
+ def load(self, f: t.Union[str, os.PathLike]) -> t.Dict[str, t.Any]:
+ """Loads a file into a dict.
+
+ :param f: File path.
+ :return: Dict with the file contents.
+ """
+ parser = configparser.ConfigParser()
+ try:
+ with open(f) as fs:
+ parser.read_file(fs)
+ return {section: dict(parser[section].items()) for section in parser.sections()}
+ except configparser.MissingSectionHeaderError:
+ with open(f) as fs:
+ parser.read_string("[fake_section]\n" + fs.read())
+ return dict(parser["fake_section"].items())
+
+
+class JSONFileLoader(FileLoader):
+ """Loads a json formatted file into a dict."""
+
+ def load(self, f: t.Union[str, os.PathLike]) -> t.Dict[str, t.Any]:
+ """Loads a file into a dict.
+
+ :param f: File path.
+ :return: Dict with the file contents.
+ """
+ with open(f) as fs:
+ return json.load(fs)
+
+
+class YAMLFileLoader(FileLoader):
+ """Loads a yaml formatted file into a dict."""
+
+ def load(self, f: t.Union[str, os.PathLike]) -> t.Dict[str, t.Any]:
+ """Loads a file into a dict.
+
+ :param f: File path.
+ :return: Dict with the file contents.
+ """
+ with open(f) as fs:
+ return yaml.safe_load(fs)
+
+
+class TOMLFileLoader(FileLoader):
+ """Loads a toml formatted file into a dict."""
+
+ def load(self, f: t.Union[str, os.PathLike]) -> t.Dict[str, t.Any]:
+ """Loads a file into a dict.
+
+ :param f: File path.
+ :return: Dict with the file contents.
+ """
+ assert tomllib is not None, "`tomli` must be installed to use TOMLFileLoader in Python versions older than 3.11"
+
+ with open(f, "rb") as fs:
+ return tomllib.load(fs)
diff --git a/flama/config/types.py b/flama/config/types.py
new file mode 100644
index 00000000..0b807a04
--- /dev/null
+++ b/flama/config/types.py
@@ -0,0 +1,12 @@
+import enum
+
+__all__ = ["FileFormat"]
+
+
+class FileFormat(enum.Enum):
+ """Config file format."""
+
+ INI = enum.auto()
+ JSON = enum.auto()
+ YAML = enum.auto()
+ TOML = enum.auto()
diff --git a/flama/debug/middleware.py b/flama/debug/middleware.py
index 9258b570..8a7ed44d 100644
--- a/flama/debug/middleware.py
+++ b/flama/debug/middleware.py
@@ -136,7 +136,7 @@ async def process_exception(
response = await concurrency.run(handler, scope, receive, send, exc)
- if response:
+ if response and concurrency.is_async(response):
await response(scope, receive, send)
def http_exception_handler(
diff --git a/flama/debug/types.py b/flama/debug/types.py
index 63f50726..d57423ce 100644
--- a/flama/debug/types.py
+++ b/flama/debug/types.py
@@ -8,5 +8,5 @@
HandlerException = t.TypeVar("HandlerException", bound=Exception)
Handler = t.Callable[
[types.Scope, types.Receive, types.Send, HandlerException],
- t.Union[t.Optional["http.Response"], t.Awaitable[None], t.Awaitable[t.Optional["http.Response"]]],
+ t.Union[t.Optional["http.Response"], t.Awaitable[t.Optional["http.Response"]]],
]
diff --git a/flama/events.py b/flama/events.py
index adc9d9fd..bb666364 100644
--- a/flama/events.py
+++ b/flama/events.py
@@ -6,8 +6,8 @@
class Events:
"""Application events register."""
- startup: t.List[t.Callable] = dataclasses.field(default_factory=list)
- shutdown: t.List[t.Callable] = dataclasses.field(default_factory=list)
+ startup: t.List[t.Callable[..., t.Coroutine[t.Any, t.Any, None]]] = dataclasses.field(default_factory=list)
+ shutdown: t.List[t.Callable[..., t.Coroutine[t.Any, t.Any, None]]] = dataclasses.field(default_factory=list)
def register(self, event: str, handler: t.Callable) -> None:
"""Register a new event.
@@ -19,7 +19,7 @@ def register(self, event: str, handler: t.Callable) -> None:
getattr(self, event).append(handler)
@classmethod
- def build(cls, **events: t.List[t.Callable]) -> "Events":
+ def build(cls, **events: t.List[t.Callable[..., t.Coroutine[t.Any, t.Any, None]]]) -> "Events":
"""Build events register from dict.
:param events: Events to register.
diff --git a/flama/exceptions.py b/flama/exceptions.py
index ed4989ec..b27e54bb 100644
--- a/flama/exceptions.py
+++ b/flama/exceptions.py
@@ -3,8 +3,6 @@
import starlette.exceptions
-import flama.schemas.exceptions
-
__all__ = [
"DecodeError",
"HTTPException",
@@ -16,7 +14,9 @@
"MethodNotAllowedException",
]
-__all__ += flama.schemas.exceptions.__all__
+
+class ApplicationError(Exception):
+ ...
class DecodeError(Exception):
diff --git a/flama/http.py b/flama/http.py
index b2a13d73..c129790d 100644
--- a/flama/http.py
+++ b/flama/http.py
@@ -16,9 +16,6 @@
from flama import schemas, types
from flama.exceptions import HTTPException, SerializationError
-if t.TYPE_CHECKING:
- from flama.types.schema import _T_Schema
-
__all__ = [
"Method",
"Request",
@@ -114,7 +111,7 @@ class FileResponse(starlette.responses.FileResponse, Response):
class APIResponse(JSONResponse):
media_type = "application/json"
- def __init__(self, content: t.Any = None, schema: t.Optional["_T_Schema"] = None, *args, **kwargs):
+ def __init__(self, content: t.Any = None, schema: t.Optional["schemas.Schema"] = None, *args, **kwargs):
self.schema = schema
super().__init__(content, *args, **kwargs)
diff --git a/flama/injection/components.py b/flama/injection/components.py
index 74a2ca5c..56bcc657 100644
--- a/flama/injection/components.py
+++ b/flama/injection/components.py
@@ -1,3 +1,4 @@
+import abc
import asyncio
import inspect
import typing as t
@@ -8,7 +9,7 @@
__all__ = ["Component", "Components"]
-class Component:
+class Component(metaclass=abc.ABCMeta):
def identity(self, parameter: Parameter) -> str:
"""Each component needs a unique identifier string that we use for lookups from the `state` dictionary when we
run the dependency injection.
@@ -22,8 +23,7 @@ def identity(self, parameter: Parameter) -> str:
parameter_type = parameter.type.__class__.__name__
component_id = f"{id(parameter.type)}:{parameter_type}"
- # If `resolve_parameter` includes `Parameter` then we use an identifier that is additionally parameterized by
- # the parameter name.
+ # If `resolve` includes `Parameter` then use an id that is additionally parameterized by the parameter name.
args = inspect.signature(self.resolve).parameters.values() # type: ignore[attr-defined]
if Parameter in [arg.annotation for arg in args]:
component_id += f":{parameter.name.lower()}"
@@ -65,17 +65,21 @@ async def __call__(self, *args, **kwargs):
:param kwargs: Resolve keyword arguments.
:return: Resolve result.
"""
- if asyncio.iscoroutinefunction(self.resolve):
- return await self.resolve(*args, **kwargs)
+ if asyncio.iscoroutinefunction(self.resolve): # type: ignore[attr-defined]
+ return await self.resolve(*args, **kwargs) # type: ignore[attr-defined]
- return self.resolve(*args, **kwargs)
+ return self.resolve(*args, **kwargs) # type: ignore[attr-defined]
def __str__(self) -> str:
return str(self.__class__.__name__)
+ @abc.abstractmethod
+ def resolve(self, *args, **kwargs) -> t.Any:
+ ...
+
class Components(t.Tuple[Component, ...]):
- def __new__(cls, components=None):
+ def __new__(cls, components: t.Optional[t.Union[t.Sequence[Component], t.Set[Component]]] = None):
return super().__new__(cls, components or [])
def __eq__(self, other: t.Any) -> bool:
diff --git a/flama/lifespan.py b/flama/lifespan.py
index 7d053a29..08e9ac80 100644
--- a/flama/lifespan.py
+++ b/flama/lifespan.py
@@ -1,40 +1,14 @@
-import contextlib
+import logging
import typing as t
-import anyio
-
-from flama import types
+from flama import concurrency, exceptions, types
if t.TYPE_CHECKING:
from flama import Flama
__all__ = ["Lifespan"]
-
-class Context(t.AsyncContextManager):
- def __init__(
- self,
- app: "Flama",
- lifespan: t.Optional[t.Callable[[t.Optional["Flama"]], t.AsyncContextManager]] = None,
- ):
- self.app = app
- self.lifespan = lifespan(app) if lifespan else contextlib.AsyncExitStack()
-
- async def __aenter__(self) -> None:
- async with anyio.create_task_group() as tg:
- for handler in self.app.events.startup:
- tg.start_soon(handler)
-
- await self.lifespan.__aenter__()
-
- async def __aexit__(
- self, exc_type: t.Optional[t.Type[BaseException]], exc_val: t.Optional[BaseException], exc_tb
- ) -> None:
- await self.lifespan.__aexit__(exc_type, exc_val, exc_tb)
-
- async with anyio.create_task_group() as tg:
- for handler in self.app.events.shutdown:
- tg.start_soon(handler)
+logger = logging.getLogger(__name__)
class Lifespan(types.AppClass):
@@ -42,25 +16,49 @@ def __init__(self, lifespan: t.Optional[t.Callable[[t.Optional["Flama"]], t.Asyn
self.lifespan = lifespan
async def __call__(self, scope: types.Scope, receive: types.Receive, send: types.Send) -> None:
- """Handles a lifespan request by initializing all mo
+ """Handles a lifespan request by initialising and finalising all modules and running a user defined lifespan.
:param scope: ASGI request.
:param receive: ASGI receive.
:param send: ASGI send.
"""
- started = False
- await receive()
- try:
- async with Context(scope["app"], self.lifespan):
- await send(types.Message({"type": "lifespan.startup.complete"}))
- started = True
- await receive()
- except BaseException as e:
- await send(
- types.Message(
- {"type": "lifespan.shutdown.failed" if started else "lifespan.startup.failed", "message": str(e)}
- )
- )
- raise
- else:
- await send(types.Message({"type": "lifespan.shutdown.complete"}))
+ app = scope["app"]
+ while True:
+ message = await receive()
+ if message["type"] == "lifespan.startup":
+ try:
+ logger.info("Application starting")
+ app._status = types.AppStatus.STARTING
+ await self._startup(app)
+ await send(types.Message({"type": "lifespan.startup.complete"}))
+ app._status = types.AppStatus.READY
+ logger.info("Application ready")
+ except BaseException as e:
+ logger.exception("Application start failed")
+ app._status = types.AppStatus.FAILED
+ await send(types.Message({"type": "lifespan.startup.failed", "message": str(e)}))
+ raise exceptions.ApplicationError("Lifespan startup failed") from e
+ elif message["type"] == "lifespan.shutdown":
+ try:
+ logger.info("Application shutting down")
+ app._status = types.AppStatus.SHUTTING_DOWN
+ await self._shutdown(app)
+ await send(types.Message({"type": "lifespan.shutdown.complete"}))
+ app._status = types.AppStatus.SHUT_DOWN
+ logger.info("Application shut down")
+ return
+ except BaseException as e:
+ await send(types.Message({"type": "lifespan.shutdown.failed", "message": str(e)}))
+ app._status = types.AppStatus.FAILED
+ logger.exception("Application shutdown failed")
+ raise exceptions.ApplicationError("Lifespan shutdown failed") from e
+
+ async def _startup(self, app: "Flama") -> None:
+ await concurrency.run_task_group(*(f() for f in app.events.startup))
+ if self.lifespan:
+ await self.lifespan(app).__aenter__()
+
+ async def _shutdown(self, app: "Flama") -> None:
+ if self.lifespan:
+ await self.lifespan(app).__aexit__(None, None, None)
+ await concurrency.run_task_group(*(f() for f in app.events.shutdown))
diff --git a/flama/middleware.py b/flama/middleware.py
index 0e4bc9fb..904064ec 100644
--- a/flama/middleware.py
+++ b/flama/middleware.py
@@ -9,6 +9,7 @@
from starlette.middleware.httpsredirect import HTTPSRedirectMiddleware
from starlette.middleware.trustedhost import TrustedHostMiddleware
+from flama import concurrency
from flama.debug.middleware import ExceptionMiddleware, ServerErrorMiddleware
try:
@@ -39,7 +40,9 @@ def __init__(self, middleware: "types.Middleware", **kwargs: t.Any) -> None:
self.middleware = middleware
self.kwargs = kwargs
- def __call__(self, app: "types.App"):
+ def __call__(
+ self, app: "types.App"
+ ) -> t.Union["types.App", t.Awaitable["types.App"], "types.MiddlewareClass", "types.MiddlewareAsyncClass"]:
return self.middleware(app, **self.kwargs)
def __repr__(self) -> str:
@@ -59,13 +62,17 @@ def __init__(self, app: "types.App", middleware: t.Sequence[Middleware], debug:
self._exception_handlers: t.Dict[
t.Union[int, t.Type[Exception]], t.Callable[["Request", Exception], "Response"]
] = {}
- self._stack: t.Optional["types.App"] = None
+ self._stack: t.Optional[
+ t.Union["types.App", t.Awaitable["types.App"], "types.MiddlewareClass", "types.MiddlewareAsyncClass"]
+ ] = None
@property
- def stack(self) -> "types.App":
+ def stack(
+ self,
+ ) -> t.Union["types.App", t.Awaitable["types.App"], "types.MiddlewareClass", "types.MiddlewareAsyncClass"]:
if self._stack is None:
self._stack = functools.reduce(
- lambda app, middleware: middleware(app=app),
+ lambda app, middleware: middleware(app=app), # type: ignore
[
Middleware(ExceptionMiddleware, handlers=self._exception_handlers, debug=self.debug),
*self.middleware,
@@ -100,4 +107,4 @@ def add_middleware(self, middleware: Middleware):
del self.stack
async def __call__(self, scope: "types.Scope", receive: "types.Receive", send: "types.Send") -> None:
- await self.stack(scope, receive, send)
+ await concurrency.run(self.stack, scope, receive, send) # type: ignore
diff --git a/flama/models/base.py b/flama/models/base.py
index fb82386e..ad8ba952 100644
--- a/flama/models/base.py
+++ b/flama/models/base.py
@@ -8,7 +8,7 @@
class Model:
- def __init__(self, model: t.Any, meta: "Metadata", artifacts: "Artifacts"):
+ def __init__(self, model: t.Any, meta: "Metadata", artifacts: t.Optional["Artifacts"]):
self.model = model
self.meta = meta
self.artifacts = artifacts
diff --git a/flama/models/models/tensorflow.py b/flama/models/models/tensorflow.py
index eaa2e91b..fedd602e 100644
--- a/flama/models/models/tensorflow.py
+++ b/flama/models/models/tensorflow.py
@@ -4,16 +4,16 @@
from flama.models.base import Model
try:
- import tensorflow
+ import tensorflow as tf
except Exception: # pragma: no cover
- tensorflow = None # type: ignore
+ tf = None
class TensorFlowModel(Model):
def predict(self, x: t.List[t.List[t.Any]]) -> t.Any:
- assert tensorflow is not None, "`tensorflow` must be installed to use TensorFlowModel."
+ assert tf is not None, "`tensorflow` must be installed to use TensorFlowModel."
try:
return self.model.predict(x).tolist()
- except (tensorflow.errors.OpError, ValueError):
+ except (tf.errors.OpError, ValueError): # type: ignore
raise exceptions.HTTPException(status_code=400)
diff --git a/flama/models/resource.py b/flama/models/resource.py
index 2f4b3399..819a0eb3 100644
--- a/flama/models/resource.py
+++ b/flama/models/resource.py
@@ -19,7 +19,7 @@
class InspectMixin:
@classmethod
def _add_inspect(
- mcs, name: str, verbose_name: str, model_model_type: t.Type["Model"], **kwargs
+ cls, name: str, verbose_name: str, model_model_type: t.Type["Model"], **kwargs
) -> t.Dict[str, t.Any]:
@resource_method("/", methods=["GET"], name=f"{name}-inspect")
async def inspect(self, model: model_model_type): # type: ignore[valid-type]
@@ -44,7 +44,7 @@ async def inspect(self, model: model_model_type): # type: ignore[valid-type]
class PredictMixin:
@classmethod
def _add_predict(
- mcs, name: str, verbose_name: str, model_model_type: t.Type["Model"], **kwargs
+ cls, name: str, verbose_name: str, model_model_type: t.Type["Model"], **kwargs
) -> t.Dict[str, t.Any]:
@resource_method("/predict/", methods=["POST"], name=f"{name}-predict")
async def predict(
@@ -108,16 +108,16 @@ def __new__(mcs, name: str, bases: t.Tuple[type], namespace: t.Dict[str, t.Any])
return super().__new__(mcs, name, bases, namespace)
@classmethod
- def _get_model_component(mcs, bases: t.Sequence[t.Any], namespace: t.Dict[str, t.Any]) -> "ModelComponent":
+ def _get_model_component(cls, bases: t.Sequence[t.Any], namespace: t.Dict[str, t.Any]) -> "ModelComponent":
try:
- component: "ModelComponent" = mcs._get_attribute("component", bases, namespace, metadata_namespace="model")
+ component: "ModelComponent" = cls._get_attribute("component", bases, namespace, metadata_namespace="model")
return component
except AttributeError:
...
try:
return ModelComponentBuilder.load(
- mcs._get_attribute("model_path", bases, namespace, metadata_namespace="model")
+ cls._get_attribute("model_path", bases, namespace, metadata_namespace="model")
)
except AttributeError:
...
diff --git a/flama/modules.py b/flama/modules.py
index ba3545d9..7217b9dc 100644
--- a/flama/modules.py
+++ b/flama/modules.py
@@ -14,10 +14,10 @@ class _BaseModule:
def __init__(self) -> None:
self.app: "Flama"
- async def on_startup(self):
+ async def on_startup(self) -> None:
...
- async def on_shutdown(self):
+ async def on_shutdown(self) -> None:
...
@@ -33,7 +33,7 @@ class Module(_BaseModule, metaclass=_ModuleMeta):
class Modules(t.Dict[str, Module]):
- def __init__(self, app: "Flama", modules: t.Optional[t.Set[Module]]):
+ def __init__(self, app: "Flama", modules: t.Optional[t.Union[t.Sequence[Module], t.Set[Module]]]):
modules_map: t.Dict[str, t.List[Module]] = defaultdict(list)
for module in modules or []:
module.app = app
diff --git a/flama/pagination/__init__.py b/flama/pagination/__init__.py
index eaf00977..f7c3b712 100644
--- a/flama/pagination/__init__.py
+++ b/flama/pagination/__init__.py
@@ -1,12 +1,1 @@
-from flama.pagination.limit_offset import LimitOffsetMixin
-from flama.pagination.page_number import PageNumberMixin
-
-__all__ = ["paginator"]
-
-
-class Paginator(LimitOffsetMixin, PageNumberMixin):
- def __init__(self):
- self.schemas = {}
-
-
-paginator = Paginator()
+from flama.pagination.paginator import paginator # noqa
diff --git a/flama/pagination/decorators.py b/flama/pagination/decorators.py
new file mode 100644
index 00000000..ce86afde
--- /dev/null
+++ b/flama/pagination/decorators.py
@@ -0,0 +1,40 @@
+import abc
+import asyncio
+import inspect
+import typing as t
+
+from flama import schemas, types
+
+
+class PaginationDecoratorFactory:
+ PARAMETERS: t.List[inspect.Parameter]
+
+ @classmethod
+ def decorate(cls, func: t.Callable, schema: schemas.Schema) -> t.Callable:
+ func_signature = inspect.signature(func)
+ if "kwargs" not in func_signature.parameters:
+ raise TypeError("Paginated views must define **kwargs param")
+
+ decorated_func = (
+ cls._decorate_async(func, schema) if asyncio.iscoroutinefunction(func) else cls._decorate_sync(func, schema)
+ )
+
+ decorated_func.__signature__ = inspect.Signature( # type: ignore
+ parameters=[
+ *[v for k, v in func_signature.parameters.items() if k != "kwargs"],
+ *cls.PARAMETERS,
+ ],
+ return_annotation=types.Schema[schema], # type: ignore
+ )
+
+ return decorated_func
+
+ @classmethod
+ @abc.abstractmethod
+ def _decorate_async(cls, func: t.Callable, schema: schemas.Schema) -> t.Callable:
+ ...
+
+ @classmethod
+ @abc.abstractmethod
+ def _decorate_sync(cls, func: t.Callable, schema: schemas.Schema) -> t.Callable:
+ ...
diff --git a/flama/pagination/limit_offset.py b/flama/pagination/limit_offset.py
deleted file mode 100644
index d466dfee..00000000
--- a/flama/pagination/limit_offset.py
+++ /dev/null
@@ -1,128 +0,0 @@
-import asyncio
-import functools
-import inspect
-import typing as t
-
-from flama import http, schemas, types
-
-try:
- import forge
-except Exception: # pragma: no cover
- forge = None # type: ignore
-
-__all__ = ["LimitOffsetMixin", "LimitOffsetResponse"]
-
-
-class LimitOffsetResponse(http.APIResponse):
- """
- Response paginated based on a limit of elements and an offset.
-
- First 10 elements:
- /resource?offset=0&limit=10
- Elements 20-30:
- /resource?offset=20&limit=10
- """
-
- default_limit = 10
-
- def __init__(
- self,
- schema: types.schema._T_Schema,
- offset: t.Optional[t.Union[int, str]] = None,
- limit: t.Optional[t.Union[int, str]] = None,
- count: t.Optional[bool] = True,
- **kwargs
- ):
- self.offset = int(offset) if offset is not None else 0
- self.limit = int(limit) if limit is not None else self.default_limit
- self.count = count
- super().__init__(schema=schema, **kwargs)
-
- def render(self, content: t.Sequence[t.Any]):
- init = self.offset
- end = self.offset + self.limit
- return super().render(
- {
- "meta": {"limit": self.limit, "offset": self.offset, "count": len(content) if self.count else None},
- "data": content[init:end],
- }
- )
-
-
-class LimitOffsetMixin:
- def limit_offset(self, schema_name: str):
- """
- Decorator for adding pagination behavior to a view. That decorator produces a view based on limit-offset and
- it adds three query parameters to control the pagination: limit, offset and count. Offset has a default value of
- zero to start with the first element of the collection, limit default value is defined in
- :class:`LimitOffsetResponse` and count defines if the response will
- define the total number of elements.
-
- The output field is also modified by :class:`LimitOffsetSchema`,
- creating a new field based on it but using the old output field as the content of its data field.
-
- :param schema_name: Name used for output field.
- :return: Decorated view.
- """
-
- def _inner(func: t.Callable):
- assert forge is not None, "`python-forge` must be installed to use Paginator."
-
- resource_schema = schemas.Schema.from_type(inspect.signature(func).return_annotation).unique_schema
- paginated_schema_name = "LimitOffsetPaginated" + schema_name
- schema = schemas.Schema.build(
- paginated_schema_name,
- schema=schemas.schemas.LimitOffset,
- fields=[schemas.Field("data", resource_schema, multiple=True)],
- ).unique_schema
-
- forge_revision_list = (
- forge.copy(func),
- forge.insert(forge.arg("limit", default=None, type=t.Optional[int]), index=-1),
- forge.insert(forge.arg("offset", default=None, type=t.Optional[int]), index=-1),
- forge.insert(forge.arg("count", default=True, type=bool), index=-1),
- forge.delete("kwargs"),
- forge.returns(types.Schema[schema]), # type: ignore[index,valid-type]
- )
-
- try:
- if asyncio.iscoroutinefunction(func):
-
- @forge.compose(*forge_revision_list)
- @functools.wraps(func)
- async def decorator(
- *args,
- limit: t.Optional[int] = None,
- offset: t.Optional[int] = None,
- count: bool = True,
- **kwargs
- ):
- return LimitOffsetResponse(
- schema=schema, limit=limit, offset=offset, count=count, content=await func(*args, **kwargs)
- )
-
- else:
-
- @forge.compose(*forge_revision_list)
- @functools.wraps(func)
- def decorator(
- *args,
- limit: t.Optional[int] = None,
- offset: t.Optional[int] = None,
- count: bool = True,
- **kwargs
- ):
- return LimitOffsetResponse(
- schema=schema, limit=limit, offset=offset, count=count, content=func(*args, **kwargs)
- )
-
- except ValueError as e:
- raise TypeError("Paginated views must define **kwargs param") from e
- else:
- self.schemas.update( # type: ignore[attr-defined]
- {schema_name: resource_schema, paginated_schema_name: schema}
- )
-
- return decorator
-
- return _inner
diff --git a/flama/pagination/mixins/__init__.py b/flama/pagination/mixins/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/flama/pagination/mixins/limit_offset.py b/flama/pagination/mixins/limit_offset.py
new file mode 100644
index 00000000..103f728b
--- /dev/null
+++ b/flama/pagination/mixins/limit_offset.py
@@ -0,0 +1,131 @@
+import functools
+import inspect
+import typing as t
+
+from flama import http, schemas
+
+__all__ = ["LimitOffsetMixin", "LimitOffsetResponse"]
+
+from flama.pagination.decorators import PaginationDecoratorFactory
+
+
+class LimitOffsetResponse(http.APIResponse):
+ """
+ Response paginated based on a limit of elements and an offset.
+
+ First 10 elements:
+ /resource?offset=0&limit=10
+ Elements 20-30:
+ /resource?offset=20&limit=10
+ """
+
+ default_limit = 10
+
+ def __init__(
+ self,
+ schema: schemas.Schema,
+ offset: t.Optional[t.Union[int, str]] = None,
+ limit: t.Optional[t.Union[int, str]] = None,
+ count: t.Optional[bool] = True,
+ **kwargs,
+ ):
+ self.offset = int(offset) if offset is not None else 0
+ self.limit = int(limit) if limit is not None else self.default_limit
+ self.count = count
+ super().__init__(schema=schema, **kwargs)
+
+ def render(self, content: t.Sequence[t.Any]):
+ init = self.offset
+ end = self.offset + self.limit
+ return super().render(
+ {
+ "meta": {"limit": self.limit, "offset": self.offset, "count": len(content) if self.count else None},
+ "data": content[init:end],
+ }
+ )
+
+
+class LimitOffsetDecoratorFactory(PaginationDecoratorFactory):
+ PARAMETERS = [
+ inspect.Parameter(
+ name="limit", default=None, annotation=t.Optional[int], kind=inspect.Parameter.POSITIONAL_OR_KEYWORD
+ ),
+ inspect.Parameter(
+ name="offset", default=None, annotation=t.Optional[int], kind=inspect.Parameter.POSITIONAL_OR_KEYWORD
+ ),
+ inspect.Parameter(
+ name="count", default=False, annotation=t.Optional[bool], kind=inspect.Parameter.POSITIONAL_OR_KEYWORD
+ ),
+ ]
+
+ @classmethod
+ def _decorate_async(cls, func: t.Callable, schema: schemas.Schema) -> t.Callable:
+ @functools.wraps(func)
+ async def decorator(
+ *args,
+ limit: t.Optional[int] = None,
+ offset: t.Optional[int] = None,
+ count: t.Optional[bool] = False,
+ **kwargs,
+ ):
+ return LimitOffsetResponse(
+ schema=schema, limit=limit, offset=offset, count=count, content=await func(*args, **kwargs)
+ )
+
+ return decorator
+
+ @classmethod
+ def _decorate_sync(cls, func: t.Callable, schema: schemas.Schema) -> t.Callable:
+ @functools.wraps(func)
+ def decorator(
+ *args,
+ limit: t.Optional[int] = None,
+ offset: t.Optional[int] = None,
+ count: t.Optional[bool] = False,
+ **kwargs,
+ ):
+ return LimitOffsetResponse(
+ schema=schema, limit=limit, offset=offset, count=count, content=func(*args, **kwargs)
+ )
+
+ return decorator
+
+
+class LimitOffsetMixin:
+ def limit_offset(self, schema_name: str):
+ """
+ Decorator for adding pagination behavior to a view. That decorator produces a view based on limit-offset and
+ it adds three query parameters to control the pagination: limit, offset and count. Offset has a default value of
+ zero to start with the first element of the collection, limit default value is defined in
+ :class:`LimitOffsetResponse` and count defines if the response will
+ define the total number of elements.
+
+ The output field is also modified by :class:`LimitOffsetSchema`,
+ creating a new field based on it but using the old output field as the content of its data field.
+
+ :param schema_name: Name used for output field.
+ :return: Decorated view.
+ """
+
+ def _inner(func: t.Callable) -> t.Callable:
+ resource_schema = schemas.Schema.from_type(inspect.signature(func).return_annotation).unique_schema
+ try:
+ schema_module, schema_class = schema_name.rsplit(".", 1)
+ paginated_schema_name = f"{schema_module}.LimitOffsetPaginated{schema_class}"
+ except ValueError:
+ paginated_schema_name = f"LimitOffsetPaginated{schema_name}"
+ schema = schemas.Schema.build(
+ paginated_schema_name,
+ schema=schemas.schemas.LimitOffset,
+ fields=[schemas.Field("data", resource_schema, multiple=True)],
+ ).unique_schema
+
+ decorator = LimitOffsetDecoratorFactory.decorate(func, schema)
+
+ self.schemas.update( # type: ignore[attr-defined]
+ {schema_name: resource_schema, paginated_schema_name: schema}
+ )
+
+ return decorator
+
+ return _inner
diff --git a/flama/pagination/mixins/page_number.py b/flama/pagination/mixins/page_number.py
new file mode 100644
index 00000000..693fe051
--- /dev/null
+++ b/flama/pagination/mixins/page_number.py
@@ -0,0 +1,137 @@
+import functools
+import inspect
+import typing as t
+
+from flama import http, schemas
+
+__all__ = ["PageNumberMixin", "PageNumberResponse"]
+
+from flama.pagination.decorators import PaginationDecoratorFactory
+
+
+class PageNumberResponse(http.APIResponse):
+ """
+ Response paginated based on a page number and a page size.
+
+ First 10 elements:
+ /resource?page=1
+ Third 10 elements:
+ /resource?page=3
+ First 20 elements:
+ /resource?page=1&page_size=20
+ """
+
+ default_page_size = 10
+
+ def __init__(
+ self,
+ schema: schemas.Schema,
+ page: t.Optional[t.Union[int, str]] = None,
+ page_size: t.Optional[t.Union[int, str]] = None,
+ count: t.Optional[bool] = True,
+ **kwargs,
+ ):
+ self.page_number = int(page) if page is not None else 1
+ self.page_size = int(page_size) if page_size is not None else self.default_page_size
+ self.count = count
+ super().__init__(schema=schema, **kwargs)
+
+ def render(self, content: t.Sequence[t.Any]):
+ init = (self.page_number - 1) * self.page_size
+ end = self.page_number * self.page_size
+
+ return super().render(
+ {
+ "meta": {
+ "page": self.page_number,
+ "page_size": self.page_size,
+ "count": len(content) if self.count else None,
+ },
+ "data": content[init:end],
+ }
+ )
+
+
+class PageNumberDecoratorFactory(PaginationDecoratorFactory):
+ PARAMETERS = [
+ inspect.Parameter(
+ name="page", default=None, annotation=t.Optional[int], kind=inspect.Parameter.POSITIONAL_OR_KEYWORD
+ ),
+ inspect.Parameter(
+ name="page_size", default=None, annotation=t.Optional[int], kind=inspect.Parameter.POSITIONAL_OR_KEYWORD
+ ),
+ inspect.Parameter(
+ name="count", default=False, annotation=t.Optional[bool], kind=inspect.Parameter.POSITIONAL_OR_KEYWORD
+ ),
+ ]
+
+ @classmethod
+ def _decorate_async(cls, func: t.Callable, schema: schemas.Schema) -> t.Callable:
+ @functools.wraps(func)
+ async def decorator(
+ *args,
+ page: t.Optional[int] = None,
+ page_size: t.Optional[int] = None,
+ count: t.Optional[bool] = False,
+ **kwargs,
+ ):
+ return PageNumberResponse(
+ schema=schema, page=page, page_size=page_size, count=count, content=await func(*args, **kwargs)
+ )
+
+ return decorator
+
+ @classmethod
+ def _decorate_sync(cls, func: t.Callable, schema: schemas.Schema) -> t.Callable:
+ @functools.wraps(func)
+ def decorator(
+ *args,
+ page: t.Optional[int] = None,
+ page_size: t.Optional[int] = None,
+ count: t.Optional[bool] = False,
+ **kwargs,
+ ):
+ return PageNumberResponse(
+ schema=schema, page=page, page_size=page_size, count=count, content=func(*args, **kwargs)
+ )
+
+ return decorator
+
+
+class PageNumberMixin:
+ def page_number(self, schema_name: str) -> t.Callable:
+ """
+ Decorator for adding pagination behavior to a view. That decorator produces a view based on page numbering and
+ it adds three query parameters to control the pagination: page, page_size and count. Page has a default value of
+ first page, page_size default value is defined in
+ :class:`PageNumberResponse` and count defines if the response will define
+ the total number of elements.
+
+ The output field is also modified by :class:`PageNumberSchema`, creating
+ a new field based on it but using the old output field as the content of its data field.
+
+ :param schema_name: Name used for output field.
+ :return: Decorated view.
+ """
+
+ def _inner(func: t.Callable) -> t.Callable:
+ resource_schema = schemas.Schema.from_type(inspect.signature(func).return_annotation).unique_schema
+ try:
+ schema_module, schema_class = schema_name.rsplit(".", 1)
+ paginated_schema_name = f"{schema_module}.PageNumberPaginated{schema_class}"
+ except ValueError:
+ paginated_schema_name = f"PageNumberPaginated{schema_name}"
+ schema = schemas.Schema.build(
+ paginated_schema_name,
+ schema=schemas.schemas.PageNumber,
+ fields=[schemas.Field("data", resource_schema, multiple=True)],
+ ).unique_schema
+
+ decorator = PageNumberDecoratorFactory.decorate(func, schema)
+
+ self.schemas.update( # type: ignore[attr-defined]
+ {schema_name: resource_schema, paginated_schema_name: schema}
+ )
+ return decorator
+
+ return _inner
diff --git a/flama/pagination/page_number.py b/flama/pagination/page_number.py
deleted file mode 100644
index 8cde4af8..00000000
--- a/flama/pagination/page_number.py
+++ /dev/null
@@ -1,138 +0,0 @@
-import asyncio
-import functools
-import inspect
-import typing as t
-
-from flama import http, schemas, types
-
-try:
- import forge
-except Exception: # pragma: no cover
- forge = None # type: ignore
-
-__all__ = ["PageNumberMixin", "PageNumberResponse"]
-
-
-class PageNumberResponse(http.APIResponse):
- """
- Response paginated based on a page number and a page size.
-
- First 10 elements:
- /resource?page=1
- Third 10 elements:
- /resource?page=3
- First 20 elements:
- /resource?page=1&page_size=20
- """
-
- default_page_size = 10
-
- def __init__(
- self,
- schema: types.schema._T_Schema,
- page: t.Optional[t.Union[int, str]] = None,
- page_size: t.Optional[t.Union[int, str]] = None,
- count: t.Optional[bool] = True,
- **kwargs
- ):
- self.page_number = int(page) if page is not None else 1
- self.page_size = int(page_size) if page_size is not None else self.default_page_size
- self.count = count
- super().__init__(schema=schema, **kwargs)
-
- def render(self, content: t.Sequence[t.Any]):
- init = (self.page_number - 1) * self.page_size
- end = self.page_number * self.page_size
-
- return super().render(
- {
- "meta": {
- "page": self.page_number,
- "page_size": self.page_size,
- "count": len(content) if self.count else None,
- },
- "data": content[init:end],
- }
- )
-
-
-class PageNumberMixin:
- def page_number(self, schema_name: str):
- """
- Decorator for adding pagination behavior to a view. That decorator produces a view based on page numbering and
- it adds three query parameters to control the pagination: page, page_size and count. Page has a default value of
- first page, page_size default value is defined in
- :class:`PageNumberResponse` and count defines if the response will define
- the total number of elements.
-
- The output field is also modified by :class:`PageNumberSchema`, creating
- a new field based on it but using the old output field as the content of its data field.
-
- :param schema_name: Name used for output field.
- :return: Decorated view.
- """
-
- def _inner(func: t.Callable):
- assert forge is not None, "`python-forge` must be installed to use Paginator."
-
- resource_schema = schemas.Schema.from_type(inspect.signature(func).return_annotation).unique_schema
- paginated_schema_name = "PageNumberPaginated" + schema_name
- schema = schemas.Schema.build(
- paginated_schema_name,
- schema=schemas.schemas.PageNumber,
- fields=[schemas.Field("data", resource_schema, multiple=True)],
- ).unique_schema
-
- forge_revision_list = (
- forge.copy(func),
- forge.insert(forge.arg("page", default=None, type=t.Optional[int]), index=-1),
- forge.insert(forge.arg("page_size", default=None, type=t.Optional[int]), index=-1),
- forge.insert(forge.arg("count", default=True, type=bool), index=-1),
- forge.delete("kwargs"),
- forge.returns(types.Schema[schema]), # type: ignore[index,valid-type]
- )
-
- try:
- if asyncio.iscoroutinefunction(func):
-
- @forge.compose(*forge_revision_list)
- @functools.wraps(func)
- async def decorator(
- *args,
- page: t.Optional[int] = None,
- page_size: t.Optional[int] = None,
- count: bool = True,
- **kwargs
- ):
- return PageNumberResponse(
- schema=schema,
- page=page,
- page_size=page_size,
- count=count,
- content=await func(*args, **kwargs),
- )
-
- else:
-
- @forge.compose(*forge_revision_list)
- @functools.wraps(func)
- def decorator(
- *args,
- page: t.Optional[int] = None,
- page_size: t.Optional[int] = None,
- count: bool = True,
- **kwargs
- ):
- return PageNumberResponse(
- schema=schema, page=page, page_size=page_size, count=count, content=func(*args, **kwargs)
- )
-
- except ValueError as e:
- raise TypeError("Paginated views must define **kwargs param") from e
- else:
- self.schemas.update( # type: ignore[attr-defined]
- {schema_name: resource_schema, paginated_schema_name: schema}
- )
- return decorator
-
- return _inner
diff --git a/flama/pagination/paginator.py b/flama/pagination/paginator.py
new file mode 100644
index 00000000..97e4acb3
--- /dev/null
+++ b/flama/pagination/paginator.py
@@ -0,0 +1,12 @@
+from flama.pagination.mixins.limit_offset import LimitOffsetMixin
+from flama.pagination.mixins.page_number import PageNumberMixin
+
+__all__ = ["paginator"]
+
+
+class Paginator(LimitOffsetMixin, PageNumberMixin):
+ def __init__(self):
+ self.schemas = {}
+
+
+paginator = Paginator()
diff --git a/flama/resources/crud.py b/flama/resources/crud.py
index 0618ae99..f6b8c123 100644
--- a/flama/resources/crud.py
+++ b/flama/resources/crud.py
@@ -30,7 +30,7 @@
class CreateMixin:
@classmethod
def _add_create(
- mcs,
+ cls,
name: str,
verbose_name: str,
rest_schemas: data_structures.Schemas,
@@ -77,7 +77,7 @@ async def create(
class RetrieveMixin:
@classmethod
def _add_retrieve(
- mcs,
+ cls,
name: str,
verbose_name: str,
rest_schemas: data_structures.Schemas,
@@ -122,7 +122,7 @@ async def retrieve(
class UpdateMixin:
@classmethod
def _add_update(
- mcs,
+ cls,
name: str,
verbose_name: str,
rest_schemas: data_structures.Schemas,
@@ -183,7 +183,7 @@ async def update(
class DeleteMixin:
@classmethod
def _add_delete(
- mcs, name: str, verbose_name: str, rest_model: data_structures.Model, **kwargs
+ cls, name: str, verbose_name: str, rest_model: data_structures.Model, **kwargs
) -> t.Dict[str, t.Any]:
@resource_method("/{element_id}/", methods=["DELETE"], name=f"{name}-delete")
async def delete(
@@ -229,7 +229,7 @@ async def delete(
class ListMixin:
@classmethod
def _add_list(
- mcs, name: str, verbose_name: str, rest_schemas: data_structures.Schemas, **kwargs
+ cls, name: str, verbose_name: str, rest_schemas: data_structures.Schemas, **kwargs
) -> t.Dict[str, t.Any]:
async def filter(self, app, *clauses, **filters) -> t.List[t.Dict]:
async with app.sqlalchemy.engine.begin() as connection:
@@ -269,7 +269,7 @@ async def list(
class DropMixin:
@classmethod
- def _add_drop(mcs, name: str, verbose_name: str, **kwargs) -> t.Dict[str, t.Any]:
+ def _add_drop(cls, name: str, verbose_name: str, **kwargs) -> t.Dict[str, t.Any]:
@resource_method("/", methods=["DELETE"], name=f"{name}-drop")
async def drop(
self, scope: types.Scope
diff --git a/flama/resources/data_structures.py b/flama/resources/data_structures.py
index e31464fe..99cf58f5 100644
--- a/flama/resources/data_structures.py
+++ b/flama/resources/data_structures.py
@@ -4,7 +4,7 @@
try:
from sqlalchemy import Table
except Exception: # pragma: no cover
- Table = typing.Any # type: ignore[assignment,misc]
+ Table = typing.Any
__all__ = ["Model", "PrimaryKey", "Schema", "Metadata", "MethodMetadata"]
@@ -17,7 +17,7 @@ class PrimaryKey:
@dataclasses.dataclass
class Model:
- table: Table
+ table: Table # type: ignore
primary_key: PrimaryKey
diff --git a/flama/resources/modules.py b/flama/resources/modules.py
index 7bccb705..d817262a 100644
--- a/flama/resources/modules.py
+++ b/flama/resources/modules.py
@@ -2,11 +2,11 @@
import typing as t
from flama.modules import Module
+from flama.resources.resource import BaseResource
from flama.resources.routing import ResourceRoute
if t.TYPE_CHECKING:
from flama import types
- from flama.resources.resource import BaseResource
__all__ = ["ResourcesModule"]
@@ -17,7 +17,7 @@ class ResourcesModule(Module):
def add_resource(
self,
path: str,
- resource: t.Union["BaseResource", t.Type["BaseResource"]],
+ resource: t.Union[BaseResource, t.Type[BaseResource]],
tags: t.Optional[t.Dict[str, t.Dict[str, "types.Tag"]]] = None,
*args,
**kwargs
@@ -28,8 +28,12 @@ def add_resource(
:param tags: Tags to add to the resource.
:param resource: Resource class.
"""
- # Handle class or instance objects
- resource_instance: "BaseResource" = resource(*args, **kwargs) if inspect.isclass(resource) else resource
+ if inspect.isclass(resource) and issubclass(resource, BaseResource):
+ resource_instance = resource(*args, **kwargs)
+ elif isinstance(resource, BaseResource):
+ resource_instance = resource
+ else:
+ raise ValueError("Wrong resource")
self.app.mount(mount=ResourceRoute(path, resource_instance, tags))
@@ -45,7 +49,7 @@ def resource(
:return: Decorated resource class.
"""
- def decorator(resource: t.Type["BaseResource"]) -> t.Type["BaseResource"]:
+ def decorator(resource: t.Type[BaseResource]) -> t.Type[BaseResource]:
self.add_resource(path, resource, tags, *args, **kwargs)
return resource
diff --git a/flama/resources/resource.py b/flama/resources/resource.py
index 1f300efa..75184313 100644
--- a/flama/resources/resource.py
+++ b/flama/resources/resource.py
@@ -44,21 +44,19 @@ def __new__(mcs, name: str, bases: t.Tuple[type], namespace: t.Dict[str, t.Any])
return super().__new__(mcs, name, bases, namespace)
@classmethod
- def _get_mro(mcs, *classes: type) -> t.List[t.Type]:
+ def _get_mro(cls, *classes: type) -> t.List[t.Type]:
"""Generate the MRO list for given base class or list of base classes.
:param classes: Base classes.
:return: MRO list.
"""
return list(
- dict.fromkeys(
- [y for x in [[cls.__mro__[0]] + mcs._get_mro(*cls.__mro__[1:]) for cls in classes] for y in x]
- )
+ dict.fromkeys([y for x in [[c.__mro__[0]] + cls._get_mro(*c.__mro__[1:]) for c in classes] for y in x])
)
@classmethod
def _get_attribute(
- mcs,
+ cls,
attribute: str,
bases: t.Sequence[t.Any],
namespace: t.Dict[str, t.Any],
@@ -74,7 +72,7 @@ def _get_attribute(
try:
return namespace.pop(attribute)
except KeyError:
- for base in mcs._get_mro(*bases):
+ for base in cls._get_mro(*bases):
if hasattr(base, "_meta"):
if attribute in base._meta.namespaces.get(metadata_namespace, {}):
return base._meta.namespaces[metadata_namespace][attribute]
@@ -88,7 +86,7 @@ def _get_attribute(
raise AttributeError(ResourceAttributeError.ATTRIBUTE_NOT_FOUND.format(attribute=attribute))
@classmethod
- def _get_resource_name(mcs, name: str, namespace: t.Dict[str, t.Any]) -> t.Tuple[str, str]:
+ def _get_resource_name(cls, name: str, namespace: t.Dict[str, t.Any]) -> t.Tuple[str, str]:
"""Look for a resource name in namespace and check it's a valid name.
:param name: Class name.
@@ -104,7 +102,7 @@ def _get_resource_name(mcs, name: str, namespace: t.Dict[str, t.Any]) -> t.Tuple
return resource_name, namespace.pop("verbose_name", resource_name)
@classmethod
- def _build_routes(mcs, namespace: t.Dict[str, t.Any]) -> t.Dict[str, t.Callable]:
+ def _build_routes(cls, namespace: t.Dict[str, t.Any]) -> t.Dict[str, t.Callable]:
"""Builds the routes' descriptor.
:param namespace: Variables namespace used to create the class.
@@ -116,7 +114,7 @@ def _build_routes(mcs, namespace: t.Dict[str, t.Any]) -> t.Dict[str, t.Callable]
}
@classmethod
- def _build_methods(mcs, namespace: t.Dict[str, t.Any]) -> t.Dict[str, t.Callable]:
+ def _build_methods(cls, namespace: t.Dict[str, t.Any]) -> t.Dict[str, t.Callable]:
"""Builds a namespace containing all resource methods. Look for all methods listed in METHODS attribute and
named '_add_[method]'.
@@ -124,7 +122,7 @@ def _build_methods(mcs, namespace: t.Dict[str, t.Any]) -> t.Dict[str, t.Callable
:return: Methods namespace.
"""
# Get available methods
- methods = [getattr(mcs, f"_add_{method}") for method in mcs.METHODS if hasattr(mcs, f"_add_{method}")]
+ methods = [getattr(cls, f"_add_{method}") for method in cls.METHODS if hasattr(cls, f"_add_{method}")]
# Generate methods
methods_namespace = {
@@ -135,7 +133,7 @@ def _build_methods(mcs, namespace: t.Dict[str, t.Any]) -> t.Dict[str, t.Callable
# Preserve already defined methods
methods_namespace.update(
- {method: methods_namespace[f"_{method}"] for method in mcs.METHODS if method not in namespace}
+ {method: methods_namespace[f"_{method}"] for method in cls.METHODS if method not in namespace}
)
return methods_namespace
diff --git a/flama/resources/rest.py b/flama/resources/rest.py
index d549a8df..0771eeaf 100644
--- a/flama/resources/rest.py
+++ b/flama/resources/rest.py
@@ -14,7 +14,7 @@
__all__ = ["RESTResource", "RESTResourceType"]
-PK_MAPPING = {
+PK_MAPPING: t.Dict[t.Any, t.Any] = {
sqlalchemy.Integer: int,
sqlalchemy.String: str,
sqlalchemy.Date: datetime.date,
@@ -60,14 +60,14 @@ def __new__(mcs, name: str, bases: t.Tuple[type], namespace: t.Dict[str, t.Any])
return super().__new__(mcs, name, bases, namespace)
@classmethod
- def _get_model(mcs, bases: t.Sequence[t.Any], namespace: t.Dict[str, t.Any]) -> data_structures.Model:
+ def _get_model(cls, bases: t.Sequence[t.Any], namespace: t.Dict[str, t.Any]) -> data_structures.Model:
"""Look for the resource model and checks if a primary key is defined with a valid type.
:param bases: List of superclasses.
:param namespace: Variables namespace used to create the class.
:return: Resource model.
"""
- model = mcs._get_attribute("model", bases, namespace, metadata_namespace="rest")
+ model = cls._get_attribute("model", bases, namespace, metadata_namespace="rest")
# Already defined model probably because resource inheritance, so no need to create it
if isinstance(model, data_structures.Model):
@@ -99,7 +99,7 @@ def _get_model(mcs, bases: t.Sequence[t.Any], namespace: t.Dict[str, t.Any]) ->
@classmethod
def _get_schemas(
- mcs, name: str, bases: t.Sequence[t.Any], namespace: t.Dict[str, t.Any]
+ cls, name: str, bases: t.Sequence[t.Any], namespace: t.Dict[str, t.Any]
) -> data_structures.Schemas:
"""Look for the resource schema or the pair of input and output schemas.
@@ -112,11 +112,11 @@ def _get_schemas(
return data_structures.Schemas(
input=data_structures.Schema(
name="Input" + name,
- schema=mcs._get_attribute("input_schema", bases, namespace, metadata_namespace="rest"),
+ schema=cls._get_attribute("input_schema", bases, namespace, metadata_namespace="rest"),
),
output=data_structures.Schema(
name="Output" + name,
- schema=mcs._get_attribute("output_schema", bases, namespace, metadata_namespace="rest"),
+ schema=cls._get_attribute("output_schema", bases, namespace, metadata_namespace="rest"),
),
)
except AttributeError:
@@ -124,14 +124,14 @@ def _get_schemas(
try:
schema = data_structures.Schema(
- name=name, schema=mcs._get_attribute("schema", bases, namespace, metadata_namespace="rest")
+ name=name, schema=cls._get_attribute("schema", bases, namespace, metadata_namespace="rest")
)
return data_structures.Schemas(input=schema, output=schema)
except AttributeError:
...
try:
- schemas: data_structures.Schemas = mcs._get_attribute(
+ schemas: data_structures.Schemas = cls._get_attribute(
"schemas", bases, namespace, metadata_namespace="rest"
)
return schemas
diff --git a/flama/resources/routing.py b/flama/resources/routing.py
index d40968cf..7d770ae7 100644
--- a/flama/resources/routing.py
+++ b/flama/resources/routing.py
@@ -23,7 +23,9 @@ def __init__(
# Handle class or instance objects
self.resource = resource() if inspect.isclass(resource) else resource
- assert set(self.resource.routes.keys()) >= set(tags.keys()), "Tags must be defined only for existing routes."
+ assert set(self.resource.routes.keys()) >= set( # type: ignore
+ tags.keys()
+ ), "Tags must be defined only for existing routes."
routes = [
Route(
@@ -33,10 +35,10 @@ def __init__(
name=route._meta.name or route.__name__,
tags=tags.get(name, route._meta.tags),
)
- for name, route in self.resource.routes.items()
+ for name, route in self.resource.routes.items() # type: ignore
]
- super().__init__(path=path, routes=routes, name=self.resource._meta.name)
+ super().__init__(path=path, routes=routes, name=self.resource._meta.name) # type: ignore
def resource_method(
@@ -56,7 +58,7 @@ def resource_method(
def wrapper(func):
func._meta = data_structures.MethodMetadata(
- path=path, methods=methods if methods is not None else {"GET"}, name=name, tags=tags or {}
+ path=path, methods=set(methods) if methods is not None else {"GET"}, name=name, tags=tags or {}
)
return func
diff --git a/flama/resources/types.py b/flama/resources/types.py
new file mode 100644
index 00000000..e69de29b
diff --git a/flama/routing.py b/flama/routing.py
index 44e44e22..7e89b089 100644
--- a/flama/routing.py
+++ b/flama/routing.py
@@ -13,7 +13,7 @@
if sys.version_info < (3, 10): # PORT: Remove when stop supporting 3.9 # pragma: no cover
from typing_extensions import TypeGuard
- t.TypeGuard = TypeGuard
+ t.TypeGuard = TypeGuard # type: ignore
if t.TYPE_CHECKING:
from flama.applications import Flama
@@ -49,12 +49,13 @@ def __init__(
self.handler = handler
functools.update_wrapper(self, handler)
- self.call_function: types.App = {
+ decorator_select: t.Dict[t.Tuple[_EndpointType, bool], types.App] = {
(self.type.http, False): self._http_function,
(self.type.http, True): self._http_endpoint,
(self.type.websocket, False): self._websocket_function,
(self.type.websocket, True): self._websocket_endpoint,
- }[(endpoint_type, inspect.isclass(self.handler))]
+ }
+ self.call_function: types.App = decorator_select[(endpoint_type, inspect.isclass(self.handler))]
def __get__(self, instance, owner):
return functools.partial(self.__call__, instance)
@@ -335,7 +336,9 @@ def __repr__(self) -> str:
@staticmethod
def is_endpoint(
x: t.Union[t.Callable, t.Type[endpoints.HTTPEndpoint]]
- ) -> t.TypeGuard[t.Type[endpoints.HTTPEndpoint]]:
+ ) -> t.TypeGuard[ # type: ignore # PORT: Remove this comment when stop supporting 3.9
+ t.Type[endpoints.HTTPEndpoint]
+ ]:
return inspect.isclass(x) and issubclass(x, endpoints.HTTPEndpoint)
def endpoint_handlers(self) -> t.Dict[str, t.Callable]:
@@ -411,7 +414,9 @@ def __eq__(self, other: t.Any) -> bool:
@staticmethod
def is_endpoint(
x: t.Union[t.Callable, t.Type[endpoints.WebSocketEndpoint]]
- ) -> t.TypeGuard[t.Type[endpoints.WebSocketEndpoint]]:
+ ) -> t.TypeGuard[ # type: ignore # PORT: Remove this comment when stop supporting 3.9
+ t.Type[endpoints.WebSocketEndpoint]
+ ]:
return inspect.isclass(x) and issubclass(x, endpoints.WebSocketEndpoint)
def endpoint_handlers(self) -> t.Dict[str, t.Callable]:
@@ -558,7 +563,7 @@ def __init__(
self,
routes: t.Optional[t.Sequence[BaseRoute]] = None,
*,
- components: t.Optional[t.Sequence["Component"]] = None,
+ components: t.Optional[t.Union[t.Sequence["Component"], t.Set["Component"]]] = None,
lifespan: t.Optional[t.Callable[[t.Optional["Flama"]], t.AsyncContextManager]] = None,
root: t.Optional["Flama"] = None,
):
@@ -580,8 +585,12 @@ def __eq__(self, other: t.Any) -> bool:
return isinstance(other, Router) and self.routes == other.routes
async def __call__(self, scope: types.Scope, receive: types.Receive, send: types.Send) -> None:
+ logger.debug("Request: %s", str(scope))
assert scope["type"] in ("http", "websocket", "lifespan")
+ if "app" in scope and scope["app"]._status != types.AppStatus.READY and scope["type"] != "lifespan":
+ raise exceptions.ApplicationError("Application is not ready to process requests yet.")
+
if "router" not in scope:
scope["router"] = self
diff --git a/flama/schemas/_libs/marshmallow/adapter.py b/flama/schemas/_libs/marshmallow/adapter.py
index a0181c05..d1ee7a44 100644
--- a/flama/schemas/_libs/marshmallow/adapter.py
+++ b/flama/schemas/_libs/marshmallow/adapter.py
@@ -7,7 +7,7 @@
from apispec.ext.marshmallow import MarshmallowPlugin, resolve_schema_cls
from flama.injection import Parameter
-from flama.schemas._libs.marshmallow.fields import MAPPING
+from flama.schemas._libs.marshmallow.fields import MAPPING, MAPPING_TYPES
from flama.schemas.adapter import Adapter
from flama.schemas.exceptions import SchemaGenerationError, SchemaValidationError
from flama.types import JSONSchema
@@ -15,7 +15,7 @@
if sys.version_info < (3, 10): # PORT: Remove when stop supporting 3.9 # pragma: no cover
from typing_extensions import TypeGuard
- t.TypeGuard = TypeGuard
+ t.TypeGuard = TypeGuard # type: ignore
if t.TYPE_CHECKING:
from apispec.ext.marshmallow import OpenAPIConverter
@@ -35,7 +35,7 @@ def build_field(
required: bool = True,
default: t.Any = None,
multiple: bool = False,
- **kwargs
+ **kwargs,
) -> Field:
field_args = {
"required": required,
@@ -66,31 +66,23 @@ def build_schema(
)
def validate(self, schema: t.Union[t.Type[Schema], Schema], values: t.Dict[str, t.Any]) -> t.Dict[str, t.Any]:
- schema_instance = schema() if inspect.isclass(schema) else schema
-
try:
- data: t.Dict[str, t.Any] = schema_instance.load(values, unknown=marshmallow.EXCLUDE)
+ return self._schema_instance(schema).load(values, unknown=marshmallow.EXCLUDE) # type: ignore
except marshmallow.ValidationError as exc:
raise SchemaValidationError(errors=exc.normalized_messages())
- return data
-
def load(self, schema: t.Union[t.Type[Schema], Schema], value: t.Dict[str, t.Any]) -> Schema:
- schema_instance = schema() if inspect.isclass(schema) else schema
-
- load_schema: Schema = schema_instance.load(value)
-
- return load_schema
+ return self._schema_instance(schema).load(value) # type: ignore
def dump(self, schema: t.Union[t.Type[Schema], Schema], value: t.Dict[str, t.Any]) -> t.Dict[str, t.Any]:
- schema_instance = schema() if inspect.isclass(schema) else schema
-
try:
- data: t.Dict[str, t.Any] = schema_instance.dump(value)
+ return self._schema_instance(schema).dump(value) # type: ignore
except Exception as exc:
raise SchemaValidationError(errors=str(exc))
- return data
+ def name(self, schema: t.Union[Schema, t.Type[Schema]]) -> str:
+ s = self.unique_schema(schema)
+ return s.__qualname__ if s.__module__ == "builtins" else f"{s.__module__}.{s.__qualname__}"
def to_json_schema(self, schema: t.Union[t.Type[Schema], t.Type[Field], Schema, Field]) -> JSONSchema:
json_schema: t.Dict[str, t.Any]
@@ -127,8 +119,38 @@ def unique_schema(self, schema: t.Union[Schema, t.Type[Schema]]) -> t.Type[Schem
return schema
- def is_schema(self, obj: t.Any) -> t.TypeGuard[t.Union[Schema, t.Type[Schema]]]:
+ def _get_field_type(self, field: Field) -> t.Union[Schema, t.Type]:
+ if isinstance(field, marshmallow.fields.Nested):
+ return field.schema
+
+ if isinstance(field, marshmallow.fields.List):
+ return self._get_field_type(field.inner) # type: ignore
+
+ if isinstance(field, marshmallow.fields.Dict):
+ return self._get_field_type(field.value_field) # type: ignore
+
+ try:
+ return MAPPING_TYPES[field.__class__]
+ except KeyError:
+ return None
+
+ def schema_fields(
+ self, schema: t.Union[Schema, t.Type[Schema]]
+ ) -> t.Dict[str, t.Tuple[t.Union[t.Type, Schema], Field]]:
+ return {
+ name: (self._get_field_type(field), field) for name, field in self._schema_instance(schema).fields.items()
+ }
+
+ def is_schema(self, obj: t.Any) -> t.TypeGuard[t.Union[Schema, t.Type[Schema]]]: # type: ignore
return isinstance(obj, Schema) or (inspect.isclass(obj) and issubclass(obj, Schema))
- def is_field(self, obj: t.Any) -> t.TypeGuard[t.Union[Field, t.Type[Field]]]:
+ def is_field(self, obj: t.Any) -> t.TypeGuard[t.Union[Field, t.Type[Field]]]: # type: ignore
return isinstance(obj, Field) or (inspect.isclass(obj) and issubclass(obj, Field))
+
+ def _schema_instance(self, schema: t.Union[t.Type[Schema], Schema]) -> Schema:
+ if inspect.isclass(schema) and issubclass(schema, Schema):
+ return schema()
+ elif isinstance(schema, Schema):
+ return schema
+ else:
+ raise ValueError("Wrong schema")
diff --git a/flama/schemas/_libs/marshmallow/fields.py b/flama/schemas/_libs/marshmallow/fields.py
index abda4972..362159fb 100644
--- a/flama/schemas/_libs/marshmallow/fields.py
+++ b/flama/schemas/_libs/marshmallow/fields.py
@@ -1,12 +1,11 @@
# ruff: noqa
import datetime
-import typing
+import typing as t
import uuid
-import marshmallow.fields
from marshmallow.fields import *
-MAPPING: typing.Dict[typing.Optional[typing.Type], typing.Type[marshmallow.fields.Field]] = {
+MAPPING: t.Dict[t.Union[t.Type, None], t.Type[Field]] = {
None: Field,
int: Integer,
float: Float,
@@ -19,3 +18,5 @@
datetime.datetime: DateTime,
datetime.time: Time,
}
+
+MAPPING_TYPES = {v: k for k, v in MAPPING.items()}
diff --git a/flama/schemas/_libs/marshmallow/schemas.py b/flama/schemas/_libs/marshmallow/schemas.py
index 34c67be2..5b4736e2 100644
--- a/flama/schemas/_libs/marshmallow/schemas.py
+++ b/flama/schemas/_libs/marshmallow/schemas.py
@@ -78,12 +78,12 @@ class MLModelOutput(marshmallow.Schema):
SCHEMAS = {
- "APIError": APIError,
- "DropCollection": DropCollection,
- "LimitOffsetMeta": LimitOffsetMeta,
- "LimitOffset": LimitOffset,
- "PageNumberMeta": PageNumberMeta,
- "PageNumber": PageNumber,
- "MLModelInput": MLModelInput,
- "MLModelOutput": MLModelOutput,
+ "flama.APIError": APIError,
+ "flama.DropCollection": DropCollection,
+ "flama.LimitOffsetMeta": LimitOffsetMeta,
+ "flama.LimitOffset": LimitOffset,
+ "flama.PageNumberMeta": PageNumberMeta,
+ "flama.PageNumber": PageNumber,
+ "flama.MLModelInput": MLModelInput,
+ "flama.MLModelOutput": MLModelOutput,
}
diff --git a/flama/schemas/_libs/pydantic/adapter.py b/flama/schemas/_libs/pydantic/adapter.py
index fe4e255a..613f7ce4 100644
--- a/flama/schemas/_libs/pydantic/adapter.py
+++ b/flama/schemas/_libs/pydantic/adapter.py
@@ -3,8 +3,8 @@
import typing as t
import pydantic
-from pydantic.fields import ModelField
-from pydantic.schema import field_schema, model_schema
+from pydantic.fields import FieldInfo
+from pydantic.json_schema import model_json_schema
from flama.injection import Parameter
from flama.schemas.adapter import Adapter
@@ -14,12 +14,12 @@
if sys.version_info < (3, 10): # PORT: Remove when stop supporting 3.9 # pragma: no cover
from typing_extensions import TypeGuard
- t.TypeGuard = TypeGuard
+ t.TypeGuard = TypeGuard # type: ignore
__all__ = ["PydanticAdapter"]
Schema = pydantic.BaseModel
-Field = ModelField
+Field = FieldInfo
class PydanticAdapter(Adapter[Schema, Field]):
@@ -44,13 +44,12 @@ def build_field(
if nullable:
annotation = t.Optional[annotation]
- return ModelField.infer(
- name=name,
- annotation=annotation,
- value=pydantic.Field(**kwargs),
- class_validators=None,
- config=pydantic.BaseConfig,
- )
+ if default is Parameter.empty:
+ field = FieldInfo.from_annotation(annotation)
+ else:
+ field = FieldInfo.from_annotated_attribute(annotation, default)
+
+ return field
def build_schema(
self,
@@ -59,26 +58,26 @@ def build_schema(
schema: t.Optional[t.Union[Schema, t.Type[Schema]]] = None,
fields: t.Optional[t.Dict[str, Field]] = None,
) -> t.Type[Schema]:
- return pydantic.create_model( # type: ignore
+ return pydantic.create_model(
name or self.DEFAULT_SCHEMA_NAME,
**{
**(
{
- name: (field.annotation, field.field_info)
- for name, field in self.unique_schema(schema).__fields__.items()
+ name: (field_info.annotation, field_info)
+ for name, field_info in self.unique_schema(schema).model_fields.items()
}
- if schema
+ if self.is_schema(schema)
else {}
),
- **({name: (field.annotation, field.field_info) for name, field in fields.items()} if fields else {}),
- },
+ **({name: (field.annotation, field) for name, field in fields.items()} if fields else {}),
+ }, # type: ignore
)
def validate(self, schema: t.Union[Schema, t.Type[Schema]], values: t.Dict[str, t.Any]) -> t.Dict[str, t.Any]:
schema_cls = self.unique_schema(schema)
try:
- return schema_cls(**values).dict()
+ return schema_cls(**values).model_dump()
except pydantic.ValidationError as errors:
raise SchemaValidationError(errors={str(error["loc"][0]): error for error in errors.errors()})
@@ -92,15 +91,25 @@ def dump(self, schema: t.Union[Schema, t.Type[Schema]], value: t.Dict[str, t.Any
return self.validate(schema_cls, value)
+ def name(self, schema: t.Union[Schema, t.Type[Schema]]) -> str:
+ s = self.unique_schema(schema)
+ return s.__qualname__ if s.__module__ == "builtins" else f"{s.__module__}.{s.__qualname__}"
+
def to_json_schema(self, schema: t.Union[Schema, t.Type[Schema], Field]) -> JSONSchema:
try:
if self.is_schema(schema):
- json_schema = model_schema(schema, ref_prefix="#/components/schemas/")
+ json_schema = model_json_schema(schema, ref_template="#/components/schemas/{model}")
+ if "$defs" in json_schema:
+ del json_schema["$defs"]
elif self.is_field(schema):
- json_schema = field_schema(schema, ref_prefix="#/components/schemas/", model_name_map={})[0]
- if schema.allow_none:
- types = [json_schema["type"]] if isinstance(json_schema["type"], str) else json_schema["type"]
- json_schema["type"] = list(dict.fromkeys(types + ["null"]))
+ json_schema = model_json_schema(
+ self.build_schema(fields={"x": schema}), ref_template="#/components/schemas/{model}"
+ )["properties"]["x"]
+ if not schema.title: # Pydantic is introducing a default title, so we drop it
+ del json_schema["title"]
+ if "anyOf" in json_schema: # Just simplifying type definition from anyOf to a list of types
+ json_schema["type"] = [x["type"] for x in json_schema["anyOf"]]
+ del json_schema["anyOf"]
else:
raise TypeError("Not a valid schema class or field")
@@ -111,8 +120,37 @@ def to_json_schema(self, schema: t.Union[Schema, t.Type[Schema], Field]) -> JSON
def unique_schema(self, schema: t.Union[Schema, t.Type[Schema]]) -> t.Type[Schema]:
return schema.__class__ if isinstance(schema, Schema) else schema
- def is_schema(self, obj: t.Any) -> t.TypeGuard[t.Type[Schema]]:
+ def _get_field_type(
+ self, field: Field
+ ) -> t.Union[t.Union[Schema, t.Type], t.List[t.Union[Schema, t.Type]], t.Dict[str, t.Union[Schema, t.Type]]]:
+ if not self.is_field(field):
+ return field
+
+ if t.get_origin(field.annotation) == list:
+ return self._get_field_type(t.get_args(field.annotation)[0])
+
+ if t.get_origin(field.annotation) == dict:
+ return self._get_field_type(t.get_args(field.annotation)[1])
+
+ return field.annotation
+
+ def schema_fields(
+ self, schema: t.Union[Schema, t.Type[Schema]]
+ ) -> t.Dict[
+ str,
+ t.Tuple[
+ t.Union[t.Union[Schema, t.Type], t.List[t.Union[Schema, t.Type]], t.Dict[str, t.Union[Schema, t.Type]]],
+ Field,
+ ],
+ ]:
+ return {name: (self._get_field_type(field), field) for name, field in schema.model_fields.items()}
+
+ def is_schema(
+ self, obj: t.Any
+ ) -> t.TypeGuard[t.Type[Schema]]: # type: ignore # PORT: Remove this comment when stop supporting 3.9
return inspect.isclass(obj) and issubclass(obj, Schema)
- def is_field(self, obj: t.Any) -> t.TypeGuard[Field]:
+ def is_field(
+ self, obj: t.Any
+ ) -> t.TypeGuard[Field]: # type: ignore # PORT: Remove this comment when stop supporting 3.9
return isinstance(obj, Field)
diff --git a/flama/schemas/_libs/pydantic/schemas.py b/flama/schemas/_libs/pydantic/schemas.py
index 66fab945..e96c5506 100644
--- a/flama/schemas/_libs/pydantic/schemas.py
+++ b/flama/schemas/_libs/pydantic/schemas.py
@@ -56,12 +56,12 @@ class MLModelOutput(BaseModel):
SCHEMAS = {
- "APIError": APIError,
- "DropCollection": DropCollection,
- "LimitOffsetMeta": LimitOffsetMeta,
- "LimitOffset": LimitOffset,
- "PageNumberMeta": PageNumberMeta,
- "PageNumber": PageNumber,
- "MLModelInput": MLModelInput,
- "MLModelOutput": MLModelOutput,
+ "flama.APIError": APIError,
+ "flama.DropCollection": DropCollection,
+ "flama.LimitOffsetMeta": LimitOffsetMeta,
+ "flama.LimitOffset": LimitOffset,
+ "flama.PageNumberMeta": PageNumberMeta,
+ "flama.PageNumber": PageNumber,
+ "flama.MLModelInput": MLModelInput,
+ "flama.MLModelOutput": MLModelOutput,
}
diff --git a/flama/schemas/_libs/typesystem/adapter.py b/flama/schemas/_libs/typesystem/adapter.py
index 04a1b31a..ff201bb7 100644
--- a/flama/schemas/_libs/typesystem/adapter.py
+++ b/flama/schemas/_libs/typesystem/adapter.py
@@ -5,7 +5,7 @@
import typesystem
from flama.injection import Parameter
-from flama.schemas._libs.typesystem.fields import MAPPING
+from flama.schemas._libs.typesystem.fields import MAPPING, MAPPING_TYPES
from flama.schemas.adapter import Adapter
from flama.schemas.exceptions import SchemaGenerationError, SchemaValidationError
from flama.types import JSONSchema
@@ -13,7 +13,7 @@
if sys.version_info < (3, 10): # PORT: Remove when stop supporting 3.9 # pragma: no cover
from typing_extensions import TypeGuard
- t.TypeGuard = TypeGuard
+ t.TypeGuard = TypeGuard # type: ignore
__all__ = ["TypesystemAdapter"]
@@ -30,7 +30,7 @@ def build_field(
required: bool = True,
default: t.Any = None,
multiple: bool = False,
- **kwargs
+ **kwargs,
) -> Field:
if required is False and default is not Parameter.empty:
kwargs["default"] = default
@@ -44,7 +44,7 @@ def build_field(
if self.is_schema(type_)
else MAPPING[type_]()
),
- **kwargs
+ **kwargs,
)
return MAPPING[type_](**kwargs)
@@ -58,7 +58,7 @@ def build_schema(
) -> Schema:
return Schema(
title=name or self.DEFAULT_SCHEMA_NAME,
- fields={**(self.unique_schema(schema).fields if schema else {}), **(fields or {})},
+ fields={**(self.unique_schema(schema).fields if self.is_schema(schema) else {}), **(fields or {})},
)
@t.no_type_check
@@ -82,6 +82,13 @@ def _dump(self, value: t.Any) -> t.Any:
return value
+ @t.no_type_check
+ def name(self, schema: Schema) -> str:
+ if not schema.title:
+ raise ValueError(f"Schema '{schema}' needs to define title attribute")
+
+ return schema.title if schema.__module__ == "builtins" else f"{schema.__module__}.{schema.title}"
+
@t.no_type_check
def to_json_schema(self, schema: t.Union[Schema, Field]) -> JSONSchema:
try:
@@ -100,10 +107,47 @@ def to_json_schema(self, schema: t.Union[Schema, Field]) -> JSONSchema:
def unique_schema(self, schema: Schema) -> Schema:
return schema
+ def _get_field_type(
+ self, field: Field
+ ) -> t.Union[t.Union[Schema, t.Type], t.List[t.Union[Schema, t.Type]], t.Dict[str, t.Union[Schema, t.Type]]]:
+ if isinstance(field, typesystem.Reference):
+ return field.target
+
+ if isinstance(field, typesystem.Array):
+ return (
+ [self._get_field_type(x) for x in field.items]
+ if isinstance(field.items, (list, tuple, set))
+ else self._get_field_type(field.items)
+ )
+
+ if isinstance(field, typesystem.Object):
+ return {k: self._get_field_type(v) for k, v in field.properties.items()}
+
+ try:
+ return MAPPING_TYPES[field.__class__]
+ except KeyError:
+ return None
+
+ @t.no_type_check
+ def schema_fields(
+ self, schema: Schema
+ ) -> t.Dict[
+ str,
+ t.Tuple[
+ t.Union[t.Union[Schema, t.Type], t.List[t.Union[Schema, t.Type]], t.Dict[str, t.Union[Schema, t.Type]]],
+ Field,
+ ],
+ ]:
+ return {name: (self._get_field_type(field), field) for name, field in schema.fields.items()}
+
@t.no_type_check
- def is_schema(self, obj: t.Any) -> t.TypeGuard[Schema]:
+ def is_schema(
+ self, obj: t.Any
+ ) -> t.TypeGuard[Schema]: # type: ignore # PORT: Remove this comment when stop supporting 3.9
return isinstance(obj, Schema) or (inspect.isclass(obj) and issubclass(obj, Schema))
@t.no_type_check
- def is_field(self, obj: t.Any) -> t.TypeGuard[Field]:
+ def is_field(
+ self, obj: t.Any
+ ) -> t.TypeGuard[Field]: # type: ignore # PORT: Remove this comment when stop supporting 3.9
return isinstance(obj, Field) or (inspect.isclass(obj) and issubclass(obj, Field))
diff --git a/flama/schemas/_libs/typesystem/fields.py b/flama/schemas/_libs/typesystem/fields.py
index 20f9769e..274cc177 100644
--- a/flama/schemas/_libs/typesystem/fields.py
+++ b/flama/schemas/_libs/typesystem/fields.py
@@ -1,12 +1,12 @@
# ruff: noqa
import datetime
-import typing
+import typing as t
import uuid
from typesystem.fields import *
from typesystem.schemas import Reference
-MAPPING: typing.Dict[typing.Any, typing.Type[Field]] = {
+MAPPING: t.Dict[t.Union[t.Type, None], t.Type[Field]] = {
None: Field,
int: Integer,
float: Float,
@@ -19,3 +19,5 @@
datetime.datetime: DateTime,
datetime.time: Time,
}
+
+MAPPING_TYPES = {v: k for k, v in MAPPING.items()}
diff --git a/flama/schemas/_libs/typesystem/schemas.py b/flama/schemas/_libs/typesystem/schemas.py
index 6ea5691e..203f7186 100644
--- a/flama/schemas/_libs/typesystem/schemas.py
+++ b/flama/schemas/_libs/typesystem/schemas.py
@@ -1,4 +1,4 @@
-import typesystem
+from typesystem import Definitions, Reference, Schema, fields
__all__ = [
"APIError",
@@ -12,82 +12,80 @@
"SCHEMAS",
]
-SCHEMAS = typesystem.Definitions()
+SCHEMAS = Definitions()
-APIError = typesystem.Schema(
+APIError = Schema(
title="APIError",
fields={
- "status_code": typesystem.fields.Integer(title="status_code", description="HTTP status code"),
- "detail": typesystem.fields.Union(
- any_of=[typesystem.fields.Object(), typesystem.fields.String()], title="detail", description="Error detail"
- ),
- "error": typesystem.fields.String(title="type", description="Exception or error type", allow_null=True),
+ "status_code": fields.Integer(title="status_code", description="HTTP status code"),
+ "detail": fields.Union(any_of=[fields.Object(), fields.String()], title="detail", description="Error detail"),
+ "error": fields.String(title="type", description="Exception or error type", allow_null=True),
},
)
-SCHEMAS["APIError"] = APIError
+SCHEMAS["flama.APIError"] = APIError
-DropCollection = typesystem.Schema(
+DropCollection = Schema(
title="DropCollection",
fields={
- "deleted": typesystem.fields.Integer(title="deleted", description="Number of deleted elements"),
+ "deleted": fields.Integer(title="deleted", description="Number of deleted elements"),
},
)
-SCHEMAS["DropCollection"] = DropCollection
+SCHEMAS["flama.DropCollection"] = DropCollection
-LimitOffsetMeta = typesystem.Schema(
+LimitOffsetMeta = Schema(
title="LimitOffsetMeta",
fields={
- "limit": typesystem.fields.Integer(title="limit", description="Number of retrieved items"),
- "offset": typesystem.fields.Integer(title="offset", description="Collection offset"),
- "count": typesystem.fields.Integer(title="count", description="Total number of items", allow_null=True),
+ "limit": fields.Integer(title="limit", description="Number of retrieved items"),
+ "offset": fields.Integer(title="offset", description="Collection offset"),
+ "count": fields.Integer(title="count", description="Total number of items", allow_null=True),
},
)
-SCHEMAS["LimitOffsetMeta"] = LimitOffsetMeta
+SCHEMAS["flama.LimitOffsetMeta"] = LimitOffsetMeta
-LimitOffset = typesystem.Schema(
+LimitOffset = Schema(
title="LimitOffset",
fields={
- "meta": typesystem.Reference(
- to="LimitOffsetMeta", definitions=SCHEMAS, title="meta", description="Pagination metadata"
+ "meta": Reference(
+ to="flama.LimitOffsetMeta", definitions=SCHEMAS, title="meta", description="Pagination metadata"
),
- "data": typesystem.fields.Array(title="data", description="Paginated data"),
+ "data": fields.Array(title="data", description="Paginated data"),
},
)
-SCHEMAS["LimitOffset"] = LimitOffset
+SCHEMAS["flama.LimitOffset"] = LimitOffset
-PageNumberMeta = typesystem.Schema(
+PageNumberMeta = Schema(
title="PageNumberMeta",
fields={
- "page": typesystem.fields.Integer(title="page", description="Current page number"),
- "page_size": typesystem.fields.Integer(title="page_size", description="Page size"),
- "count": typesystem.fields.Integer(title="count", description="Total number of items", allow_null=True),
+ "page": fields.Integer(title="page", description="Current page number"),
+ "page_size": fields.Integer(title="page_size", description="Page size"),
+ "count": fields.Integer(title="count", description="Total number of items", allow_null=True),
},
)
-SCHEMAS["PageNumberMeta"] = PageNumberMeta
+SCHEMAS["flama.PageNumberMeta"] = PageNumberMeta
-PageNumber = typesystem.Schema(
+PageNumber = Schema(
title="PageNumber",
fields={
- "meta": typesystem.Reference(
- to="PageNumberMeta", definitions=SCHEMAS, title="meta", description="Pagination metadata"
+ "meta": Reference(
+ to="flama.PageNumberMeta", definitions=SCHEMAS, title="meta", description="Pagination metadata"
),
- "data": typesystem.fields.Array(title="data", description="Paginated data"),
+ "data": fields.Array(title="data", description="Paginated data"),
},
)
-SCHEMAS["PageNumber"] = PageNumber
+SCHEMAS["flama.PageNumber"] = PageNumber
-MLModelInput = typesystem.Schema(
+MLModelInput = Schema(
title="MLModelInput",
fields={
- "input": typesystem.fields.Array(title="input", description="Model input"),
+ "input": fields.Array(title="input", description="Model input"),
},
)
-SCHEMAS["MLModelInput"] = MLModelInput
+SCHEMAS["flama.MLModelInput"] = MLModelInput
-MLModelOutput = typesystem.Schema(
+MLModelOutput = Schema(
title="MLModelOutput",
fields={
- "output": typesystem.fields.Array(title="output", description="Model output"),
+ "output": fields.Array(title="output", description="Model output"),
},
)
-SCHEMAS["MLModelOutput"] = MLModelOutput
+SCHEMAS["flama.MLModelOutput"] = MLModelOutput
diff --git a/flama/schemas/adapter.py b/flama/schemas/adapter.py
index 76cf94cf..9eee87e6 100644
--- a/flama/schemas/adapter.py
+++ b/flama/schemas/adapter.py
@@ -8,7 +8,7 @@
if sys.version_info < (3, 10): # PORT: Remove when stop supporting 3.9 # pragma: no cover
from typing_extensions import TypeGuard
- t.TypeGuard = TypeGuard
+ t.TypeGuard = TypeGuard # type: ignore
class Adapter(t.Generic[_T_Schema, _T_Field], metaclass=abc.ABCMeta):
@@ -28,18 +28,21 @@ def build_field(
...
@t.overload
+ @abc.abstractmethod
def build_schema(
self, *, name: t.Optional[str] = None, fields: t.Dict[str, _T_Field]
) -> t.Union[_T_Schema, t.Type[_T_Schema]]:
...
@t.overload
+ @abc.abstractmethod
def build_schema(
self, *, name: t.Optional[str] = None, schema: t.Union[_T_Schema, t.Type[_T_Schema]]
) -> t.Union[_T_Schema, t.Type[_T_Schema]]:
...
@t.overload
+ @abc.abstractmethod
def build_schema(
self,
*,
@@ -71,6 +74,10 @@ def load(self, schema: t.Union[_T_Schema, t.Type[_T_Schema]], value: t.Dict[str,
def dump(self, schema: t.Union[_T_Schema, t.Type[_T_Schema]], value: t.Dict[str, t.Any]) -> t.Dict[str, t.Any]:
...
+ @abc.abstractmethod
+ def name(self, schema: t.Union[_T_Schema, t.Type[_T_Schema]]) -> str:
+ ...
+
@abc.abstractmethod
def to_json_schema(self, schema: t.Union[_T_Schema, t.Type[_T_Schema], _T_Field]) -> JSONSchema:
...
@@ -80,9 +87,31 @@ def unique_schema(self, schema: t.Union[_T_Schema, t.Type[_T_Schema]]) -> t.Unio
...
@abc.abstractmethod
- def is_schema(self, obj: t.Any) -> t.TypeGuard[t.Union[_T_Schema, t.Type[_T_Schema]]]:
+ def schema_fields(
+ self, schema: t.Union[_T_Schema, t.Type[_T_Schema]]
+ ) -> t.Dict[
+ str,
+ t.Tuple[
+ t.Union[
+ t.Union[_T_Schema, t.Type], t.List[t.Union[_T_Schema, t.Type]], t.Dict[str, t.Union[_T_Schema, t.Type]]
+ ],
+ _T_Field,
+ ],
+ ]:
+ ...
+
+ @abc.abstractmethod
+ def is_schema(
+ self, obj: t.Any
+ ) -> t.TypeGuard[ # type: ignore # PORT: Remove this comment when stop supporting 3.9
+ t.Union[_T_Schema, t.Type[_T_Schema]]
+ ]:
...
@abc.abstractmethod
- def is_field(self, obj: t.Any) -> t.TypeGuard[t.Union[_T_Field, t.Type[_T_Field]]]:
+ def is_field(
+ self, obj: t.Any
+ ) -> t.TypeGuard[ # type: ignore # PORT: Remove this comment when stop supporting 3.9
+ t.Union[_T_Field, t.Type[_T_Field]]
+ ]:
...
diff --git a/flama/schemas/data_structures.py b/flama/schemas/data_structures.py
index 84301062..1533cf2b 100644
--- a/flama/schemas/data_structures.py
+++ b/flama/schemas/data_structures.py
@@ -3,24 +3,20 @@
import sys
import typing as t
-import flama.types
from flama import schemas, types
from flama.injection.resolver import Parameter as InjectionParameter
-if sys.version_info < (3, 8): # PORT: Remove when stop supporting 3.7 # pragma: no cover
- from typing_extensions import get_args, get_origin
-
- t.get_args = get_args
- t.get_origin = get_origin
-
if sys.version_info < (3, 10): # PORT: Remove when stop supporting 3.9 # pragma: no cover
from typing_extensions import TypeGuard
- t.TypeGuard = TypeGuard
+ t.TypeGuard = TypeGuard # type: ignore
__all__ = ["Field", "Schema", "Parameter", "Parameters"]
+UNKNOWN = t.TypeVar("UNKNOWN")
+
+
class ParameterLocation(enum.Enum):
query = enum.auto()
path = enum.auto()
@@ -87,7 +83,7 @@ def is_http_valid_type(cls, type_: t.Type) -> bool:
)
@property
- def json_schema(self) -> flama.types.JSONSchema:
+ def json_schema(self) -> types.JSONSchema:
return schemas.adapter.to_json_schema(self.field)
@@ -127,13 +123,39 @@ def is_schema(cls, obj: t.Any) -> bool:
return schemas.adapter.is_schema(obj)
@property
- def json_schema(self) -> t.Dict[str, t.Any]:
+ def name(self) -> str:
+ return schemas.adapter.name(self.schema)
+
+ @property
+ def json_schema(self) -> types.JSONSchema:
return schemas.adapter.to_json_schema(self.schema)
@property
def unique_schema(self) -> t.Any:
return schemas.adapter.unique_schema(self.schema)
+ @property
+ def fields(self) -> t.Dict[str, t.Tuple[t.Any, t.Any]]:
+ return schemas.adapter.schema_fields(self.unique_schema)
+
+ def nested_schemas(self, schema: t.Any = UNKNOWN) -> t.List[t.Any]:
+ if schema == UNKNOWN:
+ return self.nested_schemas(self)
+
+ if schemas.adapter.is_schema(schema):
+ return [schema]
+
+ if isinstance(schema, (list, tuple, set)):
+ return [x for field in schema for x in self.nested_schemas(field)]
+
+ if isinstance(schema, dict):
+ return [x for field in schema.values() for x in self.nested_schemas(field)]
+
+ if isinstance(schema, Schema):
+ return [x for field_type, _ in schema.fields.values() for x in self.nested_schemas(field_type)]
+
+ return []
+
@t.overload
def validate(self, values: t.Dict[str, t.Any]) -> t.Dict[str, t.Any]:
...
diff --git a/flama/schemas/generator.py b/flama/schemas/generator.py
index 0c2d6821..95e4afdd 100644
--- a/flama/schemas/generator.py
+++ b/flama/schemas/generator.py
@@ -2,14 +2,12 @@
import inspect
import itertools
import logging
-import typing
import typing as t
from collections import defaultdict
import yaml
-import flama.types.schema
-from flama import routing, schemas
+from flama import routing, schemas, types
from flama.schemas import Schema, openapi
from flama.schemas.data_structures import Parameter
from flama.url import RegexPath
@@ -40,33 +38,31 @@ def ref(self) -> str:
return f"#/components/schemas/{self.name}"
@property
- def json_schema(self) -> t.Dict[str, t.Any]:
+ def json_schema(self) -> types.JSONSchema:
return Schema(self.schema).json_schema
-class SchemaRegistry(typing.Dict[int, SchemaInfo]):
- def __init__(self, schemas: t.Optional[typing.Dict[str, flama.types.schema._T_Schema]] = None):
+class SchemaRegistry(t.Dict[int, SchemaInfo]):
+ def __init__(self, schemas: t.Optional[t.Dict[str, schemas.Schema]] = None):
super().__init__()
for name, schema in (schemas or {}).items():
self.register(schema, name)
- def __contains__(self, item: flama.types.schema._T_Schema) -> bool:
- return super().__contains__(id(schemas.adapter.unique_schema(item)))
+ def __contains__(self, item: t.Any) -> bool:
+ return super().__contains__(id(schemas.Schema(item).unique_schema))
- def __getitem__(self, item: flama.types.schema._T_Schema) -> SchemaInfo:
+ def __getitem__(self, item: t.Any) -> SchemaInfo:
"""
Lookup method that allows using Schema classes or instances.
:param item: Schema to look for.
:return: Registered schema.
"""
- return super().__getitem__(id(schemas.adapter.unique_schema(item)))
+ return super().__getitem__(id(schemas.Schema(item).unique_schema))
@t.no_type_check
- def _get_schema_references_from_schema(
- self, schema: typing.Union[openapi.Schema, openapi.Reference]
- ) -> typing.List[str]:
+ def _get_schema_references_from_schema(self, schema: t.Union[openapi.Schema, openapi.Reference]) -> t.List[str]:
if isinstance(schema, openapi.Reference):
return [schema.ref]
@@ -75,28 +71,36 @@ def _get_schema_references_from_schema(
if "$ref" in schema:
result.append(schema["$ref"])
- if schema.get("type", "") == "array" and schema.get("items", {}).get("$ref"):
- result.append(schema["items"]["$ref"])
-
- result += [
- ref
- for composer in ("allOf", "anyOf", "oneOf")
- for composer_schema in schema.get(composer, [])
- for ref in self._get_schema_references_from_schema(composer_schema)
- ]
-
- result += [
- ref
- for prop in schema.get("properties", {}).values()
- for ref in self._get_schema_references_from_schema(prop)
- ]
+ if schema.get("type", "") == "array":
+ items = schema.get("items", {})
+ if isinstance(items, dict) and "$ref" in items:
+ result.append(items["$ref"])
+
+ for composer in ("allOf", "anyOf", "oneOf"):
+ composer_schemas = schema.get(composer, [])
+ if isinstance(composer_schemas, list):
+ result += [
+ ref
+ for x in composer_schemas
+ if x and isinstance(x, dict)
+ for ref in self._get_schema_references_from_schema(openapi.Schema(x))
+ ]
+
+ props = schema.get("properties", {})
+ if isinstance(props, dict):
+ result += [
+ ref
+ for x in props.values()
+ if x and isinstance(x, dict)
+ for ref in self._get_schema_references_from_schema(openapi.Schema(x))
+ ]
return result
- def _get_schema_references_from_path(self, path: openapi.Path) -> typing.List[str]:
+ def _get_schema_references_from_path(self, path: openapi.Path) -> t.List[str]:
return [y for x in path.operations.values() for y in self._get_schema_references_from_operation(x)]
- def _get_schema_references_from_operation(self, operation: openapi.Operation) -> typing.List[str]:
+ def _get_schema_references_from_operation(self, operation: openapi.Operation) -> t.List[str]:
return [
*self._get_schema_references_from_operation_parameters(operation.parameters),
*self._get_schema_references_from_operation_request_body(operation.requestBody),
@@ -104,7 +108,7 @@ def _get_schema_references_from_operation(self, operation: openapi.Operation) ->
*self._get_schema_references_from_operation_responses(operation.responses),
]
- def _get_schema_references_from_operation_responses(self, responses: openapi.Responses) -> typing.List[str]:
+ def _get_schema_references_from_operation_responses(self, responses: openapi.Responses) -> t.List[str]:
refs = []
for response in [x for x in responses.values() if x.content]:
@@ -118,8 +122,8 @@ def _get_schema_references_from_operation_responses(self, responses: openapi.Res
return refs
def _get_schema_references_from_operation_callbacks(
- self, callbacks: typing.Optional[typing.Dict[str, typing.Union[openapi.Callback, openapi.Reference]]]
- ) -> typing.List[str]:
+ self, callbacks: t.Optional[t.Dict[str, t.Union[openapi.Callback, openapi.Reference]]]
+ ) -> t.List[str]:
refs = []
if callbacks:
@@ -133,8 +137,8 @@ def _get_schema_references_from_operation_callbacks(
return refs
def _get_schema_references_from_operation_request_body(
- self, request_body: typing.Optional[typing.Union[openapi.RequestBody, openapi.Reference]]
- ) -> typing.List[str]:
+ self, request_body: t.Optional[t.Union[openapi.RequestBody, openapi.Reference]]
+ ) -> t.List[str]:
refs = []
if request_body:
@@ -147,8 +151,8 @@ def _get_schema_references_from_operation_request_body(
return refs
def _get_schema_references_from_operation_parameters(
- self, parameters: typing.Optional[typing.List[typing.Union[openapi.Parameter, openapi.Reference]]]
- ) -> typing.List[str]:
+ self, parameters: t.Optional[t.List[t.Union[openapi.Parameter, openapi.Reference]]]
+ ) -> t.List[str]:
refs = []
if parameters:
@@ -160,7 +164,7 @@ def _get_schema_references_from_operation_parameters(
return refs
- def used(self, spec: openapi.OpenAPISpec) -> typing.Dict[int, SchemaInfo]:
+ def used(self, spec: openapi.OpenAPISpec) -> t.Dict[int, SchemaInfo]:
"""
Generate a dict containing used schemas.
@@ -178,9 +182,17 @@ def used(self, spec: openapi.OpenAPISpec) -> typing.Dict[int, SchemaInfo]:
}
used_schemas.update({k: v for k, v in self.items() if v.name in refs_from_schemas})
+ for child_schema in [y for x in used_schemas.values() for y in schemas.Schema(x.schema).nested_schemas()]:
+ schema = schemas.Schema(child_schema)
+ instance = schema.unique_schema
+ if instance not in used_schemas:
+ used_schemas[id(instance)] = (
+ self[instance] if instance in self else SchemaInfo(name=schema.name, schema=instance)
+ )
+
return used_schemas
- def register(self, schema: flama.types.schema._T_Schema, name: t.Optional[str] = None) -> int:
+ def register(self, schema: schemas.Schema, name: t.Optional[str] = None) -> int:
"""
Register a new Schema to this registry.
@@ -191,24 +203,22 @@ def register(self, schema: flama.types.schema._T_Schema, name: t.Optional[str] =
if schema in self:
raise ValueError("Schema is already registered.")
- schema_instance = schemas.adapter.unique_schema(schema)
- if name is None:
- if not inspect.isclass(schema_instance):
- raise ValueError("Cannot infer schema name.")
+ s = schemas.Schema(schema)
- try:
- name = schema_instance.__name__
- except AttributeError:
- raise ValueError("Cannot infer schema name.")
+ try:
+ schema_name = name or s.name
+ except ValueError as e:
+ raise ValueError("Cannot infer schema name.") from e
+ schema_instance = s.unique_schema
schema_id = id(schema_instance)
- self[schema_id] = SchemaInfo(name=name, schema=schema_instance)
+ self[schema_id] = SchemaInfo(name=schema_name, schema=schema_instance)
return schema_id
def get_openapi_ref(
- self, element: flama.types.schema._T_Schema, multiple: t.Optional[bool] = None
- ) -> typing.Union[openapi.Schema, openapi.Reference]:
+ self, element: schemas.Schema, multiple: t.Optional[bool] = None
+ ) -> t.Union[openapi.Schema, openapi.Reference]:
"""
Builds the reference for a single schema or the array schema containing the reference.
@@ -238,7 +248,7 @@ def __init__(
contact_email: t.Optional[str] = None,
license_name: t.Optional[str] = None,
license_url: t.Optional[str] = None,
- schemas: t.Optional[typing.Dict] = None,
+ schemas: t.Optional[t.Dict] = None,
):
contact = (
openapi.Contact(name=contact_name, url=contact_url, email=contact_email)
@@ -261,8 +271,8 @@ def __init__(
self.schemas = SchemaRegistry(schemas=schemas)
def get_endpoints( # type: ignore[override]
- self, routes: typing.List[routing.BaseRoute], base_path: str = ""
- ) -> typing.Dict[str, typing.List[EndpointInfo]]:
+ self, routes: t.List[routing.BaseRoute], base_path: str = ""
+ ) -> t.Dict[str, t.List[EndpointInfo]]:
"""
Given the routes, yields the following information:
@@ -277,7 +287,7 @@ def get_endpoints( # type: ignore[override]
:param base_path: The base endpoints path.
:return: Data structure that contains metadata from every route.
"""
- endpoints_info: typing.Dict[str, typing.List[EndpointInfo]] = defaultdict(list)
+ endpoints_info: t.Dict[str, t.List[EndpointInfo]] = defaultdict(list)
for route in routes:
path = RegexPath(base_path + route.path.path).template
@@ -322,8 +332,8 @@ def get_endpoints( # type: ignore[override]
return endpoints_info
def _build_endpoint_parameters(
- self, endpoint: EndpointInfo, metadata: typing.Dict[str, typing.Any]
- ) -> typing.Optional[typing.List[openapi.Parameter]]:
+ self, endpoint: EndpointInfo, metadata: t.Dict[str, t.Any]
+ ) -> t.Optional[t.List[openapi.Parameter]]:
if not endpoint.query_parameters and not endpoint.path_parameters:
return None
@@ -352,8 +362,8 @@ def _build_endpoint_parameters(
]
def _build_endpoint_body(
- self, endpoint: EndpointInfo, metadata: typing.Dict[str, typing.Any]
- ) -> typing.Optional[openapi.RequestBody]:
+ self, endpoint: EndpointInfo, metadata: t.Dict[str, t.Any]
+ ) -> t.Optional[openapi.RequestBody]:
if not endpoint.body_parameter:
return None
@@ -373,8 +383,8 @@ def _build_endpoint_body(
)
def _build_endpoint_response(
- self, endpoint: EndpointInfo, metadata: typing.Dict[str, typing.Any]
- ) -> typing.Tuple[typing.Optional[openapi.Response], str]:
+ self, endpoint: EndpointInfo, metadata: t.Dict[str, t.Any]
+ ) -> t.Tuple[t.Optional[openapi.Response], str]:
try:
response_code, main_response = list(metadata.get("responses", {}).items())[0]
except IndexError:
@@ -403,7 +413,7 @@ def _build_endpoint_response(
str(response_code),
)
- def _build_endpoint_default_response(self, metadata: typing.Dict[str, typing.Any]) -> openapi.Response:
+ def _build_endpoint_default_response(self, metadata: t.Dict[str, t.Any]) -> openapi.Response:
return openapi.Response(
description=metadata.get("responses", {}).get("default", {}).get("description", "Unexpected error."),
content={
@@ -413,9 +423,7 @@ def _build_endpoint_default_response(self, metadata: typing.Dict[str, typing.Any
},
)
- def _build_endpoint_responses(
- self, endpoint: EndpointInfo, metadata: typing.Dict[str, typing.Any]
- ) -> openapi.Responses:
+ def _build_endpoint_responses(self, endpoint: EndpointInfo, metadata: t.Dict[str, t.Any]) -> openapi.Responses:
responses = metadata.get("responses", {})
try:
main_response_code = next(iter(responses.keys()))
@@ -469,7 +477,7 @@ def _build_endpoint_responses(
}
)
- def _parse_docstring(self, func: typing.Callable) -> t.Dict[t.Any, t.Any]:
+ def _parse_docstring(self, func: t.Callable) -> t.Dict[t.Any, t.Any]:
"""Given a function, parse the docstring as YAML and return a dictionary of info.
:param func: Function to analyze docstring.
@@ -512,7 +520,7 @@ def get_operation_schema(self, endpoint: EndpointInfo) -> openapi.Operation:
},
)
- def get_api_schema(self, routes: typing.List[routing.BaseRoute]) -> typing.Dict[str, typing.Any]:
+ def get_api_schema(self, routes: t.List[routing.BaseRoute]) -> t.Dict[str, t.Any]:
endpoints_info = self.get_endpoints(routes)
for path, endpoints in endpoints_info.items():
@@ -522,6 +530,6 @@ def get_api_schema(self, routes: typing.List[routing.BaseRoute]) -> typing.Dict[
for schema in self.schemas.used(self.spec).values():
self.spec.add_schema(schema.name, openapi.Schema(schema.json_schema))
- api_schema: typing.Dict[str, typing.Any] = self.spec.asdict()
+ api_schema: t.Dict[str, t.Any] = self.spec.asdict()
return api_schema
diff --git a/flama/serialize/serializers/pytorch.py b/flama/serialize/serializers/pytorch.py
index 37c18bf0..5596abfa 100644
--- a/flama/serialize/serializers/pytorch.py
+++ b/flama/serialize/serializers/pytorch.py
@@ -1,20 +1,11 @@
import codecs
+import importlib.metadata
import io
-import sys
import typing as t
from flama.serialize.base import Serializer
from flama.serialize.types import Framework
-if sys.version_info < (3, 8): # PORT: Remove when stop supporting 3.7 # pragma: no cover
- import importlib
-
- import importlib_metadata
-
- importlib.metadata = importlib_metadata
-else:
- import importlib.metadata
-
try:
import torch
except Exception: # pragma: no cover
diff --git a/flama/serialize/serializers/sklearn.py b/flama/serialize/serializers/sklearn.py
index 0745ef26..03a8dab3 100644
--- a/flama/serialize/serializers/sklearn.py
+++ b/flama/serialize/serializers/sklearn.py
@@ -1,21 +1,16 @@
import codecs
+import importlib.metadata
+import logging
import math
import pickle
-import sys
import typing as t
import warnings
+from flama import types
from flama.serialize.base import Serializer
from flama.serialize.types import Framework
-if sys.version_info < (3, 8): # PORT: Remove when stop supporting 3.7 # pragma: no cover
- import importlib
-
- import importlib_metadata
-
- importlib.metadata = importlib_metadata
-else:
- import importlib.metadata
+logger = logging.getLogger(__name__)
class SKLearnSerializer(Serializer):
@@ -31,7 +26,7 @@ def load(self, model: bytes, **kwargs) -> t.Any:
return model
- def _info(self, data):
+ def _info(self, data) -> types.JSONField:
if isinstance(data, (int, bool, str)):
return data
@@ -44,14 +39,14 @@ def _info(self, data):
if isinstance(data, (list, tuple, set)):
return [self._info(i) for i in data]
+ return None
+
+ def info(self, model: t.Any) -> t.Optional[types.JSONSchema]:
try:
- return self._info(data.get_params())
+ return self._info(model.get_params()) # type: ignore
except: # noqa
+ logger.exception("Cannot collect info from model")
return None
- def info(self, model: t.Any) -> t.Dict[str, t.Any]:
- model_info: t.Dict[str, t.Any] = self._info(model)
- return model_info
-
def version(self) -> str:
return importlib.metadata.version("scikit-learn")
diff --git a/flama/serialize/serializers/tensorflow.py b/flama/serialize/serializers/tensorflow.py
index 62c6368e..10788552 100644
--- a/flama/serialize/serializers/tensorflow.py
+++ b/flama/serialize/serializers/tensorflow.py
@@ -1,7 +1,7 @@
import codecs
+import importlib.metadata
import io
import json
-import sys
import tarfile
import typing as t
from tempfile import TemporaryDirectory
@@ -9,15 +9,6 @@
from flama.serialize.base import Serializer
from flama.serialize.types import Framework
-if sys.version_info < (3, 8): # PORT: Remove when stop supporting 3.7 # pragma: no cover
- import importlib
-
- import importlib_metadata
-
- importlib.metadata = importlib_metadata
-else:
- import importlib.metadata
-
try:
import tensorflow as tf
except Exception: # pragma: no cover
@@ -31,7 +22,7 @@ def dump(self, obj: t.Any, **kwargs) -> bytes:
assert tf is not None, "`tensorflow` must be installed to use TensorFlowSerializer."
buffer = io.BytesIO()
with TemporaryDirectory() as saved_model_dir, tarfile.open(fileobj=buffer, mode="w") as model_tar:
- tf.keras.models.save_model(obj, saved_model_dir)
+ tf.keras.models.save_model(obj, saved_model_dir) # type: ignore
model_tar.add(saved_model_dir, arcname="")
buffer.seek(0)
return codecs.encode(buffer.read(), "base64")
@@ -42,7 +33,7 @@ def load(self, model: bytes, **kwargs) -> t.Any:
fileobj=io.BytesIO(codecs.decode(model, "base64")), mode="r:"
) as model_tar:
model_tar.extractall(saved_model_dir)
- return tf.keras.models.load_model(saved_model_dir)
+ return tf.keras.models.load_model(saved_model_dir) # type: ignore
def info(self, model: t.Any) -> t.Dict[str, t.Any]:
model_info: t.Dict[str, t.Any] = json.loads(model.to_json())
diff --git a/flama/sqlalchemy.py b/flama/sqlalchemy.py
index 84390216..b4868cd4 100644
--- a/flama/sqlalchemy.py
+++ b/flama/sqlalchemy.py
@@ -4,9 +4,10 @@
try:
import sqlalchemy
+ from sqlalchemy import MetaData
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
- metadata = sqlalchemy.MetaData()
+ metadata = MetaData()
except Exception: # pragma: no cover
sqlalchemy = None # type: ignore[assignment]
metadata = None # type: ignore[assignment]
@@ -22,7 +23,7 @@ def __init__(self, database: t.Optional[str] = None):
self.database = database
self._engine: t.Optional["AsyncEngine"] = None
- self._metadata: t.Optional["sqlalchemy.MetaData"] = metadata
+ self._metadata: t.Optional["MetaData"] = metadata
@property
def engine(self) -> t.Optional["AsyncEngine"]:
@@ -38,7 +39,7 @@ def engine(self):
self._engine = None
@property
- def metadata(self) -> t.Optional["sqlalchemy.MetaData"]:
+ def metadata(self) -> t.Optional["MetaData"]:
assert sqlalchemy is not None, "sqlalchemy[asyncio] must be installed to use SQLAlchemyModule."
return self._metadata
diff --git a/flama/types/__init__.py b/flama/types/__init__.py
index b16058d5..bec66f2d 100644
--- a/flama/types/__init__.py
+++ b/flama/types/__init__.py
@@ -1,10 +1,6 @@
-import typing as t
-
+from flama.types.applications import * # noqa
from flama.types.asgi import * # noqa
from flama.types.http import * # noqa
+from flama.types.json import * # noqa
from flama.types.schema import * # noqa
from flama.types.websockets import * # noqa
-
-JSONField = t.Union[str, int, float, bool, None, t.List["JSONField"], t.Dict[str, "JSONField"]]
-JSONSchema = t.Dict[str, JSONField]
-Tag = t.Union[str, t.Sequence["Tag"], t.Dict[str, "Tag"]]
diff --git a/flama/types/applications.py b/flama/types/applications.py
new file mode 100644
index 00000000..5cad0620
--- /dev/null
+++ b/flama/types/applications.py
@@ -0,0 +1,15 @@
+import enum
+import typing as t
+
+__all__ = ["Tag", "AppStatus"]
+
+Tag = t.Union[str, t.Sequence["Tag"], t.Dict[str, "Tag"]]
+
+
+class AppStatus(enum.Enum):
+ NOT_INITIALIZED = enum.auto()
+ STARTING = enum.auto()
+ READY = enum.auto()
+ SHUTTING_DOWN = enum.auto()
+ SHUT_DOWN = enum.auto()
+ FAILED = enum.auto()
diff --git a/flama/types/asgi.py b/flama/types/asgi.py
index 352cc352..97c6a172 100644
--- a/flama/types/asgi.py
+++ b/flama/types/asgi.py
@@ -1,16 +1,11 @@
import sys
import typing as t
-if sys.version_info < (3, 8): # PORT: Remove when stop supporting 3.7 # pragma: no cover
- from typing_extensions import Protocol
-
- t.Protocol = Protocol
-
if sys.version_info < (3, 10): # PORT: Remove when stop supporting 3.9 # pragma: no cover
from typing_extensions import Concatenate, ParamSpec
- t.Concatenate = Concatenate
- t.ParamSpec = ParamSpec
+ t.Concatenate = Concatenate # type: ignore
+ t.ParamSpec = ParamSpec # type: ignore
if t.TYPE_CHECKING:
from flama import endpoints # noqa
@@ -34,7 +29,7 @@
"WebSocketHandler",
]
-P = t.ParamSpec("P")
+P = t.ParamSpec("P") # type: ignore # PORT: Remove this comment when stop supporting 3.9
R = t.TypeVar("R", covariant=True)
Scope = t.NewType("Scope", t.MutableMapping[str, t.Any])
@@ -62,24 +57,28 @@ async def __call__(self, *args: P.args, **kwargs: P.kwargs) -> R:
...
-AppFunction = t.Callable[..., R]
-AppAsyncFunction = t.Callable[..., t.Awaitable[R]]
+AppFunction = t.Callable[P, R]
+AppAsyncFunction = t.Callable[P, t.Awaitable[R]]
App = t.Union[AppClass, AppAsyncClass, AppFunction, AppAsyncFunction]
# Middleware
-class MiddlewareClass(AppClass):
+class MiddlewareClass(AppClass, t.Protocol[P, R]):
def __init__(self, app: App, *args: P.args, **kwargs: P.kwargs):
...
-class MiddlewareAsyncClass(AppAsyncClass):
+class MiddlewareAsyncClass(AppAsyncClass, t.Protocol[P, R]):
def __init__(self, app: App, *args: P.args, **kwargs: P.kwargs):
...
-MiddlewareFunction = t.Callable[t.Concatenate[App, P], App] # type: ignore[valid-type,misc]
-MiddlewareAsyncFunction = t.Callable[t.Concatenate[App, P], t.Awaitable[App]] # type: ignore[valid-type,misc]
+MiddlewareFunction = t.Callable[
+ t.Concatenate[App, P], App # type: ignore # PORT: Remove this comment when stop supporting 3.9
+]
+MiddlewareAsyncFunction = t.Callable[
+ t.Concatenate[App, P], t.Awaitable[App] # type: ignore # PORT: Remove this comment when stop supporting 3.9
+]
Middleware = t.Union[t.Type[MiddlewareClass], t.Type[MiddlewareAsyncClass], MiddlewareFunction, MiddlewareAsyncFunction]
HTTPHandler = t.Union[AppFunction, t.Type["endpoints.HTTPEndpoint"]]
diff --git a/flama/types/json.py b/flama/types/json.py
new file mode 100644
index 00000000..ccc8d300
--- /dev/null
+++ b/flama/types/json.py
@@ -0,0 +1,7 @@
+import typing as t
+
+__all__ = ["JSONField", "JSONSchema"]
+
+
+JSONField = t.Union[str, int, float, bool, None, t.List["JSONField"], t.Dict[str, "JSONField"]]
+JSONSchema = t.Dict[str, JSONField]
diff --git a/flama/types/schema.py b/flama/types/schema.py
index f14fc33d..76f24127 100644
--- a/flama/types/schema.py
+++ b/flama/types/schema.py
@@ -5,15 +5,17 @@
if sys.version_info < (3, 10): # PORT: Remove when stop supporting 3.9 # pragma: no cover
from typing_extensions import TypeGuard
- t.TypeGuard = TypeGuard
+ t.TypeGuard = TypeGuard # type: ignore
-__all__ = ["Schema", "is_schema"]
+__all__ = ["_T_Field", "_T_Schema", "Schema", "is_schema"]
_T_Field = t.TypeVar("_T_Field")
_T_Schema = t.TypeVar("_T_Schema")
-def is_schema(obj: t.Any) -> t.TypeGuard[t.Type["Schema"]]:
+def is_schema(
+ obj: t.Any,
+) -> t.TypeGuard[t.Type["Schema"]]: # type: ignore # PORT: Remove this comment when stop supporting 3.9
return inspect.isclass(obj) and issubclass(obj, Schema)
diff --git a/flama/url.py b/flama/url.py
index 3ae335ee..92079a4a 100644
--- a/flama/url.py
+++ b/flama/url.py
@@ -6,6 +6,8 @@
import uuid
from decimal import Decimal
+T = t.TypeVar("T", bound=t.Union[int, str, float, Decimal, uuid.UUID])
+
@dataclasses.dataclass
class URL:
@@ -46,8 +48,11 @@ def url(self) -> str:
"""
return str(urllib.parse.urlunparse(tuple(self.components.values())))
+ def __str__(self) -> str:
+ return self.url
-T = t.TypeVar("T", bound=t.Union[int, str, float, Decimal, uuid.UUID])
+ def __repr__(self) -> str:
+ return f"URL('{self.url}')"
class ParamSerializer(t.Generic[T], metaclass=abc.ABCMeta):
diff --git a/flama/validation.py b/flama/validation.py
index 1ecab0e3..bb7f7a32 100644
--- a/flama/validation.py
+++ b/flama/validation.py
@@ -27,7 +27,8 @@ async def resolve(self, request: http.Request) -> types.RequestData:
raise exceptions.HTTPException(415)
try:
- return types.RequestData(await codec.decode(request))
+ data = await codec.decode(request)
+ return types.RequestData(data) if data else None # type: ignore
except exceptions.DecodeError as exc:
raise exceptions.HTTPException(400, detail=str(exc))
diff --git a/flama/websockets.py b/flama/websockets.py
index 292a4273..cc421a7c 100644
--- a/flama/websockets.py
+++ b/flama/websockets.py
@@ -5,11 +5,7 @@
if t.TYPE_CHECKING:
from flama import types
-__all__ = [
- "WebSocket",
- "Close",
- "State",
-]
+__all__ = ["WebSocket", "Close", "State"]
State = starlette.websockets.WebSocketState
diff --git a/poetry.lock b/poetry.lock
index 3a51a4b5..56ad1b98 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -14,110 +14,108 @@ files = [
[[package]]
name = "aiohttp"
-version = "3.8.4"
+version = "3.8.5"
description = "Async http client/server framework (asyncio)"
category = "dev"
optional = false
python-versions = ">=3.6"
files = [
- {file = "aiohttp-3.8.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5ce45967538fb747370308d3145aa68a074bdecb4f3a300869590f725ced69c1"},
- {file = "aiohttp-3.8.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b744c33b6f14ca26b7544e8d8aadff6b765a80ad6164fb1a430bbadd593dfb1a"},
- {file = "aiohttp-3.8.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a45865451439eb320784918617ba54b7a377e3501fb70402ab84d38c2cd891b"},
- {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a86d42d7cba1cec432d47ab13b6637bee393a10f664c425ea7b305d1301ca1a3"},
- {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee3c36df21b5714d49fc4580247947aa64bcbe2939d1b77b4c8dcb8f6c9faecc"},
- {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:176a64b24c0935869d5bbc4c96e82f89f643bcdf08ec947701b9dbb3c956b7dd"},
- {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c844fd628851c0bc309f3c801b3a3d58ce430b2ce5b359cd918a5a76d0b20cb5"},
- {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5393fb786a9e23e4799fec788e7e735de18052f83682ce2dfcabaf1c00c2c08e"},
- {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e4b09863aae0dc965c3ef36500d891a3ff495a2ea9ae9171e4519963c12ceefd"},
- {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:adfbc22e87365a6e564c804c58fc44ff7727deea782d175c33602737b7feadb6"},
- {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:147ae376f14b55f4f3c2b118b95be50a369b89b38a971e80a17c3fd623f280c9"},
- {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:eafb3e874816ebe2a92f5e155f17260034c8c341dad1df25672fb710627c6949"},
- {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c6cc15d58053c76eacac5fa9152d7d84b8d67b3fde92709195cb984cfb3475ea"},
- {file = "aiohttp-3.8.4-cp310-cp310-win32.whl", hash = "sha256:59f029a5f6e2d679296db7bee982bb3d20c088e52a2977e3175faf31d6fb75d1"},
- {file = "aiohttp-3.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:fe7ba4a51f33ab275515f66b0a236bcde4fb5561498fe8f898d4e549b2e4509f"},
- {file = "aiohttp-3.8.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3d8ef1a630519a26d6760bc695842579cb09e373c5f227a21b67dc3eb16cfea4"},
- {file = "aiohttp-3.8.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b3f2e06a512e94722886c0827bee9807c86a9f698fac6b3aee841fab49bbfb4"},
- {file = "aiohttp-3.8.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a80464982d41b1fbfe3154e440ba4904b71c1a53e9cd584098cd41efdb188ef"},
- {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b631e26df63e52f7cce0cce6507b7a7f1bc9b0c501fcde69742130b32e8782f"},
- {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f43255086fe25e36fd5ed8f2ee47477408a73ef00e804cb2b5cba4bf2ac7f5e"},
- {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4d347a172f866cd1d93126d9b239fcbe682acb39b48ee0873c73c933dd23bd0f"},
- {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3fec6a4cb5551721cdd70473eb009d90935b4063acc5f40905d40ecfea23e05"},
- {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80a37fe8f7c1e6ce8f2d9c411676e4bc633a8462844e38f46156d07a7d401654"},
- {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d1e6a862b76f34395a985b3cd39a0d949ca80a70b6ebdea37d3ab39ceea6698a"},
- {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cd468460eefef601ece4428d3cf4562459157c0f6523db89365202c31b6daebb"},
- {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:618c901dd3aad4ace71dfa0f5e82e88b46ef57e3239fc7027773cb6d4ed53531"},
- {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:652b1bff4f15f6287550b4670546a2947f2a4575b6c6dff7760eafb22eacbf0b"},
- {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80575ba9377c5171407a06d0196b2310b679dc752d02a1fcaa2bc20b235dbf24"},
- {file = "aiohttp-3.8.4-cp311-cp311-win32.whl", hash = "sha256:bbcf1a76cf6f6dacf2c7f4d2ebd411438c275faa1dc0c68e46eb84eebd05dd7d"},
- {file = "aiohttp-3.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:6e74dd54f7239fcffe07913ff8b964e28b712f09846e20de78676ce2a3dc0bfc"},
- {file = "aiohttp-3.8.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:880e15bb6dad90549b43f796b391cfffd7af373f4646784795e20d92606b7a51"},
- {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb96fa6b56bb536c42d6a4a87dfca570ff8e52de2d63cabebfd6fb67049c34b6"},
- {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a6cadebe132e90cefa77e45f2d2f1a4b2ce5c6b1bfc1656c1ddafcfe4ba8131"},
- {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f352b62b45dff37b55ddd7b9c0c8672c4dd2eb9c0f9c11d395075a84e2c40f75"},
- {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ab43061a0c81198d88f39aaf90dae9a7744620978f7ef3e3708339b8ed2ef01"},
- {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9cb1565a7ad52e096a6988e2ee0397f72fe056dadf75d17fa6b5aebaea05622"},
- {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:1b3ea7edd2d24538959c1c1abf97c744d879d4e541d38305f9bd7d9b10c9ec41"},
- {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:7c7837fe8037e96b6dd5cfcf47263c1620a9d332a87ec06a6ca4564e56bd0f36"},
- {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:3b90467ebc3d9fa5b0f9b6489dfb2c304a1db7b9946fa92aa76a831b9d587e99"},
- {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:cab9401de3ea52b4b4c6971db5fb5c999bd4260898af972bf23de1c6b5dd9d71"},
- {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d1f9282c5f2b5e241034a009779e7b2a1aa045f667ff521e7948ea9b56e0c5ff"},
- {file = "aiohttp-3.8.4-cp36-cp36m-win32.whl", hash = "sha256:5e14f25765a578a0a634d5f0cd1e2c3f53964553a00347998dfdf96b8137f777"},
- {file = "aiohttp-3.8.4-cp36-cp36m-win_amd64.whl", hash = "sha256:4c745b109057e7e5f1848c689ee4fb3a016c8d4d92da52b312f8a509f83aa05e"},
- {file = "aiohttp-3.8.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:aede4df4eeb926c8fa70de46c340a1bc2c6079e1c40ccf7b0eae1313ffd33519"},
- {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ddaae3f3d32fc2cb4c53fab020b69a05c8ab1f02e0e59665c6f7a0d3a5be54f"},
- {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4eb3b82ca349cf6fadcdc7abcc8b3a50ab74a62e9113ab7a8ebc268aad35bb9"},
- {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bcb89336efa095ea21b30f9e686763f2be4478f1b0a616969551982c4ee4c3b"},
- {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c08e8ed6fa3d477e501ec9db169bfac8140e830aa372d77e4a43084d8dd91ab"},
- {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c6cd05ea06daca6ad6a4ca3ba7fe7dc5b5de063ff4daec6170ec0f9979f6c332"},
- {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7a00a9ed8d6e725b55ef98b1b35c88013245f35f68b1b12c5cd4100dddac333"},
- {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:de04b491d0e5007ee1b63a309956eaed959a49f5bb4e84b26c8f5d49de140fa9"},
- {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:40653609b3bf50611356e6b6554e3a331f6879fa7116f3959b20e3528783e699"},
- {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dbf3a08a06b3f433013c143ebd72c15cac33d2914b8ea4bea7ac2c23578815d6"},
- {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854f422ac44af92bfe172d8e73229c270dc09b96535e8a548f99c84f82dde241"},
- {file = "aiohttp-3.8.4-cp37-cp37m-win32.whl", hash = "sha256:aeb29c84bb53a84b1a81c6c09d24cf33bb8432cc5c39979021cc0f98c1292a1a"},
- {file = "aiohttp-3.8.4-cp37-cp37m-win_amd64.whl", hash = "sha256:db3fc6120bce9f446d13b1b834ea5b15341ca9ff3f335e4a951a6ead31105480"},
- {file = "aiohttp-3.8.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fabb87dd8850ef0f7fe2b366d44b77d7e6fa2ea87861ab3844da99291e81e60f"},
- {file = "aiohttp-3.8.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:91f6d540163f90bbaef9387e65f18f73ffd7c79f5225ac3d3f61df7b0d01ad15"},
- {file = "aiohttp-3.8.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d265f09a75a79a788237d7f9054f929ced2e69eb0bb79de3798c468d8a90f945"},
- {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d89efa095ca7d442a6d0cbc755f9e08190ba40069b235c9886a8763b03785da"},
- {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4dac314662f4e2aa5009977b652d9b8db7121b46c38f2073bfeed9f4049732cd"},
- {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe11310ae1e4cd560035598c3f29d86cef39a83d244c7466f95c27ae04850f10"},
- {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ddb2a2026c3f6a68c3998a6c47ab6795e4127315d2e35a09997da21865757f8"},
- {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e75b89ac3bd27d2d043b234aa7b734c38ba1b0e43f07787130a0ecac1e12228a"},
- {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6e601588f2b502c93c30cd5a45bfc665faaf37bbe835b7cfd461753068232074"},
- {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a5d794d1ae64e7753e405ba58e08fcfa73e3fad93ef9b7e31112ef3c9a0efb52"},
- {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a1f4689c9a1462f3df0a1f7e797791cd6b124ddbee2b570d34e7f38ade0e2c71"},
- {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:3032dcb1c35bc330134a5b8a5d4f68c1a87252dfc6e1262c65a7e30e62298275"},
- {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8189c56eb0ddbb95bfadb8f60ea1b22fcfa659396ea36f6adcc521213cd7b44d"},
- {file = "aiohttp-3.8.4-cp38-cp38-win32.whl", hash = "sha256:33587f26dcee66efb2fff3c177547bd0449ab7edf1b73a7f5dea1e38609a0c54"},
- {file = "aiohttp-3.8.4-cp38-cp38-win_amd64.whl", hash = "sha256:e595432ac259af2d4630008bf638873d69346372d38255774c0e286951e8b79f"},
- {file = "aiohttp-3.8.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5a7bdf9e57126dc345b683c3632e8ba317c31d2a41acd5800c10640387d193ed"},
- {file = "aiohttp-3.8.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:22f6eab15b6db242499a16de87939a342f5a950ad0abaf1532038e2ce7d31567"},
- {file = "aiohttp-3.8.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7235604476a76ef249bd64cb8274ed24ccf6995c4a8b51a237005ee7a57e8643"},
- {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea9eb976ffdd79d0e893869cfe179a8f60f152d42cb64622fca418cd9b18dc2a"},
- {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92c0cea74a2a81c4c76b62ea1cac163ecb20fb3ba3a75c909b9fa71b4ad493cf"},
- {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:493f5bc2f8307286b7799c6d899d388bbaa7dfa6c4caf4f97ef7521b9cb13719"},
- {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a63f03189a6fa7c900226e3ef5ba4d3bd047e18f445e69adbd65af433add5a2"},
- {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10c8cefcff98fd9168cdd86c4da8b84baaa90bf2da2269c6161984e6737bf23e"},
- {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bca5f24726e2919de94f047739d0a4fc01372801a3672708260546aa2601bf57"},
- {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:03baa76b730e4e15a45f81dfe29a8d910314143414e528737f8589ec60cf7391"},
- {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8c29c77cc57e40f84acef9bfb904373a4e89a4e8b74e71aa8075c021ec9078c2"},
- {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:03543dcf98a6619254b409be2d22b51f21ec66272be4ebda7b04e6412e4b2e14"},
- {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17b79c2963db82086229012cff93ea55196ed31f6493bb1ccd2c62f1724324e4"},
- {file = "aiohttp-3.8.4-cp39-cp39-win32.whl", hash = "sha256:34ce9f93a4a68d1272d26030655dd1b58ff727b3ed2a33d80ec433561b03d67a"},
- {file = "aiohttp-3.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:41a86a69bb63bb2fc3dc9ad5ea9f10f1c9c8e282b471931be0268ddd09430b04"},
- {file = "aiohttp-3.8.4.tar.gz", hash = "sha256:bf2e1a9162c1e441bf805a1fd166e249d574ca04e03b34f97e2928769e91ab5c"},
+ {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a94159871304770da4dd371f4291b20cac04e8c94f11bdea1c3478e557fbe0d8"},
+ {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:13bf85afc99ce6f9ee3567b04501f18f9f8dbbb2ea11ed1a2e079670403a7c84"},
+ {file = "aiohttp-3.8.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ce2ac5708501afc4847221a521f7e4b245abf5178cf5ddae9d5b3856ddb2f3a"},
+ {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96943e5dcc37a6529d18766597c491798b7eb7a61d48878611298afc1fca946c"},
+ {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ad5c3c4590bb3cc28b4382f031f3783f25ec223557124c68754a2231d989e2b"},
+ {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c413c633d0512df4dc7fd2373ec06cc6a815b7b6d6c2f208ada7e9e93a5061d"},
+ {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df72ac063b97837a80d80dec8d54c241af059cc9bb42c4de68bd5b61ceb37caa"},
+ {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c48c5c0271149cfe467c0ff8eb941279fd6e3f65c9a388c984e0e6cf57538e14"},
+ {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:368a42363c4d70ab52c2c6420a57f190ed3dfaca6a1b19afda8165ee16416a82"},
+ {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7607ec3ce4993464368505888af5beb446845a014bc676d349efec0e05085905"},
+ {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0d21c684808288a98914e5aaf2a7c6a3179d4df11d249799c32d1808e79503b5"},
+ {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:312fcfbacc7880a8da0ae8b6abc6cc7d752e9caa0051a53d217a650b25e9a691"},
+ {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ad093e823df03bb3fd37e7dec9d4670c34f9e24aeace76808fc20a507cace825"},
+ {file = "aiohttp-3.8.5-cp310-cp310-win32.whl", hash = "sha256:33279701c04351a2914e1100b62b2a7fdb9a25995c4a104259f9a5ead7ed4802"},
+ {file = "aiohttp-3.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:6e4a280e4b975a2e7745573e3fc9c9ba0d1194a3738ce1cbaa80626cc9b4f4df"},
+ {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae871a964e1987a943d83d6709d20ec6103ca1eaf52f7e0d36ee1b5bebb8b9b9"},
+ {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:461908b2578955045efde733719d62f2b649c404189a09a632d245b445c9c975"},
+ {file = "aiohttp-3.8.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:72a860c215e26192379f57cae5ab12b168b75db8271f111019509a1196dfc780"},
+ {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc14be025665dba6202b6a71cfcdb53210cc498e50068bc088076624471f8bb9"},
+ {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8af740fc2711ad85f1a5c034a435782fbd5b5f8314c9a3ef071424a8158d7f6b"},
+ {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:841cd8233cbd2111a0ef0a522ce016357c5e3aff8a8ce92bcfa14cef890d698f"},
+ {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed1c46fb119f1b59304b5ec89f834f07124cd23ae5b74288e364477641060ff"},
+ {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84f8ae3e09a34f35c18fa57f015cc394bd1389bce02503fb30c394d04ee6b938"},
+ {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62360cb771707cb70a6fd114b9871d20d7dd2163a0feafe43fd115cfe4fe845e"},
+ {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:23fb25a9f0a1ca1f24c0a371523546366bb642397c94ab45ad3aedf2941cec6a"},
+ {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0ba0d15164eae3d878260d4c4df859bbdc6466e9e6689c344a13334f988bb53"},
+ {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5d20003b635fc6ae3f96d7260281dfaf1894fc3aa24d1888a9b2628e97c241e5"},
+ {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0175d745d9e85c40dcc51c8f88c74bfbaef9e7afeeeb9d03c37977270303064c"},
+ {file = "aiohttp-3.8.5-cp311-cp311-win32.whl", hash = "sha256:2e1b1e51b0774408f091d268648e3d57f7260c1682e7d3a63cb00d22d71bb945"},
+ {file = "aiohttp-3.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:043d2299f6dfdc92f0ac5e995dfc56668e1587cea7f9aa9d8a78a1b6554e5755"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cae533195e8122584ec87531d6df000ad07737eaa3c81209e85c928854d2195c"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f21e83f355643c345177a5d1d8079f9f28b5133bcd154193b799d380331d5d3"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a75ef35f2df54ad55dbf4b73fe1da96f370e51b10c91f08b19603c64004acc"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e2e9839e14dd5308ee773c97115f1e0a1cb1d75cbeeee9f33824fa5144c7634"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44e65da1de4403d0576473e2344828ef9c4c6244d65cf4b75549bb46d40b8dd"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78d847e4cde6ecc19125ccbc9bfac4a7ab37c234dd88fbb3c5c524e8e14da543"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:c7a815258e5895d8900aec4454f38dca9aed71085f227537208057853f9d13f2"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:8b929b9bd7cd7c3939f8bcfffa92fae7480bd1aa425279d51a89327d600c704d"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:5db3a5b833764280ed7618393832e0853e40f3d3e9aa128ac0ba0f8278d08649"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:a0215ce6041d501f3155dc219712bc41252d0ab76474615b9700d63d4d9292af"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:fd1ed388ea7fbed22c4968dd64bab0198de60750a25fe8c0c9d4bef5abe13824"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-win32.whl", hash = "sha256:6e6783bcc45f397fdebc118d772103d751b54cddf5b60fbcc958382d7dd64f3e"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-win_amd64.whl", hash = "sha256:b5411d82cddd212644cf9360879eb5080f0d5f7d809d03262c50dad02f01421a"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:01d4c0c874aa4ddfb8098e85d10b5e875a70adc63db91f1ae65a4b04d3344cda"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5980a746d547a6ba173fd5ee85ce9077e72d118758db05d229044b469d9029a"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a482e6da906d5e6e653be079b29bc173a48e381600161c9932d89dfae5942ef"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80bd372b8d0715c66c974cf57fe363621a02f359f1ec81cba97366948c7fc873"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1161b345c0a444ebcf46bf0a740ba5dcf50612fd3d0528883fdc0eff578006a"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd56db019015b6acfaaf92e1ac40eb8434847d9bf88b4be4efe5bfd260aee692"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:153c2549f6c004d2754cc60603d4668899c9895b8a89397444a9c4efa282aaf4"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4a01951fabc4ce26ab791da5f3f24dca6d9a6f24121746eb19756416ff2d881b"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bfb9162dcf01f615462b995a516ba03e769de0789de1cadc0f916265c257e5d8"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:7dde0009408969a43b04c16cbbe252c4f5ef4574ac226bc8815cd7342d2028b6"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4149d34c32f9638f38f544b3977a4c24052042affa895352d3636fa8bffd030a"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-win32.whl", hash = "sha256:68c5a82c8779bdfc6367c967a4a1b2aa52cd3595388bf5961a62158ee8a59e22"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2cf57fb50be5f52bda004b8893e63b48530ed9f0d6c96c84620dc92fe3cd9b9d"},
+ {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:eca4bf3734c541dc4f374ad6010a68ff6c6748f00451707f39857f429ca36ced"},
+ {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1274477e4c71ce8cfe6c1ec2f806d57c015ebf84d83373676036e256bc55d690"},
+ {file = "aiohttp-3.8.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28c543e54710d6158fc6f439296c7865b29e0b616629767e685a7185fab4a6b9"},
+ {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:910bec0c49637d213f5d9877105d26e0c4a4de2f8b1b29405ff37e9fc0ad52b8"},
+ {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5443910d662db951b2e58eb70b0fbe6b6e2ae613477129a5805d0b66c54b6cb7"},
+ {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e460be6978fc24e3df83193dc0cc4de46c9909ed92dd47d349a452ef49325b7"},
+ {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb1558def481d84f03b45888473fc5a1f35747b5f334ef4e7a571bc0dfcb11f8"},
+ {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34dd0c107799dcbbf7d48b53be761a013c0adf5571bf50c4ecad5643fe9cfcd0"},
+ {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aa1990247f02a54185dc0dff92a6904521172a22664c863a03ff64c42f9b5410"},
+ {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0e584a10f204a617d71d359fe383406305a4b595b333721fa50b867b4a0a1548"},
+ {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a3cf433f127efa43fee6b90ea4c6edf6c4a17109d1d037d1a52abec84d8f2e42"},
+ {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c11f5b099adafb18e65c2c997d57108b5bbeaa9eeee64a84302c0978b1ec948b"},
+ {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:84de26ddf621d7ac4c975dbea4c945860e08cccde492269db4e1538a6a6f3c35"},
+ {file = "aiohttp-3.8.5-cp38-cp38-win32.whl", hash = "sha256:ab88bafedc57dd0aab55fa728ea10c1911f7e4d8b43e1d838a1739f33712921c"},
+ {file = "aiohttp-3.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:5798a9aad1879f626589f3df0f8b79b3608a92e9beab10e5fda02c8a2c60db2e"},
+ {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a6ce61195c6a19c785df04e71a4537e29eaa2c50fe745b732aa937c0c77169f3"},
+ {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:773dd01706d4db536335fcfae6ea2440a70ceb03dd3e7378f3e815b03c97ab51"},
+ {file = "aiohttp-3.8.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f83a552443a526ea38d064588613aca983d0ee0038801bc93c0c916428310c28"},
+ {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f7372f7341fcc16f57b2caded43e81ddd18df53320b6f9f042acad41f8e049a"},
+ {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea353162f249c8097ea63c2169dd1aa55de1e8fecbe63412a9bc50816e87b761"},
+ {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d47ae48db0b2dcf70bc8a3bc72b3de86e2a590fc299fdbbb15af320d2659de"},
+ {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d827176898a2b0b09694fbd1088c7a31836d1a505c243811c87ae53a3f6273c1"},
+ {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3562b06567c06439d8b447037bb655ef69786c590b1de86c7ab81efe1c9c15d8"},
+ {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4e874cbf8caf8959d2adf572a78bba17cb0e9d7e51bb83d86a3697b686a0ab4d"},
+ {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6809a00deaf3810e38c628e9a33271892f815b853605a936e2e9e5129762356c"},
+ {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:33776e945d89b29251b33a7e7d006ce86447b2cfd66db5e5ded4e5cd0340585c"},
+ {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eaeed7abfb5d64c539e2db173f63631455f1196c37d9d8d873fc316470dfbacd"},
+ {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e91d635961bec2d8f19dfeb41a539eb94bd073f075ca6dae6c8dc0ee89ad6f91"},
+ {file = "aiohttp-3.8.5-cp39-cp39-win32.whl", hash = "sha256:00ad4b6f185ec67f3e6562e8a1d2b69660be43070bd0ef6fcec5211154c7df67"},
+ {file = "aiohttp-3.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:c0a9034379a37ae42dea7ac1e048352d96286626251862e448933c0f59cbd79c"},
+ {file = "aiohttp-3.8.5.tar.gz", hash = "sha256:b9552ec52cc147dbf1944ac7ac98af7602e51ea2dcd076ed194ca3c0d1c7d0bc"},
]
[package.dependencies]
aiosignal = ">=1.1.2"
async-timeout = ">=4.0.0a3,<5.0"
-asynctest = {version = "0.13.0", markers = "python_version < \"3.8\""}
attrs = ">=17.3.0"
charset-normalizer = ">=2.0,<4.0"
frozenlist = ">=1.1.1"
multidict = ">=4.5,<7.0"
-typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""}
yarl = ">=1.0,<2.0"
[package.extras]
@@ -150,35 +148,46 @@ files = [
{file = "aiosqlite-0.19.0.tar.gz", hash = "sha256:95ee77b91c8d2808bd08a59fbebf66270e9090c3d92ffbf260dc0db0b979577d"},
]
-[package.dependencies]
-typing_extensions = {version = ">=4.0", markers = "python_version < \"3.8\""}
-
[package.extras]
dev = ["aiounittest (==1.4.1)", "attribution (==1.6.2)", "black (==23.3.0)", "coverage[toml] (==7.2.3)", "flake8 (==5.0.4)", "flake8-bugbear (==23.3.12)", "flit (==3.7.1)", "mypy (==1.2.0)", "ufmt (==2.1.0)", "usort (==1.0.6)"]
docs = ["sphinx (==6.1.3)", "sphinx-mdinclude (==0.5.3)"]
+[[package]]
+name = "annotated-types"
+version = "0.5.0"
+description = "Reusable constraint types to use with typing.Annotated"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "annotated_types-0.5.0-py3-none-any.whl", hash = "sha256:58da39888f92c276ad970249761ebea80ba544b77acddaa1a4d6cf78287d45fd"},
+ {file = "annotated_types-0.5.0.tar.gz", hash = "sha256:47cdc3490d9ac1506ce92c7aaa76c579dc3509ff11e098fc867e5130ab7be802"},
+]
+
+[package.dependencies]
+typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""}
+
[[package]]
name = "anyio"
-version = "3.7.0"
+version = "4.0.0"
description = "High level compatibility layer for multiple asynchronous event loop implementations"
category = "main"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "anyio-3.7.0-py3-none-any.whl", hash = "sha256:eddca883c4175f14df8aedce21054bfca3adb70ffe76a9f607aef9d7fa2ea7f0"},
- {file = "anyio-3.7.0.tar.gz", hash = "sha256:275d9973793619a5374e1c89a4f4ad3f4b0a5510a2b5b939444bee8f4c4d37ce"},
+ {file = "anyio-4.0.0-py3-none-any.whl", hash = "sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f"},
+ {file = "anyio-4.0.0.tar.gz", hash = "sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a"},
]
[package.dependencies]
-exceptiongroup = {version = "*", markers = "python_version < \"3.11\""}
+exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""}
idna = ">=2.8"
sniffio = ">=1.1"
-typing-extensions = {version = "*", markers = "python_version < \"3.8\""}
[package.extras]
-doc = ["Sphinx (>=6.1.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme", "sphinxcontrib-jquery"]
-test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"]
-trio = ["trio (<0.22)"]
+doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)"]
+test = ["anyio[trio]", "coverage[toml] (>=7)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"]
+trio = ["trio (>=0.22)"]
[[package]]
name = "apispec"
@@ -218,18 +227,18 @@ files = [
[[package]]
name = "asttokens"
-version = "2.2.1"
+version = "2.4.0"
description = "Annotate AST trees with source code positions"
category = "dev"
optional = false
python-versions = "*"
files = [
- {file = "asttokens-2.2.1-py2.py3-none-any.whl", hash = "sha256:6b0ac9e93fb0335014d382b8fa9b3afa7df546984258005da0b9e7095b3deb1c"},
- {file = "asttokens-2.2.1.tar.gz", hash = "sha256:4622110b2a6f30b77e1473affaa97e711bc2f07d3f10848420ff1898edbe94f3"},
+ {file = "asttokens-2.4.0-py2.py3-none-any.whl", hash = "sha256:cf8fc9e61a86461aa9fb161a14a0841a03c405fa829ac6b202670b3495d2ce69"},
+ {file = "asttokens-2.4.0.tar.gz", hash = "sha256:2e0171b991b2c959acc6c49318049236844a5da1d65ba2672c4880c1c894834e"},
]
[package.dependencies]
-six = "*"
+six = ">=1.12.0"
[package.extras]
test = ["astroid", "pytest"]
@@ -252,44 +261,14 @@ wheel = ">=0.23.0,<1.0"
[[package]]
name = "async-timeout"
-version = "4.0.2"
+version = "4.0.3"
description = "Timeout context manager for asyncio programs"
category = "dev"
optional = false
-python-versions = ">=3.6"
-files = [
- {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"},
- {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"},
-]
-
-[package.dependencies]
-typing-extensions = {version = ">=3.6.5", markers = "python_version < \"3.8\""}
-
-[[package]]
-name = "asyncmock"
-version = "0.4.2"
-description = "Extension to the standard mock framework to support support async"
-category = "dev"
-optional = false
-python-versions = ">=3.6"
-files = [
- {file = "asyncmock-0.4.2-py3-none-any.whl", hash = "sha256:fd8bc4e7813251a8959d1140924ccba3adbbc7af885dba7047c67f73c0b664b1"},
- {file = "asyncmock-0.4.2.tar.gz", hash = "sha256:c251889d542e98fe5f7ece2b5b8643b7d62b50a5657d34a4cbce8a1d5170d750"},
-]
-
-[package.dependencies]
-mock = "*"
-
-[[package]]
-name = "asynctest"
-version = "0.13.0"
-description = "Enhance the standard unittest package with features for testing asyncio libraries"
-category = "dev"
-optional = false
-python-versions = ">=3.5"
+python-versions = ">=3.7"
files = [
- {file = "asynctest-0.13.0-py3-none-any.whl", hash = "sha256:5da6118a7e6d6b54d83a8f7197769d046922a44d2a99c21382f0a6e4fadae676"},
- {file = "asynctest-0.13.0.tar.gz", hash = "sha256:c27862842d15d83e6a34eb0b2866c323880eb3a75e4485b079ea11748fd77fac"},
+ {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"},
+ {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"},
]
[[package]]
@@ -304,9 +283,6 @@ files = [
{file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"},
]
-[package.dependencies]
-importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
-
[package.extras]
cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
dev = ["attrs[docs,tests]", "pre-commit"]
@@ -355,7 +331,6 @@ mypy-extensions = ">=0.4.3"
pathspec = ">=0.9.0"
platformdirs = ">=2"
tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""}
-typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""}
typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
[package.extras]
@@ -378,14 +353,14 @@ files = [
[[package]]
name = "certifi"
-version = "2023.5.7"
+version = "2023.7.22"
description = "Python package for providing Mozilla's CA Bundle."
-category = "dev"
+category = "main"
optional = false
python-versions = ">=3.6"
files = [
- {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"},
- {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"},
+ {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"},
+ {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"},
]
[[package]]
@@ -467,116 +442,145 @@ pycparser = "*"
[[package]]
name = "cfgv"
-version = "3.3.1"
+version = "3.4.0"
description = "Validate configuration and produce human readable error messages."
category = "dev"
optional = false
-python-versions = ">=3.6.1"
+python-versions = ">=3.8"
files = [
- {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"},
- {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"},
+ {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"},
+ {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"},
]
[[package]]
name = "charset-normalizer"
-version = "3.1.0"
+version = "3.2.0"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
category = "dev"
optional = false
python-versions = ">=3.7.0"
files = [
- {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"},
- {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"},
+ {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"},
+ {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"},
]
[[package]]
name = "click"
-version = "8.1.3"
+version = "8.1.7"
description = "Composable command line interface toolkit"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
- {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"},
- {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"},
+ {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
+ {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
]
[package.dependencies]
colorama = {version = "*", markers = "platform_system == \"Windows\""}
-importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
+
+[[package]]
+name = "cmake"
+version = "3.27.5"
+description = "CMake is an open-source, cross-platform family of tools designed to build, test and package software"
+category = "dev"
+optional = false
+python-versions = "*"
+files = [
+ {file = "cmake-3.27.5-py2.py3-none-macosx_10_10_universal2.macosx_10_10_x86_64.macosx_11_0_arm64.macosx_11_0_universal2.whl", hash = "sha256:fc15d514587e6bae7c3b6616d1eb2792548d34e6910490e20065378b3cd1019e"},
+ {file = "cmake-3.27.5-py2.py3-none-manylinux2010_i686.manylinux_2_12_i686.whl", hash = "sha256:bc784f6a0f07f517323e1fa2b005302ed8e90d3044a89a1b4b9b1962df2053ed"},
+ {file = "cmake-3.27.5-py2.py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:49307970589b2202bd528317a7a4a52bb6e6debca4f4c28e192a6b4965f47687"},
+ {file = "cmake-3.27.5-py2.py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:000efab194567cbd5c7f1cb0e839c57b44db01f218f4d318ee5eac0ec72dbd90"},
+ {file = "cmake-3.27.5-py2.py3-none-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:9229170107c5bfadba32cd25c7c6ee5b936649a36c53247d41a39bcdfdcd55d6"},
+ {file = "cmake-3.27.5-py2.py3-none-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:a941c9dc0fda65e2b7cf2657f87802702fd28e677991d629b58a00cced3bed08"},
+ {file = "cmake-3.27.5-py2.py3-none-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:f47117376ccea3189628a911666a7e23e09efae26c124aebe8f0aa139000dc73"},
+ {file = "cmake-3.27.5-py2.py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:14acb1954b1ec0b398fb5265bc9e9b8785dffc6c3686d031bd1758ca23293162"},
+ {file = "cmake-3.27.5-py2.py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:49385eda22f5b94ffb00c2251a76f4c1d5b5aedf737767e7c44f75b4a1fdd426"},
+ {file = "cmake-3.27.5-py2.py3-none-musllinux_1_1_i686.whl", hash = "sha256:9ef2b45834e3777ddbb477d21784ffdbe4f09a3d0e0883b55fea0c4cadd6038e"},
+ {file = "cmake-3.27.5-py2.py3-none-musllinux_1_1_ppc64le.whl", hash = "sha256:f510aa3bc6b2c0b8fb51f22209359a6da7544f668629b01544a5b991cdec82ee"},
+ {file = "cmake-3.27.5-py2.py3-none-musllinux_1_1_s390x.whl", hash = "sha256:014badd3ad7b0bd55ed81d8b60bf435ef2a382e9ad0f03340fea2bb41620b9af"},
+ {file = "cmake-3.27.5-py2.py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:256ba48b86bb63bcb61db6de27a5458d66857b10902d8f942e8d3432a58939b4"},
+ {file = "cmake-3.27.5-py2.py3-none-win32.whl", hash = "sha256:ec338a489a80feaf8adfe9bb92eaba93318407ae6088a6dad7896aba0dd698cf"},
+ {file = "cmake-3.27.5-py2.py3-none-win_amd64.whl", hash = "sha256:fa55547922e5dbe0f1edb6d4d23d78feab39366a0507e9eb5f9300f5d6010ab6"},
+ {file = "cmake-3.27.5-py2.py3-none-win_arm64.whl", hash = "sha256:3c93abd817848f81f992cd2c53c9397a353625de2af27b14b23761e5cce99c5d"},
+ {file = "cmake-3.27.5.tar.gz", hash = "sha256:101d4e56154658c974e9425acd31f0fedde3ce68b47263fd14f789b028f8a13a"},
+]
+
+[package.extras]
+test = ["coverage (>=4.2)", "flake8 (>=3.0.4)", "path.py (>=11.5.0)", "pytest (>=3.0.3)", "pytest-cov (>=2.4.0)", "pytest-runner (>=2.9)", "pytest-virtualenv (>=1.7.0)", "scikit-build (>=0.10.0)", "setuptools (>=28.0.0)", "virtualenv (>=15.0.3)", "wheel"]
[[package]]
name = "colorama"
@@ -592,72 +596,64 @@ files = [
[[package]]
name = "coverage"
-version = "7.2.7"
+version = "7.3.1"
description = "Code coverage measurement for Python"
category = "dev"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"},
- {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"},
- {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"},
- {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"},
- {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"},
- {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"},
- {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"},
- {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"},
- {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"},
- {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"},
- {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"},
- {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"},
- {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"},
- {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"},
- {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"},
- {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"},
- {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"},
- {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"},
- {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"},
- {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"},
- {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"},
- {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"},
- {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"},
- {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"},
- {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"},
- {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"},
- {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"},
- {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"},
- {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"},
- {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"},
- {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"},
- {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"},
- {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"},
- {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"},
- {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"},
- {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"},
- {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"},
- {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"},
- {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"},
- {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"},
- {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"},
- {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"},
- {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"},
- {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"},
- {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"},
- {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"},
- {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"},
- {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"},
- {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"},
- {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"},
- {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"},
- {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"},
- {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"},
- {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"},
- {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"},
- {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"},
- {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"},
- {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"},
- {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"},
- {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"},
+ {file = "coverage-7.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cd0f7429ecfd1ff597389907045ff209c8fdb5b013d38cfa7c60728cb484b6e3"},
+ {file = "coverage-7.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:966f10df9b2b2115da87f50f6a248e313c72a668248be1b9060ce935c871f276"},
+ {file = "coverage-7.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0575c37e207bb9b98b6cf72fdaaa18ac909fb3d153083400c2d48e2e6d28bd8e"},
+ {file = "coverage-7.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:245c5a99254e83875c7fed8b8b2536f040997a9b76ac4c1da5bff398c06e860f"},
+ {file = "coverage-7.3.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c96dd7798d83b960afc6c1feb9e5af537fc4908852ef025600374ff1a017392"},
+ {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:de30c1aa80f30af0f6b2058a91505ea6e36d6535d437520067f525f7df123887"},
+ {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:50dd1e2dd13dbbd856ffef69196781edff26c800a74f070d3b3e3389cab2600d"},
+ {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9c0c19f70d30219113b18fe07e372b244fb2a773d4afde29d5a2f7930765136"},
+ {file = "coverage-7.3.1-cp310-cp310-win32.whl", hash = "sha256:770f143980cc16eb601ccfd571846e89a5fe4c03b4193f2e485268f224ab602f"},
+ {file = "coverage-7.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:cdd088c00c39a27cfa5329349cc763a48761fdc785879220d54eb785c8a38520"},
+ {file = "coverage-7.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:74bb470399dc1989b535cb41f5ca7ab2af561e40def22d7e188e0a445e7639e3"},
+ {file = "coverage-7.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:025ded371f1ca280c035d91b43252adbb04d2aea4c7105252d3cbc227f03b375"},
+ {file = "coverage-7.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6191b3a6ad3e09b6cfd75b45c6aeeffe7e3b0ad46b268345d159b8df8d835f9"},
+ {file = "coverage-7.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7eb0b188f30e41ddd659a529e385470aa6782f3b412f860ce22b2491c89b8593"},
+ {file = "coverage-7.3.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c8f0df9dfd8ff745bccff75867d63ef336e57cc22b2908ee725cc552689ec8"},
+ {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7eb3cd48d54b9bd0e73026dedce44773214064be93611deab0b6a43158c3d5a0"},
+ {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ac3c5b7e75acac31e490b7851595212ed951889918d398b7afa12736c85e13ce"},
+ {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b4ee7080878077af0afa7238df1b967f00dc10763f6e1b66f5cced4abebb0a3"},
+ {file = "coverage-7.3.1-cp311-cp311-win32.whl", hash = "sha256:229c0dd2ccf956bf5aeede7e3131ca48b65beacde2029f0361b54bf93d36f45a"},
+ {file = "coverage-7.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:c6f55d38818ca9596dc9019eae19a47410d5322408140d9a0076001a3dcb938c"},
+ {file = "coverage-7.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5289490dd1c3bb86de4730a92261ae66ea8d44b79ed3cc26464f4c2cde581fbc"},
+ {file = "coverage-7.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ca833941ec701fda15414be400c3259479bfde7ae6d806b69e63b3dc423b1832"},
+ {file = "coverage-7.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd694e19c031733e446c8024dedd12a00cda87e1c10bd7b8539a87963685e969"},
+ {file = "coverage-7.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aab8e9464c00da5cb9c536150b7fbcd8850d376d1151741dd0d16dfe1ba4fd26"},
+ {file = "coverage-7.3.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87d38444efffd5b056fcc026c1e8d862191881143c3aa80bb11fcf9dca9ae204"},
+ {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8a07b692129b8a14ad7a37941a3029c291254feb7a4237f245cfae2de78de037"},
+ {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2829c65c8faaf55b868ed7af3c7477b76b1c6ebeee99a28f59a2cb5907a45760"},
+ {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1f111a7d85658ea52ffad7084088277135ec5f368457275fc57f11cebb15607f"},
+ {file = "coverage-7.3.1-cp312-cp312-win32.whl", hash = "sha256:c397c70cd20f6df7d2a52283857af622d5f23300c4ca8e5bd8c7a543825baa5a"},
+ {file = "coverage-7.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:5ae4c6da8b3d123500f9525b50bf0168023313963e0e2e814badf9000dd6ef92"},
+ {file = "coverage-7.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca70466ca3a17460e8fc9cea7123c8cbef5ada4be3140a1ef8f7b63f2f37108f"},
+ {file = "coverage-7.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f2781fd3cabc28278dc982a352f50c81c09a1a500cc2086dc4249853ea96b981"},
+ {file = "coverage-7.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6407424621f40205bbe6325686417e5e552f6b2dba3535dd1f90afc88a61d465"},
+ {file = "coverage-7.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:04312b036580ec505f2b77cbbdfb15137d5efdfade09156961f5277149f5e344"},
+ {file = "coverage-7.3.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9ad38204887349853d7c313f53a7b1c210ce138c73859e925bc4e5d8fc18e7"},
+ {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:53669b79f3d599da95a0afbef039ac0fadbb236532feb042c534fbb81b1a4e40"},
+ {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:614f1f98b84eb256e4f35e726bfe5ca82349f8dfa576faabf8a49ca09e630086"},
+ {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f1a317fdf5c122ad642db8a97964733ab7c3cf6009e1a8ae8821089993f175ff"},
+ {file = "coverage-7.3.1-cp38-cp38-win32.whl", hash = "sha256:defbbb51121189722420a208957e26e49809feafca6afeef325df66c39c4fdb3"},
+ {file = "coverage-7.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:f4f456590eefb6e1b3c9ea6328c1e9fa0f1006e7481179d749b3376fc793478e"},
+ {file = "coverage-7.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f12d8b11a54f32688b165fd1a788c408f927b0960984b899be7e4c190ae758f1"},
+ {file = "coverage-7.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f09195dda68d94a53123883de75bb97b0e35f5f6f9f3aa5bf6e496da718f0cb6"},
+ {file = "coverage-7.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6601a60318f9c3945be6ea0f2a80571f4299b6801716f8a6e4846892737ebe4"},
+ {file = "coverage-7.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07d156269718670d00a3b06db2288b48527fc5f36859425ff7cec07c6b367745"},
+ {file = "coverage-7.3.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:636a8ac0b044cfeccae76a36f3b18264edcc810a76a49884b96dd744613ec0b7"},
+ {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5d991e13ad2ed3aced177f524e4d670f304c8233edad3210e02c465351f785a0"},
+ {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:586649ada7cf139445da386ab6f8ef00e6172f11a939fc3b2b7e7c9082052fa0"},
+ {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4aba512a15a3e1e4fdbfed2f5392ec221434a614cc68100ca99dcad7af29f3f8"},
+ {file = "coverage-7.3.1-cp39-cp39-win32.whl", hash = "sha256:6bc6f3f4692d806831c136c5acad5ccedd0262aa44c087c46b7101c77e139140"},
+ {file = "coverage-7.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:553d7094cb27db58ea91332e8b5681bac107e7242c23f7629ab1316ee73c4981"},
+ {file = "coverage-7.3.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:220eb51f5fb38dfdb7e5d54284ca4d0cd70ddac047d750111a68ab1798945194"},
+ {file = "coverage-7.3.1.tar.gz", hash = "sha256:6cb7fe1581deb67b782c153136541e20901aa312ceedaf1467dcb35255787952"},
]
[package.dependencies]
@@ -680,26 +676,26 @@ files = [
[[package]]
name = "distlib"
-version = "0.3.6"
+version = "0.3.7"
description = "Distribution utilities"
category = "dev"
optional = false
python-versions = "*"
files = [
- {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"},
- {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"},
+ {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"},
+ {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"},
]
[[package]]
name = "exceptiongroup"
-version = "1.1.1"
+version = "1.1.3"
description = "Backport of PEP 654 (exception groups)"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
- {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"},
- {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"},
+ {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"},
+ {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"},
]
[package.extras]
@@ -707,18 +703,18 @@ test = ["pytest (>=6)"]
[[package]]
name = "execnet"
-version = "1.9.0"
+version = "2.0.2"
description = "execnet: rapid multi-Python deployment"
category = "dev"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+python-versions = ">=3.7"
files = [
- {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"},
- {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"},
+ {file = "execnet-2.0.2-py3-none-any.whl", hash = "sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41"},
+ {file = "execnet-2.0.2.tar.gz", hash = "sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af"},
]
[package.extras]
-testing = ["pre-commit"]
+testing = ["hatch", "pre-commit", "pytest", "tox"]
[[package]]
name = "executing"
@@ -753,19 +749,20 @@ text-unidecode = "1.3"
[[package]]
name = "filelock"
-version = "3.12.0"
+version = "3.12.4"
description = "A platform independent file lock."
category = "dev"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "filelock-3.12.0-py3-none-any.whl", hash = "sha256:ad98852315c2ab702aeb628412cbf7e95b7ce8c3bf9565670b4eaecf1db370a9"},
- {file = "filelock-3.12.0.tar.gz", hash = "sha256:fc03ae43288c013d2ea83c8597001b1129db351aad9c57fe2409327916b8e718"},
+ {file = "filelock-3.12.4-py3-none-any.whl", hash = "sha256:08c21d87ded6e2b9da6728c3dff51baf1dcecf973b768ef35bcbc3447edb9ad4"},
+ {file = "filelock-3.12.4.tar.gz", hash = "sha256:2e6f249f1f3654291606e046b09f1fd5eac39b360664c27f5aad072012f8bcbd"},
]
[package.extras]
-docs = ["furo (>=2023.3.27)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"]
-testing = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"]
+docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.24)"]
+testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=2.1)"]
+typing = ["typing-extensions (>=4.7.1)"]
[[package]]
name = "flatbuffers"
@@ -781,86 +778,73 @@ files = [
[[package]]
name = "frozenlist"
-version = "1.3.3"
+version = "1.4.0"
description = "A list-like structure which implements collections.abc.MutableSequence"
category = "dev"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff8bf625fe85e119553b5383ba0fb6aa3d0ec2ae980295aaefa552374926b3f4"},
- {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dfbac4c2dfcc082fcf8d942d1e49b6aa0766c19d3358bd86e2000bf0fa4a9cf0"},
- {file = "frozenlist-1.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b1c63e8d377d039ac769cd0926558bb7068a1f7abb0f003e3717ee003ad85530"},
- {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fdfc24dcfce5b48109867c13b4cb15e4660e7bd7661741a391f821f23dfdca7"},
- {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c926450857408e42f0bbc295e84395722ce74bae69a3b2aa2a65fe22cb14b99"},
- {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1841e200fdafc3d51f974d9d377c079a0694a8f06de2e67b48150328d66d5483"},
- {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f470c92737afa7d4c3aacc001e335062d582053d4dbe73cda126f2d7031068dd"},
- {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:783263a4eaad7c49983fe4b2e7b53fa9770c136c270d2d4bbb6d2192bf4d9caf"},
- {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:924620eef691990dfb56dc4709f280f40baee568c794b5c1885800c3ecc69816"},
- {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae4dc05c465a08a866b7a1baf360747078b362e6a6dbeb0c57f234db0ef88ae0"},
- {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:bed331fe18f58d844d39ceb398b77d6ac0b010d571cba8267c2e7165806b00ce"},
- {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:02c9ac843e3390826a265e331105efeab489ffaf4dd86384595ee8ce6d35ae7f"},
- {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9545a33965d0d377b0bc823dcabf26980e77f1b6a7caa368a365a9497fb09420"},
- {file = "frozenlist-1.3.3-cp310-cp310-win32.whl", hash = "sha256:d5cd3ab21acbdb414bb6c31958d7b06b85eeb40f66463c264a9b343a4e238642"},
- {file = "frozenlist-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:b756072364347cb6aa5b60f9bc18e94b2f79632de3b0190253ad770c5df17db1"},
- {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b4395e2f8d83fbe0c627b2b696acce67868793d7d9750e90e39592b3626691b7"},
- {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14143ae966a6229350021384870458e4777d1eae4c28d1a7aa47f24d030e6678"},
- {file = "frozenlist-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5d8860749e813a6f65bad8285a0520607c9500caa23fea6ee407e63debcdbef6"},
- {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23d16d9f477bb55b6154654e0e74557040575d9d19fe78a161bd33d7d76808e8"},
- {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb82dbba47a8318e75f679690190c10a5e1f447fbf9df41cbc4c3afd726d88cb"},
- {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9309869032abb23d196cb4e4db574232abe8b8be1339026f489eeb34a4acfd91"},
- {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a97b4fe50b5890d36300820abd305694cb865ddb7885049587a5678215782a6b"},
- {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c188512b43542b1e91cadc3c6c915a82a5eb95929134faf7fd109f14f9892ce4"},
- {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:303e04d422e9b911a09ad499b0368dc551e8c3cd15293c99160c7f1f07b59a48"},
- {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0771aed7f596c7d73444c847a1c16288937ef988dc04fb9f7be4b2aa91db609d"},
- {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:66080ec69883597e4d026f2f71a231a1ee9887835902dbe6b6467d5a89216cf6"},
- {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:41fe21dc74ad3a779c3d73a2786bdf622ea81234bdd4faf90b8b03cad0c2c0b4"},
- {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f20380df709d91525e4bee04746ba612a4df0972c1b8f8e1e8af997e678c7b81"},
- {file = "frozenlist-1.3.3-cp311-cp311-win32.whl", hash = "sha256:f30f1928162e189091cf4d9da2eac617bfe78ef907a761614ff577ef4edfb3c8"},
- {file = "frozenlist-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a6394d7dadd3cfe3f4b3b186e54d5d8504d44f2d58dcc89d693698e8b7132b32"},
- {file = "frozenlist-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8df3de3a9ab8325f94f646609a66cbeeede263910c5c0de0101079ad541af332"},
- {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0693c609e9742c66ba4870bcee1ad5ff35462d5ffec18710b4ac89337ff16e27"},
- {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd4210baef299717db0a600d7a3cac81d46ef0e007f88c9335db79f8979c0d3d"},
- {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:394c9c242113bfb4b9aa36e2b80a05ffa163a30691c7b5a29eba82e937895d5e"},
- {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6327eb8e419f7d9c38f333cde41b9ae348bec26d840927332f17e887a8dcb70d"},
- {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e24900aa13212e75e5b366cb9065e78bbf3893d4baab6052d1aca10d46d944c"},
- {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3843f84a6c465a36559161e6c59dce2f2ac10943040c2fd021cfb70d58c4ad56"},
- {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:84610c1502b2461255b4c9b7d5e9c48052601a8957cd0aea6ec7a7a1e1fb9420"},
- {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c21b9aa40e08e4f63a2f92ff3748e6b6c84d717d033c7b3438dd3123ee18f70e"},
- {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:efce6ae830831ab6a22b9b4091d411698145cb9b8fc869e1397ccf4b4b6455cb"},
- {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:40de71985e9042ca00b7953c4f41eabc3dc514a2d1ff534027f091bc74416401"},
- {file = "frozenlist-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:180c00c66bde6146a860cbb81b54ee0df350d2daf13ca85b275123bbf85de18a"},
- {file = "frozenlist-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9bbbcedd75acdfecf2159663b87f1bb5cfc80e7cd99f7ddd9d66eb98b14a8411"},
- {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:034a5c08d36649591be1cbb10e09da9f531034acfe29275fc5454a3b101ce41a"},
- {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba64dc2b3b7b158c6660d49cdb1d872d1d0bf4e42043ad8d5006099479a194e5"},
- {file = "frozenlist-1.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47df36a9fe24054b950bbc2db630d508cca3aa27ed0566c0baf661225e52c18e"},
- {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:008a054b75d77c995ea26629ab3a0c0d7281341f2fa7e1e85fa6153ae29ae99c"},
- {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:841ea19b43d438a80b4de62ac6ab21cfe6827bb8a9dc62b896acc88eaf9cecba"},
- {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e235688f42b36be2b6b06fc37ac2126a73b75fb8d6bc66dd632aa35286238703"},
- {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca713d4af15bae6e5d79b15c10c8522859a9a89d3b361a50b817c98c2fb402a2"},
- {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ac5995f2b408017b0be26d4a1d7c61bce106ff3d9e3324374d66b5964325448"},
- {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4ae8135b11652b08a8baf07631d3ebfe65a4c87909dbef5fa0cdde440444ee4"},
- {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ea42116ceb6bb16dbb7d526e242cb6747b08b7710d9782aa3d6732bd8d27649"},
- {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:810860bb4bdce7557bc0febb84bbd88198b9dbc2022d8eebe5b3590b2ad6c842"},
- {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ee78feb9d293c323b59a6f2dd441b63339a30edf35abcb51187d2fc26e696d13"},
- {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0af2e7c87d35b38732e810befb9d797a99279cbb85374d42ea61c1e9d23094b3"},
- {file = "frozenlist-1.3.3-cp38-cp38-win32.whl", hash = "sha256:899c5e1928eec13fd6f6d8dc51be23f0d09c5281e40d9cf4273d188d9feeaf9b"},
- {file = "frozenlist-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:7f44e24fa70f6fbc74aeec3e971f60a14dde85da364aa87f15d1be94ae75aeef"},
- {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2b07ae0c1edaa0a36339ec6cce700f51b14a3fc6545fdd32930d2c83917332cf"},
- {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ebb86518203e12e96af765ee89034a1dbb0c3c65052d1b0c19bbbd6af8a145e1"},
- {file = "frozenlist-1.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5cf820485f1b4c91e0417ea0afd41ce5cf5965011b3c22c400f6d144296ccbc0"},
- {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c11e43016b9024240212d2a65043b70ed8dfd3b52678a1271972702d990ac6d"},
- {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8fa3c6e3305aa1146b59a09b32b2e04074945ffcfb2f0931836d103a2c38f936"},
- {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:352bd4c8c72d508778cf05ab491f6ef36149f4d0cb3c56b1b4302852255d05d5"},
- {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65a5e4d3aa679610ac6e3569e865425b23b372277f89b5ef06cf2cdaf1ebf22b"},
- {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e2c1185858d7e10ff045c496bbf90ae752c28b365fef2c09cf0fa309291669"},
- {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f163d2fd041c630fed01bc48d28c3ed4a3b003c00acd396900e11ee5316b56bb"},
- {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05cdb16d09a0832eedf770cb7bd1fe57d8cf4eaf5aced29c4e41e3f20b30a784"},
- {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8bae29d60768bfa8fb92244b74502b18fae55a80eac13c88eb0b496d4268fd2d"},
- {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eedab4c310c0299961ac285591acd53dc6723a1ebd90a57207c71f6e0c2153ab"},
- {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3bbdf44855ed8f0fbcd102ef05ec3012d6a4fd7c7562403f76ce6a52aeffb2b1"},
- {file = "frozenlist-1.3.3-cp39-cp39-win32.whl", hash = "sha256:efa568b885bca461f7c7b9e032655c0c143d305bf01c30caf6db2854a4532b38"},
- {file = "frozenlist-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfe33efc9cb900a4c46f91a5ceba26d6df370ffddd9ca386eb1d4f0ad97b9ea9"},
- {file = "frozenlist-1.3.3.tar.gz", hash = "sha256:58bcc55721e8a90b88332d6cd441261ebb22342e238296bb330968952fbb3a6a"},
+ {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:764226ceef3125e53ea2cb275000e309c0aa5464d43bd72abd661e27fffc26ab"},
+ {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6484756b12f40003c6128bfcc3fa9f0d49a687e171186c2d85ec82e3758c559"},
+ {file = "frozenlist-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9ac08e601308e41eb533f232dbf6b7e4cea762f9f84f6357136eed926c15d12c"},
+ {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d081f13b095d74b67d550de04df1c756831f3b83dc9881c38985834387487f1b"},
+ {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71932b597f9895f011f47f17d6428252fc728ba2ae6024e13c3398a087c2cdea"},
+ {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:981b9ab5a0a3178ff413bca62526bb784249421c24ad7381e39d67981be2c326"},
+ {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e41f3de4df3e80de75845d3e743b3f1c4c8613c3997a912dbf0229fc61a8b963"},
+ {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6918d49b1f90821e93069682c06ffde41829c346c66b721e65a5c62b4bab0300"},
+ {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e5c8764c7829343d919cc2dfc587a8db01c4f70a4ebbc49abde5d4b158b007b"},
+ {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8d0edd6b1c7fb94922bf569c9b092ee187a83f03fb1a63076e7774b60f9481a8"},
+ {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e29cda763f752553fa14c68fb2195150bfab22b352572cb36c43c47bedba70eb"},
+ {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0c7c1b47859ee2cac3846fde1c1dc0f15da6cec5a0e5c72d101e0f83dcb67ff9"},
+ {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:901289d524fdd571be1c7be054f48b1f88ce8dddcbdf1ec698b27d4b8b9e5d62"},
+ {file = "frozenlist-1.4.0-cp310-cp310-win32.whl", hash = "sha256:1a0848b52815006ea6596c395f87449f693dc419061cc21e970f139d466dc0a0"},
+ {file = "frozenlist-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:b206646d176a007466358aa21d85cd8600a415c67c9bd15403336c331a10d956"},
+ {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:de343e75f40e972bae1ef6090267f8260c1446a1695e77096db6cfa25e759a95"},
+ {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad2a9eb6d9839ae241701d0918f54c51365a51407fd80f6b8289e2dfca977cc3"},
+ {file = "frozenlist-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7bd3b3830247580de99c99ea2a01416dfc3c34471ca1298bccabf86d0ff4dc"},
+ {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdf1847068c362f16b353163391210269e4f0569a3c166bc6a9f74ccbfc7e839"},
+ {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38461d02d66de17455072c9ba981d35f1d2a73024bee7790ac2f9e361ef1cd0c"},
+ {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5a32087d720c608f42caed0ef36d2b3ea61a9d09ee59a5142d6070da9041b8f"},
+ {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd65632acaf0d47608190a71bfe46b209719bf2beb59507db08ccdbe712f969b"},
+ {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261b9f5d17cac914531331ff1b1d452125bf5daa05faf73b71d935485b0c510b"},
+ {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b89ac9768b82205936771f8d2eb3ce88503b1556324c9f903e7156669f521472"},
+ {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:008eb8b31b3ea6896da16c38c1b136cb9fec9e249e77f6211d479db79a4eaf01"},
+ {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e74b0506fa5aa5598ac6a975a12aa8928cbb58e1f5ac8360792ef15de1aa848f"},
+ {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:490132667476f6781b4c9458298b0c1cddf237488abd228b0b3650e5ecba7467"},
+ {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:76d4711f6f6d08551a7e9ef28c722f4a50dd0fc204c56b4bcd95c6cc05ce6fbb"},
+ {file = "frozenlist-1.4.0-cp311-cp311-win32.whl", hash = "sha256:a02eb8ab2b8f200179b5f62b59757685ae9987996ae549ccf30f983f40602431"},
+ {file = "frozenlist-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:515e1abc578dd3b275d6a5114030b1330ba044ffba03f94091842852f806f1c1"},
+ {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0ed05f5079c708fe74bf9027e95125334b6978bf07fd5ab923e9e55e5fbb9d3"},
+ {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca265542ca427bf97aed183c1676e2a9c66942e822b14dc6e5f42e038f92a503"},
+ {file = "frozenlist-1.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:491e014f5c43656da08958808588cc6c016847b4360e327a62cb308c791bd2d9"},
+ {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ae5cd0f333f94f2e03aaf140bb762c64783935cc764ff9c82dff626089bebf"},
+ {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e78fb68cf9c1a6aa4a9a12e960a5c9dfbdb89b3695197aa7064705662515de2"},
+ {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5655a942f5f5d2c9ed93d72148226d75369b4f6952680211972a33e59b1dfdc"},
+ {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11b0746f5d946fecf750428a95f3e9ebe792c1ee3b1e96eeba145dc631a9672"},
+ {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e66d2a64d44d50d2543405fb183a21f76b3b5fd16f130f5c99187c3fb4e64919"},
+ {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:88f7bc0fcca81f985f78dd0fa68d2c75abf8272b1f5c323ea4a01a4d7a614efc"},
+ {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5833593c25ac59ede40ed4de6d67eb42928cca97f26feea219f21d0ed0959b79"},
+ {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fec520865f42e5c7f050c2a79038897b1c7d1595e907a9e08e3353293ffc948e"},
+ {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:b826d97e4276750beca7c8f0f1a4938892697a6bcd8ec8217b3312dad6982781"},
+ {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ceb6ec0a10c65540421e20ebd29083c50e6d1143278746a4ef6bcf6153171eb8"},
+ {file = "frozenlist-1.4.0-cp38-cp38-win32.whl", hash = "sha256:2b8bcf994563466db019fab287ff390fffbfdb4f905fc77bc1c1d604b1c689cc"},
+ {file = "frozenlist-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:a6c8097e01886188e5be3e6b14e94ab365f384736aa1fca6a0b9e35bd4a30bc7"},
+ {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6c38721585f285203e4b4132a352eb3daa19121a035f3182e08e437cface44bf"},
+ {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0c6da9aee33ff0b1a451e867da0c1f47408112b3391dd43133838339e410963"},
+ {file = "frozenlist-1.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93ea75c050c5bb3d98016b4ba2497851eadf0ac154d88a67d7a6816206f6fa7f"},
+ {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f61e2dc5ad442c52b4887f1fdc112f97caeff4d9e6ebe78879364ac59f1663e1"},
+ {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa384489fefeb62321b238e64c07ef48398fe80f9e1e6afeff22e140e0850eef"},
+ {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10ff5faaa22786315ef57097a279b833ecab1a0bfb07d604c9cbb1c4cdc2ed87"},
+ {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:007df07a6e3eb3e33e9a1fe6a9db7af152bbd8a185f9aaa6ece10a3529e3e1c6"},
+ {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4f399d28478d1f604c2ff9119907af9726aed73680e5ed1ca634d377abb087"},
+ {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5374b80521d3d3f2ec5572e05adc94601985cc526fb276d0c8574a6d749f1b3"},
+ {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ce31ae3e19f3c902de379cf1323d90c649425b86de7bbdf82871b8a2a0615f3d"},
+ {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7211ef110a9194b6042449431e08c4d80c0481e5891e58d429df5899690511c2"},
+ {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:556de4430ce324c836789fa4560ca62d1591d2538b8ceb0b4f68fb7b2384a27a"},
+ {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7645a8e814a3ee34a89c4a372011dcd817964ce8cb273c8ed6119d706e9613e3"},
+ {file = "frozenlist-1.4.0-cp39-cp39-win32.whl", hash = "sha256:19488c57c12d4e8095a922f328df3f179c820c212940a498623ed39160bc3c2f"},
+ {file = "frozenlist-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:6221d84d463fb110bdd7619b69cb43878a11d51cbb9394ae3105d082d5199167"},
+ {file = "frozenlist-1.4.0.tar.gz", hash = "sha256:09163bdf0b2907454042edb19f887c6d33806adc71fbd54afc14908bfdc22251"},
]
[[package]]
@@ -953,28 +937,28 @@ test = ["backports.socketpair", "cffi (>=1.12.2)", "contextvars (==2.4)", "cover
[[package]]
name = "google-auth"
-version = "2.17.3"
+version = "2.23.0"
description = "Google Authentication Library"
category = "dev"
optional = false
-python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*"
+python-versions = ">=3.7"
files = [
- {file = "google-auth-2.17.3.tar.gz", hash = "sha256:ce311e2bc58b130fddf316df57c9b3943c2a7b4f6ec31de9663a9333e4064efc"},
- {file = "google_auth-2.17.3-py2.py3-none-any.whl", hash = "sha256:f586b274d3eb7bd932ea424b1c702a30e0393a2e2bc4ca3eae8263ffd8be229f"},
+ {file = "google-auth-2.23.0.tar.gz", hash = "sha256:753a26312e6f1eaeec20bc6f2644a10926697da93446e1f8e24d6d32d45a922a"},
+ {file = "google_auth-2.23.0-py2.py3-none-any.whl", hash = "sha256:2cec41407bd1e207f5b802638e32bb837df968bb5c05f413d0fa526fac4cf7a7"},
]
[package.dependencies]
cachetools = ">=2.0.0,<6.0"
pyasn1-modules = ">=0.2.1"
-rsa = {version = ">=3.1.4,<5", markers = "python_version >= \"3.6\""}
-six = ">=1.9.0"
+rsa = ">=3.1.4,<5"
+urllib3 = "<2.0"
[package.extras]
-aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "requests (>=2.20.0,<3.0.0dev)"]
+aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"]
enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"]
pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"]
reauth = ["pyu2f (>=0.1.5)"]
-requests = ["requests (>=2.20.0,<3.0.0dev)"]
+requests = ["requests (>=2.20.0,<3.0.0.dev0)"]
[[package]]
name = "google-auth-oauthlib"
@@ -1087,61 +1071,61 @@ test = ["objgraph", "psutil"]
[[package]]
name = "grpcio"
-version = "1.54.2"
+version = "1.58.0"
description = "HTTP/2-based RPC framework"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "grpcio-1.54.2-cp310-cp310-linux_armv7l.whl", hash = "sha256:40e1cbf69d6741b40f750f3cccc64326f927ac6145a9914d33879e586002350c"},
- {file = "grpcio-1.54.2-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:2288d76e4d4aa7ef3fe7a73c1c470b66ea68e7969930e746a8cd8eca6ef2a2ea"},
- {file = "grpcio-1.54.2-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:c0e3155fc5335ec7b3b70f15230234e529ca3607b20a562b6c75fb1b1218874c"},
- {file = "grpcio-1.54.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bf88004fe086c786dc56ef8dd6cb49c026833fdd6f42cb853008bce3f907148"},
- {file = "grpcio-1.54.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2be88c081e33f20630ac3343d8ad9f1125f32987968e9c8c75c051c9800896e8"},
- {file = "grpcio-1.54.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:33d40954199bddbb6a78f8f6f2b2082660f381cd2583ec860a6c2fa7c8400c08"},
- {file = "grpcio-1.54.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b52d00d1793d290c81ad6a27058f5224a7d5f527867e5b580742e1bd211afeee"},
- {file = "grpcio-1.54.2-cp310-cp310-win32.whl", hash = "sha256:881d058c5ccbea7cc2c92085a11947b572498a27ef37d3eef4887f499054dca8"},
- {file = "grpcio-1.54.2-cp310-cp310-win_amd64.whl", hash = "sha256:0212e2f7fdf7592e4b9d365087da30cb4d71e16a6f213120c89b4f8fb35a3ab3"},
- {file = "grpcio-1.54.2-cp311-cp311-linux_armv7l.whl", hash = "sha256:1e623e0cf99a0ac114f091b3083a1848dbc64b0b99e181473b5a4a68d4f6f821"},
- {file = "grpcio-1.54.2-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:66233ccd2a9371158d96e05d082043d47dadb18cbb294dc5accfdafc2e6b02a7"},
- {file = "grpcio-1.54.2-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:4cb283f630624ebb16c834e5ac3d7880831b07cbe76cb08ab7a271eeaeb8943e"},
- {file = "grpcio-1.54.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a1e601ee31ef30a9e2c601d0867e236ac54c922d32ed9f727b70dd5d82600d5"},
- {file = "grpcio-1.54.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8da84bbc61a4e92af54dc96344f328e5822d574f767e9b08e1602bb5ddc254a"},
- {file = "grpcio-1.54.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5008964885e8d23313c8e5ea0d44433be9bfd7e24482574e8cc43c02c02fc796"},
- {file = "grpcio-1.54.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a2f5a1f1080ccdc7cbaf1171b2cf384d852496fe81ddedeb882d42b85727f610"},
- {file = "grpcio-1.54.2-cp311-cp311-win32.whl", hash = "sha256:b74ae837368cfffeb3f6b498688a123e6b960951be4dec0e869de77e7fa0439e"},
- {file = "grpcio-1.54.2-cp311-cp311-win_amd64.whl", hash = "sha256:8cdbcbd687e576d48f7886157c95052825ca9948c0ed2afdc0134305067be88b"},
- {file = "grpcio-1.54.2-cp37-cp37m-linux_armv7l.whl", hash = "sha256:782f4f8662a2157c4190d0f99eaaebc602899e84fb1e562a944e5025929e351c"},
- {file = "grpcio-1.54.2-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:714242ad0afa63a2e6dabd522ae22e1d76e07060b5af2ddda5474ba4f14c2c94"},
- {file = "grpcio-1.54.2-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:f900ed4ad7a0f1f05d35f955e0943944d5a75f607a836958c6b8ab2a81730ef2"},
- {file = "grpcio-1.54.2-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96a41817d2c763b1d0b32675abeb9179aa2371c72aefdf74b2d2b99a1b92417b"},
- {file = "grpcio-1.54.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70fcac7b94f4c904152809a050164650ac81c08e62c27aa9f156ac518029ebbe"},
- {file = "grpcio-1.54.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fd6c6c29717724acf9fc1847c4515d57e4dc12762452457b9cb37461f30a81bb"},
- {file = "grpcio-1.54.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c2392f5b5d84b71d853918687d806c1aa4308109e5ca158a16e16a6be71041eb"},
- {file = "grpcio-1.54.2-cp37-cp37m-win_amd64.whl", hash = "sha256:51630c92591d6d3fe488a7c706bd30a61594d144bac7dee20c8e1ce78294f474"},
- {file = "grpcio-1.54.2-cp38-cp38-linux_armv7l.whl", hash = "sha256:b04202453941a63b36876a7172b45366dc0cde10d5fd7855c0f4a4e673c0357a"},
- {file = "grpcio-1.54.2-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:89dde0ac72a858a44a2feb8e43dc68c0c66f7857a23f806e81e1b7cc7044c9cf"},
- {file = "grpcio-1.54.2-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:09d4bfd84686cd36fd11fd45a0732c7628308d094b14d28ea74a81db0bce2ed3"},
- {file = "grpcio-1.54.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7fc2b4edb938c8faa4b3c3ea90ca0dd89b7565a049e8e4e11b77e60e4ed2cc05"},
- {file = "grpcio-1.54.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61f7203e2767800edee7a1e1040aaaf124a35ce0c7fe0883965c6b762defe598"},
- {file = "grpcio-1.54.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e416c8baf925b5a1aff31f7f5aecc0060b25d50cce3a5a7255dc5cf2f1d4e5eb"},
- {file = "grpcio-1.54.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dc80c9c6b608bf98066a038e0172013a49cfa9a08d53335aefefda2c64fc68f4"},
- {file = "grpcio-1.54.2-cp38-cp38-win32.whl", hash = "sha256:8d6192c37a30a115f4663592861f50e130caed33efc4eec24d92ec881c92d771"},
- {file = "grpcio-1.54.2-cp38-cp38-win_amd64.whl", hash = "sha256:46a057329938b08e5f0e12ea3d7aed3ecb20a0c34c4a324ef34e00cecdb88a12"},
- {file = "grpcio-1.54.2-cp39-cp39-linux_armv7l.whl", hash = "sha256:2296356b5c9605b73ed6a52660b538787094dae13786ba53080595d52df13a98"},
- {file = "grpcio-1.54.2-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:c72956972e4b508dd39fdc7646637a791a9665b478e768ffa5f4fe42123d5de1"},
- {file = "grpcio-1.54.2-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:9bdbb7624d65dc0ed2ed8e954e79ab1724526f09b1efa88dcd9a1815bf28be5f"},
- {file = "grpcio-1.54.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c44e1a765b31e175c391f22e8fc73b2a2ece0e5e6ff042743d8109b5d2eff9f"},
- {file = "grpcio-1.54.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cc928cfe6c360c1df636cf7991ab96f059666ac7b40b75a769410cc6217df9c"},
- {file = "grpcio-1.54.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a08920fa1a97d4b8ee5db2f31195de4a9def1a91bc003544eb3c9e6b8977960a"},
- {file = "grpcio-1.54.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4864f99aac207e3e45c5e26c6cbb0ad82917869abc2f156283be86c05286485c"},
- {file = "grpcio-1.54.2-cp39-cp39-win32.whl", hash = "sha256:b38b3de8cff5bc70f8f9c615f51b48eff7313fc9aca354f09f81b73036e7ddfa"},
- {file = "grpcio-1.54.2-cp39-cp39-win_amd64.whl", hash = "sha256:be48496b0e00460717225e7680de57c38be1d8629dc09dadcd1b3389d70d942b"},
- {file = "grpcio-1.54.2.tar.gz", hash = "sha256:50a9f075eeda5097aa9a182bb3877fe1272875e45370368ac0ee16ab9e22d019"},
-]
-
-[package.extras]
-protobuf = ["grpcio-tools (>=1.54.2)"]
+ {file = "grpcio-1.58.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:3e6bebf1dfdbeb22afd95650e4f019219fef3ab86d3fca8ebade52e4bc39389a"},
+ {file = "grpcio-1.58.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:cde11577d5b6fd73a00e6bfa3cf5f428f3f33c2d2878982369b5372bbc4acc60"},
+ {file = "grpcio-1.58.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:a2d67ff99e70e86b2be46c1017ae40b4840d09467d5455b2708de6d4c127e143"},
+ {file = "grpcio-1.58.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ed979b273a81de36fc9c6716d9fb09dd3443efa18dcc8652501df11da9583e9"},
+ {file = "grpcio-1.58.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:458899d2ebd55d5ca2350fd3826dfd8fcb11fe0f79828ae75e2b1e6051d50a29"},
+ {file = "grpcio-1.58.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc7ffef430b80345729ff0a6825e9d96ac87efe39216e87ac58c6c4ef400de93"},
+ {file = "grpcio-1.58.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5b23d75e5173faa3d1296a7bedffb25afd2fddb607ef292dfc651490c7b53c3d"},
+ {file = "grpcio-1.58.0-cp310-cp310-win32.whl", hash = "sha256:fad9295fe02455d4f158ad72c90ef8b4bcaadfdb5efb5795f7ab0786ad67dd58"},
+ {file = "grpcio-1.58.0-cp310-cp310-win_amd64.whl", hash = "sha256:bc325fed4d074367bebd465a20763586e5e1ed5b943e9d8bc7c162b1f44fd602"},
+ {file = "grpcio-1.58.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:652978551af02373a5a313e07bfef368f406b5929cf2d50fa7e4027f913dbdb4"},
+ {file = "grpcio-1.58.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:9f13a171281ebb4d7b1ba9f06574bce2455dcd3f2f6d1fbe0fd0d84615c74045"},
+ {file = "grpcio-1.58.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:8774219e21b05f750eef8adc416e9431cf31b98f6ce9def288e4cea1548cbd22"},
+ {file = "grpcio-1.58.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09206106848462763f7f273ca93d2d2d4d26cab475089e0de830bb76be04e9e8"},
+ {file = "grpcio-1.58.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62831d5e251dd7561d9d9e83a0b8655084b2a1f8ea91e4bd6b3cedfefd32c9d2"},
+ {file = "grpcio-1.58.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:212f38c6a156862098f6bdc9a79bf850760a751d259d8f8f249fc6d645105855"},
+ {file = "grpcio-1.58.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4b12754af201bb993e6e2efd7812085ddaaef21d0a6f0ff128b97de1ef55aa4a"},
+ {file = "grpcio-1.58.0-cp311-cp311-win32.whl", hash = "sha256:3886b4d56bd4afeac518dbc05933926198aa967a7d1d237a318e6fbc47141577"},
+ {file = "grpcio-1.58.0-cp311-cp311-win_amd64.whl", hash = "sha256:002f228d197fea12797a14e152447044e14fb4fdb2eb5d6cfa496f29ddbf79ef"},
+ {file = "grpcio-1.58.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:b5e8db0aff0a4819946215f156bd722b6f6c8320eb8419567ffc74850c9fd205"},
+ {file = "grpcio-1.58.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:201e550b7e2ede113b63e718e7ece93cef5b0fbf3c45e8fe4541a5a4305acd15"},
+ {file = "grpcio-1.58.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:d79b660681eb9bc66cc7cbf78d1b1b9e335ee56f6ea1755d34a31108b80bd3c8"},
+ {file = "grpcio-1.58.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ef8d4a76d2c7d8065aba829f8d0bc0055495c998dce1964ca5b302d02514fb3"},
+ {file = "grpcio-1.58.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6cba491c638c76d3dc6c191d9c75041ca5b8f5c6de4b8327ecdcab527f130bb4"},
+ {file = "grpcio-1.58.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6801ff6652ecd2aae08ef994a3e49ff53de29e69e9cd0fd604a79ae4e545a95c"},
+ {file = "grpcio-1.58.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:24edec346e69e672daf12b2c88e95c6f737f3792d08866101d8c5f34370c54fd"},
+ {file = "grpcio-1.58.0-cp37-cp37m-win_amd64.whl", hash = "sha256:7e473a7abad9af48e3ab5f3b5d237d18208024d28ead65a459bd720401bd2f8f"},
+ {file = "grpcio-1.58.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:4891bbb4bba58acd1d620759b3be11245bfe715eb67a4864c8937b855b7ed7fa"},
+ {file = "grpcio-1.58.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:e9f995a8a421405958ff30599b4d0eec244f28edc760de82f0412c71c61763d2"},
+ {file = "grpcio-1.58.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:2f85f87e2f087d9f632c085b37440a3169fda9cdde80cb84057c2fc292f8cbdf"},
+ {file = "grpcio-1.58.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb6b92036ff312d5b4182fa72e8735d17aceca74d0d908a7f08e375456f03e07"},
+ {file = "grpcio-1.58.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d81c2b2b24c32139dd2536972f1060678c6b9fbd106842a9fcdecf07b233eccd"},
+ {file = "grpcio-1.58.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:fbcecb6aedd5c1891db1d70efbfbdc126c986645b5dd616a045c07d6bd2dfa86"},
+ {file = "grpcio-1.58.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:92ae871a902cf19833328bd6498ec007b265aabf2fda845ab5bd10abcaf4c8c6"},
+ {file = "grpcio-1.58.0-cp38-cp38-win32.whl", hash = "sha256:dc72e04620d49d3007771c0e0348deb23ca341c0245d610605dddb4ac65a37cb"},
+ {file = "grpcio-1.58.0-cp38-cp38-win_amd64.whl", hash = "sha256:1c1c5238c6072470c7f1614bf7c774ffde6b346a100521de9ce791d1e4453afe"},
+ {file = "grpcio-1.58.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:fe643af248442221db027da43ed43e53b73e11f40c9043738de9a2b4b6ca7697"},
+ {file = "grpcio-1.58.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:128eb1f8e70676d05b1b0c8e6600320fc222b3f8c985a92224248b1367122188"},
+ {file = "grpcio-1.58.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:039003a5e0ae7d41c86c768ef8b3ee2c558aa0a23cf04bf3c23567f37befa092"},
+ {file = "grpcio-1.58.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f061722cad3f9aabb3fbb27f3484ec9d4667b7328d1a7800c3c691a98f16bb0"},
+ {file = "grpcio-1.58.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0af11938acf8cd4cf815c46156bcde36fa5850518120920d52620cc3ec1830"},
+ {file = "grpcio-1.58.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d4cef77ad2fed42b1ba9143465856d7e737279854e444925d5ba45fc1f3ba727"},
+ {file = "grpcio-1.58.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24765a627eb4d9288ace32d5104161c3654128fe27f2808ecd6e9b0cfa7fc8b9"},
+ {file = "grpcio-1.58.0-cp39-cp39-win32.whl", hash = "sha256:f0241f7eb0d2303a545136c59bc565a35c4fc3b924ccbd69cb482f4828d6f31c"},
+ {file = "grpcio-1.58.0-cp39-cp39-win_amd64.whl", hash = "sha256:dcfba7befe3a55dab6fe1eb7fc9359dc0c7f7272b30a70ae0af5d5b063842f28"},
+ {file = "grpcio-1.58.0.tar.gz", hash = "sha256:532410c51ccd851b706d1fbc00a87be0f5312bd6f8e5dbf89d4e99c7f79d7499"},
+]
+
+[package.extras]
+protobuf = ["grpcio-tools (>=1.58.0)"]
[[package]]
name = "h11"
@@ -1155,57 +1139,50 @@ files = [
{file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
]
-[package.dependencies]
-typing-extensions = {version = "*", markers = "python_version < \"3.8\""}
-
[[package]]
name = "h5py"
-version = "3.8.0"
+version = "3.9.0"
description = "Read and write HDF5 files from Python"
category = "dev"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "h5py-3.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:533d7dad466ddb7e3b30af274b630eb7c1a6e4ddf01d1c373a0334dc2152110a"},
- {file = "h5py-3.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c873ba9fd4fa875ad62ce0e4891725e257a8fe7f5abdbc17e51a5d54819be55c"},
- {file = "h5py-3.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98a240cd4c1bfd568aaa52ec42d263131a2582dab82d74d3d42a0d954cac12be"},
- {file = "h5py-3.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3389b63222b1c7a158bb7fe69d11ca00066740ec5574596d47a2fe5317f563a"},
- {file = "h5py-3.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:7f3350fc0a8407d668b13247861c2acd23f7f5fe7d060a3ad9b0820f5fcbcae0"},
- {file = "h5py-3.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:db03e3f2c716205fbdabb34d0848459840585225eb97b4f08998c743821ca323"},
- {file = "h5py-3.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:36761693efbe53df179627a775476dcbc37727d6e920958277a7efbc18f1fb73"},
- {file = "h5py-3.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a506fc223def428f4329e7e1f9fe1c8c593eab226e7c0942c8d75308ad49950"},
- {file = "h5py-3.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33b15aae79e9147aebe1d0e54099cbcde8d65e3e227cd5b59e49b1272aa0e09d"},
- {file = "h5py-3.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:9f6f6ffadd6bfa9b2c5b334805eb4b19ca0a5620433659d8f7fb86692c40a359"},
- {file = "h5py-3.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8f55d9c6c84d7d09c79fb85979e97b81ec6071cc776a97eb6b96f8f6ec767323"},
- {file = "h5py-3.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b685453e538b2b5934c58a644ac3f3b3d0cec1a01b6fb26d57388e9f9b674ad0"},
- {file = "h5py-3.8.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:377865821fe80ad984d003723d6f8890bd54ceeb5981b43c0313b9df95411b30"},
- {file = "h5py-3.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:0fef76e10b9216657fa37e7edff6d8be0709b25bd5066474c229b56cf0098df9"},
- {file = "h5py-3.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:26ffc344ec9984d2cd3ca0265007299a8bac8d85c1ad48f4639d8d3aed2af171"},
- {file = "h5py-3.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bacaa1c16810dd2b3e4417f8e730971b7c4d53d234de61fe4a918db78e80e1e4"},
- {file = "h5py-3.8.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bae730580ae928de409d63cbe4fdca4c82c3ad2bed30511d19d34e995d63c77e"},
- {file = "h5py-3.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f47f757d1b76f0ecb8aa0508ec8d1b390df67a8b67ee2515dc1b046f3a1596ea"},
- {file = "h5py-3.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f891b17e3a3e974e93f9e34e7cca9f530806543571ce078998676a555837d91d"},
- {file = "h5py-3.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:290e00fa2de74a10688d1bac98d5a9cdd43f14f58e562c580b5b3dfbd358ecae"},
- {file = "h5py-3.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:03890b1c123d024fb0239a3279737d5432498c1901c354f8b10d8221d1d16235"},
- {file = "h5py-3.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7865de06779b14d98068da387333ad9bf2756b5b579cc887fac169bc08f87c3"},
- {file = "h5py-3.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49bc857635f935fa30e92e61ac1e87496df8f260a6945a3235e43a9890426866"},
- {file = "h5py-3.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:5fd2252d1fc364ba0e93dd0b7089f4906b66805cb4e6aca7fa8874ac08649647"},
- {file = "h5py-3.8.0.tar.gz", hash = "sha256:6fead82f0c4000cf38d53f9c030780d81bfa0220218aee13b90b7701c937d95f"},
-]
-
-[package.dependencies]
-numpy = ">=1.14.5"
+ {file = "h5py-3.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eb7bdd5e601dd1739698af383be03f3dad0465fe67184ebd5afca770f50df9d6"},
+ {file = "h5py-3.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:78e44686334cbbf2dd21d9df15823bc38663f27a3061f6a032c68a3e30c47bf7"},
+ {file = "h5py-3.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f68b41efd110ce9af1cbe6fa8af9f4dcbadace6db972d30828b911949e28fadd"},
+ {file = "h5py-3.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12aa556d540f11a2cae53ea7cfb94017353bd271fb3962e1296b342f6550d1b8"},
+ {file = "h5py-3.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:d97409e17915798029e297a84124705c8080da901307ea58f29234e09b073ddc"},
+ {file = "h5py-3.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:551e358db05a874a0f827b22e95b30092f2303edc4b91bb62ad2f10e0236e1a0"},
+ {file = "h5py-3.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6822a814b9d8b8363ff102f76ea8d026f0ca25850bb579d85376029ee3e73b93"},
+ {file = "h5py-3.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54f01202cdea754ab4227dd27014bdbd561a4bbe4b631424fd812f7c2ce9c6ac"},
+ {file = "h5py-3.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64acceaf6aff92af091a4b83f6dee3cf8d3061f924a6bb3a33eb6c4658a8348b"},
+ {file = "h5py-3.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:804c7fb42a34c8ab3a3001901c977a5c24d2e9c586a0f3e7c0a389130b4276fc"},
+ {file = "h5py-3.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8d9492391ff5c3c80ec30ae2fe82a3f0efd1e750833739c25b0d090e3be1b095"},
+ {file = "h5py-3.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9da9e7e63376c32704e37ad4cea2dceae6964cee0d8515185b3ab9cbd6b947bc"},
+ {file = "h5py-3.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e20897c88759cbcbd38fb45b507adc91af3e0f67722aa302d71f02dd44d286"},
+ {file = "h5py-3.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbf5225543ca35ce9f61c950b73899a82be7ba60d58340e76d0bd42bf659235a"},
+ {file = "h5py-3.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:36408f8c62f50007d14e000f9f3acf77e103b9e932c114cbe52a3089e50ebf94"},
+ {file = "h5py-3.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:23e74b878bbe1653ab34ca49b83cac85529cd0b36b9d625516c5830cc5ca2eac"},
+ {file = "h5py-3.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3f457089c5d524b7998e3649bc63240679b8fb0a3859ea53bbb06841f3d755f1"},
+ {file = "h5py-3.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6284061f3214335e1eec883a6ee497dbe7a79f19e6a57fed2dd1f03acd5a8cb"},
+ {file = "h5py-3.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7a745efd0d56076999b52e8da5fad5d30823bac98b59c68ae75588d09991a"},
+ {file = "h5py-3.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:79bbca34696c6f9eeeb36a91776070c49a060b2879828e2c8fa6c58b8ed10dd1"},
+ {file = "h5py-3.9.0.tar.gz", hash = "sha256:e604db6521c1e367c6bd7fad239c847f53cc46646f2d2651372d05ae5e95f817"},
+]
+
+[package.dependencies]
+numpy = ">=1.17.3"
[[package]]
name = "httpcore"
-version = "0.17.2"
+version = "0.17.3"
description = "A minimal low-level HTTP client."
-category = "dev"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
- {file = "httpcore-0.17.2-py3-none-any.whl", hash = "sha256:5581b9c12379c4288fe70f43c710d16060c10080617001e6b22a3b6dbcbefd36"},
- {file = "httpcore-0.17.2.tar.gz", hash = "sha256:125f8375ab60036db632f34f4b627a9ad085048eef7cb7d2616fea0f739f98af"},
+ {file = "httpcore-0.17.3-py3-none-any.whl", hash = "sha256:c2789b767ddddfa2a5782e3199b2b7f6894540b17b16ec26b2c4d8e103510b87"},
+ {file = "httpcore-0.17.3.tar.gz", hash = "sha256:a6f30213335e34c1ade7be6ec7c47f19f50c56db36abef1a9dfa3815b1cb3888"},
]
[package.dependencies]
@@ -1220,53 +1197,47 @@ socks = ["socksio (>=1.0.0,<2.0.0)"]
[[package]]
name = "httptools"
-version = "0.5.0"
+version = "0.6.0"
description = "A collection of framework independent HTTP protocol utils."
category = "dev"
optional = false
python-versions = ">=3.5.0"
files = [
- {file = "httptools-0.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8f470c79061599a126d74385623ff4744c4e0f4a0997a353a44923c0b561ee51"},
- {file = "httptools-0.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e90491a4d77d0cb82e0e7a9cb35d86284c677402e4ce7ba6b448ccc7325c5421"},
- {file = "httptools-0.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1d2357f791b12d86faced7b5736dea9ef4f5ecdc6c3f253e445ee82da579449"},
- {file = "httptools-0.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f90cd6fd97c9a1b7fe9215e60c3bd97336742a0857f00a4cb31547bc22560c2"},
- {file = "httptools-0.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5230a99e724a1bdbbf236a1b58d6e8504b912b0552721c7c6b8570925ee0ccde"},
- {file = "httptools-0.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3a47a34f6015dd52c9eb629c0f5a8a5193e47bf2a12d9a3194d231eaf1bc451a"},
- {file = "httptools-0.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:24bb4bb8ac3882f90aa95403a1cb48465de877e2d5298ad6ddcfdebec060787d"},
- {file = "httptools-0.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e67d4f8734f8054d2c4858570cc4b233bf753f56e85217de4dfb2495904cf02e"},
- {file = "httptools-0.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7e5eefc58d20e4c2da82c78d91b2906f1a947ef42bd668db05f4ab4201a99f49"},
- {file = "httptools-0.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0297822cea9f90a38df29f48e40b42ac3d48a28637368f3ec6d15eebefd182f9"},
- {file = "httptools-0.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:557be7fbf2bfa4a2ec65192c254e151684545ebab45eca5d50477d562c40f986"},
- {file = "httptools-0.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:54465401dbbec9a6a42cf737627fb0f014d50dc7365a6b6cd57753f151a86ff0"},
- {file = "httptools-0.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4d9ebac23d2de960726ce45f49d70eb5466725c0087a078866043dad115f850f"},
- {file = "httptools-0.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:e8a34e4c0ab7b1ca17b8763613783e2458e77938092c18ac919420ab8655c8c1"},
- {file = "httptools-0.5.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f659d7a48401158c59933904040085c200b4be631cb5f23a7d561fbae593ec1f"},
- {file = "httptools-0.5.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef1616b3ba965cd68e6f759eeb5d34fbf596a79e84215eeceebf34ba3f61fdc7"},
- {file = "httptools-0.5.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3625a55886257755cb15194efbf209584754e31d336e09e2ffe0685a76cb4b60"},
- {file = "httptools-0.5.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:72ad589ba5e4a87e1d404cc1cb1b5780bfcb16e2aec957b88ce15fe879cc08ca"},
- {file = "httptools-0.5.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:850fec36c48df5a790aa735417dca8ce7d4b48d59b3ebd6f83e88a8125cde324"},
- {file = "httptools-0.5.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f222e1e9d3f13b68ff8a835574eda02e67277d51631d69d7cf7f8e07df678c86"},
- {file = "httptools-0.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3cb8acf8f951363b617a8420768a9f249099b92e703c052f9a51b66342eea89b"},
- {file = "httptools-0.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:550059885dc9c19a072ca6d6735739d879be3b5959ec218ba3e013fd2255a11b"},
- {file = "httptools-0.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a04fe458a4597aa559b79c7f48fe3dceabef0f69f562daf5c5e926b153817281"},
- {file = "httptools-0.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d0c1044bce274ec6711f0770fd2d5544fe392591d204c68328e60a46f88843b"},
- {file = "httptools-0.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c6eeefd4435055a8ebb6c5cc36111b8591c192c56a95b45fe2af22d9881eee25"},
- {file = "httptools-0.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:5b65be160adcd9de7a7e6413a4966665756e263f0d5ddeffde277ffeee0576a5"},
- {file = "httptools-0.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fe9c766a0c35b7e3d6b6939393c8dfdd5da3ac5dec7f971ec9134f284c6c36d6"},
- {file = "httptools-0.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:85b392aba273566c3d5596a0a490978c085b79700814fb22bfd537d381dd230c"},
- {file = "httptools-0.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5e3088f4ed33947e16fd865b8200f9cfae1144f41b64a8cf19b599508e096bc"},
- {file = "httptools-0.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c2a56b6aad7cc8f5551d8e04ff5a319d203f9d870398b94702300de50190f63"},
- {file = "httptools-0.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9b571b281a19762adb3f48a7731f6842f920fa71108aff9be49888320ac3e24d"},
- {file = "httptools-0.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa47ffcf70ba6f7848349b8a6f9b481ee0f7637931d91a9860a1838bfc586901"},
- {file = "httptools-0.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:bede7ee075e54b9a5bde695b4fc8f569f30185891796b2e4e09e2226801d09bd"},
- {file = "httptools-0.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:64eba6f168803a7469866a9c9b5263a7463fa8b7a25b35e547492aa7322036b6"},
- {file = "httptools-0.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4b098e4bb1174096a93f48f6193e7d9aa7071506a5877da09a783509ca5fff42"},
- {file = "httptools-0.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9423a2de923820c7e82e18980b937893f4aa8251c43684fa1772e341f6e06887"},
- {file = "httptools-0.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca1b7becf7d9d3ccdbb2f038f665c0f4857e08e1d8481cbcc1a86a0afcfb62b2"},
- {file = "httptools-0.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:50d4613025f15f4b11f1c54bbed4761c0020f7f921b95143ad6d58c151198142"},
- {file = "httptools-0.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8ffce9d81c825ac1deaa13bc9694c0562e2840a48ba21cfc9f3b4c922c16f372"},
- {file = "httptools-0.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:1af91b3650ce518d226466f30bbba5b6376dbd3ddb1b2be8b0658c6799dd450b"},
- {file = "httptools-0.5.0.tar.gz", hash = "sha256:295874861c173f9101960bba332429bb77ed4dcd8cdf5cee9922eb00e4f6bc09"},
+ {file = "httptools-0.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:818325afee467d483bfab1647a72054246d29f9053fd17cc4b86cda09cc60339"},
+ {file = "httptools-0.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72205730bf1be875003692ca54a4a7c35fac77b4746008966061d9d41a61b0f5"},
+ {file = "httptools-0.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33eb1d4e609c835966e969a31b1dedf5ba16b38cab356c2ce4f3e33ffa94cad3"},
+ {file = "httptools-0.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdc6675ec6cb79d27e0575750ac6e2b47032742e24eed011b8db73f2da9ed40"},
+ {file = "httptools-0.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:463c3bc5ef64b9cf091be9ac0e0556199503f6e80456b790a917774a616aff6e"},
+ {file = "httptools-0.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:82f228b88b0e8c6099a9c4757ce9fdbb8b45548074f8d0b1f0fc071e35655d1c"},
+ {file = "httptools-0.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:0781fedc610293a2716bc7fa142d4c85e6776bc59d617a807ff91246a95dea35"},
+ {file = "httptools-0.6.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:721e503245d591527cddd0f6fd771d156c509e831caa7a57929b55ac91ee2b51"},
+ {file = "httptools-0.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:274bf20eeb41b0956e34f6a81f84d26ed57c84dd9253f13dcb7174b27ccd8aaf"},
+ {file = "httptools-0.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:259920bbae18740a40236807915def554132ad70af5067e562f4660b62c59b90"},
+ {file = "httptools-0.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03bfd2ae8a2d532952ac54445a2fb2504c804135ed28b53fefaf03d3a93eb1fd"},
+ {file = "httptools-0.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f959e4770b3fc8ee4dbc3578fd910fab9003e093f20ac8c621452c4d62e517cb"},
+ {file = "httptools-0.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6e22896b42b95b3237eccc42278cd72c0df6f23247d886b7ded3163452481e38"},
+ {file = "httptools-0.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:38f3cafedd6aa20ae05f81f2e616ea6f92116c8a0f8dcb79dc798df3356836e2"},
+ {file = "httptools-0.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:47043a6e0ea753f006a9d0dd076a8f8c99bc0ecae86a0888448eb3076c43d717"},
+ {file = "httptools-0.6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35a541579bed0270d1ac10245a3e71e5beeb1903b5fbbc8d8b4d4e728d48ff1d"},
+ {file = "httptools-0.6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65d802e7b2538a9756df5acc062300c160907b02e15ed15ba035b02bce43e89c"},
+ {file = "httptools-0.6.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:26326e0a8fe56829f3af483200d914a7cd16d8d398d14e36888b56de30bec81a"},
+ {file = "httptools-0.6.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e41ccac9e77cd045f3e4ee0fc62cbf3d54d7d4b375431eb855561f26ee7a9ec4"},
+ {file = "httptools-0.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4e748fc0d5c4a629988ef50ac1aef99dfb5e8996583a73a717fc2cac4ab89932"},
+ {file = "httptools-0.6.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:cf8169e839a0d740f3d3c9c4fa630ac1a5aaf81641a34575ca6773ed7ce041a1"},
+ {file = "httptools-0.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5dcc14c090ab57b35908d4a4585ec5c0715439df07be2913405991dbb37e049d"},
+ {file = "httptools-0.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d0b0571806a5168013b8c3d180d9f9d6997365a4212cb18ea20df18b938aa0b"},
+ {file = "httptools-0.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fb4a608c631f7dcbdf986f40af7a030521a10ba6bc3d36b28c1dc9e9035a3c0"},
+ {file = "httptools-0.6.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:93f89975465133619aea8b1952bc6fa0e6bad22a447c6d982fc338fbb4c89649"},
+ {file = "httptools-0.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:73e9d66a5a28b2d5d9fbd9e197a31edd02be310186db423b28e6052472dc8201"},
+ {file = "httptools-0.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:22c01fcd53648162730a71c42842f73b50f989daae36534c818b3f5050b54589"},
+ {file = "httptools-0.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f96d2a351b5625a9fd9133c95744e8ca06f7a4f8f0b8231e4bbaae2c485046a"},
+ {file = "httptools-0.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72ec7c70bd9f95ef1083d14a755f321d181f046ca685b6358676737a5fecd26a"},
+ {file = "httptools-0.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b703d15dbe082cc23266bf5d9448e764c7cb3fcfe7cb358d79d3fd8248673ef9"},
+ {file = "httptools-0.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82c723ed5982f8ead00f8e7605c53e55ffe47c47465d878305ebe0082b6a1755"},
+ {file = "httptools-0.6.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b0a816bb425c116a160fbc6f34cece097fd22ece15059d68932af686520966bd"},
+ {file = "httptools-0.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:dea66d94e5a3f68c5e9d86e0894653b87d952e624845e0b0e3ad1c733c6cc75d"},
+ {file = "httptools-0.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:23b09537086a5a611fad5696fc8963d67c7e7f98cb329d38ee114d588b0b74cd"},
+ {file = "httptools-0.6.0.tar.gz", hash = "sha256:9fc6e409ad38cbd68b177cd5158fc4042c796b82ca88d99ec78f07bed6c6b796"},
]
[package.extras]
@@ -1276,7 +1247,7 @@ test = ["Cython (>=0.29.24,<0.30.0)"]
name = "httpx"
version = "0.24.1"
description = "The next generation HTTP client."
-category = "dev"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -1298,14 +1269,14 @@ socks = ["socksio (>=1.0.0,<2.0.0)"]
[[package]]
name = "identify"
-version = "2.5.24"
+version = "2.5.29"
description = "File identification library for Python"
category = "dev"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "identify-2.5.24-py2.py3-none-any.whl", hash = "sha256:986dbfb38b1140e763e413e6feb44cd731faf72d1909543178aa79b0e258265d"},
- {file = "identify-2.5.24.tar.gz", hash = "sha256:0aac67d5b4812498056d28a9a512a483f5085cc28640b02b258a59dac34301d4"},
+ {file = "identify-2.5.29-py2.py3-none-any.whl", hash = "sha256:24437fbf6f4d3fe6efd0eb9d67e24dd9106db99af5ceb27996a5f7895f24bf1b"},
+ {file = "identify-2.5.29.tar.gz", hash = "sha256:d43d52b86b15918c137e3a74fff5224f60385cd0e9c38e99d07c257f02f151a5"},
]
[package.extras]
@@ -1325,24 +1296,23 @@ files = [
[[package]]
name = "importlib-metadata"
-version = "4.13.0"
+version = "6.8.0"
description = "Read metadata from Python packages"
-category = "main"
+category = "dev"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"},
- {file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"},
+ {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"},
+ {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"},
]
[package.dependencies]
-typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""}
zipp = ">=0.5"
[package.extras]
-docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
perf = ["ipython"]
-testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"]
+testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"]
[[package]]
name = "iniconfig"
@@ -1356,43 +1326,6 @@ files = [
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
]
-[[package]]
-name = "ipython"
-version = "7.34.0"
-description = "IPython: Productive Interactive Computing"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "ipython-7.34.0-py3-none-any.whl", hash = "sha256:c175d2440a1caff76116eb719d40538fbb316e214eda85c5515c303aacbfb23e"},
- {file = "ipython-7.34.0.tar.gz", hash = "sha256:af3bdb46aa292bce5615b1b2ebc76c2080c5f77f54bda2ec72461317273e7cd6"},
-]
-
-[package.dependencies]
-appnope = {version = "*", markers = "sys_platform == \"darwin\""}
-backcall = "*"
-colorama = {version = "*", markers = "sys_platform == \"win32\""}
-decorator = "*"
-jedi = ">=0.16"
-matplotlib-inline = "*"
-pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""}
-pickleshare = "*"
-prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0"
-pygments = "*"
-setuptools = ">=18.5"
-traitlets = ">=4.2"
-
-[package.extras]
-all = ["Sphinx (>=1.3)", "ipykernel", "ipyparallel", "ipywidgets", "nbconvert", "nbformat", "nose (>=0.10.1)", "notebook", "numpy (>=1.17)", "pygments", "qtconsole", "requests", "testpath"]
-doc = ["Sphinx (>=1.3)"]
-kernel = ["ipykernel"]
-nbconvert = ["nbconvert"]
-nbformat = ["nbformat"]
-notebook = ["ipywidgets", "notebook"]
-parallel = ["ipyparallel"]
-qtconsole = ["qtconsole"]
-test = ["ipykernel", "nbformat", "nose (>=0.10.1)", "numpy (>=1.17)", "pygments", "requests", "testpath"]
-
[[package]]
name = "ipython"
version = "8.12.2"
@@ -1435,31 +1368,31 @@ test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pa
[[package]]
name = "isort"
-version = "5.11.5"
+version = "5.12.0"
description = "A Python utility / library to sort Python imports."
category = "dev"
optional = false
-python-versions = ">=3.7.0"
+python-versions = ">=3.8.0"
files = [
- {file = "isort-5.11.5-py3-none-any.whl", hash = "sha256:ba1d72fb2595a01c7895a5128f9585a5cc4b6d395f1c8d514989b9a7eb2a8746"},
- {file = "isort-5.11.5.tar.gz", hash = "sha256:6be1f76a507cb2ecf16c7cf14a37e41609ca082330be4e3436a18ef74add55db"},
+ {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"},
+ {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"},
]
[package.extras]
-colors = ["colorama (>=0.4.3,<0.5.0)"]
+colors = ["colorama (>=0.4.3)"]
pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"]
plugins = ["setuptools"]
requirements-deprecated-finder = ["pip-api", "pipreqs"]
[[package]]
name = "jax"
-version = "0.4.11"
+version = "0.4.13"
description = "Differentiate, compile, and transform Numpy code."
category = "dev"
optional = false
python-versions = ">=3.8"
files = [
- {file = "jax-0.4.11.tar.gz", hash = "sha256:8b1cd443b698339df8d8807578ee141e5b67e36125b3945b146f600177d60d79"},
+ {file = "jax-0.4.13.tar.gz", hash = "sha256:03bfe6749dfe647f16f15f6616638adae6c4a7ca7167c75c21961ecfd3a3baaa"},
]
[package.dependencies]
@@ -1471,36 +1404,35 @@ scipy = ">=1.7"
[package.extras]
australis = ["protobuf (>=3.13,<4)"]
-ci = ["jaxlib (==0.4.10)"]
-cpu = ["jaxlib (==0.4.11)"]
-cuda = ["jaxlib (==0.4.11+cuda11.cudnn86)"]
-cuda11-cudnn82 = ["jaxlib (==0.4.11+cuda11.cudnn82)"]
-cuda11-cudnn86 = ["jaxlib (==0.4.11+cuda11.cudnn86)"]
-cuda11-local = ["jaxlib (==0.4.11+cuda11.cudnn86)"]
-cuda11-pip = ["jaxlib (==0.4.11+cuda11.cudnn86)", "nvidia-cublas-cu11 (>=11.11)", "nvidia-cuda-cupti-cu11 (>=11.8)", "nvidia-cuda-nvcc-cu11 (>=11.8)", "nvidia-cuda-runtime-cu11 (>=11.8)", "nvidia-cudnn-cu11 (>=8.8)", "nvidia-cufft-cu11 (>=10.9)", "nvidia-cusolver-cu11 (>=11.4)", "nvidia-cusparse-cu11 (>=11.7)"]
-cuda12-local = ["jaxlib (==0.4.11+cuda12.cudnn88)"]
-cuda12-pip = ["jaxlib (==0.4.11+cuda12.cudnn88)", "nvidia-cublas-cu12", "nvidia-cuda-cupti-cu12", "nvidia-cuda-nvcc-cu12", "nvidia-cuda-runtime-cu12", "nvidia-cudnn-cu12 (>=8.9)", "nvidia-cufft-cu12", "nvidia-cusolver-cu12", "nvidia-cusparse-cu12"]
-minimum-jaxlib = ["jaxlib (==0.4.7)"]
-tpu = ["jaxlib (==0.4.11)", "libtpu-nightly (==0.1.dev20230531)"]
+ci = ["jaxlib (==0.4.12)"]
+cpu = ["jaxlib (==0.4.13)"]
+cuda = ["jaxlib (==0.4.13+cuda11.cudnn86)"]
+cuda11-cudnn86 = ["jaxlib (==0.4.13+cuda11.cudnn86)"]
+cuda11-local = ["jaxlib (==0.4.13+cuda11.cudnn86)"]
+cuda11-pip = ["jaxlib (==0.4.13+cuda11.cudnn86)", "nvidia-cublas-cu11 (>=11.11)", "nvidia-cuda-cupti-cu11 (>=11.8)", "nvidia-cuda-nvcc-cu11 (>=11.8)", "nvidia-cuda-runtime-cu11 (>=11.8)", "nvidia-cudnn-cu11 (>=8.8)", "nvidia-cufft-cu11 (>=10.9)", "nvidia-cusolver-cu11 (>=11.4)", "nvidia-cusparse-cu11 (>=11.7)"]
+cuda12-local = ["jaxlib (==0.4.13+cuda12.cudnn89)"]
+cuda12-pip = ["jaxlib (==0.4.13+cuda12.cudnn89)", "nvidia-cublas-cu12", "nvidia-cuda-cupti-cu12", "nvidia-cuda-nvcc-cu12", "nvidia-cuda-runtime-cu12", "nvidia-cudnn-cu12 (>=8.9)", "nvidia-cufft-cu12", "nvidia-cusolver-cu12", "nvidia-cusparse-cu12"]
+minimum-jaxlib = ["jaxlib (==0.4.11)"]
+tpu = ["jaxlib (==0.4.13)", "libtpu-nightly (==0.1.dev20230622)"]
[[package]]
name = "jedi"
-version = "0.18.2"
+version = "0.19.0"
description = "An autocompletion tool for Python that can be used for text editors."
category = "dev"
optional = false
python-versions = ">=3.6"
files = [
- {file = "jedi-0.18.2-py2.py3-none-any.whl", hash = "sha256:203c1fd9d969ab8f2119ec0a3342e0b49910045abe6af0a3ae83a5764d54639e"},
- {file = "jedi-0.18.2.tar.gz", hash = "sha256:bae794c30d07f6d910d32a7048af09b5a39ed740918da923c6b780790ebac612"},
+ {file = "jedi-0.19.0-py2.py3-none-any.whl", hash = "sha256:cb8ce23fbccff0025e9386b5cf85e892f94c9b822378f8da49970471335ac64e"},
+ {file = "jedi-0.19.0.tar.gz", hash = "sha256:bcf9894f1753969cbac8022a8c2eaee06bfa3724e4192470aaffe7eb6272b0c4"},
]
[package.dependencies]
-parso = ">=0.8.0,<0.9.0"
+parso = ">=0.8.3,<0.9.0"
[package.extras]
docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"]
-qa = ["flake8 (==3.8.3)", "mypy (==0.782)"]
+qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"]
testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"]
[[package]]
@@ -1523,14 +1455,14 @@ i18n = ["Babel (>=2.7)"]
[[package]]
name = "joblib"
-version = "1.2.0"
+version = "1.3.2"
description = "Lightweight pipelining with Python functions"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "joblib-1.2.0-py3-none-any.whl", hash = "sha256:091138ed78f800342968c523bdde947e7a305b8594b910a0fea2ab83c3c6d385"},
- {file = "joblib-1.2.0.tar.gz", hash = "sha256:e1cee4a79e4af22881164f218d4311f60074197fb707e082e803b61f6d137018"},
+ {file = "joblib-1.3.2-py3-none-any.whl", hash = "sha256:ef4331c65f239985f3f2220ecc87db222f08fd22097a3dd5698f693875f8cbb9"},
+ {file = "joblib-1.3.2.tar.gz", hash = "sha256:92f865e621e17784e7955080b6d042489e3b8e294949cc44c6eac304f59772b1"},
]
[[package]]
@@ -1546,38 +1478,51 @@ files = [
[[package]]
name = "libclang"
-version = "16.0.0"
+version = "16.0.6"
description = "Clang Python Bindings, mirrored from the official LLVM repo: https://github.com/llvm/llvm-project/tree/main/clang/bindings/python, to make the installation process easier."
category = "dev"
optional = false
python-versions = "*"
files = [
- {file = "libclang-16.0.0-py2.py3-none-macosx_10_9_x86_64.whl", hash = "sha256:65258a6bb3e7dc31dc9b26f8d42f53c9d3b959643ade291fcd1aef4855303ca6"},
- {file = "libclang-16.0.0-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:af55a4aa86fdfe6b2ec68bc8cfe5fdac6c448d591ca7648be86ca17099b41ca8"},
- {file = "libclang-16.0.0-py2.py3-none-manylinux2010_x86_64.whl", hash = "sha256:a043138caaf2cb076ebb060c6281ec95612926645d425c691991fc9df00e8a24"},
- {file = "libclang-16.0.0-py2.py3-none-manylinux2014_aarch64.whl", hash = "sha256:eb59652cb0559c0e71784ff4c8ba24c14644becc907b1446563ecfaa622d523b"},
- {file = "libclang-16.0.0-py2.py3-none-manylinux2014_armv7l.whl", hash = "sha256:7b6686b67a0daa84b4c614bcc119578329fc4fbb52b919565b7376b507c4793b"},
- {file = "libclang-16.0.0-py2.py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:2adce42ae652f312245b8f4eda6f30b4076fb61f7619f2dfd0a0c31dee4c32b9"},
- {file = "libclang-16.0.0-py2.py3-none-win_amd64.whl", hash = "sha256:ee20bf93e3dd330f71fc50cdbf13b92ced0aec8e540be64251db53502a9b33f7"},
- {file = "libclang-16.0.0-py2.py3-none-win_arm64.whl", hash = "sha256:bf4628fc4da7a1dd06a244f9b8e121c5ec68076a763c59d6b13cbb103acc935b"},
+ {file = "libclang-16.0.6-py2.py3-none-macosx_10_9_x86_64.whl", hash = "sha256:da9e47ebc3f0a6d90fb169ef25f9fbcd29b4a4ef97a8b0e3e3a17800af1423f4"},
+ {file = "libclang-16.0.6-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:e1a5ad1e895e5443e205568c85c04b4608e4e973dae42f4dfd9cb46c81d1486b"},
+ {file = "libclang-16.0.6-py2.py3-none-manylinux2010_x86_64.whl", hash = "sha256:9dcdc730939788b8b69ffd6d5d75fe5366e3ee007f1e36a99799ec0b0c001492"},
+ {file = "libclang-16.0.6-py2.py3-none-manylinux2014_aarch64.whl", hash = "sha256:8130482120500476a027171f8f3c8dfc2536b591716eea71fc5da22cae13131b"},
+ {file = "libclang-16.0.6-py2.py3-none-manylinux2014_armv7l.whl", hash = "sha256:1e940048f51d0b0999099a9b78629ab8a64b62af5e9ff1b2b062439c21ee244d"},
+ {file = "libclang-16.0.6-py2.py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:f04e3060ae1f207f234d0608900c99c50edcb743e5e18276d78da2ddd727d39f"},
+ {file = "libclang-16.0.6-py2.py3-none-win_amd64.whl", hash = "sha256:daab4a11dae228f1efa9efa3fe638b493b14d8d52c71fb3c7019e2f1df4514c2"},
+ {file = "libclang-16.0.6-py2.py3-none-win_arm64.whl", hash = "sha256:4a9acbfd9c135a72f80d5dbff7588dfb0c81458244a89b9e83526e8595880e0a"},
+ {file = "libclang-16.0.6.tar.gz", hash = "sha256:4acdde39dfe410c877b4ccc0d4b57eb952100e4ee26bbdf6cfdb88e2033a7d31"},
+]
+
+[[package]]
+name = "lit"
+version = "16.0.6"
+description = "A Software Testing Tool"
+category = "dev"
+optional = false
+python-versions = "*"
+files = [
+ {file = "lit-16.0.6.tar.gz", hash = "sha256:84623c9c23b6b14763d637f4e63e6b721b3446ada40bf7001d8fee70b8e77a9a"},
]
[[package]]
name = "markdown"
-version = "3.4.3"
+version = "3.4.4"
description = "Python implementation of John Gruber's Markdown."
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "Markdown-3.4.3-py3-none-any.whl", hash = "sha256:065fd4df22da73a625f14890dd77eb8040edcbd68794bcd35943be14490608b2"},
- {file = "Markdown-3.4.3.tar.gz", hash = "sha256:8bf101198e004dc93e84a12a7395e31aac6a9c9942848ae1d99b9d72cf9b3520"},
+ {file = "Markdown-3.4.4-py3-none-any.whl", hash = "sha256:a4c1b65c0957b4bd9e7d86ddc7b3c9868fb9670660f6f99f6d1bca8954d5a941"},
+ {file = "Markdown-3.4.4.tar.gz", hash = "sha256:225c6123522495d4119a90b3a3ba31a1e87a70369e03f14799ea9c0d7183a3d6"},
]
[package.dependencies]
importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""}
[package.extras]
+docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.0)", "mkdocs-nature (>=0.4)"]
testing = ["coverage", "pyyaml"]
[[package]]
@@ -1642,23 +1587,23 @@ files = [
[[package]]
name = "marshmallow"
-version = "3.19.0"
+version = "3.20.1"
description = "A lightweight library for converting complex datatypes to and from native Python datatypes."
category = "main"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "marshmallow-3.19.0-py3-none-any.whl", hash = "sha256:93f0958568da045b0021ec6aeb7ac37c81bfcccbb9a0e7ed8559885070b3a19b"},
- {file = "marshmallow-3.19.0.tar.gz", hash = "sha256:90032c0fd650ce94b6ec6dc8dfeb0e3ff50c144586462c389b81a07205bedb78"},
+ {file = "marshmallow-3.20.1-py3-none-any.whl", hash = "sha256:684939db93e80ad3561392f47be0230743131560a41c5110684c16e21ade0a5c"},
+ {file = "marshmallow-3.20.1.tar.gz", hash = "sha256:5d2371bbe42000f2b3fb5eaa065224df7d8f8597bc19a1bbfa5bfe7fba8da889"},
]
[package.dependencies]
packaging = ">=17.0"
[package.extras]
-dev = ["flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "mypy (==0.990)", "pre-commit (>=2.4,<3.0)", "pytest", "pytz", "simplejson", "tox"]
-docs = ["alabaster (==0.7.12)", "autodocsumm (==0.2.9)", "sphinx (==5.3.0)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"]
-lint = ["flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "mypy (==0.990)", "pre-commit (>=2.4,<3.0)"]
+dev = ["flake8 (==6.0.0)", "flake8-bugbear (==23.7.10)", "mypy (==1.4.1)", "pre-commit (>=2.4,<4.0)", "pytest", "pytz", "simplejson", "tox"]
+docs = ["alabaster (==0.7.13)", "autodocsumm (==0.2.11)", "sphinx (==7.0.1)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"]
+lint = ["flake8 (==6.0.0)", "flake8-bugbear (==23.7.10)", "mypy (==1.4.1)", "pre-commit (>=2.4,<4.0)"]
tests = ["pytest", "pytz", "simplejson"]
[[package]]
@@ -1714,21 +1659,22 @@ numpy = [
dev = ["absl-py", "pyink", "pylint (>=2.6.0)", "pytest", "pytest-xdist"]
[[package]]
-name = "mock"
-version = "5.0.2"
-description = "Rolling backport of unittest.mock for all Pythons"
+name = "mpmath"
+version = "1.3.0"
+description = "Python library for arbitrary-precision floating-point arithmetic"
category = "dev"
optional = false
-python-versions = ">=3.6"
+python-versions = "*"
files = [
- {file = "mock-5.0.2-py3-none-any.whl", hash = "sha256:0e0bc5ba78b8db3667ad636d964eb963dc97a59f04c6f6214c5f0e4a8f726c56"},
- {file = "mock-5.0.2.tar.gz", hash = "sha256:06f18d7d65b44428202b145a9a36e99c2ee00d1eb992df0caf881d4664377891"},
+ {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"},
+ {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"},
]
[package.extras]
-build = ["blurb", "twine", "wheel"]
+develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"]
docs = ["sphinx"]
-test = ["pytest", "pytest-cov"]
+gmpy = ["gmpy2 (>=2.1.0a4)"]
+tests = ["pytest (>=4.6)"]
[[package]]
name = "multidict"
@@ -1814,54 +1760,6 @@ files = [
{file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"},
]
-[[package]]
-name = "mypy"
-version = "1.3.0"
-description = "Optional static typing for Python"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "mypy-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c1eb485cea53f4f5284e5baf92902cd0088b24984f4209e25981cc359d64448d"},
- {file = "mypy-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4c99c3ecf223cf2952638da9cd82793d8f3c0c5fa8b6ae2b2d9ed1e1ff51ba85"},
- {file = "mypy-1.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:550a8b3a19bb6589679a7c3c31f64312e7ff482a816c96e0cecec9ad3a7564dd"},
- {file = "mypy-1.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cbc07246253b9e3d7d74c9ff948cd0fd7a71afcc2b77c7f0a59c26e9395cb152"},
- {file = "mypy-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:a22435632710a4fcf8acf86cbd0d69f68ac389a3892cb23fbad176d1cddaf228"},
- {file = "mypy-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6e33bb8b2613614a33dff70565f4c803f889ebd2f859466e42b46e1df76018dd"},
- {file = "mypy-1.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7d23370d2a6b7a71dc65d1266f9a34e4cde9e8e21511322415db4b26f46f6b8c"},
- {file = "mypy-1.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:658fe7b674769a0770d4b26cb4d6f005e88a442fe82446f020be8e5f5efb2fae"},
- {file = "mypy-1.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6e42d29e324cdda61daaec2336c42512e59c7c375340bd202efa1fe0f7b8f8ca"},
- {file = "mypy-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:d0b6c62206e04061e27009481cb0ec966f7d6172b5b936f3ead3d74f29fe3dcf"},
- {file = "mypy-1.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:76ec771e2342f1b558c36d49900dfe81d140361dd0d2df6cd71b3db1be155409"},
- {file = "mypy-1.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebc95f8386314272bbc817026f8ce8f4f0d2ef7ae44f947c4664efac9adec929"},
- {file = "mypy-1.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:faff86aa10c1aa4a10e1a301de160f3d8fc8703b88c7e98de46b531ff1276a9a"},
- {file = "mypy-1.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:8c5979d0deb27e0f4479bee18ea0f83732a893e81b78e62e2dda3e7e518c92ee"},
- {file = "mypy-1.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c5d2cc54175bab47011b09688b418db71403aefad07cbcd62d44010543fc143f"},
- {file = "mypy-1.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:87df44954c31d86df96c8bd6e80dfcd773473e877ac6176a8e29898bfb3501cb"},
- {file = "mypy-1.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:473117e310febe632ddf10e745a355714e771ffe534f06db40702775056614c4"},
- {file = "mypy-1.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:74bc9b6e0e79808bf8678d7678b2ae3736ea72d56eede3820bd3849823e7f305"},
- {file = "mypy-1.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:44797d031a41516fcf5cbfa652265bb994e53e51994c1bd649ffcd0c3a7eccbf"},
- {file = "mypy-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ddae0f39ca146972ff6bb4399f3b2943884a774b8771ea0a8f50e971f5ea5ba8"},
- {file = "mypy-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1c4c42c60a8103ead4c1c060ac3cdd3ff01e18fddce6f1016e08939647a0e703"},
- {file = "mypy-1.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e86c2c6852f62f8f2b24cb7a613ebe8e0c7dc1402c61d36a609174f63e0ff017"},
- {file = "mypy-1.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f9dca1e257d4cc129517779226753dbefb4f2266c4eaad610fc15c6a7e14283e"},
- {file = "mypy-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:95d8d31a7713510685b05fbb18d6ac287a56c8f6554d88c19e73f724a445448a"},
- {file = "mypy-1.3.0-py3-none-any.whl", hash = "sha256:a8763e72d5d9574d45ce5881962bc8e9046bf7b375b0abf031f3e6811732a897"},
- {file = "mypy-1.3.0.tar.gz", hash = "sha256:e1f4d16e296f5135624b34e8fb741eb0eadedca90862405b1f1fde2040b9bd11"},
-]
-
-[package.dependencies]
-mypy-extensions = ">=1.0.0"
-tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
-typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""}
-typing-extensions = ">=3.10"
-
-[package.extras]
-dmypy = ["psutil (>=4.0)"]
-install-types = ["pip"]
-python2 = ["typed-ast (>=1.4.0,<2)"]
-reports = ["lxml"]
-
[[package]]
name = "mypy-extensions"
version = "1.0.0"
@@ -1874,6 +1772,25 @@ files = [
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
]
+[[package]]
+name = "networkx"
+version = "3.1"
+description = "Python package for creating and manipulating graphs and networks"
+category = "dev"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "networkx-3.1-py3-none-any.whl", hash = "sha256:4f33f68cb2afcf86f28a45f43efc27a9386b535d567d2127f8f61d51dec58d36"},
+ {file = "networkx-3.1.tar.gz", hash = "sha256:de346335408f84de0eada6ff9fafafff9bcda11f0a0dfaa931133debb146ab61"},
+]
+
+[package.extras]
+default = ["matplotlib (>=3.4)", "numpy (>=1.20)", "pandas (>=1.3)", "scipy (>=1.8)"]
+developer = ["mypy (>=1.1)", "pre-commit (>=3.2)"]
+doc = ["nb2plots (>=0.6)", "numpydoc (>=1.5)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.13)", "sphinx (>=6.1)", "sphinx-gallery (>=0.12)", "texext (>=0.6.7)"]
+extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.10)", "sympy (>=1.10)"]
+test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"]
+
[[package]]
name = "nodeenv"
version = "1.8.0"
@@ -1943,6 +1860,22 @@ files = [
setuptools = "*"
wheel = "*"
+[[package]]
+name = "nvidia-cuda-cupti-cu11"
+version = "11.7.101"
+description = "CUDA profiling tools runtime libs."
+category = "dev"
+optional = false
+python-versions = ">=3"
+files = [
+ {file = "nvidia_cuda_cupti_cu11-11.7.101-py3-none-manylinux1_x86_64.whl", hash = "sha256:e0cfd9854e1f2edaa36ca20d21cd0bdd5dcfca4e3b9e130a082e05b33b6c5895"},
+ {file = "nvidia_cuda_cupti_cu11-11.7.101-py3-none-win_amd64.whl", hash = "sha256:7cc5b8f91ae5e1389c3c0ad8866b3b016a175e827ea8f162a672990a402ab2b0"},
+]
+
+[package.dependencies]
+setuptools = "*"
+wheel = "*"
+
[[package]]
name = "nvidia-cuda-nvrtc-cu11"
version = "11.7.99"
@@ -1992,6 +1925,94 @@ files = [
setuptools = "*"
wheel = "*"
+[[package]]
+name = "nvidia-cufft-cu11"
+version = "10.9.0.58"
+description = "CUFFT native runtime libraries"
+category = "dev"
+optional = false
+python-versions = ">=3"
+files = [
+ {file = "nvidia_cufft_cu11-10.9.0.58-py3-none-manylinux1_x86_64.whl", hash = "sha256:222f9da70c80384632fd6035e4c3f16762d64ea7a843829cb278f98b3cb7dd81"},
+ {file = "nvidia_cufft_cu11-10.9.0.58-py3-none-win_amd64.whl", hash = "sha256:c4d316f17c745ec9c728e30409612eaf77a8404c3733cdf6c9c1569634d1ca03"},
+]
+
+[[package]]
+name = "nvidia-curand-cu11"
+version = "10.2.10.91"
+description = "CURAND native runtime libraries"
+category = "dev"
+optional = false
+python-versions = ">=3"
+files = [
+ {file = "nvidia_curand_cu11-10.2.10.91-py3-none-manylinux1_x86_64.whl", hash = "sha256:eecb269c970fa599a2660c9232fa46aaccbf90d9170b96c462e13bcb4d129e2c"},
+ {file = "nvidia_curand_cu11-10.2.10.91-py3-none-win_amd64.whl", hash = "sha256:f742052af0e1e75523bde18895a9ed016ecf1e5aa0ecddfcc3658fd11a1ff417"},
+]
+
+[package.dependencies]
+setuptools = "*"
+wheel = "*"
+
+[[package]]
+name = "nvidia-cusolver-cu11"
+version = "11.4.0.1"
+description = "CUDA solver native runtime libraries"
+category = "dev"
+optional = false
+python-versions = ">=3"
+files = [
+ {file = "nvidia_cusolver_cu11-11.4.0.1-2-py3-none-manylinux1_x86_64.whl", hash = "sha256:72fa7261d755ed55c0074960df5904b65e2326f7adce364cbe4945063c1be412"},
+ {file = "nvidia_cusolver_cu11-11.4.0.1-py3-none-manylinux1_x86_64.whl", hash = "sha256:700b781bfefd57d161443aff9ace1878584b93e0b2cfef3d6e9296d96febbf99"},
+ {file = "nvidia_cusolver_cu11-11.4.0.1-py3-none-win_amd64.whl", hash = "sha256:00f70b256add65f8c1eb3b6a65308795a93e7740f6df9e273eccbba770d370c4"},
+]
+
+[package.dependencies]
+setuptools = "*"
+wheel = "*"
+
+[[package]]
+name = "nvidia-cusparse-cu11"
+version = "11.7.4.91"
+description = "CUSPARSE native runtime libraries"
+category = "dev"
+optional = false
+python-versions = ">=3"
+files = [
+ {file = "nvidia_cusparse_cu11-11.7.4.91-py3-none-manylinux1_x86_64.whl", hash = "sha256:a3389de714db63321aa11fbec3919271f415ef19fda58aed7f2ede488c32733d"},
+ {file = "nvidia_cusparse_cu11-11.7.4.91-py3-none-win_amd64.whl", hash = "sha256:304a01599534f5186a8ed1c3756879282c72c118bc77dd890dc1ff868cad25b9"},
+]
+
+[package.dependencies]
+setuptools = "*"
+wheel = "*"
+
+[[package]]
+name = "nvidia-nccl-cu11"
+version = "2.14.3"
+description = "NVIDIA Collective Communication Library (NCCL) Runtime"
+category = "dev"
+optional = false
+python-versions = ">=3"
+files = [
+ {file = "nvidia_nccl_cu11-2.14.3-py3-none-manylinux1_x86_64.whl", hash = "sha256:5e5534257d1284b8e825bc3a182c6f06acd6eb405e9f89d49340e98cd8f136eb"},
+]
+
+[[package]]
+name = "nvidia-nvtx-cu11"
+version = "11.7.91"
+description = "NVIDIA Tools Extension"
+category = "dev"
+optional = false
+python-versions = ">=3"
+files = [
+ {file = "nvidia_nvtx_cu11-11.7.91-py3-none-manylinux1_x86_64.whl", hash = "sha256:b22c64eee426a62fc00952b507d6d29cf62b4c9df7a480fcc417e540e05fd5ac"},
+ {file = "nvidia_nvtx_cu11-11.7.91-py3-none-win_amd64.whl", hash = "sha256:dfd7fcb2a91742513027d63a26b757f38dd8b07fecac282c4d132a9d373ff064"},
+]
+
+[package.dependencies]
+setuptools = "*"
+wheel = "*"
+
[[package]]
name = "oauthlib"
version = "3.2.2"
@@ -2058,14 +2079,14 @@ testing = ["docopt", "pytest (<6.0.0)"]
[[package]]
name = "pathspec"
-version = "0.11.1"
+version = "0.11.2"
description = "Utility library for gitignore style pattern matching of file paths."
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"},
- {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"},
+ {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"},
+ {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"},
]
[[package]]
@@ -2097,38 +2118,32 @@ files = [
[[package]]
name = "platformdirs"
-version = "3.5.1"
+version = "3.10.0"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "platformdirs-3.5.1-py3-none-any.whl", hash = "sha256:e2378146f1964972c03c085bb5662ae80b2b8c06226c54b2ff4aa9483e8a13a5"},
- {file = "platformdirs-3.5.1.tar.gz", hash = "sha256:412dae91f52a6f84830f39a8078cecd0e866cb72294a5c66808e74d5e88d251f"},
+ {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"},
+ {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"},
]
-[package.dependencies]
-typing-extensions = {version = ">=4.5", markers = "python_version < \"3.8\""}
-
[package.extras]
-docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.2.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"]
-test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"]
+docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"]
+test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"]
[[package]]
name = "pluggy"
-version = "1.0.0"
+version = "1.3.0"
description = "plugin and hook calling mechanisms for python"
category = "dev"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.8"
files = [
- {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
- {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
+ {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"},
+ {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"},
]
-[package.dependencies]
-importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
-
[package.extras]
dev = ["pre-commit", "tox"]
testing = ["pytest", "pytest-benchmark"]
@@ -2148,21 +2163,20 @@ files = [
[package.dependencies]
cfgv = ">=2.0.0"
identify = ">=1.0.0"
-importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
nodeenv = ">=0.11.1"
pyyaml = ">=5.1"
virtualenv = ">=20.10.0"
[[package]]
name = "prompt-toolkit"
-version = "3.0.38"
+version = "3.0.39"
description = "Library for building powerful interactive command lines in Python"
category = "dev"
optional = false
python-versions = ">=3.7.0"
files = [
- {file = "prompt_toolkit-3.0.38-py3-none-any.whl", hash = "sha256:45ea77a2f7c60418850331366c81cf6b5b9cf4c7fd34616f733c5427e6abbb1f"},
- {file = "prompt_toolkit-3.0.38.tar.gz", hash = "sha256:23ac5d50538a9a38c8bde05fecb47d0b403ecd0662857a86f886f798563d5b9b"},
+ {file = "prompt_toolkit-3.0.39-py3-none-any.whl", hash = "sha256:9dffbe1d8acf91e3de75f3b544e4842382fc06c6babe903ac9acb74dc6e08d88"},
+ {file = "prompt_toolkit-3.0.39.tar.gz", hash = "sha256:04505ade687dc26dc4284b1ad19a83be2f2afe83e7a828ace0c72f3a1df72aac"},
]
[package.dependencies]
@@ -2170,25 +2184,25 @@ wcwidth = "*"
[[package]]
name = "protobuf"
-version = "4.23.2"
+version = "4.24.3"
description = ""
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "protobuf-4.23.2-cp310-abi3-win32.whl", hash = "sha256:384dd44cb4c43f2ccddd3645389a23ae61aeb8cfa15ca3a0f60e7c3ea09b28b3"},
- {file = "protobuf-4.23.2-cp310-abi3-win_amd64.whl", hash = "sha256:09310bce43353b46d73ba7e3bca78273b9bc50349509b9698e64d288c6372c2a"},
- {file = "protobuf-4.23.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:b2cfab63a230b39ae603834718db74ac11e52bccaaf19bf20f5cce1a84cf76df"},
- {file = "protobuf-4.23.2-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:c52cfcbfba8eb791255edd675c1fe6056f723bf832fa67f0442218f8817c076e"},
- {file = "protobuf-4.23.2-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:86df87016d290143c7ce3be3ad52d055714ebaebb57cc659c387e76cfacd81aa"},
- {file = "protobuf-4.23.2-cp37-cp37m-win32.whl", hash = "sha256:281342ea5eb631c86697e1e048cb7e73b8a4e85f3299a128c116f05f5c668f8f"},
- {file = "protobuf-4.23.2-cp37-cp37m-win_amd64.whl", hash = "sha256:ce744938406de1e64b91410f473736e815f28c3b71201302612a68bf01517fea"},
- {file = "protobuf-4.23.2-cp38-cp38-win32.whl", hash = "sha256:6c081863c379bb1741be8f8193e893511312b1d7329b4a75445d1ea9955be69e"},
- {file = "protobuf-4.23.2-cp38-cp38-win_amd64.whl", hash = "sha256:25e3370eda26469b58b602e29dff069cfaae8eaa0ef4550039cc5ef8dc004511"},
- {file = "protobuf-4.23.2-cp39-cp39-win32.whl", hash = "sha256:efabbbbac1ab519a514579ba9ec52f006c28ae19d97915951f69fa70da2c9e91"},
- {file = "protobuf-4.23.2-cp39-cp39-win_amd64.whl", hash = "sha256:54a533b971288af3b9926e53850c7eb186886c0c84e61daa8444385a4720297f"},
- {file = "protobuf-4.23.2-py3-none-any.whl", hash = "sha256:8da6070310d634c99c0db7df48f10da495cc283fd9e9234877f0cd182d43ab7f"},
- {file = "protobuf-4.23.2.tar.gz", hash = "sha256:20874e7ca4436f683b64ebdbee2129a5a2c301579a67d1a7dda2cdf62fb7f5f7"},
+ {file = "protobuf-4.24.3-cp310-abi3-win32.whl", hash = "sha256:20651f11b6adc70c0f29efbe8f4a94a74caf61b6200472a9aea6e19898f9fcf4"},
+ {file = "protobuf-4.24.3-cp310-abi3-win_amd64.whl", hash = "sha256:3d42e9e4796a811478c783ef63dc85b5a104b44aaaca85d4864d5b886e4b05e3"},
+ {file = "protobuf-4.24.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:6e514e8af0045be2b56e56ae1bb14f43ce7ffa0f68b1c793670ccbe2c4fc7d2b"},
+ {file = "protobuf-4.24.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:ba53c2f04798a326774f0e53b9c759eaef4f6a568ea7072ec6629851c8435959"},
+ {file = "protobuf-4.24.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:f6ccbcf027761a2978c1406070c3788f6de4a4b2cc20800cc03d52df716ad675"},
+ {file = "protobuf-4.24.3-cp37-cp37m-win32.whl", hash = "sha256:1b182c7181a2891e8f7f3a1b5242e4ec54d1f42582485a896e4de81aa17540c2"},
+ {file = "protobuf-4.24.3-cp37-cp37m-win_amd64.whl", hash = "sha256:b0271a701e6782880d65a308ba42bc43874dabd1a0a0f41f72d2dac3b57f8e76"},
+ {file = "protobuf-4.24.3-cp38-cp38-win32.whl", hash = "sha256:e29d79c913f17a60cf17c626f1041e5288e9885c8579832580209de8b75f2a52"},
+ {file = "protobuf-4.24.3-cp38-cp38-win_amd64.whl", hash = "sha256:067f750169bc644da2e1ef18c785e85071b7c296f14ac53e0900e605da588719"},
+ {file = "protobuf-4.24.3-cp39-cp39-win32.whl", hash = "sha256:2da777d34b4f4f7613cdf85c70eb9a90b1fbef9d36ae4a0ccfe014b0b07906f1"},
+ {file = "protobuf-4.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:f631bb982c5478e0c1c70eab383af74a84be66945ebf5dd6b06fc90079668d0b"},
+ {file = "protobuf-4.24.3-py3-none-any.whl", hash = "sha256:f6f8dc65625dadaad0c8545319c2e2f0424fede988368893ca3844261342c11a"},
+ {file = "protobuf-4.24.3.tar.gz", hash = "sha256:12e9ad2ec079b833176d2921be2cb24281fa591f0b119b208b788adc48c2561d"},
]
[[package]]
@@ -2286,95 +2300,199 @@ files = [
[[package]]
name = "pydantic"
-version = "1.10.9"
-description = "Data validation and settings management using python type hints"
+version = "2.3.0"
+description = "Data validation using Python type hints"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
- {file = "pydantic-1.10.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e692dec4a40bfb40ca530e07805b1208c1de071a18d26af4a2a0d79015b352ca"},
- {file = "pydantic-1.10.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3c52eb595db83e189419bf337b59154bdcca642ee4b2a09e5d7797e41ace783f"},
- {file = "pydantic-1.10.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:939328fd539b8d0edf244327398a667b6b140afd3bf7e347cf9813c736211896"},
- {file = "pydantic-1.10.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b48d3d634bca23b172f47f2335c617d3fcb4b3ba18481c96b7943a4c634f5c8d"},
- {file = "pydantic-1.10.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f0b7628fb8efe60fe66fd4adadd7ad2304014770cdc1f4934db41fe46cc8825f"},
- {file = "pydantic-1.10.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e1aa5c2410769ca28aa9a7841b80d9d9a1c5f223928ca8bec7e7c9a34d26b1d4"},
- {file = "pydantic-1.10.9-cp310-cp310-win_amd64.whl", hash = "sha256:eec39224b2b2e861259d6f3c8b6290d4e0fbdce147adb797484a42278a1a486f"},
- {file = "pydantic-1.10.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d111a21bbbfd85c17248130deac02bbd9b5e20b303338e0dbe0faa78330e37e0"},
- {file = "pydantic-1.10.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e9aec8627a1a6823fc62fb96480abe3eb10168fd0d859ee3d3b395105ae19a7"},
- {file = "pydantic-1.10.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07293ab08e7b4d3c9d7de4949a0ea571f11e4557d19ea24dd3ae0c524c0c334d"},
- {file = "pydantic-1.10.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ee829b86ce984261d99ff2fd6e88f2230068d96c2a582f29583ed602ef3fc2c"},
- {file = "pydantic-1.10.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4b466a23009ff5cdd7076eb56aca537c745ca491293cc38e72bf1e0e00de5b91"},
- {file = "pydantic-1.10.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7847ca62e581e6088d9000f3c497267868ca2fa89432714e21a4fb33a04d52e8"},
- {file = "pydantic-1.10.9-cp311-cp311-win_amd64.whl", hash = "sha256:7845b31959468bc5b78d7b95ec52fe5be32b55d0d09983a877cca6aedc51068f"},
- {file = "pydantic-1.10.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:517a681919bf880ce1dac7e5bc0c3af1e58ba118fd774da2ffcd93c5f96eaece"},
- {file = "pydantic-1.10.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67195274fd27780f15c4c372f4ba9a5c02dad6d50647b917b6a92bf00b3d301a"},
- {file = "pydantic-1.10.9-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2196c06484da2b3fded1ab6dbe182bdabeb09f6318b7fdc412609ee2b564c49a"},
- {file = "pydantic-1.10.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6257bb45ad78abacda13f15bde5886efd6bf549dd71085e64b8dcf9919c38b60"},
- {file = "pydantic-1.10.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3283b574b01e8dbc982080d8287c968489d25329a463b29a90d4157de4f2baaf"},
- {file = "pydantic-1.10.9-cp37-cp37m-win_amd64.whl", hash = "sha256:5f8bbaf4013b9a50e8100333cc4e3fa2f81214033e05ac5aa44fa24a98670a29"},
- {file = "pydantic-1.10.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9cd67fb763248cbe38f0593cd8611bfe4b8ad82acb3bdf2b0898c23415a1f82"},
- {file = "pydantic-1.10.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f50e1764ce9353be67267e7fd0da08349397c7db17a562ad036aa7c8f4adfdb6"},
- {file = "pydantic-1.10.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73ef93e5e1d3c8e83f1ff2e7fdd026d9e063c7e089394869a6e2985696693766"},
- {file = "pydantic-1.10.9-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:128d9453d92e6e81e881dd7e2484e08d8b164da5507f62d06ceecf84bf2e21d3"},
- {file = "pydantic-1.10.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ad428e92ab68798d9326bb3e5515bc927444a3d71a93b4a2ca02a8a5d795c572"},
- {file = "pydantic-1.10.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fab81a92f42d6d525dd47ced310b0c3e10c416bbfae5d59523e63ea22f82b31e"},
- {file = "pydantic-1.10.9-cp38-cp38-win_amd64.whl", hash = "sha256:963671eda0b6ba6926d8fc759e3e10335e1dc1b71ff2a43ed2efd6996634dafb"},
- {file = "pydantic-1.10.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:970b1bdc6243ef663ba5c7e36ac9ab1f2bfecb8ad297c9824b542d41a750b298"},
- {file = "pydantic-1.10.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7e1d5290044f620f80cf1c969c542a5468f3656de47b41aa78100c5baa2b8276"},
- {file = "pydantic-1.10.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83fcff3c7df7adff880622a98022626f4f6dbce6639a88a15a3ce0f96466cb60"},
- {file = "pydantic-1.10.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0da48717dc9495d3a8f215e0d012599db6b8092db02acac5e0d58a65248ec5bc"},
- {file = "pydantic-1.10.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0a2aabdc73c2a5960e87c3ffebca6ccde88665616d1fd6d3db3178ef427b267a"},
- {file = "pydantic-1.10.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9863b9420d99dfa9c064042304868e8ba08e89081428a1c471858aa2af6f57c4"},
- {file = "pydantic-1.10.9-cp39-cp39-win_amd64.whl", hash = "sha256:e7c9900b43ac14110efa977be3da28931ffc74c27e96ee89fbcaaf0b0fe338e1"},
- {file = "pydantic-1.10.9-py3-none-any.whl", hash = "sha256:6cafde02f6699ce4ff643417d1a9223716ec25e228ddc3b436fe7e2d25a1f305"},
- {file = "pydantic-1.10.9.tar.gz", hash = "sha256:95c70da2cd3b6ddf3b9645ecaa8d98f3d80c606624b6d245558d202cd23ea3be"},
+ {file = "pydantic-2.3.0-py3-none-any.whl", hash = "sha256:45b5e446c6dfaad9444819a293b921a40e1db1aa61ea08aede0522529ce90e81"},
+ {file = "pydantic-2.3.0.tar.gz", hash = "sha256:1607cc106602284cd4a00882986570472f193fde9cb1259bceeaedb26aa79a6d"},
]
[package.dependencies]
-typing-extensions = ">=4.2.0"
+annotated-types = ">=0.4.0"
+pydantic-core = "2.6.3"
+typing-extensions = ">=4.6.1"
[package.extras]
-dotenv = ["python-dotenv (>=0.10.4)"]
-email = ["email-validator (>=1.0.3)"]
+email = ["email-validator (>=2.0.0)"]
+
+[[package]]
+name = "pydantic-core"
+version = "2.6.3"
+description = ""
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pydantic_core-2.6.3-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:1a0ddaa723c48af27d19f27f1c73bdc615c73686d763388c8683fe34ae777bad"},
+ {file = "pydantic_core-2.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5cfde4fab34dd1e3a3f7f3db38182ab6c95e4ea91cf322242ee0be5c2f7e3d2f"},
+ {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5493a7027bfc6b108e17c3383959485087d5942e87eb62bbac69829eae9bc1f7"},
+ {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:84e87c16f582f5c753b7f39a71bd6647255512191be2d2dbf49458c4ef024588"},
+ {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:522a9c4a4d1924facce7270c84b5134c5cabcb01513213662a2e89cf28c1d309"},
+ {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaafc776e5edc72b3cad1ccedb5fd869cc5c9a591f1213aa9eba31a781be9ac1"},
+ {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a750a83b2728299ca12e003d73d1264ad0440f60f4fc9cee54acc489249b728"},
+ {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e8b374ef41ad5c461efb7a140ce4730661aadf85958b5c6a3e9cf4e040ff4bb"},
+ {file = "pydantic_core-2.6.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b594b64e8568cf09ee5c9501ede37066b9fc41d83d58f55b9952e32141256acd"},
+ {file = "pydantic_core-2.6.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2a20c533cb80466c1d42a43a4521669ccad7cf2967830ac62c2c2f9cece63e7e"},
+ {file = "pydantic_core-2.6.3-cp310-none-win32.whl", hash = "sha256:04fe5c0a43dec39aedba0ec9579001061d4653a9b53a1366b113aca4a3c05ca7"},
+ {file = "pydantic_core-2.6.3-cp310-none-win_amd64.whl", hash = "sha256:6bf7d610ac8f0065a286002a23bcce241ea8248c71988bda538edcc90e0c39ad"},
+ {file = "pydantic_core-2.6.3-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:6bcc1ad776fffe25ea5c187a028991c031a00ff92d012ca1cc4714087e575973"},
+ {file = "pydantic_core-2.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:df14f6332834444b4a37685810216cc8fe1fe91f447332cd56294c984ecbff1c"},
+ {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0b7486d85293f7f0bbc39b34e1d8aa26210b450bbd3d245ec3d732864009819"},
+ {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a892b5b1871b301ce20d40b037ffbe33d1407a39639c2b05356acfef5536d26a"},
+ {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:883daa467865e5766931e07eb20f3e8152324f0adf52658f4d302242c12e2c32"},
+ {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d4eb77df2964b64ba190eee00b2312a1fd7a862af8918ec70fc2d6308f76ac64"},
+ {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ce8c84051fa292a5dc54018a40e2a1926fd17980a9422c973e3ebea017aa8da"},
+ {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22134a4453bd59b7d1e895c455fe277af9d9d9fbbcb9dc3f4a97b8693e7e2c9b"},
+ {file = "pydantic_core-2.6.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:02e1c385095efbd997311d85c6021d32369675c09bcbfff3b69d84e59dc103f6"},
+ {file = "pydantic_core-2.6.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d79f1f2f7ebdb9b741296b69049ff44aedd95976bfee38eb4848820628a99b50"},
+ {file = "pydantic_core-2.6.3-cp311-none-win32.whl", hash = "sha256:430ddd965ffd068dd70ef4e4d74f2c489c3a313adc28e829dd7262cc0d2dd1e8"},
+ {file = "pydantic_core-2.6.3-cp311-none-win_amd64.whl", hash = "sha256:84f8bb34fe76c68c9d96b77c60cef093f5e660ef8e43a6cbfcd991017d375950"},
+ {file = "pydantic_core-2.6.3-cp311-none-win_arm64.whl", hash = "sha256:5a2a3c9ef904dcdadb550eedf3291ec3f229431b0084666e2c2aa8ff99a103a2"},
+ {file = "pydantic_core-2.6.3-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:8421cf496e746cf8d6b677502ed9a0d1e4e956586cd8b221e1312e0841c002d5"},
+ {file = "pydantic_core-2.6.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bb128c30cf1df0ab78166ded1ecf876620fb9aac84d2413e8ea1594b588c735d"},
+ {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37a822f630712817b6ecc09ccc378192ef5ff12e2c9bae97eb5968a6cdf3b862"},
+ {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:240a015102a0c0cc8114f1cba6444499a8a4d0333e178bc504a5c2196defd456"},
+ {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f90e5e3afb11268628c89f378f7a1ea3f2fe502a28af4192e30a6cdea1e7d5e"},
+ {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:340e96c08de1069f3d022a85c2a8c63529fd88709468373b418f4cf2c949fb0e"},
+ {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1480fa4682e8202b560dcdc9eeec1005f62a15742b813c88cdc01d44e85308e5"},
+ {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f14546403c2a1d11a130b537dda28f07eb6c1805a43dae4617448074fd49c282"},
+ {file = "pydantic_core-2.6.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a87c54e72aa2ef30189dc74427421e074ab4561cf2bf314589f6af5b37f45e6d"},
+ {file = "pydantic_core-2.6.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f93255b3e4d64785554e544c1c76cd32f4a354fa79e2eeca5d16ac2e7fdd57aa"},
+ {file = "pydantic_core-2.6.3-cp312-none-win32.whl", hash = "sha256:f70dc00a91311a1aea124e5f64569ea44c011b58433981313202c46bccbec0e1"},
+ {file = "pydantic_core-2.6.3-cp312-none-win_amd64.whl", hash = "sha256:23470a23614c701b37252618e7851e595060a96a23016f9a084f3f92f5ed5881"},
+ {file = "pydantic_core-2.6.3-cp312-none-win_arm64.whl", hash = "sha256:1ac1750df1b4339b543531ce793b8fd5c16660a95d13aecaab26b44ce11775e9"},
+ {file = "pydantic_core-2.6.3-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:a53e3195f134bde03620d87a7e2b2f2046e0e5a8195e66d0f244d6d5b2f6d31b"},
+ {file = "pydantic_core-2.6.3-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:f2969e8f72c6236c51f91fbb79c33821d12a811e2a94b7aa59c65f8dbdfad34a"},
+ {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:672174480a85386dd2e681cadd7d951471ad0bb028ed744c895f11f9d51b9ebe"},
+ {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:002d0ea50e17ed982c2d65b480bd975fc41086a5a2f9c924ef8fc54419d1dea3"},
+ {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ccc13afee44b9006a73d2046068d4df96dc5b333bf3509d9a06d1b42db6d8bf"},
+ {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:439a0de139556745ae53f9cc9668c6c2053444af940d3ef3ecad95b079bc9987"},
+ {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d63b7545d489422d417a0cae6f9898618669608750fc5e62156957e609e728a5"},
+ {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b44c42edc07a50a081672e25dfe6022554b47f91e793066a7b601ca290f71e42"},
+ {file = "pydantic_core-2.6.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1c721bfc575d57305dd922e6a40a8fe3f762905851d694245807a351ad255c58"},
+ {file = "pydantic_core-2.6.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5e4a2cf8c4543f37f5dc881de6c190de08096c53986381daebb56a355be5dfe6"},
+ {file = "pydantic_core-2.6.3-cp37-none-win32.whl", hash = "sha256:d9b4916b21931b08096efed090327f8fe78e09ae8f5ad44e07f5c72a7eedb51b"},
+ {file = "pydantic_core-2.6.3-cp37-none-win_amd64.whl", hash = "sha256:a8acc9dedd304da161eb071cc7ff1326aa5b66aadec9622b2574ad3ffe225525"},
+ {file = "pydantic_core-2.6.3-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:5e9c068f36b9f396399d43bfb6defd4cc99c36215f6ff33ac8b9c14ba15bdf6b"},
+ {file = "pydantic_core-2.6.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e61eae9b31799c32c5f9b7be906be3380e699e74b2db26c227c50a5fc7988698"},
+ {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85463560c67fc65cd86153a4975d0b720b6d7725cf7ee0b2d291288433fc21b"},
+ {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9616567800bdc83ce136e5847d41008a1d602213d024207b0ff6cab6753fe645"},
+ {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e9b65a55bbabda7fccd3500192a79f6e474d8d36e78d1685496aad5f9dbd92c"},
+ {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f468d520f47807d1eb5d27648393519655eadc578d5dd862d06873cce04c4d1b"},
+ {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9680dd23055dd874173a3a63a44e7f5a13885a4cfd7e84814be71be24fba83db"},
+ {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a718d56c4d55efcfc63f680f207c9f19c8376e5a8a67773535e6f7e80e93170"},
+ {file = "pydantic_core-2.6.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8ecbac050856eb6c3046dea655b39216597e373aa8e50e134c0e202f9c47efec"},
+ {file = "pydantic_core-2.6.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:788be9844a6e5c4612b74512a76b2153f1877cd845410d756841f6c3420230eb"},
+ {file = "pydantic_core-2.6.3-cp38-none-win32.whl", hash = "sha256:07a1aec07333bf5adebd8264047d3dc518563d92aca6f2f5b36f505132399efc"},
+ {file = "pydantic_core-2.6.3-cp38-none-win_amd64.whl", hash = "sha256:621afe25cc2b3c4ba05fff53525156d5100eb35c6e5a7cf31d66cc9e1963e378"},
+ {file = "pydantic_core-2.6.3-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:813aab5bfb19c98ae370952b6f7190f1e28e565909bfc219a0909db168783465"},
+ {file = "pydantic_core-2.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:50555ba3cb58f9861b7a48c493636b996a617db1a72c18da4d7f16d7b1b9952b"},
+ {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19e20f8baedd7d987bd3f8005c146e6bcbda7cdeefc36fad50c66adb2dd2da48"},
+ {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b0a5d7edb76c1c57b95df719af703e796fc8e796447a1da939f97bfa8a918d60"},
+ {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f06e21ad0b504658a3a9edd3d8530e8cea5723f6ea5d280e8db8efc625b47e49"},
+ {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea053cefa008fda40f92aab937fb9f183cf8752e41dbc7bc68917884454c6362"},
+ {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:171a4718860790f66d6c2eda1d95dd1edf64f864d2e9f9115840840cf5b5713f"},
+ {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ed7ceca6aba5331ece96c0e328cd52f0dcf942b8895a1ed2642de50800b79d3"},
+ {file = "pydantic_core-2.6.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:acafc4368b289a9f291e204d2c4c75908557d4f36bd3ae937914d4529bf62a76"},
+ {file = "pydantic_core-2.6.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1aa712ba150d5105814e53cb141412217146fedc22621e9acff9236d77d2a5ef"},
+ {file = "pydantic_core-2.6.3-cp39-none-win32.whl", hash = "sha256:44b4f937b992394a2e81a5c5ce716f3dcc1237281e81b80c748b2da6dd5cf29a"},
+ {file = "pydantic_core-2.6.3-cp39-none-win_amd64.whl", hash = "sha256:9b33bf9658cb29ac1a517c11e865112316d09687d767d7a0e4a63d5c640d1b17"},
+ {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d7050899026e708fb185e174c63ebc2c4ee7a0c17b0a96ebc50e1f76a231c057"},
+ {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:99faba727727b2e59129c59542284efebbddade4f0ae6a29c8b8d3e1f437beb7"},
+ {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fa159b902d22b283b680ef52b532b29554ea2a7fc39bf354064751369e9dbd7"},
+ {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:046af9cfb5384f3684eeb3f58a48698ddab8dd870b4b3f67f825353a14441418"},
+ {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:930bfe73e665ebce3f0da2c6d64455098aaa67e1a00323c74dc752627879fc67"},
+ {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:85cc4d105747d2aa3c5cf3e37dac50141bff779545ba59a095f4a96b0a460e70"},
+ {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b25afe9d5c4f60dcbbe2b277a79be114e2e65a16598db8abee2a2dcde24f162b"},
+ {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e49ce7dc9f925e1fb010fc3d555250139df61fa6e5a0a95ce356329602c11ea9"},
+ {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:2dd50d6a1aef0426a1d0199190c6c43ec89812b1f409e7fe44cb0fbf6dfa733c"},
+ {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6595b0d8c8711e8e1dc389d52648b923b809f68ac1c6f0baa525c6440aa0daa"},
+ {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ef724a059396751aef71e847178d66ad7fc3fc969a1a40c29f5aac1aa5f8784"},
+ {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3c8945a105f1589ce8a693753b908815e0748f6279959a4530f6742e1994dcb6"},
+ {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c8c6660089a25d45333cb9db56bb9e347241a6d7509838dbbd1931d0e19dbc7f"},
+ {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:692b4ff5c4e828a38716cfa92667661a39886e71136c97b7dac26edef18767f7"},
+ {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:f1a5d8f18877474c80b7711d870db0eeef9442691fcdb00adabfc97e183ee0b0"},
+ {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:3796a6152c545339d3b1652183e786df648ecdf7c4f9347e1d30e6750907f5bb"},
+ {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b962700962f6e7a6bd77e5f37320cabac24b4c0f76afeac05e9f93cf0c620014"},
+ {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56ea80269077003eaa59723bac1d8bacd2cd15ae30456f2890811efc1e3d4413"},
+ {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c0ebbebae71ed1e385f7dfd9b74c1cff09fed24a6df43d326dd7f12339ec34"},
+ {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:252851b38bad3bfda47b104ffd077d4f9604a10cb06fe09d020016a25107bf98"},
+ {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:6656a0ae383d8cd7cc94e91de4e526407b3726049ce8d7939049cbfa426518c8"},
+ {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d9140ded382a5b04a1c030b593ed9bf3088243a0a8b7fa9f071a5736498c5483"},
+ {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d38bbcef58220f9c81e42c255ef0bf99735d8f11edef69ab0b499da77105158a"},
+ {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:c9d469204abcca28926cbc28ce98f28e50e488767b084fb3fbdf21af11d3de26"},
+ {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:48c1ed8b02ffea4d5c9c220eda27af02b8149fe58526359b3c07eb391cb353a2"},
+ {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b2b1bfed698fa410ab81982f681f5b1996d3d994ae8073286515ac4d165c2e7"},
+ {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf9d42a71a4d7a7c1f14f629e5c30eac451a6fc81827d2beefd57d014c006c4a"},
+ {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4292ca56751aebbe63a84bbfc3b5717abb09b14d4b4442cc43fd7c49a1529efd"},
+ {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7dc2ce039c7290b4ef64334ec7e6ca6494de6eecc81e21cb4f73b9b39991408c"},
+ {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:615a31b1629e12445c0e9fc8339b41aaa6cc60bd53bf802d5fe3d2c0cda2ae8d"},
+ {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1fa1f6312fb84e8c281f32b39affe81984ccd484da6e9d65b3d18c202c666149"},
+ {file = "pydantic_core-2.6.3.tar.gz", hash = "sha256:1508f37ba9e3ddc0189e6ff4e2228bd2d3c3a4641cbe8c07177162f76ed696c7"},
+]
+
+[package.dependencies]
+typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
[[package]]
name = "pygments"
-version = "2.15.1"
+version = "2.16.1"
description = "Pygments is a syntax highlighting package written in Python."
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"},
- {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"},
+ {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"},
+ {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"},
]
[package.extras]
plugins = ["importlib-metadata"]
+[[package]]
+name = "pyright"
+version = "1.1.327"
+description = "Command line wrapper for pyright"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pyright-1.1.327-py3-none-any.whl", hash = "sha256:3462cda239e9140276238bbdbd0b59d77406f1c2e14d8cb8c20c8e25639c6b3c"},
+ {file = "pyright-1.1.327.tar.gz", hash = "sha256:ba74148ad64f22020dbbed6781c4bdb38ecb8a7ca90dc3c87a4f08d1c0e11592"},
+]
+
+[package.dependencies]
+nodeenv = ">=1.6.0"
+
+[package.extras]
+all = ["twine (>=3.4.1)"]
+dev = ["twine (>=3.4.1)"]
+
[[package]]
name = "pytest"
-version = "7.3.1"
+version = "7.4.2"
description = "pytest: simple powerful testing with Python"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "pytest-7.3.1-py3-none-any.whl", hash = "sha256:3799fa815351fea3a5e96ac7e503a96fa51cc9942c3753cda7651b93c1cfa362"},
- {file = "pytest-7.3.1.tar.gz", hash = "sha256:434afafd78b1d78ed0addf160ad2b77a30d35d4bdf8af234fe621919d9ed15e3"},
+ {file = "pytest-7.4.2-py3-none-any.whl", hash = "sha256:1d881c6124e08ff0a1bb75ba3ec0bfd8b5354a01c194ddd5a0a870a48d99b002"},
+ {file = "pytest-7.4.2.tar.gz", hash = "sha256:a766259cfab564a2ad52cb1aae1b881a75c3eb7e34ca3779697c23ed47c47069"},
]
[package.dependencies]
colorama = {version = "*", markers = "sys_platform == \"win32\""}
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
-importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
iniconfig = "*"
packaging = "*"
pluggy = ">=0.12,<2.0"
tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
[package.extras]
-testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"]
+testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
[[package]]
name = "pytest-asyncio"
@@ -2390,7 +2508,6 @@ files = [
[package.dependencies]
pytest = ">=6.1.0"
-typing-extensions = {version = ">=3.7.2", markers = "python_version < \"3.8\""}
[package.extras]
docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"]
@@ -2454,35 +2571,19 @@ six = ">=1.5"
[[package]]
name = "python-dotenv"
-version = "0.21.1"
+version = "1.0.0"
description = "Read key-value pairs from a .env file and set them as environment variables"
category = "dev"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "python-dotenv-0.21.1.tar.gz", hash = "sha256:1c93de8f636cde3ce377292818d0e440b6e45a82f215c3744979151fa8151c49"},
- {file = "python_dotenv-0.21.1-py3-none-any.whl", hash = "sha256:41e12e0318bebc859fcc4d97d4db8d20ad21721a6aa5047dd59f090391cb549a"},
+ {file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"},
+ {file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"},
]
[package.extras]
cli = ["click (>=5.0)"]
-[[package]]
-name = "python-forge"
-version = "18.6.0"
-description = "forge (python signatures)"
-category = "main"
-optional = false
-python-versions = "*"
-files = [
- {file = "python_forge-18.6.0-py35-none-any.whl", hash = "sha256:bf91f9a42150d569c2e9a0d90ab60a8cbed378bdf185e5120532a3481067395c"},
-]
-
-[package.extras]
-dev = ["coverage", "mypy", "pylint", "pytest", "sphinx", "sphinx-autodoc-typehints", "sphinx-paramlinks"]
-docs = ["docutils", "requests", "sphinx (>=1.7.4)", "sphinx-paramlinks"]
-testing = ["coverage", "mypy", "pylint", "pytest"]
-
[[package]]
name = "python-multipart"
version = "0.0.5"
@@ -2499,52 +2600,52 @@ six = ">=1.4.0"
[[package]]
name = "pyyaml"
-version = "6.0"
+version = "6.0.1"
description = "YAML parser and emitter for Python"
category = "main"
optional = false
python-versions = ">=3.6"
files = [
- {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"},
- {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"},
- {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"},
- {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"},
- {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"},
- {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"},
- {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"},
- {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"},
- {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"},
- {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"},
- {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"},
- {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"},
- {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"},
- {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"},
- {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"},
- {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"},
- {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"},
- {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"},
- {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"},
- {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"},
- {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"},
- {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"},
- {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"},
- {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"},
- {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"},
- {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"},
- {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"},
- {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"},
- {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"},
- {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"},
- {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"},
- {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"},
- {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"},
- {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"},
- {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"},
- {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"},
- {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"},
- {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"},
- {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"},
- {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
+ {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"},
+ {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"},
+ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
+ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
+ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
+ {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
+ {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
+ {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
+ {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"},
+ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
+ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
+ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
+ {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
+ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"},
+ {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"},
+ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
+ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
+ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
+ {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
+ {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
+ {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
+ {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"},
+ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
+ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
+ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
+ {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
+ {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
+ {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
]
[[package]]
@@ -2631,46 +2732,46 @@ files = [
[[package]]
name = "scikit-learn"
-version = "1.2.2"
+version = "1.3.0"
description = "A set of python modules for machine learning and data mining"
category = "dev"
optional = false
python-versions = ">=3.8"
files = [
- {file = "scikit-learn-1.2.2.tar.gz", hash = "sha256:8429aea30ec24e7a8c7ed8a3fa6213adf3814a6efbea09e16e0a0c71e1a1a3d7"},
- {file = "scikit_learn-1.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99cc01184e347de485bf253d19fcb3b1a3fb0ee4cea5ee3c43ec0cc429b6d29f"},
- {file = "scikit_learn-1.2.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:e6e574db9914afcb4e11ade84fab084536a895ca60aadea3041e85b8ac963edb"},
- {file = "scikit_learn-1.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fe83b676f407f00afa388dd1fdd49e5c6612e551ed84f3b1b182858f09e987d"},
- {file = "scikit_learn-1.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e2642baa0ad1e8f8188917423dd73994bf25429f8893ddbe115be3ca3183584"},
- {file = "scikit_learn-1.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ad66c3848c0a1ec13464b2a95d0a484fd5b02ce74268eaa7e0c697b904f31d6c"},
- {file = "scikit_learn-1.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dfeaf8be72117eb61a164ea6fc8afb6dfe08c6f90365bde2dc16456e4bc8e45f"},
- {file = "scikit_learn-1.2.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:fe0aa1a7029ed3e1dcbf4a5bc675aa3b1bc468d9012ecf6c6f081251ca47f590"},
- {file = "scikit_learn-1.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:065e9673e24e0dc5113e2dd2b4ca30c9d8aa2fa90f4c0597241c93b63130d233"},
- {file = "scikit_learn-1.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf036ea7ef66115e0d49655f16febfa547886deba20149555a41d28f56fd6d3c"},
- {file = "scikit_learn-1.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:8b0670d4224a3c2d596fd572fb4fa673b2a0ccfb07152688ebd2ea0b8c61025c"},
- {file = "scikit_learn-1.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9c710ff9f9936ba8a3b74a455ccf0dcf59b230caa1e9ba0223773c490cab1e51"},
- {file = "scikit_learn-1.2.2-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:2dd3ffd3950e3d6c0c0ef9033a9b9b32d910c61bd06cb8206303fb4514b88a49"},
- {file = "scikit_learn-1.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44b47a305190c28dd8dd73fc9445f802b6ea716669cfc22ab1eb97b335d238b1"},
- {file = "scikit_learn-1.2.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:953236889928d104c2ef14027539f5f2609a47ebf716b8cbe4437e85dce42744"},
- {file = "scikit_learn-1.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:7f69313884e8eb311460cc2f28676d5e400bd929841a2c8eb8742ae78ebf7c20"},
- {file = "scikit_learn-1.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8156db41e1c39c69aa2d8599ab7577af53e9e5e7a57b0504e116cc73c39138dd"},
- {file = "scikit_learn-1.2.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fe175ee1dab589d2e1033657c5b6bec92a8a3b69103e3dd361b58014729975c3"},
- {file = "scikit_learn-1.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d5312d9674bed14f73773d2acf15a3272639b981e60b72c9b190a0cffed5bad"},
- {file = "scikit_learn-1.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea061bf0283bf9a9f36ea3c5d3231ba2176221bbd430abd2603b1c3b2ed85c89"},
- {file = "scikit_learn-1.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:6477eed40dbce190f9f9e9d0d37e020815825b300121307942ec2110302b66a3"},
+ {file = "scikit-learn-1.3.0.tar.gz", hash = "sha256:8be549886f5eda46436b6e555b0e4873b4f10aa21c07df45c4bc1735afbccd7a"},
+ {file = "scikit_learn-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:981287869e576d42c682cf7ca96af0c6ac544ed9316328fd0d9292795c742cf5"},
+ {file = "scikit_learn-1.3.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:436aaaae2c916ad16631142488e4c82f4296af2404f480e031d866863425d2a2"},
+ {file = "scikit_learn-1.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7e28d8fa47a0b30ae1bd7a079519dd852764e31708a7804da6cb6f8b36e3630"},
+ {file = "scikit_learn-1.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae80c08834a473d08a204d966982a62e11c976228d306a2648c575e3ead12111"},
+ {file = "scikit_learn-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:552fd1b6ee22900cf1780d7386a554bb96949e9a359999177cf30211e6b20df6"},
+ {file = "scikit_learn-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:79970a6d759eb00a62266a31e2637d07d2d28446fca8079cf9afa7c07b0427f8"},
+ {file = "scikit_learn-1.3.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:850a00b559e636b23901aabbe79b73dc604b4e4248ba9e2d6e72f95063765603"},
+ {file = "scikit_learn-1.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee04835fb016e8062ee9fe9074aef9b82e430504e420bff51e3e5fffe72750ca"},
+ {file = "scikit_learn-1.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d953531f5d9f00c90c34fa3b7d7cfb43ecff4c605dac9e4255a20b114a27369"},
+ {file = "scikit_learn-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:151ac2bf65ccf363664a689b8beafc9e6aae36263db114b4ca06fbbbf827444a"},
+ {file = "scikit_learn-1.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6a885a9edc9c0a341cab27ec4f8a6c58b35f3d449c9d2503a6fd23e06bbd4f6a"},
+ {file = "scikit_learn-1.3.0-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:9877af9c6d1b15486e18a94101b742e9d0d2f343d35a634e337411ddb57783f3"},
+ {file = "scikit_learn-1.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c470f53cea065ff3d588050955c492793bb50c19a92923490d18fcb637f6383a"},
+ {file = "scikit_learn-1.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd6e2d7389542eae01077a1ee0318c4fec20c66c957f45c7aac0c6eb0fe3c612"},
+ {file = "scikit_learn-1.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:3a11936adbc379a6061ea32fa03338d4ca7248d86dd507c81e13af428a5bc1db"},
+ {file = "scikit_learn-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:998d38fcec96584deee1e79cd127469b3ad6fefd1ea6c2dfc54e8db367eb396b"},
+ {file = "scikit_learn-1.3.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:ded35e810438a527e17623ac6deae3b360134345b7c598175ab7741720d7ffa7"},
+ {file = "scikit_learn-1.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e8102d5036e28d08ab47166b48c8d5e5810704daecf3a476a4282d562be9a28"},
+ {file = "scikit_learn-1.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7617164951c422747e7c32be4afa15d75ad8044f42e7d70d3e2e0429a50e6718"},
+ {file = "scikit_learn-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:1d54fb9e6038284548072df22fd34777e434153f7ffac72c8596f2d6987110dd"},
]
[package.dependencies]
joblib = ">=1.1.1"
numpy = ">=1.17.3"
-scipy = ">=1.3.2"
+scipy = ">=1.5.0"
threadpoolctl = ">=2.0.0"
[package.extras]
benchmark = ["matplotlib (>=3.1.3)", "memory-profiler (>=0.57.0)", "pandas (>=1.0.5)"]
-docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.1.3)", "memory-profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.0.5)", "plotly (>=5.10.0)", "pooch (>=1.6.0)", "scikit-image (>=0.16.2)", "seaborn (>=0.9.0)", "sphinx (>=4.0.1)", "sphinx-gallery (>=0.7.0)", "sphinx-prompt (>=1.3.0)", "sphinxext-opengraph (>=0.4.2)"]
-examples = ["matplotlib (>=3.1.3)", "pandas (>=1.0.5)", "plotly (>=5.10.0)", "pooch (>=1.6.0)", "scikit-image (>=0.16.2)", "seaborn (>=0.9.0)"]
-tests = ["black (>=22.3.0)", "flake8 (>=3.8.2)", "matplotlib (>=3.1.3)", "mypy (>=0.961)", "numpydoc (>=1.2.0)", "pandas (>=1.0.5)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pytest (>=5.3.1)", "pytest-cov (>=2.9.0)", "scikit-image (>=0.16.2)"]
+docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.1.3)", "memory-profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.0.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.16.2)", "seaborn (>=0.9.0)", "sphinx (>=6.0.0)", "sphinx-copybutton (>=0.5.2)", "sphinx-gallery (>=0.10.1)", "sphinx-prompt (>=1.3.0)", "sphinxext-opengraph (>=0.4.2)"]
+examples = ["matplotlib (>=3.1.3)", "pandas (>=1.0.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.16.2)", "seaborn (>=0.9.0)"]
+tests = ["black (>=23.3.0)", "matplotlib (>=3.1.3)", "mypy (>=1.3)", "numpydoc (>=1.2.0)", "pandas (>=1.0.5)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.0.272)", "scikit-image (>=0.16.2)"]
[[package]]
name = "scipy"
@@ -2713,20 +2814,20 @@ test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeo
[[package]]
name = "setuptools"
-version = "67.8.0"
+version = "68.2.2"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
category = "dev"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "setuptools-67.8.0-py3-none-any.whl", hash = "sha256:5df61bf30bb10c6f756eb19e7c9f3b473051f48db77fddbe06ff2ca307df9a6f"},
- {file = "setuptools-67.8.0.tar.gz", hash = "sha256:62642358adc77ffa87233bc4d2354c4b2682d214048f500964dbe760ccedf102"},
+ {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"},
+ {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"},
]
[package.extras]
-docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
-testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
-testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
+testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
+testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
[[package]]
name = "six"
@@ -2754,62 +2855,61 @@ files = [
[[package]]
name = "sqlalchemy"
-version = "2.0.15"
+version = "2.0.20"
description = "Database Abstraction Library"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
- {file = "SQLAlchemy-2.0.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:78303719c6f72af97814b0072ad18bee72e70adca8d95cf8fecd59c5e1ddb040"},
- {file = "SQLAlchemy-2.0.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9d810b4aacd5ef4e293aa4ea01f19fca53999e9edcfc4a8ef1146238b30bdc28"},
- {file = "SQLAlchemy-2.0.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fb5d09f1d51480f711b69fe28ad42e4f8b08600a85ab2473baee669e1257800"},
- {file = "SQLAlchemy-2.0.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51b19887c96d405599880da6a7cbdf8545a7e78ec5683e46a43bac8885e32d0f"},
- {file = "SQLAlchemy-2.0.15-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d6b17cb86908e7f88be14007d6afe7d2ab11966e373044137f96a6a4d83eb21c"},
- {file = "SQLAlchemy-2.0.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df25052b92bd514357a9b370d74f240db890ea79aaa428fb893520e10ee5bc18"},
- {file = "SQLAlchemy-2.0.15-cp310-cp310-win32.whl", hash = "sha256:55ec62ddc0200b4fee94d11abbec7aa25948d5d21cb8df8807f4bdd3c51ba44b"},
- {file = "SQLAlchemy-2.0.15-cp310-cp310-win_amd64.whl", hash = "sha256:ae1d8deb391ab39cc8f0d5844e588a115ae3717e607d91482023917f920f777f"},
- {file = "SQLAlchemy-2.0.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4670ce853cb25f72115a1bbe366ae13cf3f28fc5c87222df14f8d3d55d51816e"},
- {file = "SQLAlchemy-2.0.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cea7c4a3dfc2ca61f88a2b1ddd6b0bfbd116c9b1a361b3b66fd826034b833142"},
- {file = "SQLAlchemy-2.0.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f5784dfb2d45c19cde03c45c04a54bf47428610106197ed6e6fa79f33bc63d3"},
- {file = "SQLAlchemy-2.0.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b31ebde27575b3b0708673ec14f0c305c4564d995b545148ab7ac0f4d9b847a"},
- {file = "SQLAlchemy-2.0.15-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b42913a0259267e9ee335da0c36498077799e59c5e332d506e72b4f32de781d"},
- {file = "SQLAlchemy-2.0.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a3f8020e013e9b3b7941dcf20b0fc8f7429daaf7158760846731cbd8caa5e45"},
- {file = "SQLAlchemy-2.0.15-cp311-cp311-win32.whl", hash = "sha256:88ab245ed2c96265441ed2818977be28c840cfa5204ba167425d6c26eb67b7e7"},
- {file = "SQLAlchemy-2.0.15-cp311-cp311-win_amd64.whl", hash = "sha256:5cc48a7fda2b5c5b8860494d6c575db3a101a68416492105fed6591dc8a2728a"},
- {file = "SQLAlchemy-2.0.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f6fd3c88ea4b170d13527e93be1945e69facd917661d3725a63470eb683fbffe"},
- {file = "SQLAlchemy-2.0.15-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e885dacb167077df15af2f9ccdacbd7f5dd0d538a6d74b94074f2cefc7bb589"},
- {file = "SQLAlchemy-2.0.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:201a99f922ac8c780b3929128fbd9df901418877c70e160e19adb05665e51c31"},
- {file = "SQLAlchemy-2.0.15-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e17fdcb8971e77c439113642ca8861f9465e21fc693bd3916654ceef3ac26883"},
- {file = "SQLAlchemy-2.0.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db269f67ed17b07e80aaa8fba1f650c0d84aa0bdd9d5352e4ac38d5bf47ac568"},
- {file = "SQLAlchemy-2.0.15-cp37-cp37m-win32.whl", hash = "sha256:994a75b197662e0608b6a76935d7c345f7fd874eac0b7093d561033db61b0e8c"},
- {file = "SQLAlchemy-2.0.15-cp37-cp37m-win_amd64.whl", hash = "sha256:4d61731a35eddb0f667774fe15e5a4831e444d066081d1e809e1b8a0e3f97cae"},
- {file = "SQLAlchemy-2.0.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f7f994a53c0e6b44a2966fd6bfc53e37d34b7dca34e75b6be295de6db598255e"},
- {file = "SQLAlchemy-2.0.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:79bfe728219239bdc493950ea4a4d15b02138ecb304771f9024d0d6f5f4e3706"},
- {file = "SQLAlchemy-2.0.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d6320a1d175447dce63618ec997a53836de48ed3b44bbe952f0b4b399b19941"},
- {file = "SQLAlchemy-2.0.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f80a9c9a9af0e4bd5080cc0955ce70274c28e9b931ad7e0fb07021afcd32af6"},
- {file = "SQLAlchemy-2.0.15-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4a75fdb9a84072521bb2ebd31eefe1165d4dccea3039dda701a864f4b5daa17f"},
- {file = "SQLAlchemy-2.0.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:21c89044fc48a25c2184eba332edeffbbf9367913bb065cd31538235d828f06f"},
- {file = "SQLAlchemy-2.0.15-cp38-cp38-win32.whl", hash = "sha256:1a0754c2d9f0c7982bec0a31138e495ed1f6b8435d7e677c45be60ec18370acf"},
- {file = "SQLAlchemy-2.0.15-cp38-cp38-win_amd64.whl", hash = "sha256:bc5c2b0da46c26c5f73f700834f871d0723e1e882641932468d56833bab09775"},
- {file = "SQLAlchemy-2.0.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:670ecf74ee2e70b917028a06446ad26ff9b1195e84b09c3139c215123d57dc30"},
- {file = "SQLAlchemy-2.0.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d14282bf5b4de87f922db3c70858953fd081ef4f05dba6cca3dd705daffe1cc9"},
- {file = "SQLAlchemy-2.0.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:256b2b9660e51ad7055a9835b12717416cf7288afcf465107413917b6bb2316f"},
- {file = "SQLAlchemy-2.0.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:810199d1c5b43603a9e815ae9487aef3ab1ade7ed9c0c485e12519358929fbfe"},
- {file = "SQLAlchemy-2.0.15-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:536c86ec81ca89291d533ff41a3a05f9e4e88e01906dcee0751fc7082f3e8d6c"},
- {file = "SQLAlchemy-2.0.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:435f6807fa6a0597d84741470f19db204a7d34625ea121abd63e8d95f673f0c4"},
- {file = "SQLAlchemy-2.0.15-cp39-cp39-win32.whl", hash = "sha256:da7381a883aee20b7d2ffda17d909b38134b6a625920e65239a1c681881df800"},
- {file = "SQLAlchemy-2.0.15-cp39-cp39-win_amd64.whl", hash = "sha256:788d1772fb8dcd12091ca82809eef504ce0f2c423e45284bc351b872966ff554"},
- {file = "SQLAlchemy-2.0.15-py3-none-any.whl", hash = "sha256:933d30273861fe61f014ce2a7e3c364915f5efe9ed250ec1066ca6ea5942c0bd"},
- {file = "SQLAlchemy-2.0.15.tar.gz", hash = "sha256:2e940a8659ef870ae10e0d9e2a6d5aaddf0ff6e91f7d0d7732afc9e8c4be9bbc"},
+ {file = "SQLAlchemy-2.0.20-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759b51346aa388c2e606ee206c0bc6f15a5299f6174d1e10cadbe4530d3c7a98"},
+ {file = "SQLAlchemy-2.0.20-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1506e988ebeaaf316f183da601f24eedd7452e163010ea63dbe52dc91c7fc70e"},
+ {file = "SQLAlchemy-2.0.20-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5768c268df78bacbde166b48be788b83dddaa2a5974b8810af422ddfe68a9bc8"},
+ {file = "SQLAlchemy-2.0.20-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3f0dd6d15b6dc8b28a838a5c48ced7455c3e1fb47b89da9c79cc2090b072a50"},
+ {file = "SQLAlchemy-2.0.20-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:243d0fb261f80a26774829bc2cee71df3222587ac789b7eaf6555c5b15651eed"},
+ {file = "SQLAlchemy-2.0.20-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6eb6d77c31e1bf4268b4d61b549c341cbff9842f8e115ba6904249c20cb78a61"},
+ {file = "SQLAlchemy-2.0.20-cp310-cp310-win32.whl", hash = "sha256:bcb04441f370cbe6e37c2b8d79e4af9e4789f626c595899d94abebe8b38f9a4d"},
+ {file = "SQLAlchemy-2.0.20-cp310-cp310-win_amd64.whl", hash = "sha256:d32b5ffef6c5bcb452723a496bad2d4c52b346240c59b3e6dba279f6dcc06c14"},
+ {file = "SQLAlchemy-2.0.20-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dd81466bdbc82b060c3c110b2937ab65ace41dfa7b18681fdfad2f37f27acdd7"},
+ {file = "SQLAlchemy-2.0.20-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6fe7d61dc71119e21ddb0094ee994418c12f68c61b3d263ebaae50ea8399c4d4"},
+ {file = "SQLAlchemy-2.0.20-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4e571af672e1bb710b3cc1a9794b55bce1eae5aed41a608c0401885e3491179"},
+ {file = "SQLAlchemy-2.0.20-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3364b7066b3c7f4437dd345d47271f1251e0cfb0aba67e785343cdbdb0fff08c"},
+ {file = "SQLAlchemy-2.0.20-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1be86ccea0c965a1e8cd6ccf6884b924c319fcc85765f16c69f1ae7148eba64b"},
+ {file = "SQLAlchemy-2.0.20-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1d35d49a972649b5080557c603110620a86aa11db350d7a7cb0f0a3f611948a0"},
+ {file = "SQLAlchemy-2.0.20-cp311-cp311-win32.whl", hash = "sha256:27d554ef5d12501898d88d255c54eef8414576f34672e02fe96d75908993cf53"},
+ {file = "SQLAlchemy-2.0.20-cp311-cp311-win_amd64.whl", hash = "sha256:411e7f140200c02c4b953b3dbd08351c9f9818d2bd591b56d0fa0716bd014f1e"},
+ {file = "SQLAlchemy-2.0.20-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3c6aceebbc47db04f2d779db03afeaa2c73ea3f8dcd3987eb9efdb987ffa09a3"},
+ {file = "SQLAlchemy-2.0.20-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d3f175410a6db0ad96b10bfbb0a5530ecd4fcf1e2b5d83d968dd64791f810ed"},
+ {file = "SQLAlchemy-2.0.20-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea8186be85da6587456c9ddc7bf480ebad1a0e6dcbad3967c4821233a4d4df57"},
+ {file = "SQLAlchemy-2.0.20-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c3d99ba99007dab8233f635c32b5cd24fb1df8d64e17bc7df136cedbea427897"},
+ {file = "SQLAlchemy-2.0.20-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:76fdfc0f6f5341987474ff48e7a66c3cd2b8a71ddda01fa82fedb180b961630a"},
+ {file = "SQLAlchemy-2.0.20-cp37-cp37m-win32.whl", hash = "sha256:d3793dcf5bc4d74ae1e9db15121250c2da476e1af8e45a1d9a52b1513a393459"},
+ {file = "SQLAlchemy-2.0.20-cp37-cp37m-win_amd64.whl", hash = "sha256:79fde625a0a55220d3624e64101ed68a059c1c1f126c74f08a42097a72ff66a9"},
+ {file = "SQLAlchemy-2.0.20-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:599ccd23a7146e126be1c7632d1d47847fa9f333104d03325c4e15440fc7d927"},
+ {file = "SQLAlchemy-2.0.20-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1a58052b5a93425f656675673ef1f7e005a3b72e3f2c91b8acca1b27ccadf5f4"},
+ {file = "SQLAlchemy-2.0.20-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79543f945be7a5ada9943d555cf9b1531cfea49241809dd1183701f94a748624"},
+ {file = "SQLAlchemy-2.0.20-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63e73da7fb030ae0a46a9ffbeef7e892f5def4baf8064786d040d45c1d6d1dc5"},
+ {file = "SQLAlchemy-2.0.20-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3ce5e81b800a8afc870bb8e0a275d81957e16f8c4b62415a7b386f29a0cb9763"},
+ {file = "SQLAlchemy-2.0.20-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb0d3e94c2a84215532d9bcf10229476ffd3b08f481c53754113b794afb62d14"},
+ {file = "SQLAlchemy-2.0.20-cp38-cp38-win32.whl", hash = "sha256:8dd77fd6648b677d7742d2c3cc105a66e2681cc5e5fb247b88c7a7b78351cf74"},
+ {file = "SQLAlchemy-2.0.20-cp38-cp38-win_amd64.whl", hash = "sha256:6f8a934f9dfdf762c844e5164046a9cea25fabbc9ec865c023fe7f300f11ca4a"},
+ {file = "SQLAlchemy-2.0.20-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:26a3399eaf65e9ab2690c07bd5cf898b639e76903e0abad096cd609233ce5208"},
+ {file = "SQLAlchemy-2.0.20-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4cde2e1096cbb3e62002efdb7050113aa5f01718035ba9f29f9d89c3758e7e4e"},
+ {file = "SQLAlchemy-2.0.20-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1b09ba72e4e6d341bb5bdd3564f1cea6095d4c3632e45dc69375a1dbe4e26ec"},
+ {file = "SQLAlchemy-2.0.20-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b74eeafaa11372627ce94e4dc88a6751b2b4d263015b3523e2b1e57291102f0"},
+ {file = "SQLAlchemy-2.0.20-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:77d37c1b4e64c926fa3de23e8244b964aab92963d0f74d98cbc0783a9e04f501"},
+ {file = "SQLAlchemy-2.0.20-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:eefebcc5c555803065128401a1e224a64607259b5eb907021bf9b175f315d2a6"},
+ {file = "SQLAlchemy-2.0.20-cp39-cp39-win32.whl", hash = "sha256:3423dc2a3b94125094897118b52bdf4d37daf142cbcf26d48af284b763ab90e9"},
+ {file = "SQLAlchemy-2.0.20-cp39-cp39-win_amd64.whl", hash = "sha256:5ed61e3463021763b853628aef8bc5d469fe12d95f82c74ef605049d810f3267"},
+ {file = "SQLAlchemy-2.0.20-py3-none-any.whl", hash = "sha256:63a368231c53c93e2b67d0c5556a9836fdcd383f7e3026a39602aad775b14acf"},
+ {file = "SQLAlchemy-2.0.20.tar.gz", hash = "sha256:ca8a5ff2aa7f3ade6c498aaafce25b1eaeabe4e42b73e25519183e4566a16fc6"},
]
[package.dependencies]
greenlet = {version = "!=0.4.17", optional = true, markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\" or extra == \"asyncio\""}
-importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
typing-extensions = ">=4.2.0"
[package.extras]
-aiomysql = ["aiomysql", "greenlet (!=0.4.17)"]
+aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"]
aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"]
asyncio = ["greenlet (!=0.4.17)"]
asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"]
@@ -2828,6 +2928,7 @@ postgresql-pg8000 = ["pg8000 (>=1.29.1)"]
postgresql-psycopg = ["psycopg (>=3.0.7)"]
postgresql-psycopg2binary = ["psycopg2-binary"]
postgresql-psycopg2cffi = ["psycopg2cffi"]
+postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"]
pymysql = ["pymysql"]
sqlcipher = ["sqlcipher3-binary"]
@@ -2853,14 +2954,14 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"]
[[package]]
name = "starlette"
-version = "0.28.0"
+version = "0.31.1"
description = "The little ASGI library that shines."
category = "main"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "starlette-0.28.0-py3-none-any.whl", hash = "sha256:e58b9fc402c579950260fbb6d57173395c4e62804c40d3ede7e9ef1074f0c579"},
- {file = "starlette-0.28.0.tar.gz", hash = "sha256:7bf3da5e997e796cc202cef2bd3f96a7d9b1e1943203c2fe2b42e020bc658482"},
+ {file = "starlette-0.31.1-py3-none-any.whl", hash = "sha256:009fb98ecd551a55017d204f033c58b13abcd4719cb5c41503abbf6d260fde11"},
+ {file = "starlette-0.31.1.tar.gz", hash = "sha256:a4dc2a3448fb059000868d7eb774dd71229261b6d49b6851e7849bec69c0a011"},
]
[package.dependencies]
@@ -2870,6 +2971,21 @@ typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""
[package.extras]
full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyaml"]
+[[package]]
+name = "sympy"
+version = "1.12"
+description = "Computer algebra system (CAS) in Python"
+category = "dev"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "sympy-1.12-py3-none-any.whl", hash = "sha256:c3588cd4295d0c0f603d0f2ae780587e64e2efeedb3521e46b9bb1d08d184fa5"},
+ {file = "sympy-1.12.tar.gz", hash = "sha256:ebf595c8dac3e0fdc4152c51878b498396ec7f30e7a914d6071e674d49420fb8"},
+]
+
+[package.dependencies]
+mpmath = ">=0.19"
+
[[package]]
name = "tensorboard"
version = "2.12.3"
@@ -2897,15 +3013,15 @@ wheel = ">=0.26"
[[package]]
name = "tensorboard-data-server"
-version = "0.7.0"
+version = "0.7.1"
description = "Fast data loading for TensorBoard"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "tensorboard_data_server-0.7.0-py3-none-any.whl", hash = "sha256:753d4214799b31da7b6d93837959abebbc6afa86e69eacf1e9a317a48daa31eb"},
- {file = "tensorboard_data_server-0.7.0-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:eb7fa518737944dbf4f0cf83c2e40a7ac346bf91be2e6a0215de98be74e85454"},
- {file = "tensorboard_data_server-0.7.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:64aa1be7c23e80b1a42c13b686eb0875bb70f5e755f4d2b8de5c1d880cf2267f"},
+ {file = "tensorboard_data_server-0.7.1-py3-none-any.whl", hash = "sha256:9938bd39f5041797b33921066fba0eab03a0dd10d1887a05e62ae58841ad4c3f"},
+ {file = "tensorboard_data_server-0.7.1-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:be8d016a1aa394e6198280d4a3dc37898f56467310c5f5e617cac10a783e055a"},
+ {file = "tensorboard_data_server-0.7.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:255c02b7f5b03dd5c0a88c928e563441ff39e1d4b4a234cdbe09f016e53d9594"},
]
[[package]]
@@ -2967,34 +3083,39 @@ files = [
[[package]]
name = "tensorflow-io-gcs-filesystem"
-version = "0.32.0"
+version = "0.34.0"
description = "TensorFlow IO"
category = "dev"
optional = false
python-versions = ">=3.7, <3.12"
files = [
- {file = "tensorflow_io_gcs_filesystem-0.32.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:74a7e25e83d4117a7ebb09a3f247553a5497393ab48c3ee0cf0d17b405026817"},
- {file = "tensorflow_io_gcs_filesystem-0.32.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:045d51bba586390d0545fcd8a18727d62b175eb142f6f4c6d719d39de40774cd"},
- {file = "tensorflow_io_gcs_filesystem-0.32.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db682e9a510c27dd35710ba5a2c62c371e25b727741b2fe3a920355fa501e947"},
- {file = "tensorflow_io_gcs_filesystem-0.32.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:7f15fd22e592661b10de317be2f42a0f84be7bfc5e6a565fcfcb04b60d625b78"},
- {file = "tensorflow_io_gcs_filesystem-0.32.0-cp311-cp311-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:336d9b3fe6b55aea149c4f6aa1fd6ffaf27d4e5c37e55a182340b47caba38846"},
- {file = "tensorflow_io_gcs_filesystem-0.32.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:842f5f09cd756bdb3b4d0b5571b3a6f72fd534d42da938b9acf0ef462995eada"},
- {file = "tensorflow_io_gcs_filesystem-0.32.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:1ce80e1555d6ee88dda67feddf366cc8b30252b5837a7a17303df7b06a71fc2e"},
- {file = "tensorflow_io_gcs_filesystem-0.32.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05e65d3cb6c93a7929b384d86c6369c63cbbab8a770440a3d95e094878403f9f"},
- {file = "tensorflow_io_gcs_filesystem-0.32.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:21de7dcc06eb1e7de3c022b0072d90ba35ef886578149663437aa7a6fb5bf6b3"},
- {file = "tensorflow_io_gcs_filesystem-0.32.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:79fdd02103b8ae9f8b89af41f744c013fa1caaea709de19833917795e3063857"},
- {file = "tensorflow_io_gcs_filesystem-0.32.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5635df0bbe40f971dc1b946e3372744b0bdfda45c38ffcd28ef53a32bb8da4da"},
- {file = "tensorflow_io_gcs_filesystem-0.32.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:122be149e5f6a030f5c2901be0cc3cb07619232f7b03889e2cdf3da1c0d4f92f"},
- {file = "tensorflow_io_gcs_filesystem-0.32.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8214cdf85bea694160f9035ff395221c1e25e119784ccb4c104919b1f5dec84e"},
- {file = "tensorflow_io_gcs_filesystem-0.32.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28202492d904a6e280cf27560791e87ac1c7566000db82065d63a70c27008af2"},
-]
-
-[package.extras]
-tensorflow = ["tensorflow (>=2.12.0,<2.13.0)"]
-tensorflow-aarch64 = ["tensorflow-aarch64 (>=2.12.0,<2.13.0)"]
-tensorflow-cpu = ["tensorflow-cpu (>=2.12.0,<2.13.0)"]
-tensorflow-gpu = ["tensorflow-gpu (>=2.12.0,<2.13.0)"]
-tensorflow-rocm = ["tensorflow-rocm (>=2.12.0,<2.13.0)"]
+ {file = "tensorflow_io_gcs_filesystem-0.34.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:d831702fbb270996b27cda7fde06e0825b2ea81fd8dd3ead35242f4f8b3889b8"},
+ {file = "tensorflow_io_gcs_filesystem-0.34.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:b9a93fcb01db269bc845a1ced431f3c61201755ce5f9ec4885760f30122276ef"},
+ {file = "tensorflow_io_gcs_filesystem-0.34.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5813c336b4f7cb0a01ff4cc6cbd3edf11ef67305baf0e3cf634911b702f493f8"},
+ {file = "tensorflow_io_gcs_filesystem-0.34.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b035f4c92639657b6d376929d550ac3dee9e6c0523eb434eefe0a27bae3d05b"},
+ {file = "tensorflow_io_gcs_filesystem-0.34.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:a17a616d2c7fae83de4424404815843507d40d4eb0d507c636a5493a20c3d958"},
+ {file = "tensorflow_io_gcs_filesystem-0.34.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:ec4604c99cbb5b708f4516dee27aa655abae222b876c98b740f4c2f89dd5c001"},
+ {file = "tensorflow_io_gcs_filesystem-0.34.0-cp311-cp311-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cbe26c4a3332589c7b724f147df453b5c226993aa8d346a15536358d77b364c4"},
+ {file = "tensorflow_io_gcs_filesystem-0.34.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e6353123a5b51397950138a118876af833a7db66b531123bb86f82e80ab0e72"},
+ {file = "tensorflow_io_gcs_filesystem-0.34.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:f211d2b3db8f9931765992b607b71cbfb98c8cd6169079d004a67a94ab10ecb4"},
+ {file = "tensorflow_io_gcs_filesystem-0.34.0-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:d3feba2dd76f7c188137c34642d68d378f0eed81636cb95090ecb1496722707c"},
+ {file = "tensorflow_io_gcs_filesystem-0.34.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:44ad387a812a78e7424bb8bee3820521ae1c044bddf72b1e163e8df95c124a74"},
+ {file = "tensorflow_io_gcs_filesystem-0.34.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:7f60183473f0ca966451bb1d1bb5dc29b3cf9c74d1d0e7f2ed46760ed56bd4af"},
+ {file = "tensorflow_io_gcs_filesystem-0.34.0-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:3f346b287ed2400e09b13cfd8524222fd70a66aadb9164c645286c2087007e9f"},
+ {file = "tensorflow_io_gcs_filesystem-0.34.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:027a07553367187f918a99661f63ae0506b91b77a70bee9c7ccaf3920bf7cfe7"},
+ {file = "tensorflow_io_gcs_filesystem-0.34.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d8664bddbe4e7b56ce94db8b93ea9077a158fb5e15364e11e29f93015ceea24"},
+ {file = "tensorflow_io_gcs_filesystem-0.34.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:182b0fbde7e9a537fda0b354c28b0b6c035736728de8fe2db7ef49cf90352014"},
+ {file = "tensorflow_io_gcs_filesystem-0.34.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:0dafed144673e1173528768fe208a7c5a6e8edae40208381cac420ee7c918ec9"},
+ {file = "tensorflow_io_gcs_filesystem-0.34.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:396bfff61b49f80b86ddebe0c76ae0f2731689cee49ad7d782625180b50b13af"},
+ {file = "tensorflow_io_gcs_filesystem-0.34.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b20622f8572fcb6c93e8f7d626327472f263e47ebd63d2153ef09162ef5ef7b5"},
+]
+
+[package.extras]
+tensorflow = ["tensorflow (>=2.13.0,<2.14.0)"]
+tensorflow-aarch64 = ["tensorflow-aarch64 (>=2.13.0,<2.14.0)"]
+tensorflow-cpu = ["tensorflow-cpu (>=2.13.0,<2.14.0)"]
+tensorflow-gpu = ["tensorflow-gpu (>=2.13.0,<2.14.0)"]
+tensorflow-rocm = ["tensorflow-rocm (>=2.13.0,<2.14.0)"]
[[package]]
name = "termcolor"
@@ -3025,14 +3146,14 @@ files = [
[[package]]
name = "threadpoolctl"
-version = "3.1.0"
+version = "3.2.0"
description = "threadpoolctl"
category = "dev"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.8"
files = [
- {file = "threadpoolctl-3.1.0-py3-none-any.whl", hash = "sha256:8b99adda265feb6773280df41eece7b2e6561b772d21ffd52e372f999024907b"},
- {file = "threadpoolctl-3.1.0.tar.gz", hash = "sha256:a335baacfaa4400ae1f0d8e3a58d6674d2f8828e3716bb2802c44955ad391380"},
+ {file = "threadpoolctl-3.2.0-py3-none-any.whl", hash = "sha256:2b7818516e423bdaebb97c723f86a7c6b0a83d3f3b0970328d66f4d9104dc032"},
+ {file = "threadpoolctl-3.2.0.tar.gz", hash = "sha256:c96a0ba3bdddeaca37dc4cc7344aafad41cdb8c313f74fdfe387a867bba93355"},
]
[[package]]
@@ -3049,40 +3170,55 @@ files = [
[[package]]
name = "torch"
-version = "1.13.1"
+version = "2.0.0"
description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration"
category = "dev"
optional = false
-python-versions = ">=3.7.0"
-files = [
- {file = "torch-1.13.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:fd12043868a34a8da7d490bf6db66991108b00ffbeecb034228bfcbbd4197143"},
- {file = "torch-1.13.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d9fe785d375f2e26a5d5eba5de91f89e6a3be5d11efb497e76705fdf93fa3c2e"},
- {file = "torch-1.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:98124598cdff4c287dbf50f53fb455f0c1e3a88022b39648102957f3445e9b76"},
- {file = "torch-1.13.1-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:393a6273c832e047581063fb74335ff50b4c566217019cc6ace318cd79eb0566"},
- {file = "torch-1.13.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:0122806b111b949d21fa1a5f9764d1fd2fcc4a47cb7f8ff914204fd4fc752ed5"},
- {file = "torch-1.13.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:22128502fd8f5b25ac1cd849ecb64a418382ae81dd4ce2b5cebaa09ab15b0d9b"},
- {file = "torch-1.13.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:76024be052b659ac1304ab8475ab03ea0a12124c3e7626282c9c86798ac7bc11"},
- {file = "torch-1.13.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:ea8dda84d796094eb8709df0fcd6b56dc20b58fdd6bc4e8d7109930dafc8e419"},
- {file = "torch-1.13.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2ee7b81e9c457252bddd7d3da66fb1f619a5d12c24d7074de91c4ddafb832c93"},
- {file = "torch-1.13.1-cp37-none-macosx_10_9_x86_64.whl", hash = "sha256:0d9b8061048cfb78e675b9d2ea8503bfe30db43d583599ae8626b1263a0c1380"},
- {file = "torch-1.13.1-cp37-none-macosx_11_0_arm64.whl", hash = "sha256:f402ca80b66e9fbd661ed4287d7553f7f3899d9ab54bf5c67faada1555abde28"},
- {file = "torch-1.13.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:727dbf00e2cf858052364c0e2a496684b9cb5aa01dc8a8bc8bbb7c54502bdcdd"},
- {file = "torch-1.13.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:df8434b0695e9ceb8cc70650afc1310d8ba949e6db2a0525ddd9c3b2b181e5fe"},
- {file = "torch-1.13.1-cp38-cp38-win_amd64.whl", hash = "sha256:5e1e722a41f52a3f26f0c4fcec227e02c6c42f7c094f32e49d4beef7d1e213ea"},
- {file = "torch-1.13.1-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:33e67eea526e0bbb9151263e65417a9ef2d8fa53cbe628e87310060c9dcfa312"},
- {file = "torch-1.13.1-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:eeeb204d30fd40af6a2d80879b46a7efbe3cf43cdbeb8838dd4f3d126cc90b2b"},
- {file = "torch-1.13.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:50ff5e76d70074f6653d191fe4f6a42fdbe0cf942fbe2a3af0b75eaa414ac038"},
- {file = "torch-1.13.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:2c3581a3fd81eb1f0f22997cddffea569fea53bafa372b2c0471db373b26aafc"},
- {file = "torch-1.13.1-cp39-cp39-win_amd64.whl", hash = "sha256:0aa46f0ac95050c604bcf9ef71da9f1172e5037fdf2ebe051962d47b123848e7"},
- {file = "torch-1.13.1-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:6930791efa8757cb6974af73d4996b6b50c592882a324b8fb0589c6a9ba2ddaf"},
- {file = "torch-1.13.1-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:e0df902a7c7dd6c795698532ee5970ce898672625635d885eade9976e5a04949"},
-]
-
-[package.dependencies]
-nvidia-cublas-cu11 = {version = "11.10.3.66", markers = "platform_system == \"Linux\""}
-nvidia-cuda-nvrtc-cu11 = {version = "11.7.99", markers = "platform_system == \"Linux\""}
-nvidia-cuda-runtime-cu11 = {version = "11.7.99", markers = "platform_system == \"Linux\""}
-nvidia-cudnn-cu11 = {version = "8.5.0.96", markers = "platform_system == \"Linux\""}
+python-versions = ">=3.8.0"
+files = [
+ {file = "torch-2.0.0-1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:c9090bda7d2eeeecd74f51b721420dbeb44f838d4536cc1b284e879417e3064a"},
+ {file = "torch-2.0.0-1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:bd42db2a48a20574d2c33489e120e9f32789c4dc13c514b0c44272972d14a2d7"},
+ {file = "torch-2.0.0-1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8969aa8375bcbc0c2993e7ede0a7f889df9515f18b9b548433f412affed478d9"},
+ {file = "torch-2.0.0-1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:ab2da16567cb55b67ae39e32d520d68ec736191d88ac79526ca5874754c32203"},
+ {file = "torch-2.0.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:7a9319a67294ef02459a19738bbfa8727bb5307b822dadd708bc2ccf6c901aca"},
+ {file = "torch-2.0.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:9f01fe1f6263f31bd04e1757946fd63ad531ae37f28bb2dbf66f5c826ee089f4"},
+ {file = "torch-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:527f4ae68df7b8301ee6b1158ca56350282ea633686537b30dbb5d7b4a52622a"},
+ {file = "torch-2.0.0-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:ce9b5a49bd513dff7950a5a07d6e26594dd51989cee05ba388b03e8e366fd5d5"},
+ {file = "torch-2.0.0-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:53e1c33c6896583cdb9a583693e22e99266444c4a43392dddc562640d39e542b"},
+ {file = "torch-2.0.0-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:09651bff72e439d004c991f15add0c397c66f98ab36fe60d5514b44e4da722e8"},
+ {file = "torch-2.0.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:d439aec349c98f12819e8564b8c54008e4613dd4428582af0e6e14c24ca85870"},
+ {file = "torch-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:2802f84f021907deee7e9470ed10c0e78af7457ac9a08a6cd7d55adef835fede"},
+ {file = "torch-2.0.0-cp311-none-macosx_10_9_x86_64.whl", hash = "sha256:01858620f25f25e7a9ec4b547ff38e5e27c92d38ec4ccba9cfbfb31d7071ed9c"},
+ {file = "torch-2.0.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:9a2e53b5783ef5896a6af338b36d782f28e83c8ddfc2ac44b67b066d9d76f498"},
+ {file = "torch-2.0.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:ec5fff2447663e369682838ff0f82187b4d846057ef4d119a8dea7772a0b17dd"},
+ {file = "torch-2.0.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:11b0384fe3c18c01b8fc5992e70fc519cde65e44c51cc87be1838c1803daf42f"},
+ {file = "torch-2.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:e54846aa63855298cfb1195487f032e413e7ac9cbfa978fda32354cc39551475"},
+ {file = "torch-2.0.0-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:cc788cbbbbc6eb4c90e52c550efd067586c2693092cf367c135b34893a64ae78"},
+ {file = "torch-2.0.0-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:d292640f0fd72b7a31b2a6e3b635eb5065fcbedd4478f9cad1a1e7a9ec861d35"},
+ {file = "torch-2.0.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:6befaad784004b7af357e3d87fa0863c1f642866291f12a4c2af2de435e8ac5c"},
+ {file = "torch-2.0.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:a83b26bd6ae36fbf5fee3d56973d9816e2002e8a3b7d9205531167c28aaa38a7"},
+ {file = "torch-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:c7e67195e1c3e33da53954b026e89a8e1ff3bc1aeb9eb32b677172d4a9b5dcbf"},
+ {file = "torch-2.0.0-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:6e0b97beb037a165669c312591f242382e9109a240e20054d5a5782d9236cad0"},
+ {file = "torch-2.0.0-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:297a4919aff1c0f98a58ebe969200f71350a1d4d4f986dbfd60c02ffce780e99"},
+]
+
+[package.dependencies]
+filelock = "*"
+jinja2 = "*"
+networkx = "*"
+nvidia-cublas-cu11 = {version = "11.10.3.66", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
+nvidia-cuda-cupti-cu11 = {version = "11.7.101", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
+nvidia-cuda-nvrtc-cu11 = {version = "11.7.99", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
+nvidia-cuda-runtime-cu11 = {version = "11.7.99", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
+nvidia-cudnn-cu11 = {version = "8.5.0.96", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
+nvidia-cufft-cu11 = {version = "10.9.0.58", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
+nvidia-curand-cu11 = {version = "10.2.10.91", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
+nvidia-cusolver-cu11 = {version = "11.4.0.1", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
+nvidia-cusparse-cu11 = {version = "11.7.4.91", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
+nvidia-nccl-cu11 = {version = "2.14.3", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
+nvidia-nvtx-cu11 = {version = "11.7.91", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
+sympy = "*"
+triton = {version = "2.0.0", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
typing-extensions = "*"
[package.extras]
@@ -3090,54 +3226,58 @@ opt-einsum = ["opt-einsum (>=3.3)"]
[[package]]
name = "traitlets"
-version = "5.9.0"
+version = "5.10.0"
description = "Traitlets Python configuration system"
category = "dev"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "traitlets-5.9.0-py3-none-any.whl", hash = "sha256:9e6ec080259b9a5940c797d58b613b5e31441c2257b87c2e795c5228ae80d2d8"},
- {file = "traitlets-5.9.0.tar.gz", hash = "sha256:f6cde21a9c68cf756af02035f72d5a723bf607e862e7be33ece505abf4a3bad9"},
+ {file = "traitlets-5.10.0-py3-none-any.whl", hash = "sha256:417745a96681fbb358e723d5346a547521f36e9bd0d50ba7ab368fff5d67aa54"},
+ {file = "traitlets-5.10.0.tar.gz", hash = "sha256:f584ea209240466e66e91f3c81aa7d004ba4cf794990b0c775938a1544217cd1"},
]
[package.extras]
docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"]
-test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"]
+test = ["argcomplete (>=3.0.3)", "mypy (>=1.5.1)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"]
[[package]]
-name = "typed-ast"
-version = "1.5.4"
-description = "a fork of Python 2 and 3 ast modules with type comment support"
+name = "triton"
+version = "2.0.0"
+description = "A language and compiler for custom Deep Learning operations"
category = "dev"
optional = false
-python-versions = ">=3.6"
+python-versions = "*"
files = [
- {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"},
- {file = "typed_ast-1.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62"},
- {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac"},
- {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe"},
- {file = "typed_ast-1.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72"},
- {file = "typed_ast-1.5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec"},
- {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47"},
- {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6"},
- {file = "typed_ast-1.5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1"},
- {file = "typed_ast-1.5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6"},
- {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66"},
- {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c"},
- {file = "typed_ast-1.5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2"},
- {file = "typed_ast-1.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d"},
- {file = "typed_ast-1.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f"},
- {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc"},
- {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6"},
- {file = "typed_ast-1.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e"},
- {file = "typed_ast-1.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35"},
- {file = "typed_ast-1.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97"},
- {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3"},
- {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72"},
- {file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"},
- {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"},
+ {file = "triton-2.0.0-1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:38806ee9663f4b0f7cd64790e96c579374089e58f49aac4a6608121aa55e2505"},
+ {file = "triton-2.0.0-1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:226941c7b8595219ddef59a1fdb821e8c744289a132415ddd584facedeb475b1"},
+ {file = "triton-2.0.0-1-cp36-cp36m-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4c9fc8c89874bc48eb7e7b2107a9b8d2c0bf139778637be5bfccb09191685cfd"},
+ {file = "triton-2.0.0-1-cp37-cp37m-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d2684b6a60b9f174f447f36f933e9a45f31db96cb723723ecd2dcfd1c57b778b"},
+ {file = "triton-2.0.0-1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9d4978298b74fcf59a75fe71e535c092b023088933b2f1df933ec32615e4beef"},
+ {file = "triton-2.0.0-1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:74f118c12b437fb2ca25e1a04759173b517582fcf4c7be11913316c764213656"},
+ {file = "triton-2.0.0-1-pp37-pypy37_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9618815a8da1d9157514f08f855d9e9ff92e329cd81c0305003eb9ec25cc5add"},
+ {file = "triton-2.0.0-1-pp38-pypy38_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1aca3303629cd3136375b82cb9921727f804e47ebee27b2677fef23005c3851a"},
+ {file = "triton-2.0.0-1-pp39-pypy39_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e3e13aa8b527c9b642e3a9defcc0fbd8ffbe1c80d8ac8c15a01692478dc64d8a"},
+ {file = "triton-2.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f05a7e64e4ca0565535e3d5d3405d7e49f9d308505bb7773d21fb26a4c008c2"},
+ {file = "triton-2.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb4b99ca3c6844066e516658541d876c28a5f6e3a852286bbc97ad57134827fd"},
+ {file = "triton-2.0.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47b4d70dc92fb40af553b4460492c31dc7d3a114a979ffb7a5cdedb7eb546c08"},
+ {file = "triton-2.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fedce6a381901b1547e0e7e1f2546e4f65dca6d91e2d8a7305a2d1f5551895be"},
+ {file = "triton-2.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75834f27926eab6c7f00ce73aaf1ab5bfb9bec6eb57ab7c0bfc0a23fac803b4c"},
+ {file = "triton-2.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0117722f8c2b579cd429e0bee80f7731ae05f63fe8e9414acd9a679885fcbf42"},
+ {file = "triton-2.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcd9be5d0c2e45d2b7e6ddc6da20112b6862d69741576f9c3dbaf941d745ecae"},
+ {file = "triton-2.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42a0d2c3fc2eab4ba71384f2e785fbfd47aa41ae05fa58bf12cb31dcbd0aeceb"},
+ {file = "triton-2.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52c47b72c72693198163ece9d90a721299e4fb3b8e24fd13141e384ad952724f"},
]
+[package.dependencies]
+cmake = "*"
+filelock = "*"
+lit = "*"
+torch = "*"
+
+[package.extras]
+tests = ["autopep8", "flake8", "isort", "numpy", "pytest", "scipy (>=1.7.1)"]
+tutorials = ["matplotlib", "pandas", "tabulate"]
+
[[package]]
name = "typesystem"
version = "0.4.1"
@@ -3156,33 +3296,32 @@ pyyaml = ["pyyaml"]
[[package]]
name = "typing-extensions"
-version = "4.6.3"
-description = "Backported and Experimental Type Hints for Python 3.7+"
+version = "4.8.0"
+description = "Backported and Experimental Type Hints for Python 3.8+"
category = "main"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "typing_extensions-4.6.3-py3-none-any.whl", hash = "sha256:88a4153d8505aabbb4e13aacb7c486c2b4a33ca3b3f807914a9b4c844c471c26"},
- {file = "typing_extensions-4.6.3.tar.gz", hash = "sha256:d91d5919357fe7f681a9f2b5b4cb2a5f1ef0a1e9f59c4d8ff0d3491e05c0ffd5"},
+ {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"},
+ {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"},
]
[[package]]
name = "urllib3"
-version = "2.0.3"
+version = "1.26.16"
description = "HTTP library with thread-safe connection pooling, file post, and more."
category = "dev"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
files = [
- {file = "urllib3-2.0.3-py3-none-any.whl", hash = "sha256:48e7fafa40319d358848e1bc6809b208340fafe2096f1725d05d67443d0483d1"},
- {file = "urllib3-2.0.3.tar.gz", hash = "sha256:bee28b5e56addb8226c96f7f13ac28cb4c301dd5ea8a6ca179c0b9835e032825"},
+ {file = "urllib3-1.26.16-py2.py3-none-any.whl", hash = "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f"},
+ {file = "urllib3-1.26.16.tar.gz", hash = "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"},
]
[package.extras]
-brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
-secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"]
-socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
-zstd = ["zstandard (>=0.18.0)"]
+brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
+secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
+socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
[[package]]
name = "uvicorn"
@@ -3203,7 +3342,6 @@ h11 = ">=0.8"
httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""}
python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""}
pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""}
-typing-extensions = {version = "*", markers = "python_version < \"3.8\""}
uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\" and extra == \"standard\""}
watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""}
websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""}
@@ -3258,56 +3396,55 @@ test = ["Cython (>=0.29.32,<0.30.0)", "aiohttp", "flake8 (>=3.9.2,<3.10.0)", "my
[[package]]
name = "virtualenv"
-version = "20.21.1"
+version = "20.24.5"
description = "Virtual Python Environment builder"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "virtualenv-20.21.1-py3-none-any.whl", hash = "sha256:09ddbe1af0c8ed2bb4d6ed226b9e6415718ad18aef9fa0ba023d96b7a8356049"},
- {file = "virtualenv-20.21.1.tar.gz", hash = "sha256:4c104ccde994f8b108163cf9ba58f3d11511d9403de87fb9b4f52bf33dbc8668"},
+ {file = "virtualenv-20.24.5-py3-none-any.whl", hash = "sha256:b80039f280f4919c77b30f1c23294ae357c4c8701042086e3fc005963e4e537b"},
+ {file = "virtualenv-20.24.5.tar.gz", hash = "sha256:e8361967f6da6fbdf1426483bfe9fca8287c242ac0bc30429905721cefbff752"},
]
[package.dependencies]
-distlib = ">=0.3.6,<1"
-filelock = ">=3.4.1,<4"
-importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.8\""}
-platformdirs = ">=2.4,<4"
+distlib = ">=0.3.7,<1"
+filelock = ">=3.12.2,<4"
+platformdirs = ">=3.9.1,<4"
[package.extras]
-docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"]
-test = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.3.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"]
+docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
+test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"]
[[package]]
name = "watchfiles"
-version = "0.19.0"
+version = "0.20.0"
description = "Simple, modern and high performance file watching and code reload in python."
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "watchfiles-0.19.0-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:91633e64712df3051ca454ca7d1b976baf842d7a3640b87622b323c55f3345e7"},
- {file = "watchfiles-0.19.0-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:b6577b8c6c8701ba8642ea9335a129836347894b666dd1ec2226830e263909d3"},
- {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:18b28f6ad871b82df9542ff958d0c86bb0d8310bb09eb8e87d97318a3b5273af"},
- {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fac19dc9cbc34052394dbe81e149411a62e71999c0a19e1e09ce537867f95ae0"},
- {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:09ea3397aecbc81c19ed7f025e051a7387feefdb789cf768ff994c1228182fda"},
- {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c0376deac92377817e4fb8f347bf559b7d44ff556d9bc6f6208dd3f79f104aaf"},
- {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c75eff897786ee262c9f17a48886f4e98e6cfd335e011c591c305e5d083c056"},
- {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb5d45c4143c1dd60f98a16187fd123eda7248f84ef22244818c18d531a249d1"},
- {file = "watchfiles-0.19.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:79c533ff593db861ae23436541f481ec896ee3da4e5db8962429b441bbaae16e"},
- {file = "watchfiles-0.19.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3d7d267d27aceeeaa3de0dd161a0d64f0a282264d592e335fff7958cc0cbae7c"},
- {file = "watchfiles-0.19.0-cp37-abi3-win32.whl", hash = "sha256:176a9a7641ec2c97b24455135d58012a5be5c6217fc4d5fef0b2b9f75dbf5154"},
- {file = "watchfiles-0.19.0-cp37-abi3-win_amd64.whl", hash = "sha256:945be0baa3e2440151eb3718fd8846751e8b51d8de7b884c90b17d271d34cae8"},
- {file = "watchfiles-0.19.0-cp37-abi3-win_arm64.whl", hash = "sha256:0089c6dc24d436b373c3c57657bf4f9a453b13767150d17284fc6162b2791911"},
- {file = "watchfiles-0.19.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cae3dde0b4b2078f31527acff6f486e23abed307ba4d3932466ba7cdd5ecec79"},
- {file = "watchfiles-0.19.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f3920b1285a7d3ce898e303d84791b7bf40d57b7695ad549dc04e6a44c9f120"},
- {file = "watchfiles-0.19.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9afd0d69429172c796164fd7fe8e821ade9be983f51c659a38da3faaaaac44dc"},
- {file = "watchfiles-0.19.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68dce92b29575dda0f8d30c11742a8e2b9b8ec768ae414b54f7453f27bdf9545"},
- {file = "watchfiles-0.19.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:5569fc7f967429d4bc87e355cdfdcee6aabe4b620801e2cf5805ea245c06097c"},
- {file = "watchfiles-0.19.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5471582658ea56fca122c0f0d0116a36807c63fefd6fdc92c71ca9a4491b6b48"},
- {file = "watchfiles-0.19.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b538014a87f94d92f98f34d3e6d2635478e6be6423a9ea53e4dd96210065e193"},
- {file = "watchfiles-0.19.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20b44221764955b1e703f012c74015306fb7e79a00c15370785f309b1ed9aa8d"},
- {file = "watchfiles-0.19.0.tar.gz", hash = "sha256:d9b073073e048081e502b6c6b0b88714c026a1a4c890569238d04aca5f9ca74b"},
+ {file = "watchfiles-0.20.0-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:3796312bd3587e14926013612b23066912cf45a14af71cf2b20db1c12dadf4e9"},
+ {file = "watchfiles-0.20.0-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:d0002d81c89a662b595645fb684a371b98ff90a9c7d8f8630c82f0fde8310458"},
+ {file = "watchfiles-0.20.0-cp37-abi3-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:570848706440373b4cd8017f3e850ae17f76dbdf1e9045fc79023b11e1afe490"},
+ {file = "watchfiles-0.20.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a0351d20d03c6f7ad6b2e8a226a5efafb924c7755ee1e34f04c77c3682417fa"},
+ {file = "watchfiles-0.20.0-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:007dcc4a401093010b389c044e81172c8a2520dba257c88f8828b3d460c6bb38"},
+ {file = "watchfiles-0.20.0-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0d82dbc1832da83e441d112069833eedd4cf583d983fb8dd666fbefbea9d99c0"},
+ {file = "watchfiles-0.20.0-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99f4c65fd2fce61a571b2a6fcf747d6868db0bef8a934e8ca235cc8533944d95"},
+ {file = "watchfiles-0.20.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5392dd327a05f538c56edb1c6ebba6af91afc81b40822452342f6da54907bbdf"},
+ {file = "watchfiles-0.20.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:08dc702529bb06a2b23859110c214db245455532da5eaea602921687cfcd23db"},
+ {file = "watchfiles-0.20.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:7d4e66a857621584869cfbad87039e65dadd7119f0d9bb9dbc957e089e32c164"},
+ {file = "watchfiles-0.20.0-cp37-abi3-win32.whl", hash = "sha256:a03d1e6feb7966b417f43c3e3783188167fd69c2063e86bad31e62c4ea794cc5"},
+ {file = "watchfiles-0.20.0-cp37-abi3-win_amd64.whl", hash = "sha256:eccc8942bcdc7d638a01435d915b913255bbd66f018f1af051cd8afddb339ea3"},
+ {file = "watchfiles-0.20.0-cp37-abi3-win_arm64.whl", hash = "sha256:b17d4176c49d207865630da5b59a91779468dd3e08692fe943064da260de2c7c"},
+ {file = "watchfiles-0.20.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d97db179f7566dcf145c5179ddb2ae2a4450e3a634eb864b09ea04e68c252e8e"},
+ {file = "watchfiles-0.20.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:835df2da7a5df5464c4a23b2d963e1a9d35afa422c83bf4ff4380b3114603644"},
+ {file = "watchfiles-0.20.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:608cd94a8767f49521901aff9ae0c92cc8f5a24d528db7d6b0295290f9d41193"},
+ {file = "watchfiles-0.20.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89d1de8218874925bce7bb2ae9657efc504411528930d7a83f98b1749864f2ef"},
+ {file = "watchfiles-0.20.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:13f995d5152a8ba4ed7c2bbbaeee4e11a5944defc7cacd0ccb4dcbdcfd78029a"},
+ {file = "watchfiles-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:9b5c8d3be7b502f8c43a33c63166ada8828dbb0c6d49c8f9ce990a96de2f5a49"},
+ {file = "watchfiles-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e43af4464daa08723c04b43cf978ab86cc55c684c16172622bdac64b34e36af0"},
+ {file = "watchfiles-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87d9e1f75c4f86c93d73b5bd1ebe667558357548f11b4f8af4e0e272f79413ce"},
+ {file = "watchfiles-0.20.0.tar.gz", hash = "sha256:728575b6b94c90dd531514677201e8851708e6e4b5fe7028ac506a200b622019"},
]
[package.dependencies]
@@ -3407,14 +3544,14 @@ files = [
[[package]]
name = "werkzeug"
-version = "2.3.5"
+version = "2.3.7"
description = "The comprehensive WSGI web application library."
category = "dev"
optional = false
python-versions = ">=3.8"
files = [
- {file = "Werkzeug-2.3.5-py3-none-any.whl", hash = "sha256:26c6a6a63a8a8d10a8d4e13e7f10821772ff5f138909262e89e946717c72b857"},
- {file = "Werkzeug-2.3.5.tar.gz", hash = "sha256:f6426f626540009131f06482c73f10c536efd47125afc63c850cc950db9ae585"},
+ {file = "werkzeug-2.3.7-py3-none-any.whl", hash = "sha256:effc12dba7f3bd72e605ce49807bbe692bd729c3bb122a3b91747a6ae77df528"},
+ {file = "werkzeug-2.3.7.tar.gz", hash = "sha256:2b8c0e447b4b9dbcc85dd97b6eeb4dcbaf6c8b6c3be0bd654e25553e0a2157d8"},
]
[package.dependencies]
@@ -3425,18 +3562,18 @@ watchdog = ["watchdog (>=2.3)"]
[[package]]
name = "wheel"
-version = "0.40.0"
+version = "0.41.2"
description = "A built-package format for Python"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "wheel-0.40.0-py3-none-any.whl", hash = "sha256:d236b20e7cb522daf2390fa84c55eea81c5c30190f90f29ae2ca1ad8355bf247"},
- {file = "wheel-0.40.0.tar.gz", hash = "sha256:cd1196f3faee2b31968d626e1731c94f99cbdb67cf5a46e4f5656cbee7738873"},
+ {file = "wheel-0.41.2-py3-none-any.whl", hash = "sha256:75909db2664838d015e3d9139004ee16711748a52c8f336b52882266540215d8"},
+ {file = "wheel-0.41.2.tar.gz", hash = "sha256:0c5ac5ff2afb79ac23ab82bab027a0be7b5dbcf2e54dc50efe4bf507de1f7985"},
]
[package.extras]
-test = ["pytest (>=6.0.0)"]
+test = ["pytest (>=6.0.0)", "setuptools (>=65)"]
[[package]]
name = "wrapt"
@@ -3599,34 +3736,33 @@ files = [
[package.dependencies]
idna = ">=2.0"
multidict = ">=4.0"
-typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""}
[[package]]
name = "zipp"
-version = "3.15.0"
+version = "3.16.2"
description = "Backport of pathlib-compatible object wrapper for zip files"
-category = "main"
+category = "dev"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"},
- {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"},
+ {file = "zipp-3.16.2-py3-none-any.whl", hash = "sha256:679e51dd4403591b2d6838a48de3d283f3d188412a9782faadf845f298736ba0"},
+ {file = "zipp-3.16.2.tar.gz", hash = "sha256:ebc15946aa78bd63458992fc81ec3b6f7b1e92d51c35e6de1c3804e73b799147"},
]
[package.extras]
-docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
-testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"]
[[package]]
name = "zope-event"
-version = "4.6"
+version = "5.0"
description = "Very basic event publishing system"
category = "dev"
optional = false
-python-versions = "*"
+python-versions = ">=3.7"
files = [
- {file = "zope.event-4.6-py2.py3-none-any.whl", hash = "sha256:73d9e3ef750cca14816a9c322c7250b0d7c9dbc337df5d1b807ff8d3d0b9e97c"},
- {file = "zope.event-4.6.tar.gz", hash = "sha256:81d98813046fc86cc4136e3698fee628a3282f9c320db18658c21749235fce80"},
+ {file = "zope.event-5.0-py3-none-any.whl", hash = "sha256:2832e95014f4db26c47a13fdaef84cef2f4df37e66b59d8f1f4a8f319a632c26"},
+ {file = "zope.event-5.0.tar.gz", hash = "sha256:bac440d8d9891b4068e2b5a2c5e2c9765a9df762944bda6955f96bb9b91e67cd"},
]
[package.dependencies]
@@ -3685,14 +3821,14 @@ test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
[extras]
+client = ["httpx"]
database = ["SQLAlchemy"]
-full = ["SQLAlchemy", "apispec", "marshmallow", "pydantic", "python-forge", "typesystem"]
+full = ["SQLAlchemy", "apispec", "httpx", "marshmallow", "pydantic", "typesystem"]
marshmallow = ["apispec", "marshmallow"]
-pagination = ["python-forge"]
pydantic = ["pydantic"]
typesystem = ["typesystem"]
[metadata]
lock-version = "2.0"
-python-versions = ">=3.7,<3.12"
-content-hash = "b01645bfd6097da075fd3a8437b3122f5f4ccab765362a8a99d582f384b2b620"
+python-versions = ">=3.8,<3.12"
+content-hash = "e40ef7edaf3ec048388665a1093d0f6719048cab9647fff4c70a723bc8b94135"
diff --git a/pyproject.toml b/pyproject.toml
index 1f293acd..4ea89afe 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -32,28 +32,27 @@ include = ["flama/py.typed", "flama/templates/**/*"]
exclude = []
[tool.poetry.dependencies]
-python = ">=3.7,<3.12"
+python = ">=3.8,<3.12"
starlette = ">=0.21.0,<1.0.0"
click = "^8.1"
uvicorn = "^0.22"
Jinja2 = "^3.1.2"
+pyyaml = "^6.0"
typing-extensions = { version = "^4.4", python = "<3.10" }
-importlib-metadata = { version = "^4.2", python = "<3.8" }
-pydantic = { version = "^1.10", optional = true }
+pydantic = { version = "^2.0", optional = true }
marshmallow = { version = "^3.0", optional = true }
-python-forge = { version = "^18.6", optional = true }
apispec = { version = "^6.0", optional = true }
typesystem = { version = "^0.4.1", optional = true }
SQLAlchemy = { version = "^2.0", extras = ["asyncio"], optional = true }
-pyyaml = "^6.0"
+httpx = { version = "^0.24", optional = true}
[tool.poetry.extras]
pydantic = ["pydantic"]
typesystem = ["typesystem"]
marshmallow = ["marshmallow", "apispec"]
-pagination = ["python-forge"]
database = ["sqlalchemy"]
-full = ["pydantic", "typesystem", "marshmallow", "apispec", "python-forge", "sqlalchemy"]
+client = ["httpx"]
+full = ["pydantic", "typesystem", "marshmallow", "apispec", "sqlalchemy", "httpx"]
[tool.poetry.scripts]
flama = 'flama.cli.__main__:cli'
@@ -66,31 +65,26 @@ pytest = "^7.0"
pytest-xdist = { version = "^3.0", extras = ["psutil"] }
pytest-cov = "^4.0"
pytest-asyncio = "^0.20"
-ipython = [
- { version = "^7.0", python = ">=3.7,<3.8" },
- { version = "^8.0", python = ">=3.8" }
-]
+ipython = "^8.0"
isort = "^5.10"
ruff = "^0.0.231"
-mypy = "^1.2"
black = { version = "^22.3", extras = ["d"] }
pre-commit = "^2.20"
python-multipart = "^0.0.5"
uvicorn = { version = ">=0.19.0,<1.0.0", extras = ["standard"] }
-httpx = ">=0.23,<1.0.0"
+httpx = "^0.24"
aiosqlite = ">=0.11.0,<1.0.0"
requests = "^2.22"
Faker = "^8.2"
-python-forge = "^18.6"
SQLAlchemy = { version = "^2.0", extras = ["asyncio"] }
gevent = "^22.10"
-asyncmock = { version = "^0.4", python = ">=3.7,<3.8" }
+pyright = "^1.1.320"
[tool.poetry.group.schemas]
optional = true
[tool.poetry.group.schemas.dependencies]
-pydantic = "^1.10"
+pydantic = "^2.0"
marshmallow = "^3.0"
apispec = "^6.0"
typesystem = "^0.4"
@@ -99,10 +93,9 @@ typesystem = "^0.4"
optional = true
[tool.poetry.group.ml.dependencies]
-numpy = { version = "^1.23.0", python = ">=3.8" }
-scikit-learn = { version = "^1.1.0", python = ">=3.8" }
-tensorflow-cpu = { version = "^2.12.0", python = ">=3.8" }
-torch = { version = "^1.13.0", python = ">=3.8" }
+scikit-learn = "^1.3.0"
+tensorflow-cpu = { version = "^2.12.0", platform = "linux"}
+torch = "^2.0.0, !=2.0.1"
[tool.black]
line-length = 120
@@ -171,9 +164,18 @@ exclude = [
"tests",
]
+[tool.pyright]
+include = ["flama"]
+exclude = [
+ "**/node_modules",
+ "**/__pycache__",
+]
+reportWildcardImportFromLibrary = false
+reportPrivateImportUsage = false
+
[tool.pytest.ini_options]
minversion = 3
-addopts = "--dist=loadfile --junitxml=./test-results/pytest/results.xml --no-cov-on-fail --cov-report=html --cov-report=xml --cov-report=term-missing --cov-config=pyproject.toml --cov=. --pdbcls=IPython.terminal.debugger:TerminalPdb"
+addopts = "--dist=loadfile --junitxml=./test-results/pytest/results.xml --no-cov-on-fail --cov-report=html --cov-report=xml --cov-report=term-missing --cov-config=pyproject.toml --cov=. --pdbcls=IPython.terminal.debugger:TerminalPdb -nauto"
norecursedirs = [
"*settings*",
"*urls*",
diff --git a/scripts/docker_push b/scripts/docker_push
index bc2940e2..c4800b4b 100755
--- a/scripts/docker_push
+++ b/scripts/docker_push
@@ -2,7 +2,7 @@
set -e
LINUX_VERSIONS=("slim")
-PYTHON_VERSIONS=("3.7" "3.8" "3.9" "3.10" "3.11")
+PYTHON_VERSIONS=("3.8" "3.9" "3.10" "3.11")
SCHEMA_LIBS=("pydantic" "marshmallow" "typesystem")
DEFAULT_LINUX="slim"
diff --git a/scripts/lint b/scripts/lint
index ec6d13d9..d5a98d85 100755
--- a/scripts/lint
+++ b/scripts/lint
@@ -4,7 +4,7 @@ set -e
source scripts/black
source scripts/isort
source scripts/ruff
-source scripts/mypy
+source scripts/pyright
run_lint() {
echo "🧹 Code lint using multiple tools:"
@@ -19,7 +19,7 @@ run_lint() {
run_isort --check .
run_ruff .
fi
- run_mypy .
+ run_pyright
}
if [[ "${#BASH_SOURCE[@]}" -eq 1 ]]; then
diff --git a/scripts/mypy b/scripts/mypy
deleted file mode 100755
index 7dcaa5d8..00000000
--- a/scripts/mypy
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/usr/bin/env bash
-set -e
-
-run_mypy() {
- echo "• Run MyPy code formatting:"
- poetry run mypy --install-types --non-interactive "$@"
- echo "↳ MyPy done"
-}
-
-if [[ "${#BASH_SOURCE[@]}" -eq 1 ]]; then
- run_mypy "$@"
-fi
diff --git a/scripts/pyright b/scripts/pyright
new file mode 100755
index 00000000..497e8d32
--- /dev/null
+++ b/scripts/pyright
@@ -0,0 +1,12 @@
+#!/usr/bin/env bash
+set -e
+
+run_pyright() {
+ echo "• Run Pyright static types check:"
+ poetry run pyright "$@"
+ echo "↳ Pyright done"
+}
+
+if [[ "${#BASH_SOURCE[@]}" -eq 1 ]]; then
+ run_pyright "$@"
+fi
| Wrong typing in Middleware classes
| 2023-07-21T11:41:06 | 0.0 | [] | [] |
|||
developmentseed/cql2-rs | developmentseed__cql2-rs-47 | 81af07ac24ea0449f3431abaa6cec4178d9601e5 | diff --git a/Cargo.lock b/Cargo.lock
index 4d05406..508dbb5 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -224,6 +224,7 @@ dependencies = [
"lazy_static",
"pest",
"pest_derive",
+ "pg_escape",
"rstest",
"serde",
"serde_derive",
@@ -612,6 +613,57 @@ dependencies = [
"sha2",
]
+[[package]]
+name = "pg_escape"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "44c7bc82ccbe2c7ef7ceed38dcac90d7ff46681e061e9d7310cbcd409113e303"
+dependencies = [
+ "phf",
+]
+
+[[package]]
+name = "phf"
+version = "0.11.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc"
+dependencies = [
+ "phf_macros",
+ "phf_shared",
+]
+
+[[package]]
+name = "phf_generator"
+version = "0.11.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0"
+dependencies = [
+ "phf_shared",
+ "rand",
+]
+
+[[package]]
+name = "phf_macros"
+version = "0.11.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3444646e286606587e49f3bcf1679b8cef1dc2c5ecc29ddacaffc305180d464b"
+dependencies = [
+ "phf_generator",
+ "phf_shared",
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "phf_shared"
+version = "0.11.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b"
+dependencies = [
+ "siphasher",
+]
+
[[package]]
name = "pin-project-lite"
version = "0.2.14"
@@ -730,6 +782,21 @@ dependencies = [
"proc-macro2",
]
+[[package]]
+name = "rand"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
+dependencies = [
+ "rand_core",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.6.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
+
[[package]]
name = "regex"
version = "1.11.0"
@@ -860,6 +927,12 @@ dependencies = [
"digest",
]
+[[package]]
+name = "siphasher"
+version = "0.3.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d"
+
[[package]]
name = "slab"
version = "0.4.9"
diff --git a/Cargo.toml b/Cargo.toml
index d490993..3980d47 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -30,6 +30,7 @@ geozero = "0.14.0"
lazy_static = "1.5"
pest = "2.7"
pest_derive = { version = "2.7", features = ["grammar-extras"] }
+pg_escape = "0.1.1"
serde = "1.0"
serde_derive = "1.0"
serde_json = { version = "1.0", features = ["preserve_order"] }
diff --git a/fixtures/json/clause6_01.json b/examples/json/clause6_01.json
similarity index 100%
rename from fixtures/json/clause6_01.json
rename to examples/json/clause6_01.json
diff --git a/fixtures/json/clause6_02a.json b/examples/json/clause6_02a.json
similarity index 100%
rename from fixtures/json/clause6_02a.json
rename to examples/json/clause6_02a.json
diff --git a/fixtures/json/clause6_02b.json b/examples/json/clause6_02b.json
similarity index 100%
rename from fixtures/json/clause6_02b.json
rename to examples/json/clause6_02b.json
diff --git a/fixtures/json/clause6_02c.json b/examples/json/clause6_02c.json
similarity index 100%
rename from fixtures/json/clause6_02c.json
rename to examples/json/clause6_02c.json
diff --git a/fixtures/json/clause6_02d.json b/examples/json/clause6_02d.json
similarity index 100%
rename from fixtures/json/clause6_02d.json
rename to examples/json/clause6_02d.json
diff --git a/fixtures/json/clause6_03.json b/examples/json/clause6_03.json
similarity index 100%
rename from fixtures/json/clause6_03.json
rename to examples/json/clause6_03.json
diff --git a/fixtures/json/clause7_01.json b/examples/json/clause7_01.json
similarity index 100%
rename from fixtures/json/clause7_01.json
rename to examples/json/clause7_01.json
diff --git a/fixtures/json/clause7_02.json b/examples/json/clause7_02.json
similarity index 100%
rename from fixtures/json/clause7_02.json
rename to examples/json/clause7_02.json
diff --git a/fixtures/json/clause7_03a.json b/examples/json/clause7_03a.json
similarity index 100%
rename from fixtures/json/clause7_03a.json
rename to examples/json/clause7_03a.json
diff --git a/fixtures/json/clause7_03b.json b/examples/json/clause7_03b.json
similarity index 100%
rename from fixtures/json/clause7_03b.json
rename to examples/json/clause7_03b.json
diff --git a/fixtures/json/clause7_04.json b/examples/json/clause7_04.json
similarity index 100%
rename from fixtures/json/clause7_04.json
rename to examples/json/clause7_04.json
diff --git a/fixtures/json/clause7_05.json b/examples/json/clause7_05.json
similarity index 100%
rename from fixtures/json/clause7_05.json
rename to examples/json/clause7_05.json
diff --git a/fixtures/json/clause7_07.json b/examples/json/clause7_07.json
similarity index 100%
rename from fixtures/json/clause7_07.json
rename to examples/json/clause7_07.json
diff --git a/fixtures/json/clause7_10.json b/examples/json/clause7_10.json
similarity index 100%
rename from fixtures/json/clause7_10.json
rename to examples/json/clause7_10.json
diff --git a/fixtures/json/clause7_12.json b/examples/json/clause7_12.json
similarity index 100%
rename from fixtures/json/clause7_12.json
rename to examples/json/clause7_12.json
diff --git a/fixtures/json/clause7_13.json b/examples/json/clause7_13.json
similarity index 100%
rename from fixtures/json/clause7_13.json
rename to examples/json/clause7_13.json
diff --git a/fixtures/json/clause7_15.json b/examples/json/clause7_15.json
similarity index 100%
rename from fixtures/json/clause7_15.json
rename to examples/json/clause7_15.json
diff --git a/fixtures/json/clause7_16.json b/examples/json/clause7_16.json
similarity index 100%
rename from fixtures/json/clause7_16.json
rename to examples/json/clause7_16.json
diff --git a/fixtures/json/clause7_17.json b/examples/json/clause7_17.json
similarity index 100%
rename from fixtures/json/clause7_17.json
rename to examples/json/clause7_17.json
diff --git a/fixtures/json/clause7_18.json b/examples/json/clause7_18.json
similarity index 100%
rename from fixtures/json/clause7_18.json
rename to examples/json/clause7_18.json
diff --git a/fixtures/json/clause7_19.json b/examples/json/clause7_19.json
similarity index 100%
rename from fixtures/json/clause7_19.json
rename to examples/json/clause7_19.json
diff --git a/fixtures/json/example01.json b/examples/json/example01.json
similarity index 100%
rename from fixtures/json/example01.json
rename to examples/json/example01.json
diff --git a/fixtures/json/example02.json b/examples/json/example02.json
similarity index 100%
rename from fixtures/json/example02.json
rename to examples/json/example02.json
diff --git a/fixtures/json/example03.json b/examples/json/example03.json
similarity index 100%
rename from fixtures/json/example03.json
rename to examples/json/example03.json
diff --git a/fixtures/json/example04.json b/examples/json/example04.json
similarity index 100%
rename from fixtures/json/example04.json
rename to examples/json/example04.json
diff --git a/fixtures/json/example05a.json b/examples/json/example05a.json
similarity index 100%
rename from fixtures/json/example05a.json
rename to examples/json/example05a.json
diff --git a/fixtures/json/example05b.json b/examples/json/example05b.json
similarity index 100%
rename from fixtures/json/example05b.json
rename to examples/json/example05b.json
diff --git a/fixtures/json/example06a.json b/examples/json/example06a.json
similarity index 100%
rename from fixtures/json/example06a.json
rename to examples/json/example06a.json
diff --git a/fixtures/json/example06b.json b/examples/json/example06b.json
similarity index 100%
rename from fixtures/json/example06b.json
rename to examples/json/example06b.json
diff --git a/fixtures/json/example07.json b/examples/json/example07.json
similarity index 100%
rename from fixtures/json/example07.json
rename to examples/json/example07.json
diff --git a/fixtures/json/example08.json b/examples/json/example08.json
similarity index 100%
rename from fixtures/json/example08.json
rename to examples/json/example08.json
diff --git a/fixtures/json/example09.json b/examples/json/example09.json
similarity index 100%
rename from fixtures/json/example09.json
rename to examples/json/example09.json
diff --git a/fixtures/json/example10.json b/examples/json/example10.json
similarity index 100%
rename from fixtures/json/example10.json
rename to examples/json/example10.json
diff --git a/fixtures/json/example11.json b/examples/json/example11.json
similarity index 100%
rename from fixtures/json/example11.json
rename to examples/json/example11.json
diff --git a/fixtures/json/example12.json b/examples/json/example12.json
similarity index 100%
rename from fixtures/json/example12.json
rename to examples/json/example12.json
diff --git a/fixtures/json/example13.json b/examples/json/example13.json
similarity index 100%
rename from fixtures/json/example13.json
rename to examples/json/example13.json
diff --git a/fixtures/json/example14.json b/examples/json/example14.json
similarity index 100%
rename from fixtures/json/example14.json
rename to examples/json/example14.json
diff --git a/fixtures/json/example15.json b/examples/json/example15.json
similarity index 100%
rename from fixtures/json/example15.json
rename to examples/json/example15.json
diff --git a/fixtures/json/example16.json b/examples/json/example16.json
similarity index 100%
rename from fixtures/json/example16.json
rename to examples/json/example16.json
diff --git a/fixtures/json/example17.json b/examples/json/example17.json
similarity index 100%
rename from fixtures/json/example17.json
rename to examples/json/example17.json
diff --git a/fixtures/json/example18.json b/examples/json/example18.json
similarity index 100%
rename from fixtures/json/example18.json
rename to examples/json/example18.json
diff --git a/fixtures/json/example19.json b/examples/json/example19.json
similarity index 100%
rename from fixtures/json/example19.json
rename to examples/json/example19.json
diff --git a/fixtures/json/example20.json b/examples/json/example20.json
similarity index 100%
rename from fixtures/json/example20.json
rename to examples/json/example20.json
diff --git a/fixtures/json/example21.json b/examples/json/example21.json
similarity index 100%
rename from fixtures/json/example21.json
rename to examples/json/example21.json
diff --git a/fixtures/json/example22.json b/examples/json/example22.json
similarity index 100%
rename from fixtures/json/example22.json
rename to examples/json/example22.json
diff --git a/fixtures/json/example23.json b/examples/json/example23.json
similarity index 100%
rename from fixtures/json/example23.json
rename to examples/json/example23.json
diff --git a/fixtures/json/example24.json b/examples/json/example24.json
similarity index 100%
rename from fixtures/json/example24.json
rename to examples/json/example24.json
diff --git a/fixtures/json/example25.json b/examples/json/example25.json
similarity index 100%
rename from fixtures/json/example25.json
rename to examples/json/example25.json
diff --git a/fixtures/json/example26.json b/examples/json/example26.json
similarity index 100%
rename from fixtures/json/example26.json
rename to examples/json/example26.json
diff --git a/fixtures/json/example27.json b/examples/json/example27.json
similarity index 100%
rename from fixtures/json/example27.json
rename to examples/json/example27.json
diff --git a/fixtures/json/example28.json b/examples/json/example28.json
similarity index 100%
rename from fixtures/json/example28.json
rename to examples/json/example28.json
diff --git a/fixtures/json/example29.json b/examples/json/example29.json
similarity index 100%
rename from fixtures/json/example29.json
rename to examples/json/example29.json
diff --git a/fixtures/json/example30.json b/examples/json/example30.json
similarity index 100%
rename from fixtures/json/example30.json
rename to examples/json/example30.json
diff --git a/fixtures/json/example31.json b/examples/json/example31.json
similarity index 100%
rename from fixtures/json/example31.json
rename to examples/json/example31.json
diff --git a/fixtures/json/example32.json b/examples/json/example32.json
similarity index 100%
rename from fixtures/json/example32.json
rename to examples/json/example32.json
diff --git a/fixtures/json/example33.json b/examples/json/example33.json
similarity index 100%
rename from fixtures/json/example33.json
rename to examples/json/example33.json
diff --git a/fixtures/json/example34.json b/examples/json/example34.json
similarity index 100%
rename from fixtures/json/example34.json
rename to examples/json/example34.json
diff --git a/fixtures/json/example35.json b/examples/json/example35.json
similarity index 100%
rename from fixtures/json/example35.json
rename to examples/json/example35.json
diff --git a/fixtures/json/example36.json b/examples/json/example36.json
similarity index 100%
rename from fixtures/json/example36.json
rename to examples/json/example36.json
diff --git a/fixtures/json/example37.json b/examples/json/example37.json
similarity index 100%
rename from fixtures/json/example37.json
rename to examples/json/example37.json
diff --git a/fixtures/json/example38.json b/examples/json/example38.json
similarity index 100%
rename from fixtures/json/example38.json
rename to examples/json/example38.json
diff --git a/fixtures/json/example39.json b/examples/json/example39.json
similarity index 100%
rename from fixtures/json/example39.json
rename to examples/json/example39.json
diff --git a/fixtures/json/example40.json b/examples/json/example40.json
similarity index 100%
rename from fixtures/json/example40.json
rename to examples/json/example40.json
diff --git a/fixtures/json/example41.json b/examples/json/example41.json
similarity index 100%
rename from fixtures/json/example41.json
rename to examples/json/example41.json
diff --git a/fixtures/json/example42.json b/examples/json/example42.json
similarity index 100%
rename from fixtures/json/example42.json
rename to examples/json/example42.json
diff --git a/fixtures/json/example43.json b/examples/json/example43.json
similarity index 100%
rename from fixtures/json/example43.json
rename to examples/json/example43.json
diff --git a/fixtures/json/example44.json b/examples/json/example44.json
similarity index 100%
rename from fixtures/json/example44.json
rename to examples/json/example44.json
diff --git a/fixtures/json/example45.json b/examples/json/example45.json
similarity index 100%
rename from fixtures/json/example45.json
rename to examples/json/example45.json
diff --git a/fixtures/json/example46.json b/examples/json/example46.json
similarity index 100%
rename from fixtures/json/example46.json
rename to examples/json/example46.json
diff --git a/fixtures/json/example47.json b/examples/json/example47.json
similarity index 100%
rename from fixtures/json/example47.json
rename to examples/json/example47.json
diff --git a/fixtures/json/example48.json b/examples/json/example48.json
similarity index 100%
rename from fixtures/json/example48.json
rename to examples/json/example48.json
diff --git a/fixtures/json/example49.json b/examples/json/example49.json
similarity index 100%
rename from fixtures/json/example49.json
rename to examples/json/example49.json
diff --git a/fixtures/json/example50.json b/examples/json/example50.json
similarity index 100%
rename from fixtures/json/example50.json
rename to examples/json/example50.json
diff --git a/fixtures/json/example51.json b/examples/json/example51.json
similarity index 100%
rename from fixtures/json/example51.json
rename to examples/json/example51.json
diff --git a/fixtures/json/example52.json b/examples/json/example52.json
similarity index 100%
rename from fixtures/json/example52.json
rename to examples/json/example52.json
diff --git a/fixtures/json/example53.json b/examples/json/example53.json
similarity index 100%
rename from fixtures/json/example53.json
rename to examples/json/example53.json
diff --git a/fixtures/json/example54.json b/examples/json/example54.json
similarity index 100%
rename from fixtures/json/example54.json
rename to examples/json/example54.json
diff --git a/fixtures/json/example55.json b/examples/json/example55.json
similarity index 100%
rename from fixtures/json/example55.json
rename to examples/json/example55.json
diff --git a/fixtures/json/example56.json b/examples/json/example56.json
similarity index 100%
rename from fixtures/json/example56.json
rename to examples/json/example56.json
diff --git a/fixtures/json/example57.json b/examples/json/example57.json
similarity index 100%
rename from fixtures/json/example57.json
rename to examples/json/example57.json
diff --git a/fixtures/json/example58.json b/examples/json/example58.json
similarity index 100%
rename from fixtures/json/example58.json
rename to examples/json/example58.json
diff --git a/fixtures/json/example59.json b/examples/json/example59.json
similarity index 100%
rename from fixtures/json/example59.json
rename to examples/json/example59.json
diff --git a/fixtures/json/example60.json b/examples/json/example60.json
similarity index 100%
rename from fixtures/json/example60.json
rename to examples/json/example60.json
diff --git a/fixtures/json/example61.json b/examples/json/example61.json
similarity index 100%
rename from fixtures/json/example61.json
rename to examples/json/example61.json
diff --git a/fixtures/json/example62.json b/examples/json/example62.json
similarity index 100%
rename from fixtures/json/example62.json
rename to examples/json/example62.json
diff --git a/fixtures/json/example63.json b/examples/json/example63.json
similarity index 100%
rename from fixtures/json/example63.json
rename to examples/json/example63.json
diff --git a/fixtures/json/example64.json b/examples/json/example64.json
similarity index 100%
rename from fixtures/json/example64.json
rename to examples/json/example64.json
diff --git a/fixtures/json/example65.json b/examples/json/example65.json
similarity index 100%
rename from fixtures/json/example65.json
rename to examples/json/example65.json
diff --git a/fixtures/json/example66.json b/examples/json/example66.json
similarity index 100%
rename from fixtures/json/example66.json
rename to examples/json/example66.json
diff --git a/fixtures/json/example67.json b/examples/json/example67.json
similarity index 100%
rename from fixtures/json/example67.json
rename to examples/json/example67.json
diff --git a/fixtures/json/example68.json b/examples/json/example68.json
similarity index 100%
rename from fixtures/json/example68.json
rename to examples/json/example68.json
diff --git a/fixtures/json/example69.json b/examples/json/example69.json
similarity index 100%
rename from fixtures/json/example69.json
rename to examples/json/example69.json
diff --git a/fixtures/json/example70.json b/examples/json/example70.json
similarity index 100%
rename from fixtures/json/example70.json
rename to examples/json/example70.json
diff --git a/fixtures/json/example71.json b/examples/json/example71.json
similarity index 100%
rename from fixtures/json/example71.json
rename to examples/json/example71.json
diff --git a/fixtures/json/example72.json b/examples/json/example72.json
similarity index 100%
rename from fixtures/json/example72.json
rename to examples/json/example72.json
diff --git a/fixtures/json/example73.json b/examples/json/example73.json
similarity index 100%
rename from fixtures/json/example73.json
rename to examples/json/example73.json
diff --git a/fixtures/json/example74.json b/examples/json/example74.json
similarity index 100%
rename from fixtures/json/example74.json
rename to examples/json/example74.json
diff --git a/fixtures/json/example75.json b/examples/json/example75.json
similarity index 100%
rename from fixtures/json/example75.json
rename to examples/json/example75.json
diff --git a/fixtures/json/example76.json b/examples/json/example76.json
similarity index 100%
rename from fixtures/json/example76.json
rename to examples/json/example76.json
diff --git a/fixtures/json/example77.json b/examples/json/example77.json
similarity index 100%
rename from fixtures/json/example77.json
rename to examples/json/example77.json
diff --git a/fixtures/json/example78.json b/examples/json/example78.json
similarity index 100%
rename from fixtures/json/example78.json
rename to examples/json/example78.json
diff --git a/fixtures/json/example79.json b/examples/json/example79.json
similarity index 100%
rename from fixtures/json/example79.json
rename to examples/json/example79.json
diff --git a/fixtures/json/example80.json b/examples/json/example80.json
similarity index 100%
rename from fixtures/json/example80.json
rename to examples/json/example80.json
diff --git a/fixtures/json/example81.json b/examples/json/example81.json
similarity index 100%
rename from fixtures/json/example81.json
rename to examples/json/example81.json
diff --git a/fixtures/json/example82.json b/examples/json/example82.json
similarity index 100%
rename from fixtures/json/example82.json
rename to examples/json/example82.json
diff --git a/fixtures/json/example83.json b/examples/json/example83.json
similarity index 100%
rename from fixtures/json/example83.json
rename to examples/json/example83.json
diff --git a/fixtures/json/example84.json b/examples/json/example84.json
similarity index 100%
rename from fixtures/json/example84.json
rename to examples/json/example84.json
diff --git a/fixtures/json/example85.json b/examples/json/example85.json
similarity index 100%
rename from fixtures/json/example85.json
rename to examples/json/example85.json
diff --git a/fixtures/json/example86.json b/examples/json/example86.json
similarity index 100%
rename from fixtures/json/example86.json
rename to examples/json/example86.json
diff --git a/fixtures/json/validate.sh b/examples/json/validate.sh
similarity index 100%
rename from fixtures/json/validate.sh
rename to examples/json/validate.sh
diff --git a/fixtures/text/clause6_01.txt b/examples/text/clause6_01.txt
similarity index 100%
rename from fixtures/text/clause6_01.txt
rename to examples/text/clause6_01.txt
diff --git a/fixtures/text/clause6_02a.txt b/examples/text/clause6_02a.txt
similarity index 100%
rename from fixtures/text/clause6_02a.txt
rename to examples/text/clause6_02a.txt
diff --git a/fixtures/text/clause6_02b.txt b/examples/text/clause6_02b.txt
similarity index 100%
rename from fixtures/text/clause6_02b.txt
rename to examples/text/clause6_02b.txt
diff --git a/fixtures/text/clause6_02c.txt b/examples/text/clause6_02c.txt
similarity index 100%
rename from fixtures/text/clause6_02c.txt
rename to examples/text/clause6_02c.txt
diff --git a/fixtures/text/clause6_02d.txt b/examples/text/clause6_02d.txt
similarity index 100%
rename from fixtures/text/clause6_02d.txt
rename to examples/text/clause6_02d.txt
diff --git a/fixtures/text/clause6_03.txt b/examples/text/clause6_03.txt
similarity index 100%
rename from fixtures/text/clause6_03.txt
rename to examples/text/clause6_03.txt
diff --git a/fixtures/text/clause7_01.txt b/examples/text/clause7_01.txt
similarity index 100%
rename from fixtures/text/clause7_01.txt
rename to examples/text/clause7_01.txt
diff --git a/fixtures/text/clause7_02.txt b/examples/text/clause7_02.txt
similarity index 100%
rename from fixtures/text/clause7_02.txt
rename to examples/text/clause7_02.txt
diff --git a/fixtures/text/clause7_03a.txt b/examples/text/clause7_03a.txt
similarity index 100%
rename from fixtures/text/clause7_03a.txt
rename to examples/text/clause7_03a.txt
diff --git a/fixtures/text/clause7_03b.txt b/examples/text/clause7_03b.txt
similarity index 100%
rename from fixtures/text/clause7_03b.txt
rename to examples/text/clause7_03b.txt
diff --git a/fixtures/text/clause7_04.txt b/examples/text/clause7_04.txt
similarity index 100%
rename from fixtures/text/clause7_04.txt
rename to examples/text/clause7_04.txt
diff --git a/fixtures/text/clause7_05.txt b/examples/text/clause7_05.txt
similarity index 100%
rename from fixtures/text/clause7_05.txt
rename to examples/text/clause7_05.txt
diff --git a/fixtures/text/clause7_07.txt b/examples/text/clause7_07.txt
similarity index 100%
rename from fixtures/text/clause7_07.txt
rename to examples/text/clause7_07.txt
diff --git a/fixtures/text/clause7_10.txt b/examples/text/clause7_10.txt
similarity index 100%
rename from fixtures/text/clause7_10.txt
rename to examples/text/clause7_10.txt
diff --git a/fixtures/text/clause7_12.txt b/examples/text/clause7_12.txt
similarity index 100%
rename from fixtures/text/clause7_12.txt
rename to examples/text/clause7_12.txt
diff --git a/fixtures/text/clause7_13.txt b/examples/text/clause7_13.txt
similarity index 100%
rename from fixtures/text/clause7_13.txt
rename to examples/text/clause7_13.txt
diff --git a/fixtures/text/clause7_15.txt b/examples/text/clause7_15.txt
similarity index 100%
rename from fixtures/text/clause7_15.txt
rename to examples/text/clause7_15.txt
diff --git a/fixtures/text/clause7_16.txt b/examples/text/clause7_16.txt
similarity index 100%
rename from fixtures/text/clause7_16.txt
rename to examples/text/clause7_16.txt
diff --git a/fixtures/text/clause7_17.txt b/examples/text/clause7_17.txt
similarity index 100%
rename from fixtures/text/clause7_17.txt
rename to examples/text/clause7_17.txt
diff --git a/fixtures/text/clause7_18.txt b/examples/text/clause7_18.txt
similarity index 100%
rename from fixtures/text/clause7_18.txt
rename to examples/text/clause7_18.txt
diff --git a/fixtures/text/clause7_19.txt b/examples/text/clause7_19.txt
similarity index 100%
rename from fixtures/text/clause7_19.txt
rename to examples/text/clause7_19.txt
diff --git a/fixtures/text/example01.txt b/examples/text/example01.txt
similarity index 100%
rename from fixtures/text/example01.txt
rename to examples/text/example01.txt
diff --git a/fixtures/text/example02.txt b/examples/text/example02.txt
similarity index 100%
rename from fixtures/text/example02.txt
rename to examples/text/example02.txt
diff --git a/fixtures/text/example03.txt b/examples/text/example03.txt
similarity index 100%
rename from fixtures/text/example03.txt
rename to examples/text/example03.txt
diff --git a/fixtures/text/example04.txt b/examples/text/example04.txt
similarity index 100%
rename from fixtures/text/example04.txt
rename to examples/text/example04.txt
diff --git a/fixtures/text/example05a.txt b/examples/text/example05a.txt
similarity index 100%
rename from fixtures/text/example05a.txt
rename to examples/text/example05a.txt
diff --git a/fixtures/text/example05b.txt b/examples/text/example05b.txt
similarity index 100%
rename from fixtures/text/example05b.txt
rename to examples/text/example05b.txt
diff --git a/fixtures/text/example06a.txt b/examples/text/example06a.txt
similarity index 100%
rename from fixtures/text/example06a.txt
rename to examples/text/example06a.txt
diff --git a/fixtures/text/example06b.txt b/examples/text/example06b.txt
similarity index 100%
rename from fixtures/text/example06b.txt
rename to examples/text/example06b.txt
diff --git a/fixtures/text/example07.txt b/examples/text/example07.txt
similarity index 100%
rename from fixtures/text/example07.txt
rename to examples/text/example07.txt
diff --git a/fixtures/text/example08.txt b/examples/text/example08.txt
similarity index 100%
rename from fixtures/text/example08.txt
rename to examples/text/example08.txt
diff --git a/fixtures/text/example09.txt b/examples/text/example09.txt
similarity index 100%
rename from fixtures/text/example09.txt
rename to examples/text/example09.txt
diff --git a/fixtures/text/example10.txt b/examples/text/example10.txt
similarity index 100%
rename from fixtures/text/example10.txt
rename to examples/text/example10.txt
diff --git a/fixtures/text/example11.txt b/examples/text/example11.txt
similarity index 100%
rename from fixtures/text/example11.txt
rename to examples/text/example11.txt
diff --git a/fixtures/text/example12.txt b/examples/text/example12.txt
similarity index 100%
rename from fixtures/text/example12.txt
rename to examples/text/example12.txt
diff --git a/fixtures/text/example13.txt b/examples/text/example13.txt
similarity index 100%
rename from fixtures/text/example13.txt
rename to examples/text/example13.txt
diff --git a/fixtures/text/example14.txt b/examples/text/example14.txt
similarity index 100%
rename from fixtures/text/example14.txt
rename to examples/text/example14.txt
diff --git a/fixtures/text/example15.txt b/examples/text/example15.txt
similarity index 100%
rename from fixtures/text/example15.txt
rename to examples/text/example15.txt
diff --git a/fixtures/text/example16.txt b/examples/text/example16.txt
similarity index 100%
rename from fixtures/text/example16.txt
rename to examples/text/example16.txt
diff --git a/fixtures/text/example17.txt b/examples/text/example17.txt
similarity index 100%
rename from fixtures/text/example17.txt
rename to examples/text/example17.txt
diff --git a/fixtures/text/example18.txt b/examples/text/example18.txt
similarity index 100%
rename from fixtures/text/example18.txt
rename to examples/text/example18.txt
diff --git a/fixtures/text/example19.txt b/examples/text/example19.txt
similarity index 100%
rename from fixtures/text/example19.txt
rename to examples/text/example19.txt
diff --git a/fixtures/text/example20.txt b/examples/text/example20.txt
similarity index 100%
rename from fixtures/text/example20.txt
rename to examples/text/example20.txt
diff --git a/fixtures/text/example21.txt b/examples/text/example21.txt
similarity index 100%
rename from fixtures/text/example21.txt
rename to examples/text/example21.txt
diff --git a/fixtures/text/example22.txt b/examples/text/example22.txt
similarity index 100%
rename from fixtures/text/example22.txt
rename to examples/text/example22.txt
diff --git a/fixtures/text/example23.txt b/examples/text/example23.txt
similarity index 100%
rename from fixtures/text/example23.txt
rename to examples/text/example23.txt
diff --git a/fixtures/text/example24.txt b/examples/text/example24.txt
similarity index 100%
rename from fixtures/text/example24.txt
rename to examples/text/example24.txt
diff --git a/fixtures/text/example25.txt b/examples/text/example25.txt
similarity index 100%
rename from fixtures/text/example25.txt
rename to examples/text/example25.txt
diff --git a/fixtures/text/example26.txt b/examples/text/example26.txt
similarity index 100%
rename from fixtures/text/example26.txt
rename to examples/text/example26.txt
diff --git a/fixtures/text/example27.txt b/examples/text/example27.txt
similarity index 100%
rename from fixtures/text/example27.txt
rename to examples/text/example27.txt
diff --git a/fixtures/text/example28.txt b/examples/text/example28.txt
similarity index 100%
rename from fixtures/text/example28.txt
rename to examples/text/example28.txt
diff --git a/fixtures/text/example29.txt b/examples/text/example29.txt
similarity index 100%
rename from fixtures/text/example29.txt
rename to examples/text/example29.txt
diff --git a/fixtures/text/example30.txt b/examples/text/example30.txt
similarity index 100%
rename from fixtures/text/example30.txt
rename to examples/text/example30.txt
diff --git a/fixtures/text/example31.txt b/examples/text/example31.txt
similarity index 100%
rename from fixtures/text/example31.txt
rename to examples/text/example31.txt
diff --git a/fixtures/text/example32.txt b/examples/text/example32.txt
similarity index 100%
rename from fixtures/text/example32.txt
rename to examples/text/example32.txt
diff --git a/fixtures/text/example33.txt b/examples/text/example33.txt
similarity index 100%
rename from fixtures/text/example33.txt
rename to examples/text/example33.txt
diff --git a/fixtures/text/example34.txt b/examples/text/example34.txt
similarity index 100%
rename from fixtures/text/example34.txt
rename to examples/text/example34.txt
diff --git a/fixtures/text/example35.txt b/examples/text/example35.txt
similarity index 100%
rename from fixtures/text/example35.txt
rename to examples/text/example35.txt
diff --git a/fixtures/text/example36-alt01.txt b/examples/text/example36-alt01.txt
similarity index 100%
rename from fixtures/text/example36-alt01.txt
rename to examples/text/example36-alt01.txt
diff --git a/fixtures/text/example36.txt b/examples/text/example36.txt
similarity index 100%
rename from fixtures/text/example36.txt
rename to examples/text/example36.txt
diff --git a/fixtures/text/example37.txt b/examples/text/example37.txt
similarity index 100%
rename from fixtures/text/example37.txt
rename to examples/text/example37.txt
diff --git a/fixtures/text/example38-alt01.txt b/examples/text/example38-alt01.txt
similarity index 100%
rename from fixtures/text/example38-alt01.txt
rename to examples/text/example38-alt01.txt
diff --git a/fixtures/text/example38.txt b/examples/text/example38.txt
similarity index 100%
rename from fixtures/text/example38.txt
rename to examples/text/example38.txt
diff --git a/fixtures/text/example39.txt b/examples/text/example39.txt
similarity index 100%
rename from fixtures/text/example39.txt
rename to examples/text/example39.txt
diff --git a/fixtures/text/example40-alt01.txt b/examples/text/example40-alt01.txt
similarity index 100%
rename from fixtures/text/example40-alt01.txt
rename to examples/text/example40-alt01.txt
diff --git a/fixtures/text/example40.txt b/examples/text/example40.txt
similarity index 100%
rename from fixtures/text/example40.txt
rename to examples/text/example40.txt
diff --git a/fixtures/text/example41.txt b/examples/text/example41.txt
similarity index 100%
rename from fixtures/text/example41.txt
rename to examples/text/example41.txt
diff --git a/fixtures/text/example42-alt01.txt b/examples/text/example42-alt01.txt
similarity index 100%
rename from fixtures/text/example42-alt01.txt
rename to examples/text/example42-alt01.txt
diff --git a/fixtures/text/example42.txt b/examples/text/example42.txt
similarity index 100%
rename from fixtures/text/example42.txt
rename to examples/text/example42.txt
diff --git a/fixtures/text/example43-alt01.txt b/examples/text/example43-alt01.txt
similarity index 100%
rename from fixtures/text/example43-alt01.txt
rename to examples/text/example43-alt01.txt
diff --git a/fixtures/text/example43.txt b/examples/text/example43.txt
similarity index 100%
rename from fixtures/text/example43.txt
rename to examples/text/example43.txt
diff --git a/fixtures/text/example44-alt01.txt b/examples/text/example44-alt01.txt
similarity index 100%
rename from fixtures/text/example44-alt01.txt
rename to examples/text/example44-alt01.txt
diff --git a/fixtures/text/example44.txt b/examples/text/example44.txt
similarity index 100%
rename from fixtures/text/example44.txt
rename to examples/text/example44.txt
diff --git a/fixtures/text/example45.txt b/examples/text/example45.txt
similarity index 100%
rename from fixtures/text/example45.txt
rename to examples/text/example45.txt
diff --git a/fixtures/text/example46-alt01.txt b/examples/text/example46-alt01.txt
similarity index 100%
rename from fixtures/text/example46-alt01.txt
rename to examples/text/example46-alt01.txt
diff --git a/fixtures/text/example46.txt b/examples/text/example46.txt
similarity index 100%
rename from fixtures/text/example46.txt
rename to examples/text/example46.txt
diff --git a/fixtures/text/example47.txt b/examples/text/example47.txt
similarity index 100%
rename from fixtures/text/example47.txt
rename to examples/text/example47.txt
diff --git a/fixtures/text/example48.txt b/examples/text/example48.txt
similarity index 100%
rename from fixtures/text/example48.txt
rename to examples/text/example48.txt
diff --git a/fixtures/text/example49-alt01.txt b/examples/text/example49-alt01.txt
similarity index 100%
rename from fixtures/text/example49-alt01.txt
rename to examples/text/example49-alt01.txt
diff --git a/fixtures/text/example49.txt b/examples/text/example49.txt
similarity index 100%
rename from fixtures/text/example49.txt
rename to examples/text/example49.txt
diff --git a/fixtures/text/example50.txt b/examples/text/example50.txt
similarity index 100%
rename from fixtures/text/example50.txt
rename to examples/text/example50.txt
diff --git a/fixtures/text/example51.txt b/examples/text/example51.txt
similarity index 100%
rename from fixtures/text/example51.txt
rename to examples/text/example51.txt
diff --git a/fixtures/text/example52.txt b/examples/text/example52.txt
similarity index 100%
rename from fixtures/text/example52.txt
rename to examples/text/example52.txt
diff --git a/fixtures/text/example53.txt b/examples/text/example53.txt
similarity index 100%
rename from fixtures/text/example53.txt
rename to examples/text/example53.txt
diff --git a/fixtures/text/example54-alt01.txt b/examples/text/example54-alt01.txt
similarity index 100%
rename from fixtures/text/example54-alt01.txt
rename to examples/text/example54-alt01.txt
diff --git a/fixtures/text/example54.txt b/examples/text/example54.txt
similarity index 100%
rename from fixtures/text/example54.txt
rename to examples/text/example54.txt
diff --git a/fixtures/text/example55-alt01.txt b/examples/text/example55-alt01.txt
similarity index 100%
rename from fixtures/text/example55-alt01.txt
rename to examples/text/example55-alt01.txt
diff --git a/fixtures/text/example55.txt b/examples/text/example55.txt
similarity index 100%
rename from fixtures/text/example55.txt
rename to examples/text/example55.txt
diff --git a/fixtures/text/example56.txt b/examples/text/example56.txt
similarity index 100%
rename from fixtures/text/example56.txt
rename to examples/text/example56.txt
diff --git a/fixtures/text/example57.txt b/examples/text/example57.txt
similarity index 100%
rename from fixtures/text/example57.txt
rename to examples/text/example57.txt
diff --git a/fixtures/text/example58.txt b/examples/text/example58.txt
similarity index 100%
rename from fixtures/text/example58.txt
rename to examples/text/example58.txt
diff --git a/fixtures/text/example59.txt b/examples/text/example59.txt
similarity index 100%
rename from fixtures/text/example59.txt
rename to examples/text/example59.txt
diff --git a/fixtures/text/example60.txt b/examples/text/example60.txt
similarity index 100%
rename from fixtures/text/example60.txt
rename to examples/text/example60.txt
diff --git a/fixtures/text/example61.txt b/examples/text/example61.txt
similarity index 100%
rename from fixtures/text/example61.txt
rename to examples/text/example61.txt
diff --git a/fixtures/text/example62.txt b/examples/text/example62.txt
similarity index 100%
rename from fixtures/text/example62.txt
rename to examples/text/example62.txt
diff --git a/fixtures/text/example63.txt b/examples/text/example63.txt
similarity index 100%
rename from fixtures/text/example63.txt
rename to examples/text/example63.txt
diff --git a/fixtures/text/example64.txt b/examples/text/example64.txt
similarity index 100%
rename from fixtures/text/example64.txt
rename to examples/text/example64.txt
diff --git a/fixtures/text/example65.txt b/examples/text/example65.txt
similarity index 100%
rename from fixtures/text/example65.txt
rename to examples/text/example65.txt
diff --git a/fixtures/text/example66.txt b/examples/text/example66.txt
similarity index 100%
rename from fixtures/text/example66.txt
rename to examples/text/example66.txt
diff --git a/fixtures/text/example67.txt b/examples/text/example67.txt
similarity index 100%
rename from fixtures/text/example67.txt
rename to examples/text/example67.txt
diff --git a/fixtures/text/example68.txt b/examples/text/example68.txt
similarity index 100%
rename from fixtures/text/example68.txt
rename to examples/text/example68.txt
diff --git a/fixtures/text/example69.txt b/examples/text/example69.txt
similarity index 100%
rename from fixtures/text/example69.txt
rename to examples/text/example69.txt
diff --git a/fixtures/text/example70.txt b/examples/text/example70.txt
similarity index 100%
rename from fixtures/text/example70.txt
rename to examples/text/example70.txt
diff --git a/fixtures/text/example71.txt b/examples/text/example71.txt
similarity index 100%
rename from fixtures/text/example71.txt
rename to examples/text/example71.txt
diff --git a/fixtures/text/example72.txt b/examples/text/example72.txt
similarity index 100%
rename from fixtures/text/example72.txt
rename to examples/text/example72.txt
diff --git a/fixtures/text/example73.txt b/examples/text/example73.txt
similarity index 100%
rename from fixtures/text/example73.txt
rename to examples/text/example73.txt
diff --git a/fixtures/text/example74.txt b/examples/text/example74.txt
similarity index 100%
rename from fixtures/text/example74.txt
rename to examples/text/example74.txt
diff --git a/fixtures/text/example75.txt b/examples/text/example75.txt
similarity index 100%
rename from fixtures/text/example75.txt
rename to examples/text/example75.txt
diff --git a/fixtures/text/example76.txt b/examples/text/example76.txt
similarity index 100%
rename from fixtures/text/example76.txt
rename to examples/text/example76.txt
diff --git a/fixtures/text/example77.txt b/examples/text/example77.txt
similarity index 100%
rename from fixtures/text/example77.txt
rename to examples/text/example77.txt
diff --git a/fixtures/text/example78.txt b/examples/text/example78.txt
similarity index 100%
rename from fixtures/text/example78.txt
rename to examples/text/example78.txt
diff --git a/fixtures/text/example79.txt b/examples/text/example79.txt
similarity index 100%
rename from fixtures/text/example79.txt
rename to examples/text/example79.txt
diff --git a/fixtures/text/example80.txt b/examples/text/example80.txt
similarity index 100%
rename from fixtures/text/example80.txt
rename to examples/text/example80.txt
diff --git a/fixtures/text/example81.txt b/examples/text/example81.txt
similarity index 100%
rename from fixtures/text/example81.txt
rename to examples/text/example81.txt
diff --git a/fixtures/text/example82.txt b/examples/text/example82.txt
similarity index 100%
rename from fixtures/text/example82.txt
rename to examples/text/example82.txt
diff --git a/fixtures/text/example83.txt b/examples/text/example83.txt
similarity index 100%
rename from fixtures/text/example83.txt
rename to examples/text/example83.txt
diff --git a/fixtures/text/example84.txt b/examples/text/example84.txt
similarity index 100%
rename from fixtures/text/example84.txt
rename to examples/text/example84.txt
diff --git a/fixtures/text/example85-alt01.txt b/examples/text/example85-alt01.txt
similarity index 100%
rename from fixtures/text/example85-alt01.txt
rename to examples/text/example85-alt01.txt
diff --git a/fixtures/text/example85.txt b/examples/text/example85.txt
similarity index 100%
rename from fixtures/text/example85.txt
rename to examples/text/example85.txt
diff --git a/fixtures/text/example86.txt b/examples/text/example86.txt
similarity index 100%
rename from fixtures/text/example86.txt
rename to examples/text/example86.txt
diff --git a/src/expr.rs b/src/expr.rs
index b490715..390ba9a 100644
--- a/src/expr.rs
+++ b/src/expr.rs
@@ -1,4 +1,5 @@
use crate::{Error, Geometry, SqlQuery, Validator};
+use pg_escape::{quote_identifier, quote_literal};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::str::FromStr;
@@ -63,8 +64,8 @@ impl Expr {
match self {
Expr::Bool(v) => Ok(v.to_string()),
Expr::Float(v) => Ok(v.to_string()),
- Expr::Literal(v) => Ok(format!("'{}'", v)),
- Expr::Property { property } => Ok(format!("\"{property}\"")),
+ Expr::Literal(v) => Ok(quote_literal(v).to_string()),
+ Expr::Property { property } => Ok(quote_identifier(property).to_string()),
Expr::Interval { interval } => {
check_len!(
"interval",
@@ -90,6 +91,8 @@ impl Expr {
match op.as_str() {
"and" => Ok(format!("({})", a.join(" AND "))),
"or" => Ok(format!("({})", a.join(" OR "))),
+ "like" => Ok(format!("({} LIKE {})", a[0], a[1])),
+ "in" => Ok(format!("({} IN {})", a[0], a[1])),
"between" => {
check_len!(
"between",
@@ -101,13 +104,17 @@ impl Expr {
"not" => {
check_len!("not", a, 1, format!("(NOT {})", a[0]))
}
- "is null" => {
+ "isNull" => {
check_len!("is null", a, 1, format!("({} IS NULL)", a[0]))
}
- "+" | "-" | "*" | "/" | "%" | "^" | "=" | "<=" | "<" | "<>" | ">" | ">=" => {
+ "+" | "-" | "*" | "/" | "%" => {
+ let paddedop = format!(" {} ", op);
+ Ok(a.join(&paddedop).to_string())
+ }
+ "^" | "=" | "<=" | "<" | "<>" | ">" | ">=" => {
check_len!(op, a, 2, format!("({} {} {})", a[0], op, a[1]))
}
- _ => Ok(format!("{}({})", op, a.join(", "))),
+ _ => Ok(format!("{}({})", quote_identifier(op), a.join(", "))),
}
}
Expr::BBox { bbox } => {
diff --git a/src/lib.rs b/src/lib.rs
index af56901..d630014 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -59,7 +59,7 @@ pub struct SqlQuery {
/// # Examples
///
/// ```
-/// let s = include_str!("../fixtures/json/example01.json");
+/// let s = include_str!("../examples/json/example01.json");
/// let expr = cql2::parse_json(s);
/// ```
pub fn parse_json(s: &str) -> Result<Expr, serde_json::Error> {
@@ -71,7 +71,7 @@ pub fn parse_json(s: &str) -> Result<Expr, serde_json::Error> {
/// # Examples
///
/// ```no_run
-/// let expr = cql2::parse_file("tests/fixtures/json/example01.json");
+/// let expr = cql2::parse_file("tests/examples/json/example01.json");
/// ```
pub fn parse_file(path: impl AsRef<Path>) -> Result<Expr, Error> {
let s = fs::read_to_string(path)?;
diff --git a/src/parser.rs b/src/parser.rs
index d162cd2..c896ada 100644
--- a/src/parser.rs
+++ b/src/parser.rs
@@ -256,13 +256,13 @@ fn parse_expr(expression_pairs: Pairs<'_, Rule>) -> Result<Expr, Error> {
let mut outargs: Vec<Box<Expr>> = Vec::new();
match lhsclone {
- Expr::Operation { ref op, ref args } if op == "and" => {
+ Expr::Operation { ref op, ref args } if op == "and" && op == &opstring => {
for arg in args.iter() {
outargs.push(arg.clone());
}
outargs.push(Box::new(rhsclone));
return Ok(Expr::Operation {
- op: "and".to_string(),
+ op: opstring,
args: outargs,
});
}
@@ -298,13 +298,21 @@ fn parse_expr(expression_pairs: Pairs<'_, Rule>) -> Result<Expr, Error> {
})
.map_postfix(|child, op| {
let child = child?;
- match op.as_rule() {
- Rule::IsNullPostfix => Ok(Expr::Operation {
+ let notflag = &op.clone().into_inner().next().is_some();
+ let retexpr = match op.as_rule() {
+ Rule::IsNullPostfix => Expr::Operation {
op: "isNull".to_string(),
args: vec![Box::new(child)],
- }),
+ },
rule => unreachable!("Expr::parse expected postfix operator, found {:?}", rule),
- }
+ };
+ if *notflag {
+ return Ok(Expr::Operation {
+ op: "not".to_string(),
+ args: vec![Box::new(retexpr)],
+ });
+ };
+ Ok(retexpr)
})
.parse(expression_pairs)
}
| Invalid parse while combining AND and OR clauses
Hi, thanks a lot for this project.
I try using it, but I'm stuck with what I think is an invalid parsing.
Here is a minimal example:
```rust
use cql2::Expr;
pub fn main() {
let s = "(floors>5 AND material='brick') OR swimming_pool=true";
let query = s.parse::<Expr>().expect("should be valid query");
assert_eq!(
query.to_text().unwrap(),
r#"(("floors" > 5) AND ("material" = 'brick')) OR ("swimming_pool" = true)"#
);
}
```
```
thread 'main' panicked at examples/test.rs:8:5:
assertion `left == right` failed
left: "((\"floors\" > 5) AND (\"material\" = 'brick') AND (\"swimming_pool\" = true))"
right: "((\"floors\" > 5) AND (\"material\" = 'brick')) OR (\"swimming_pool\" = true)"
```
The `AND`/`OR` combination becomes only 2 `AND`.
The example is from `fixtures/test/example17.txt`: `(floors>5 AND material='brick') OR swimming_pool=true`, but strangely it's not the same as the `tests/expected/text/example17.txt` (and I think it's because this file is generated).
Invalid parse while combining AND and OR clauses
Hi, thanks a lot for this project.
I try using it, but I'm stuck with what I think is an invalid parsing.
Here is a minimal example:
```rust
use cql2::Expr;
pub fn main() {
let s = "(floors>5 AND material='brick') OR swimming_pool=true";
let query = s.parse::<Expr>().expect("should be valid query");
assert_eq!(
query.to_text().unwrap(),
r#"(("floors" > 5) AND ("material" = 'brick')) OR ("swimming_pool" = true)"#
);
}
```
```
thread 'main' panicked at examples/test.rs:8:5:
assertion `left == right` failed
left: "((\"floors\" > 5) AND (\"material\" = 'brick') AND (\"swimming_pool\" = true))"
right: "((\"floors\" > 5) AND (\"material\" = 'brick')) OR (\"swimming_pool\" = true)"
```
The `AND`/`OR` combination becomes only 2 `AND`.
The example is from `fixtures/test/example17.txt`: `(floors>5 AND material='brick') OR swimming_pool=true`, but strangely it's not the same as the `tests/expected/text/example17.txt` (and I think it's because this file is generated).
Failing test for #44
| @bitner I took a stab but I don't grok the parser well enough ATM, so I'm unassigning myself. I put a failing test up in https://github.com/developmentseed/cql2-rs/pull/45.
@bitner I took a stab but I don't grok the parser well enough ATM, so I'm unassigning myself. I put a failing test up in https://github.com/developmentseed/cql2-rs/pull/45.
| 2024-11-12T20:14:43 | 0.0 | [] | [] |
||
developmentseed/cql2-rs | developmentseed__cql2-rs-36 | b787e523d665e7a5bcc2870163a32433bdb98d3f | diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index cc91f4a..6515705 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -24,8 +24,8 @@ jobs:
- name: cargo fmt
run: cargo fmt --all --check
- name: cargo clippy
- run: cargo clippy --tests --all -- -D warnings
+ run: cargo clippy --tests --workspace -- -D warnings
- name: cargo check
- run: cargo check --all
+ run: cargo check --workspace
- name: cargo test
run: cargo test
diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
new file mode 100644
index 0000000..066c301
--- /dev/null
+++ b/.github/workflows/docs.yml
@@ -0,0 +1,32 @@
+name: Docs
+
+on:
+ push:
+ tags:
+ - "v*"
+ workflow_dispatch:
+
+permissions:
+ contents: write
+ pages: write
+
+jobs:
+ docs:
+ runs-on: ubuntu-latest
+ env:
+ GIT_COMMITTER_NAME: ci-bot
+ GIT_COMMITTER_EMAIL: [email protected]
+ steps:
+ - uses: actions/checkout@v4
+ - uses: Swatinem/rust-cache@v2
+ - uses: astral-sh/setup-uv@v3
+ with:
+ enable-cache: true
+ - name: Install Python
+ run: uv python install # we use uv instead of setup-python so we get python-version resolution between our two packages
+ - name: Sync
+ run: uv sync
+ - name: Deploy
+ run: |
+ VERSION=$(git describe --tags --match="v*" --abbrev=0)
+ uv run mike deploy $VERSION latest --update-aliases --push
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index ef01592..42e37d1 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,5 @@
target/
debug/
**/*.rs.bk
+.cache
+site/
diff --git a/docs/cli.md b/docs/cli.md
new file mode 100644
index 0000000..a682379
--- /dev/null
+++ b/docs/cli.md
@@ -0,0 +1,66 @@
+# CLI
+
+**cql2-rs** includes a command-line interface (CLI).
+
+## Installation
+
+Install [Rust](https://rustup.rs/).
+Then:
+
+```shell
+cargo install cql2-cli
+```
+
+## Usage
+
+At its simplest, the CLI is a pass-through validator:
+
+```shell
+$ cql2 < tests/fixtures/text/example01.txt # will succeed if the CQL2 is valid
+("landsat:scene_id" = 'LC82030282019133LGN00')
+```
+
+You can convert formats:
+
+```shell
+$ cql2 -o json < tests/fixtures/text/example01.txt
+{"op":"=","args":[{"property":"landsat:scene_id"},"LC82030282019133LGN00"]}
+```
+
+Use `-v` to get detailed validation information:
+
+```shell
+$ cql2 'wrong' -v
+[ERROR] Invalid CQL2: wrong
+For more detailed validation information, use -vv
+jsonschema validation failed with file:///tmp/cql2.json#
+- at '': oneOf failed, none matched
+ - at '': missing properties 'op', 'args'
+ - at '': missing properties 'op', 'args'
+ - at '': oneOf failed, none matched
+ - at '': missing properties 'op', 'args'
+ - at '': missing properties 'op', 'args'
+ - at '': missing properties 'op', 'args'
+ - at '': missing properties 'op', 'args'
+ - at '': missing properties 'op', 'args'
+ - at '': missing properties 'op', 'args'
+ - at '': missing properties 'op', 'args'
+ - at '': missing properties 'op', 'args'
+ - at '': missing properties 'op', 'args'
+ - at '': want boolean, but got object
+```
+
+cql2-text parsing errors are pretty-printed:
+
+```shell
+$ cql2 '(foo ~= "bar")'
+[ERROR] Parsing error: (foo ~= "bar")
+ --> 1:6
+ |
+1 | (foo ~= "bar")
+ | ^---
+ |
+ = expected NotFlag, And, Or, ConcatInfixOp, Add, Subtract, Multiply, Divide, Modulo, Power, Eq, Gt, GtEq, Lt, LtEq, NotEq, Is, or IsNullPostfix
+```
+
+Use `cql2 --help` to get a complete listing of the CLI arguments and formats.
diff --git a/docs/ds-logo-hor--pos.svg b/docs/ds-logo-hor--pos.svg
new file mode 100644
index 0000000..d0041c6
--- /dev/null
+++ b/docs/ds-logo-hor--pos.svg
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<svg width="100%" height="100%" viewBox="0 0 188 32" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xml:space="preserve" xmlns:serif="http://www.serif.com/" style="fill-rule:evenodd;clip-rule:evenodd;stroke-linejoin:round;stroke-miterlimit:2;">
+ <rect id="hor--pos" x="0" y="0" width="188" height="32" style="fill:none;"/>
+ <g id="hor--pos1" serif:id="hor--pos">
+ <g id="text">
+ <path d="M49,6L49,10.9C49,10.8 48.2,10.8 47.9,10.7C47.6,10.7 47.3,10.6 46.8,10.6C46,10.6 45.2,10.7 44.6,11C44,11.3 43.5,11.7 43.1,12.2C42.7,12.7 42.4,13.4 42.2,14.2C42.1,15 42,15.9 42,16.8C42,18.7 42.3,20.2 43,21.2C43.7,22.2 44.7,22.7 45.9,22.7C46.7,22.7 47.3,22.6 47.8,22.3C48.3,22 48.7,21.8 49.1,21.1L49.2,22.9L51,22.9L51,6L49,6ZM49,17.5C49,18 48.9,18.5 48.8,19C48.7,19.5 48.5,19.9 48.2,20.2C48,20.5 47.7,20.8 47.3,21C47,21.2 46.6,21.3 46.2,21.3C45.4,21.3 44.8,20.9 44.4,20.1C44,19.3 43.8,18.2 43.8,16.6C43.8,15.1 44,14 44.5,13.2C44.9,12.4 45.7,12.1 46.7,12.1C47.2,12.1 47.6,12.1 48,12.2C48.4,12.3 49,12.4 49,12.5L49,17.5ZM58.8,22.3C58.1,22.6 57.3,22.8 56.5,22.8C55,22.8 53.9,22.4 53.2,21.5C52.5,20.6 52.1,19.2 52.1,17.3C52.1,15.1 52.5,13.4 53.3,12.3C54.1,11.2 55.2,10.6 56.6,10.6C58,10.6 59,11.1 59.8,12.1C60.5,13.1 60.8,14.7 60.7,16.8L54,16.8L54,17C54,18.6 54.2,19.7 54.7,20.4C55.2,21.1 55.8,21.4 56.7,21.4C57.2,21.4 57.8,21.3 58.3,21.1C58.8,20.9 59.4,20.6 60.1,20.1L60.8,21.3C60.1,21.7 59.5,22 58.8,22.3ZM58.2,12.9C57.8,12.3 57.2,12 56.5,12C55.8,12 55.2,12.3 54.8,12.9C54.4,13.5 54.2,14.4 54.1,15.5L58.8,15.5C58.8,14.4 58.6,13.5 58.2,12.9ZM69.2,13.6C69,14.5 68.7,15.4 68.4,16.4C68.1,17.3 67.8,18.3 67.5,19.4C67.2,20.4 66.9,21.5 66.5,22.6L64.7,22.6C64.3,21.4 64,20.3 63.6,19.3L62.7,16.3C62.4,15.4 62.2,14.4 61.9,13.6C61.7,12.7 61.5,11.8 61.3,11L63,11L64.2,15.8C64.6,17.4 65.1,19 65.6,20.7L66.9,15.9C67.3,14.3 67.7,12.7 68.1,11L69.7,11C69.7,11.8 69.5,12.7 69.2,13.6ZM77.1,22.3C76.4,22.6 75.6,22.8 74.8,22.8C73.3,22.8 72.2,22.4 71.5,21.5C70.8,20.6 70.4,19.2 70.4,17.3C70.4,15.1 70.8,13.4 71.6,12.3C72.4,11.2 73.5,10.6 74.9,10.6C76.3,10.6 77.3,11.1 78.1,12.1C78.8,13.1 79.1,14.7 79,16.8L72.3,16.8L72.3,17C72.3,18.6 72.5,19.7 73,20.4C73.5,21.1 74.1,21.4 75,21.4C75.5,21.4 76.1,21.3 76.6,21.1C77.1,20.9 77.7,20.6 78.4,20.1L79.1,21.3C78.4,21.7 77.8,22 77.1,22.3ZM76.5,12.9C76.1,12.3 75.5,12 74.8,12C74.1,12 73.5,12.3 73.1,12.9C72.7,13.5 72.5,14.4 72.4,15.5L77,15.5C77.1,14.4 76.9,13.5 76.5,12.9ZM80.5,22.6L80.5,6.5L82.2,6.5L82.2,22.6L80.5,22.6ZM88.1,22.8C86.6,22.8 85.5,22.3 84.8,21.4C84.1,20.4 83.7,18.9 83.7,16.9C83.7,14.8 84.1,13.3 84.8,12.2C85.5,11.2 86.7,10.7 88.3,10.7C89.8,10.7 90.9,11.2 91.6,12.2C92.3,13.2 92.6,14.7 92.6,16.7C92.6,20.8 91.1,22.8 88.1,22.8ZM90.2,13.2C89.8,12.4 89.1,12 88.1,12C87.2,12 86.6,12.4 86.2,13.1C85.8,13.9 85.6,15 85.6,16.6C85.6,18.3 85.8,19.5 86.2,20.2C86.6,20.9 87.3,21.3 88.2,21.3C89.8,21.3 90.7,19.8 90.7,16.7C90.7,15.1 90.5,13.9 90.2,13.2ZM102.4,21.1C101.6,22.2 100.4,22.8 98.7,22.8C98.3,22.8 97.9,22.8 97.6,22.7C97.3,22.7 97,22.6 96.5,22.5L96.5,26.4L94.8,26.4L94.8,10.9L96.3,10.9L96.4,12.4C96.8,11.7 97.2,11.3 97.7,11C98.2,10.7 98.8,10.6 99.6,10.6C100.9,10.6 101.8,11.1 102.5,12.1C103.2,13.1 103.5,14.6 103.5,16.5C103.5,18.5 103.2,20 102.4,21.1ZM101.1,13.3C100.7,12.5 100.1,12.1 99.3,12.1C98.9,12.1 98.5,12.2 98.2,12.4C97.9,12.6 97.6,12.9 97.4,13.2C97.2,13.5 97,13.9 96.8,14.4C96.6,14.9 96.6,15.3 96.6,15.9L96.6,21C96.9,21.1 97.2,21.2 97.6,21.3C98,21.4 98.4,21.4 98.9,21.4C99.9,21.4 100.6,21 101.1,20.3C101.6,19.6 101.8,18.4 101.8,16.9C101.7,15.2 101.5,14 101.1,13.3ZM117.6,22.6L117.6,14.7C117.6,13.9 117.5,13.2 117.4,12.7C117.3,12.2 116.9,12 116.3,12C115.9,12 115.5,12.1 115.1,12.4C114.7,12.7 114.2,13.1 113.5,13.6L113.5,22.5L111.8,22.5L111.8,14.7C111.8,13.9 111.7,13.2 111.6,12.7C111.5,12.2 111.1,12 110.5,12C110.1,12 109.7,12.1 109.3,12.4C108.9,12.7 108.4,13.1 107.7,13.6L107.7,22.5L106,22.5L106,10.9L107.5,10.9L107.6,12.3L109.3,11.1C109.8,10.8 110.4,10.6 111,10.6C111.7,10.6 112.2,10.8 112.5,11.1C112.8,11.4 113.1,11.8 113.3,12.4C113.9,11.9 114.5,11.5 115.1,11.2C115.7,10.9 116.2,10.7 116.8,10.7C117.8,10.7 118.4,11 118.8,11.6C119.2,12.2 119.3,13.1 119.3,14.4L119.3,22.6L117.6,22.6ZM127.5,22.3C126.8,22.6 126,22.8 125.2,22.8C123.7,22.8 122.6,22.4 121.9,21.5C121.2,20.6 120.8,19.2 120.8,17.3C120.8,15.1 121.2,13.4 122,12.3C122.8,11.2 123.9,10.6 125.3,10.6C126.7,10.6 127.7,11.1 128.5,12.1C129.2,13.1 129.5,14.7 129.4,16.8L122.7,16.8L122.7,17C122.7,18.6 122.9,19.7 123.4,20.4C123.9,21.1 124.5,21.4 125.4,21.4C125.9,21.4 126.5,21.3 127,21.1C127.5,20.9 128.1,20.6 128.8,20.1L129.5,21.3C128.8,21.7 128.1,22 127.5,22.3ZM126.8,12.9C126.4,12.3 125.8,12 125.1,12C124.4,12 123.8,12.3 123.4,12.9C123,13.5 122.8,14.4 122.7,15.5L127.4,15.5C127.4,14.4 127.2,13.5 126.8,12.9ZM136.9,22.5L136.9,14.7C136.9,13.9 136.8,13.2 136.6,12.7C136.4,12.2 136,12 135.4,12C135,12 134.6,12.1 134.2,12.4C133.8,12.7 133.2,13.1 132.6,13.6L132.6,22.5L130.9,22.5L130.9,10.9L132.4,10.9L132.5,12.3C133.1,11.9 133.7,11.5 134.2,11.1C134.8,10.8 135.3,10.6 135.9,10.6C136.9,10.6 137.6,10.9 137.9,11.5C138.3,12.1 138.5,13 138.5,14.3L138.5,22.5L136.9,22.5ZM144.7,22.8C144.1,22.8 143.5,22.7 143.1,22.6C142.7,22.5 142.4,22.2 142.2,21.9C142,21.6 141.8,21.2 141.8,20.6C141.7,20.1 141.7,19.4 141.7,18.7L141.7,12.3L140,12.3L140,10.9L141.7,10.9L141.7,7.7L143.4,7.5L143.4,10.9L145.8,10.9L145.7,12.3L143.5,12.3L143.5,19.8C143.5,20.2 143.6,20.5 143.7,20.7C143.8,20.9 144,21.1 144.2,21.2C144.4,21.3 144.7,21.4 145.1,21.4L145.6,21.4L146.1,21.3L146.3,22.7C145.7,22.7 145.2,22.8 144.7,22.8Z" style="fill:rgb(68,63,63);fill-rule:nonzero;"/>
+ <path d="M157.4,20.4C157.2,20.9 156.9,21.3 156.5,21.6C156.1,21.9 155.6,22.2 155.1,22.4C154.5,22.6 153.9,22.7 153.3,22.7C151.8,22.7 150.8,22.4 150.1,21.8C149.4,21.2 149.2,20.3 149.3,19.3L150.8,19.1C150.8,19.4 150.8,19.6 150.9,19.9C151,20.2 151.1,20.4 151.3,20.6C151.5,20.8 151.8,21 152.1,21.1C152.5,21.2 152.9,21.3 153.5,21.3C154.3,21.3 154.9,21.1 155.4,20.7C155.9,20.3 156.1,19.7 156.1,19C156.1,18.4 155.9,17.9 155.6,17.6C155.3,17.2 154.9,17 154.4,16.7C153.9,16.5 153.4,16.2 152.8,16C152.2,15.8 151.7,15.5 151.2,15.2C150.7,14.9 150.3,14.5 150,14C149.7,13.5 149.5,12.9 149.5,12.1C149.5,11.6 149.6,11.1 149.8,10.6C150,10.2 150.3,9.8 150.6,9.5C151,9.2 151.4,9 151.9,8.8C152.4,8.6 153,8.6 153.7,8.6C154.4,8.6 155,8.7 155.4,8.9C155.9,9.1 156.3,9.3 156.6,9.6C156.9,9.9 157.1,10.2 157.2,10.6C157.3,11 157.4,11.4 157.4,11.8L155.9,12C155.9,11.7 155.9,11.5 155.8,11.2C155.7,11 155.6,10.7 155.4,10.6C155.2,10.4 155,10.3 154.7,10.2C154.4,10.1 154,10 153.6,10C153.2,10 152.8,10.1 152.5,10.2C152.2,10.3 151.9,10.5 151.7,10.6C151.5,10.8 151.3,11 151.2,11.2C151.1,11.4 151,11.7 151,12C151,12.6 151.2,13.1 151.5,13.4C151.8,13.8 152.3,14 152.8,14.3L154.4,15C155,15.2 155.5,15.5 156,15.8C156.5,16.1 156.9,16.5 157.3,17C157.6,17.5 157.8,18.1 157.8,18.9C157.7,19.4 157.6,19.9 157.4,20.4ZM159.7,22.6L159.7,8.9L166.6,8.9L166.5,10.3L161.4,10.3L161.4,14.8L165.4,14.8L165.4,16.2L161.4,16.2L161.4,21.2L166.7,21.2L166.7,22.6L159.7,22.6ZM168.8,22.6L168.8,8.9L175.7,8.9L175.6,10.3L170.5,10.3L170.5,14.8L174.5,14.8L174.5,16.2L170.5,16.2L170.5,21.2L175.8,21.2L175.8,22.6L168.8,22.6ZM186.8,18.4C186.5,19.3 186.1,20 185.6,20.6C185.1,21.2 184.4,21.7 183.7,22C182.9,22.3 182.1,22.5 181.1,22.5L178.2,22.5L178.2,8.9L181.1,8.9C183.1,8.9 184.7,9.5 185.7,10.7C186.7,11.9 187.2,13.5 187.2,15.6C187.2,16.6 187.1,17.5 186.8,18.4ZM184.4,11.7C183.7,10.7 182.6,10.2 181,10.2L179.9,10.2L179.9,21.1L181.3,21.1C182.7,21.1 183.7,20.6 184.4,19.7C185.1,18.8 185.5,17.4 185.5,15.7C185.4,14 185.1,12.7 184.4,11.7Z" style="fill:rgb(208,66,0);fill-rule:nonzero;"/>
+ </g>
+ <g id="symbol">
+ <path id="surface" d="M30,0L2,0C0.9,0 0,0.9 0,2L0,30C0,31.1 0.9,32 2,32L30,32C31.1,32 32,31.1 32,30L32,2C32,0.9 31.1,0 30,0Z" style="fill:rgb(208,66,0);fill-rule:nonzero;"/>
+ <g id="shape" transform="matrix(0.925342,0,0,0.925342,1.19453,2.38906)">
+ <path d="M18.1,27.2C18.1,26.6 18.3,26 18.5,25.4C17.9,25.9 17.7,26.5 17.5,27.3C17.6,26.4 18,20.4 18,20.1C19.5,15.7 18.8,2.7 20.8,1C18.6,1.4 17.8,3.7 17.1,5.5C16.4,3.4 17,2 14.6,1.2C16.5,2.8 16.1,4.2 16.5,6.4C15.4,4.9 15.7,3.1 14.4,1.6C14,1.2 12.8,0.3 12.8,0.3C12.8,0.4 13.8,2.3 14,3.4C14.2,4.4 14.6,6.5 14.6,7.3C14.8,11.5 15.8,15.6 16.6,19.8C16.7,20.5 17.1,22.6 17.1,22.6L17.2,23.5C17.1,22.9 16.9,22.4 16.9,22.2C16.7,21.6 16.2,20 16,19.3C14.2,15.6 11,8.5 6.6,7.4C9.1,9.6 11.7,12.5 12.3,15.3C11,13.4 5.8,9.5 3.6,8C7.6,12.1 10.8,16.4 14.3,21C15.3,22.3 16.6,25 16.7,25.6C16.8,26 16.8,27.7 16.9,28.7C15,24 13.9,21.6 10.3,19.5C9.7,19.2 7.8,18.5 7.8,18.5C8.7,19.5 9.7,19.5 10.5,20.8C8.9,19.7 7.2,19.5 5.3,19.5C11.3,22.6 14.5,23.5 16.5,29.4C16.7,30.4 16.4,31 16.4,32L17.7,32L17.7,30.6C17.8,30.2 17.8,28.9 18,28.6C18.6,27 19.6,26.3 21.1,25.3C19.7,25.8 19.1,25.6 18.1,27.2ZM24,13.2C23.5,13.7 22,15.5 22,15.5C22.1,12.9 22.7,10.1 24.4,8.1C22.1,9.6 20.8,13.4 20.1,15.7C19.9,16.6 17.1,22 18.4,24.9C18.6,23.8 19.2,22.8 19.9,21.9C20.2,21.5 21.5,19.4 22.8,16.5C23.9,13.6 25.8,11.8 28.4,10.4C26.5,10.7 25.4,11.8 24,13.2Z" style="fill:white;fill-rule:nonzero;"/>
+ </g>
+ </g>
+ </g>
+</svg>
diff --git a/docs/favicon.png b/docs/favicon.png
new file mode 100644
index 0000000..db95a32
Binary files /dev/null and b/docs/favicon.png differ
diff --git a/docs/index.md b/docs/index.md
new file mode 100644
index 0000000..67e3ede
--- /dev/null
+++ b/docs/index.md
@@ -0,0 +1,27 @@
+# cql2-rs
+
+**cql2-rs** is [Python package](./python.md), [command-line interface (CLI)](./cli.md), and [Rust crate](https://docs.rs/cql2) for parsing, validating, and converting [Common Query Language (CQL2)](https://www.ogc.org/standard/cql2/).
+
+## Python
+
+```python
+>>> from cql2 import Expr
+>>> expr = Expr("landsat:scene_id = 'LC82030282019133LGN00'")
+>>> expr.to_json()
+{'op': '=', 'args': [{'property': 'landsat:scene_id'}, 'LC82030282019133LGN00']}
+```
+
+## CLI
+
+```shell
+$ cql2 < tests/fixtures/text/example01.txt # will succeed if the CQL2 is valid
+("landsat:scene_id" = 'LC82030282019133LGN00')
+```
+
+## Rust
+
+```rust
+use cql2::Expr;
+let expr: Expr = "landsat:scene_id = 'LC82030282019133LGN00'".parse();
+let json = expr.to_json().unwrap();
+```
diff --git a/docs/python.md b/docs/python.md
new file mode 100644
index 0000000..1525bbf
--- /dev/null
+++ b/docs/python.md
@@ -0,0 +1,13 @@
+# Python
+
+Python API documentation for the **cql2** package.
+Install from PyPI:
+
+```shell
+python -m pip install cql2
+```
+
+## API
+
+::: cql2.Expr
+::: cql2.SqlQuery
diff --git a/mkdocs.yml b/mkdocs.yml
new file mode 100644
index 0000000..ff8ce2a
--- /dev/null
+++ b/mkdocs.yml
@@ -0,0 +1,65 @@
+site_name: cql2-rs
+site_url: https://developmentseed.org/cql2-rs/
+site_description: Command-line interface (CLI), Python package, and Rust crate for parsing, validating, and converting Common Query Language (CQL2)
+site_author: David Bitner
+repo_name: developmentseed/cql2-rs
+repo_url: https://github.com/developmentseed/cql2-rs
+edit_uri: edit/main/docs/
+
+extra:
+ social:
+ - icon: "fontawesome/brands/github"
+ version:
+ provider: mike
+
+theme:
+ name: material
+ logo: ds-logo-hor--pos.svg
+ favicon: favicon.png
+ icon:
+ repo: fontawesome/brands/github
+ palette:
+ primary: blue
+ features:
+ - content.action.edit
+ - navigation.indexes
+ - navigation.instant
+ - navigation.tabs
+ - navigation.tracking
+ - search.share
+ - search.suggest
+ - toc.integrate
+nav:
+ - cql2-rs: index.md
+ - Python: python.md
+ - CLI: cli.md
+ - Rust: https://docs.rs/cql2
+plugins:
+ - search
+ - social
+ - mike:
+ alias_type: "copy"
+ canonical_version: "latest"
+ - mkdocstrings:
+ enable_inventory: true
+ handlers:
+ python:
+ options:
+ allow_inspection: false
+ show_root_heading: true
+ separate_signature: false
+ docstring_style: google
+ docstring_section_style: list
+ show_symbol_type_toc: true
+ signature_crossrefs: true
+ merge_init_into_class: true
+ docstring_options:
+ ignore_init_summary: false
+markdown_extensions:
+ - pymdownx.highlight:
+ anchor_linenums: true
+ line_spans: __span
+ pygments_lang_class: true
+ - pymdownx.inlinehilite
+ - pymdownx.snippets
+ - pymdownx.superfences
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..1d035a6
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,18 @@
+[project]
+name = "cql2-rs"
+version = "0.0.0"
+description = "This Python package is non-releaseable and is only used for building the documentation"
+requires-python = ">=3.12"
+
+[tool.uv]
+dev-dependencies = [
+ "mike>=2.1.3",
+ "mkdocs-material[imaging]>=9.5.39",
+ "mkdocstrings[python]>=0.26.1",
+]
+
+[tool.uv.sources]
+python = { workspace = true }
+
+[tool.uv.workspace]
+members = ["python/"]
diff --git a/python/cql2.pyi b/python/cql2.pyi
index 0beab85..359814b 100644
--- a/python/cql2.pyi
+++ b/python/cql2.pyi
@@ -2,13 +2,82 @@ from typing import Any
from os import PathLike
class SqlQuery:
+ """A SQL query"""
+
query: str
+ """The query, with parameterized fields."""
+
params: list[str]
+ """The parameters, to use for binding."""
class Expr:
@staticmethod
- def from_path(path: PathLike) -> Expr: ...
- def __init__(self, cql2: str | dict[str, Any]) -> None: ...
- def to_json(self) -> dict[str, Any]: ...
- def to_text(self) -> str: ...
- def to_sql(self) -> SqlQuery: ...
+ def from_path(path: PathLike | str) -> Expr:
+ """Reads CQL2 from a filesystem path.
+
+ Args:
+ path (PathLike | str): The input path
+
+ Returns:
+ Expr: The CQL2 expression
+
+ Examples:
+ >>> from cql2 import Expr
+ >>> expr = Expr.from_path("fixtures/text/example01.txt")
+ """
+
+ def __init__(self, cql2: str | dict[str, Any]) -> None:
+ """A CQL2 expression.
+
+ The cql2 can either be a cql2-text string, a cql2-json string, or a
+ cql2-json dictionary.
+
+ Args:
+ cql2 (str | dict[str, Any]): The input CQL2
+
+ Examples:
+ >>> from cql2 import Expr
+ >>> expr = Expr("landsat:scene_id = 'LC82030282019133LGN00'")
+ >>> expr = Expr({"op":"=","args":[{"property":"landsat:scene_id"},"LC82030282019133LGN00"]})
+ """
+
+ def to_json(self) -> dict[str, Any]:
+ """Converts this cql2 expression to a cql2-json dictionary.
+
+ Returns:
+ dict[str, Any]: The cql2-json
+
+ Examples:
+ >>> from cql2 import Expr
+ >>> expr = Expr("landsat:scene_id = 'LC82030282019133LGN00'")
+ >>> expr.to_json()
+ {'op': '=', 'args': [{'property': 'landsat:scene_id'}, 'LC82030282019133LGN00']}
+ """
+
+ def to_text(self) -> str:
+ """Converts this cql2 expression to cql2-text.
+
+ Returns:
+ str: The cql2-text
+
+ Examples:
+ >>> from cql2 import Expr
+ >>> expr = Expr({"op":"=","args":[{"property":"landsat:scene_id"},"LC82030282019133LGN00"]})
+ >>> expr.to_text()
+ '("landsat:scene_id" = \'LC82030282019133LGN00\')'
+ """
+
+ def to_sql(self) -> SqlQuery:
+ """Converts this cql2 expression to a SQL query.
+
+ Returns:
+ SqlQuery: The SQL query and parameters
+
+ Examples:
+ >>> from cql2 import Expr
+ >>> expr = Expr("landsat:scene_id = 'LC82030282019133LGN00'")
+ >>> q.query
+ '("landsat:scene_id" = $1)'
+ >>> q.params
+ ['LC82030282019133LGN00']
+ """
diff --git a/python/pyproject.toml b/python/pyproject.toml
index e0b5929..cea5f6c 100644
--- a/python/pyproject.toml
+++ b/python/pyproject.toml
@@ -13,4 +13,4 @@ classifiers = [
dynamic = ["version"]
[tool.uv]
-dev-dependencies = ["mypy", "pytest", "ruff"]
+dev-dependencies = ["mypy>=1.11.2", "pytest>=8.3.3", "ruff>=0.6.9"]
diff --git a/python/uv.lock b/python/uv.lock
deleted file mode 100644
index 464da04..0000000
--- a/python/uv.lock
+++ /dev/null
@@ -1,176 +0,0 @@
-version = 1
-requires-python = ">=3.8"
-
-[[package]]
-name = "colorama"
-version = "0.4.6"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 },
-]
-
-[[package]]
-name = "cql2"
-version = "0.1.0"
-source = { editable = "." }
-
-[package.dev-dependencies]
-dev = [
- { name = "mypy" },
- { name = "pytest" },
- { name = "ruff" },
-]
-
-[package.metadata]
-
-[package.metadata.requires-dev]
-dev = [
- { name = "mypy" },
- { name = "pytest" },
- { name = "ruff" },
-]
-
-[[package]]
-name = "exceptiongroup"
-version = "1.2.2"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/09/35/2495c4ac46b980e4ca1f6ad6db102322ef3ad2410b79fdde159a4b0f3b92/exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc", size = 28883 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", size = 16453 },
-]
-
-[[package]]
-name = "iniconfig"
-version = "2.0.0"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892 },
-]
-
-[[package]]
-name = "mypy"
-version = "1.11.2"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "mypy-extensions" },
- { name = "tomli", marker = "python_full_version < '3.11'" },
- { name = "typing-extensions" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/5c/86/5d7cbc4974fd564550b80fbb8103c05501ea11aa7835edf3351d90095896/mypy-1.11.2.tar.gz", hash = "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79", size = 3078806 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/78/cd/815368cd83c3a31873e5e55b317551500b12f2d1d7549720632f32630333/mypy-1.11.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d42a6dd818ffce7be66cce644f1dff482f1d97c53ca70908dff0b9ddc120b77a", size = 10939401 },
- { url = "https://files.pythonhosted.org/packages/f1/27/e18c93a195d2fad75eb96e1f1cbc431842c332e8eba2e2b77eaf7313c6b7/mypy-1.11.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:801780c56d1cdb896eacd5619a83e427ce436d86a3bdf9112527f24a66618fef", size = 10111697 },
- { url = "https://files.pythonhosted.org/packages/dc/08/cdc1fc6d0d5a67d354741344cc4aa7d53f7128902ebcbe699ddd4f15a61c/mypy-1.11.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41ea707d036a5307ac674ea172875f40c9d55c5394f888b168033177fce47383", size = 12500508 },
- { url = "https://files.pythonhosted.org/packages/64/12/aad3af008c92c2d5d0720ea3b6674ba94a98cdb86888d389acdb5f218c30/mypy-1.11.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6e658bd2d20565ea86da7d91331b0eed6d2eee22dc031579e6297f3e12c758c8", size = 13020712 },
- { url = "https://files.pythonhosted.org/packages/03/e6/a7d97cc124a565be5e9b7d5c2a6ebf082379ffba99646e4863ed5bbcb3c3/mypy-1.11.2-cp310-cp310-win_amd64.whl", hash = "sha256:478db5f5036817fe45adb7332d927daa62417159d49783041338921dcf646fc7", size = 9567319 },
- { url = "https://files.pythonhosted.org/packages/e2/aa/cc56fb53ebe14c64f1fe91d32d838d6f4db948b9494e200d2f61b820b85d/mypy-1.11.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385", size = 10859630 },
- { url = "https://files.pythonhosted.org/packages/04/c8/b19a760fab491c22c51975cf74e3d253b8c8ce2be7afaa2490fbf95a8c59/mypy-1.11.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca", size = 10037973 },
- { url = "https://files.pythonhosted.org/packages/88/57/7e7e39f2619c8f74a22efb9a4c4eff32b09d3798335625a124436d121d89/mypy-1.11.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104", size = 12416659 },
- { url = "https://files.pythonhosted.org/packages/fc/a6/37f7544666b63a27e46c48f49caeee388bf3ce95f9c570eb5cfba5234405/mypy-1.11.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4", size = 12897010 },
- { url = "https://files.pythonhosted.org/packages/84/8b/459a513badc4d34acb31c736a0101c22d2bd0697b969796ad93294165cfb/mypy-1.11.2-cp311-cp311-win_amd64.whl", hash = "sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6", size = 9562873 },
- { url = "https://files.pythonhosted.org/packages/35/3a/ed7b12ecc3f6db2f664ccf85cb2e004d3e90bec928e9d7be6aa2f16b7cdf/mypy-1.11.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318", size = 10990335 },
- { url = "https://files.pythonhosted.org/packages/04/e4/1a9051e2ef10296d206519f1df13d2cc896aea39e8683302f89bf5792a59/mypy-1.11.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36", size = 10007119 },
- { url = "https://files.pythonhosted.org/packages/f3/3c/350a9da895f8a7e87ade0028b962be0252d152e0c2fbaafa6f0658b4d0d4/mypy-1.11.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987", size = 12506856 },
- { url = "https://files.pythonhosted.org/packages/b6/49/ee5adf6a49ff13f4202d949544d3d08abb0ea1f3e7f2a6d5b4c10ba0360a/mypy-1.11.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca", size = 12952066 },
- { url = "https://files.pythonhosted.org/packages/27/c0/b19d709a42b24004d720db37446a42abadf844d5c46a2c442e2a074d70d9/mypy-1.11.2-cp312-cp312-win_amd64.whl", hash = "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70", size = 9664000 },
- { url = "https://files.pythonhosted.org/packages/42/ad/5a8567700410f8aa7c755b0ebd4cacff22468cbc5517588773d65075c0cb/mypy-1.11.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:37c7fa6121c1cdfcaac97ce3d3b5588e847aa79b580c1e922bb5d5d2902df19b", size = 10876550 },
- { url = "https://files.pythonhosted.org/packages/1b/bc/9fc16ea7a27ceb93e123d300f1cfe27a6dd1eac9a8beea4f4d401e737e9d/mypy-1.11.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4a8a53bc3ffbd161b5b2a4fff2f0f1e23a33b0168f1c0778ec70e1a3d66deb86", size = 10068086 },
- { url = "https://files.pythonhosted.org/packages/cd/8f/a1e460f1288405a13352dad16b24aba6dce4f850fc76510c540faa96eda3/mypy-1.11.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ff93107f01968ed834f4256bc1fc4475e2fecf6c661260066a985b52741ddce", size = 12459214 },
- { url = "https://files.pythonhosted.org/packages/c7/74/746b31aef7cc7512dab8bdc2311ef88d63fadc1c453a09c8cab7e57e59bf/mypy-1.11.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:edb91dded4df17eae4537668b23f0ff6baf3707683734b6a818d5b9d0c0c31a1", size = 12962942 },
- { url = "https://files.pythonhosted.org/packages/28/a4/7fae712240b640d75bb859294ad4776b9960b3216ccb7fa747f578e6c632/mypy-1.11.2-cp38-cp38-win_amd64.whl", hash = "sha256:ee23de8530d99b6db0573c4ef4bd8f39a2a6f9b60655bf7a1357e585a3486f2b", size = 9545616 },
- { url = "https://files.pythonhosted.org/packages/16/64/bb5ed751487e2bea0dfaa6f640a7e3bb88083648f522e766d5ef4a76f578/mypy-1.11.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:801ca29f43d5acce85f8e999b1e431fb479cb02d0e11deb7d2abb56bdaf24fd6", size = 10937294 },
- { url = "https://files.pythonhosted.org/packages/a9/a3/67a0069abed93c3bf3b0bebb8857e2979a02828a4a3fd82f107f8f1143e8/mypy-1.11.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af8d155170fcf87a2afb55b35dc1a0ac21df4431e7d96717621962e4b9192e70", size = 10107707 },
- { url = "https://files.pythonhosted.org/packages/2f/4d/0379daf4258b454b1f9ed589a9dabd072c17f97496daea7b72fdacf7c248/mypy-1.11.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f7821776e5c4286b6a13138cc935e2e9b6fde05e081bdebf5cdb2bb97c9df81d", size = 12498367 },
- { url = "https://files.pythonhosted.org/packages/3b/dc/3976a988c280b3571b8eb6928882dc4b723a403b21735a6d8ae6ed20e82b/mypy-1.11.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:539c570477a96a4e6fb718b8d5c3e0c0eba1f485df13f86d2970c91f0673148d", size = 13018014 },
- { url = "https://files.pythonhosted.org/packages/83/84/adffc7138fb970e7e2a167bd20b33bb78958370179853a4ebe9008139342/mypy-1.11.2-cp39-cp39-win_amd64.whl", hash = "sha256:3f14cd3d386ac4d05c5a39a51b84387403dadbd936e17cb35882134d4f8f0d24", size = 9568056 },
- { url = "https://files.pythonhosted.org/packages/42/3a/bdf730640ac523229dd6578e8a581795720a9321399de494374afc437ec5/mypy-1.11.2-py3-none-any.whl", hash = "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12", size = 2619625 },
-]
-
-[[package]]
-name = "mypy-extensions"
-version = "1.0.0"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782", size = 4433 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695 },
-]
-
-[[package]]
-name = "packaging"
-version = "24.1"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/51/65/50db4dda066951078f0a96cf12f4b9ada6e4b811516bf0262c0f4f7064d4/packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002", size = 148788 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124", size = 53985 },
-]
-
-[[package]]
-name = "pluggy"
-version = "1.5.0"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 },
-]
-
-[[package]]
-name = "pytest"
-version = "8.3.3"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "colorama", marker = "sys_platform == 'win32'" },
- { name = "exceptiongroup", marker = "python_full_version < '3.11'" },
- { name = "iniconfig" },
- { name = "packaging" },
- { name = "pluggy" },
- { name = "tomli", marker = "python_full_version < '3.11'" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/8b/6c/62bbd536103af674e227c41a8f3dcd022d591f6eed5facb5a0f31ee33bbc/pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181", size = 1442487 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/6b/77/7440a06a8ead44c7757a64362dd22df5760f9b12dc5f11b6188cd2fc27a0/pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2", size = 342341 },
-]
-
-[[package]]
-name = "ruff"
-version = "0.6.9"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/26/0d/6148a48dab5662ca1d5a93b7c0d13c03abd3cc7e2f35db08410e47cef15d/ruff-0.6.9.tar.gz", hash = "sha256:b076ef717a8e5bc819514ee1d602bbdca5b4420ae13a9cf61a0c0a4f53a2baa2", size = 3095355 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/6e/8f/f7a0a0ef1818662efb32ed6df16078c95da7a0a3248d64c2410c1e27799f/ruff-0.6.9-py3-none-linux_armv6l.whl", hash = "sha256:064df58d84ccc0ac0fcd63bc3090b251d90e2a372558c0f057c3f75ed73e1ccd", size = 10440526 },
- { url = "https://files.pythonhosted.org/packages/8b/69/b179a5faf936a9e2ab45bb412a668e4661eded964ccfa19d533f29463ef6/ruff-0.6.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:140d4b5c9f5fc7a7b074908a78ab8d384dd7f6510402267bc76c37195c02a7ec", size = 10034612 },
- { url = "https://files.pythonhosted.org/packages/c7/ef/fd1b4be979c579d191eeac37b5cfc0ec906de72c8bcd8595e2c81bb700c1/ruff-0.6.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:53fd8ca5e82bdee8da7f506d7b03a261f24cd43d090ea9db9a1dc59d9313914c", size = 9706197 },
- { url = "https://files.pythonhosted.org/packages/29/61/b376d775deb5851cb48d893c568b511a6d3625ef2c129ad5698b64fb523c/ruff-0.6.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645d7d8761f915e48a00d4ecc3686969761df69fb561dd914a773c1a8266e14e", size = 10751855 },
- { url = "https://files.pythonhosted.org/packages/13/d7/def9e5f446d75b9a9c19b24231a3a658c075d79163b08582e56fa5dcfa38/ruff-0.6.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eae02b700763e3847595b9d2891488989cac00214da7f845f4bcf2989007d577", size = 10200889 },
- { url = "https://files.pythonhosted.org/packages/6c/d6/7f34160818bcb6e84ce293a5966cba368d9112ff0289b273fbb689046047/ruff-0.6.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d5ccc9e58112441de8ad4b29dcb7a86dc25c5f770e3c06a9d57e0e5eba48829", size = 11038678 },
- { url = "https://files.pythonhosted.org/packages/13/34/a40ff8ae62fb1b26fb8e6fa7e64bc0e0a834b47317880de22edd6bfb54fb/ruff-0.6.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:417b81aa1c9b60b2f8edc463c58363075412866ae4e2b9ab0f690dc1e87ac1b5", size = 11808682 },
- { url = "https://files.pythonhosted.org/packages/2e/6d/25a4386ae4009fc798bd10ba48c942d1b0b3e459b5403028f1214b6dd161/ruff-0.6.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c866b631f5fbce896a74a6e4383407ba7507b815ccc52bcedabb6810fdb3ef7", size = 11330446 },
- { url = "https://files.pythonhosted.org/packages/f7/f6/bdf891a9200d692c94ebcd06ae5a2fa5894e522f2c66c2a12dd5d8cb2654/ruff-0.6.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b118afbb3202f5911486ad52da86d1d52305b59e7ef2031cea3425142b97d6f", size = 12483048 },
- { url = "https://files.pythonhosted.org/packages/a7/86/96f4252f41840e325b3fa6c48297e661abb9f564bd7dcc0572398c8daa42/ruff-0.6.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a67267654edc23c97335586774790cde402fb6bbdb3c2314f1fc087dee320bfa", size = 10936855 },
- { url = "https://files.pythonhosted.org/packages/45/87/801a52d26c8dbf73424238e9908b9ceac430d903c8ef35eab1b44fcfa2bd/ruff-0.6.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3ef0cc774b00fec123f635ce5c547dac263f6ee9fb9cc83437c5904183b55ceb", size = 10713007 },
- { url = "https://files.pythonhosted.org/packages/be/27/6f7161d90320a389695e32b6ebdbfbedde28ccbf52451e4b723d7ce744ad/ruff-0.6.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:12edd2af0c60fa61ff31cefb90aef4288ac4d372b4962c2864aeea3a1a2460c0", size = 10274594 },
- { url = "https://files.pythonhosted.org/packages/00/52/dc311775e7b5f5b19831563cb1572ecce63e62681bccc609867711fae317/ruff-0.6.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:55bb01caeaf3a60b2b2bba07308a02fca6ab56233302406ed5245180a05c5625", size = 10608024 },
- { url = "https://files.pythonhosted.org/packages/98/b6/be0a1ddcbac65a30c985cf7224c4fce786ba2c51e7efeb5178fe410ed3cf/ruff-0.6.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:925d26471fa24b0ce5a6cdfab1bb526fb4159952385f386bdcc643813d472039", size = 10982085 },
- { url = "https://files.pythonhosted.org/packages/bb/a4/c84bc13d0b573cf7bb7d17b16d6d29f84267c92d79b2f478d4ce322e8e72/ruff-0.6.9-py3-none-win32.whl", hash = "sha256:eb61ec9bdb2506cffd492e05ac40e5bc6284873aceb605503d8494180d6fc84d", size = 8522088 },
- { url = "https://files.pythonhosted.org/packages/74/be/fc352bd8ca40daae8740b54c1c3e905a7efe470d420a268cd62150248c91/ruff-0.6.9-py3-none-win_amd64.whl", hash = "sha256:785d31851c1ae91f45b3d8fe23b8ae4b5170089021fbb42402d811135f0b7117", size = 9359275 },
- { url = "https://files.pythonhosted.org/packages/3e/14/fd026bc74ded05e2351681545a5f626e78ef831f8edce064d61acd2e6ec7/ruff-0.6.9-py3-none-win_arm64.whl", hash = "sha256:a9641e31476d601f83cd602608739a0840e348bda93fec9f1ee816f8b6798b93", size = 8679879 },
-]
-
-[[package]]
-name = "tomli"
-version = "2.0.2"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/35/b9/de2a5c0144d7d75a57ff355c0c24054f965b2dc3036456ae03a51ea6264b/tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed", size = 16096 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/cf/db/ce8eda256fa131af12e0a76d481711abe4681b6923c27efb9a255c9e4594/tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38", size = 13237 },
-]
-
-[[package]]
-name = "typing-extensions"
-version = "4.12.2"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 },
-]
diff --git a/uv.lock b/uv.lock
new file mode 100644
index 0000000..7cfd14a
--- /dev/null
+++ b/uv.lock
@@ -0,0 +1,863 @@
+version = 1
+requires-python = ">=3.12"
+
+[manifest]
+members = [
+ "cql2",
+ "cql2-rs",
+]
+
+[[package]]
+name = "babel"
+version = "2.16.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/2a/74/f1bc80f23eeba13393b7222b11d95ca3af2c1e28edca18af487137eefed9/babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316", size = 9348104 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ed/20/bc79bc575ba2e2a7f70e8a1155618bb1301eaa5132a8271373a6903f73f8/babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b", size = 9587599 },
+]
+
+[[package]]
+name = "cairocffi"
+version = "1.7.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "cffi" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/70/c5/1a4dc131459e68a173cbdab5fad6b524f53f9c1ef7861b7698e998b837cc/cairocffi-1.7.1.tar.gz", hash = "sha256:2e48ee864884ec4a3a34bfa8c9ab9999f688286eb714a15a43ec9d068c36557b", size = 88096 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/93/d8/ba13451aa6b745c49536e87b6bf8f629b950e84bd0e8308f7dc6883b67e2/cairocffi-1.7.1-py3-none-any.whl", hash = "sha256:9803a0e11f6c962f3b0ae2ec8ba6ae45e957a146a004697a1ac1bbf16b073b3f", size = 75611 },
+]
+
+[[package]]
+name = "cairosvg"
+version = "2.7.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "cairocffi" },
+ { name = "cssselect2" },
+ { name = "defusedxml" },
+ { name = "pillow" },
+ { name = "tinycss2" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/d5/e6/ec5900b724e3c44af7f6f51f719919137284e5da4aabe96508baec8a1b40/CairoSVG-2.7.1.tar.gz", hash = "sha256:432531d72347291b9a9ebfb6777026b607563fd8719c46ee742db0aef7271ba0", size = 8399085 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/01/a5/1866b42151f50453f1a0d28fc4c39f5be5f412a2e914f33449c42daafdf1/CairoSVG-2.7.1-py3-none-any.whl", hash = "sha256:8a5222d4e6c3f86f1f7046b63246877a63b49923a1cd202184c3a634ef546b3b", size = 43235 },
+]
+
+[[package]]
+name = "certifi"
+version = "2024.8.30"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/b0/ee/9b19140fe824b367c04c5e1b369942dd754c4c5462d5674002f75c4dedc1/certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9", size = 168507 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/12/90/3c9ff0512038035f59d279fddeb79f5f1eccd8859f06d6163c58798b9487/certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8", size = 167321 },
+]
+
+[[package]]
+name = "cffi"
+version = "1.17.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pycparser" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178 },
+ { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840 },
+ { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803 },
+ { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850 },
+ { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729 },
+ { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256 },
+ { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424 },
+ { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568 },
+ { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736 },
+ { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448 },
+ { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976 },
+ { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989 },
+ { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802 },
+ { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792 },
+ { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893 },
+ { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810 },
+ { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200 },
+ { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447 },
+ { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358 },
+ { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469 },
+ { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475 },
+ { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009 },
+]
+
+[[package]]
+name = "charset-normalizer"
+version = "3.4.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f2/4f/e1808dc01273379acc506d18f1504eb2d299bd4131743b9fc54d7be4df1e/charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e", size = 106620 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d3/0b/4b7a70987abf9b8196845806198975b6aab4ce016632f817ad758a5aa056/charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6", size = 194445 },
+ { url = "https://files.pythonhosted.org/packages/50/89/354cc56cf4dd2449715bc9a0f54f3aef3dc700d2d62d1fa5bbea53b13426/charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf", size = 125275 },
+ { url = "https://files.pythonhosted.org/packages/fa/44/b730e2a2580110ced837ac083d8ad222343c96bb6b66e9e4e706e4d0b6df/charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db", size = 119020 },
+ { url = "https://files.pythonhosted.org/packages/9d/e4/9263b8240ed9472a2ae7ddc3e516e71ef46617fe40eaa51221ccd4ad9a27/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1", size = 139128 },
+ { url = "https://files.pythonhosted.org/packages/6b/e3/9f73e779315a54334240353eaea75854a9a690f3f580e4bd85d977cb2204/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03", size = 149277 },
+ { url = "https://files.pythonhosted.org/packages/1a/cf/f1f50c2f295312edb8a548d3fa56a5c923b146cd3f24114d5adb7e7be558/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284", size = 142174 },
+ { url = "https://files.pythonhosted.org/packages/16/92/92a76dc2ff3a12e69ba94e7e05168d37d0345fa08c87e1fe24d0c2a42223/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15", size = 143838 },
+ { url = "https://files.pythonhosted.org/packages/a4/01/2117ff2b1dfc61695daf2babe4a874bca328489afa85952440b59819e9d7/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8", size = 146149 },
+ { url = "https://files.pythonhosted.org/packages/f6/9b/93a332b8d25b347f6839ca0a61b7f0287b0930216994e8bf67a75d050255/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2", size = 140043 },
+ { url = "https://files.pythonhosted.org/packages/ab/f6/7ac4a01adcdecbc7a7587767c776d53d369b8b971382b91211489535acf0/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719", size = 148229 },
+ { url = "https://files.pythonhosted.org/packages/9d/be/5708ad18161dee7dc6a0f7e6cf3a88ea6279c3e8484844c0590e50e803ef/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631", size = 151556 },
+ { url = "https://files.pythonhosted.org/packages/5a/bb/3d8bc22bacb9eb89785e83e6723f9888265f3a0de3b9ce724d66bd49884e/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b", size = 149772 },
+ { url = "https://files.pythonhosted.org/packages/f7/fa/d3fc622de05a86f30beea5fc4e9ac46aead4731e73fd9055496732bcc0a4/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565", size = 144800 },
+ { url = "https://files.pythonhosted.org/packages/9a/65/bdb9bc496d7d190d725e96816e20e2ae3a6fa42a5cac99c3c3d6ff884118/charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7", size = 94836 },
+ { url = "https://files.pythonhosted.org/packages/3e/67/7b72b69d25b89c0b3cea583ee372c43aa24df15f0e0f8d3982c57804984b/charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9", size = 102187 },
+ { url = "https://files.pythonhosted.org/packages/f3/89/68a4c86f1a0002810a27f12e9a7b22feb198c59b2f05231349fbce5c06f4/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114", size = 194617 },
+ { url = "https://files.pythonhosted.org/packages/4f/cd/8947fe425e2ab0aa57aceb7807af13a0e4162cd21eee42ef5b053447edf5/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed", size = 125310 },
+ { url = "https://files.pythonhosted.org/packages/5b/f0/b5263e8668a4ee9becc2b451ed909e9c27058337fda5b8c49588183c267a/charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250", size = 119126 },
+ { url = "https://files.pythonhosted.org/packages/ff/6e/e445afe4f7fda27a533f3234b627b3e515a1b9429bc981c9a5e2aa5d97b6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920", size = 139342 },
+ { url = "https://files.pythonhosted.org/packages/a1/b2/4af9993b532d93270538ad4926c8e37dc29f2111c36f9c629840c57cd9b3/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64", size = 149383 },
+ { url = "https://files.pythonhosted.org/packages/fb/6f/4e78c3b97686b871db9be6f31d64e9264e889f8c9d7ab33c771f847f79b7/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23", size = 142214 },
+ { url = "https://files.pythonhosted.org/packages/2b/c9/1c8fe3ce05d30c87eff498592c89015b19fade13df42850aafae09e94f35/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc", size = 144104 },
+ { url = "https://files.pythonhosted.org/packages/ee/68/efad5dcb306bf37db7db338338e7bb8ebd8cf38ee5bbd5ceaaaa46f257e6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d", size = 146255 },
+ { url = "https://files.pythonhosted.org/packages/0c/75/1ed813c3ffd200b1f3e71121c95da3f79e6d2a96120163443b3ad1057505/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88", size = 140251 },
+ { url = "https://files.pythonhosted.org/packages/7d/0d/6f32255c1979653b448d3c709583557a4d24ff97ac4f3a5be156b2e6a210/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90", size = 148474 },
+ { url = "https://files.pythonhosted.org/packages/ac/a0/c1b5298de4670d997101fef95b97ac440e8c8d8b4efa5a4d1ef44af82f0d/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b", size = 151849 },
+ { url = "https://files.pythonhosted.org/packages/04/4f/b3961ba0c664989ba63e30595a3ed0875d6790ff26671e2aae2fdc28a399/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d", size = 149781 },
+ { url = "https://files.pythonhosted.org/packages/d8/90/6af4cd042066a4adad58ae25648a12c09c879efa4849c705719ba1b23d8c/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482", size = 144970 },
+ { url = "https://files.pythonhosted.org/packages/cc/67/e5e7e0cbfefc4ca79025238b43cdf8a2037854195b37d6417f3d0895c4c2/charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67", size = 94973 },
+ { url = "https://files.pythonhosted.org/packages/65/97/fc9bbc54ee13d33dc54a7fcf17b26368b18505500fc01e228c27b5222d80/charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b", size = 102308 },
+ { url = "https://files.pythonhosted.org/packages/bf/9b/08c0432272d77b04803958a4598a51e2a4b51c06640af8b8f0f908c18bf2/charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079", size = 49446 },
+]
+
+[[package]]
+name = "click"
+version = "8.1.7"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "platform_system == 'Windows'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/96/d3/f04c7bfcf5c1862a2a5b845c6b2b360488cf47af55dfa79c98f6a6bf98b5/click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de", size = 336121 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28", size = 97941 },
+]
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 },
+]
+
+[[package]]
+name = "cql2"
+version = "0.1.0"
+source = { editable = "python" }
+
+[package.dev-dependencies]
+dev = [
+ { name = "mypy" },
+ { name = "pytest" },
+ { name = "ruff" },
+]
+
+[package.metadata]
+
+[package.metadata.requires-dev]
+dev = [
+ { name = "mypy", specifier = ">=1.11.2" },
+ { name = "pytest", specifier = ">=8.3.3" },
+ { name = "ruff", specifier = ">=0.6.9" },
+]
+
+[[package]]
+name = "cql2-rs"
+version = "0.0.0"
+source = { virtual = "." }
+
+[package.dev-dependencies]
+dev = [
+ { name = "mike" },
+ { name = "mkdocs-material", extra = ["imaging"] },
+ { name = "mkdocstrings", extra = ["python"] },
+]
+
+[package.metadata]
+
+[package.metadata.requires-dev]
+dev = [
+ { name = "mike", specifier = ">=2.1.3" },
+ { name = "mkdocs-material", extras = ["imaging"], specifier = ">=9.5.39" },
+ { name = "mkdocstrings", extras = ["python"], specifier = ">=0.26.1" },
+]
+
+[[package]]
+name = "cssselect2"
+version = "0.7.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "tinycss2" },
+ { name = "webencodings" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/e7/fc/326cb6f988905998f09bb54a3f5d98d4462ba119363c0dfad29750d48c09/cssselect2-0.7.0.tar.gz", hash = "sha256:1ccd984dab89fc68955043aca4e1b03e0cf29cad9880f6e28e3ba7a74b14aa5a", size = 35888 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/9d/3a/e39436efe51894243ff145a37c4f9a030839b97779ebcc4f13b3ba21c54e/cssselect2-0.7.0-py3-none-any.whl", hash = "sha256:fd23a65bfd444595913f02fc71f6b286c29261e354c41d722ca7a261a49b5969", size = 15586 },
+]
+
+[[package]]
+name = "defusedxml"
+version = "0.7.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/0f/d5/c66da9b79e5bdb124974bfe172b4daf3c984ebd9c2a06e2b8a4dc7331c72/defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69", size = 75520 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61", size = 25604 },
+]
+
+[[package]]
+name = "ghp-import"
+version = "2.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "python-dateutil" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/d9/29/d40217cbe2f6b1359e00c6c307bb3fc876ba74068cbab3dde77f03ca0dc4/ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343", size = 10943 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f7/ec/67fbef5d497f86283db54c22eec6f6140243aae73265799baaaa19cd17fb/ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619", size = 11034 },
+]
+
+[[package]]
+name = "griffe"
+version = "1.3.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c2/37/813e72a3458fa3d996cf6bcc6a0caa919d16540f873366b0d328d51d684a/griffe-1.3.2.tar.gz", hash = "sha256:1ec50335aa507ed2445f2dd45a15c9fa3a45f52c9527e880571dfc61912fd60c", size = 382540 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2a/49/39967633dd3c5f06fde83fec140228671a7344289ece0cfdd3cbe4798d69/griffe-1.3.2-py3-none-any.whl", hash = "sha256:2e34b5e46507d615915c8e6288bb1a2234bd35dee44d01e40a2bc2f25bd4d10c", size = 126992 },
+]
+
+[[package]]
+name = "idna"
+version = "3.10"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 },
+]
+
+[[package]]
+name = "importlib-metadata"
+version = "8.5.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "zipp" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/cd/12/33e59336dca5be0c398a7482335911a33aa0e20776128f038019f1a95f1b/importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7", size = 55304 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a0/d9/a1e041c5e7caa9a05c925f4bdbdfb7f006d1f74996af53467bc394c97be7/importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b", size = 26514 },
+]
+
+[[package]]
+name = "importlib-resources"
+version = "6.4.5"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/98/be/f3e8c6081b684f176b761e6a2fef02a0be939740ed6f54109a2951d806f3/importlib_resources-6.4.5.tar.gz", hash = "sha256:980862a1d16c9e147a59603677fa2aa5fd82b87f223b6cb870695bcfce830065", size = 43372 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e1/6a/4604f9ae2fa62ef47b9de2fa5ad599589d28c9fd1d335f32759813dfa91e/importlib_resources-6.4.5-py3-none-any.whl", hash = "sha256:ac29d5f956f01d5e4bb63102a5a19957f1b9175e45649977264a1416783bb717", size = 36115 },
+]
+
+[[package]]
+name = "iniconfig"
+version = "2.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892 },
+]
+
+[[package]]
+name = "jinja2"
+version = "3.1.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "markupsafe" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ed/55/39036716d19cab0747a5020fc7e907f362fbf48c984b14e62127f7e68e5d/jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369", size = 240245 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d", size = 133271 },
+]
+
+[[package]]
+name = "markdown"
+version = "3.7"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/54/28/3af612670f82f4c056911fbbbb42760255801b3068c48de792d354ff4472/markdown-3.7.tar.gz", hash = "sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2", size = 357086 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/3f/08/83871f3c50fc983b88547c196d11cf8c3340e37c32d2e9d6152abe2c61f7/Markdown-3.7-py3-none-any.whl", hash = "sha256:7eb6df5690b81a1d7942992c97fad2938e956e79df20cbc6186e9c3a77b1c803", size = 106349 },
+]
+
+[[package]]
+name = "markupsafe"
+version = "3.0.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/b4/d2/38ff920762f2247c3af5cbbbbc40756f575d9692d381d7c520f45deb9b8f/markupsafe-3.0.1.tar.gz", hash = "sha256:3e683ee4f5d0fa2dde4db77ed8dd8a876686e3fc417655c2ece9a90576905344", size = 20249 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/45/6d/72ed58d42a12bd9fc288dbff6dd8d03ea973a232ac0538d7f88d105b5251/MarkupSafe-3.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8ae369e84466aa70f3154ee23c1451fda10a8ee1b63923ce76667e3077f2b0c4", size = 14322 },
+ { url = "https://files.pythonhosted.org/packages/86/f5/241238f89cdd6461ac9f521af8389f9a48fab97e4f315c69e9e0d52bc919/MarkupSafe-3.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40f1e10d51c92859765522cbd79c5c8989f40f0419614bcdc5015e7b6bf97fc5", size = 12380 },
+ { url = "https://files.pythonhosted.org/packages/27/94/79751928bca5841416d8ca02e22198672e021d5c7120338e2a6e3771f8fc/MarkupSafe-3.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a4cb365cb49b750bdb60b846b0c0bc49ed62e59a76635095a179d440540c346", size = 24099 },
+ { url = "https://files.pythonhosted.org/packages/10/6e/1b8070bbfc467429c7983cd5ffd4ec57e1d501763d974c7caaa0a9a79f4c/MarkupSafe-3.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee3941769bd2522fe39222206f6dd97ae83c442a94c90f2b7a25d847d40f4729", size = 23249 },
+ { url = "https://files.pythonhosted.org/packages/66/50/9389ae6cdff78d7481a2a2641830b5eb1d1f62177550e73355a810a889c9/MarkupSafe-3.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62fada2c942702ef8952754abfc1a9f7658a4d5460fabe95ac7ec2cbe0d02abc", size = 23149 },
+ { url = "https://files.pythonhosted.org/packages/16/02/5dddff5366fde47133186efb847fa88bddef85914bbe623e25cfeccb3517/MarkupSafe-3.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4c2d64fdba74ad16138300815cfdc6ab2f4647e23ced81f59e940d7d4a1469d9", size = 23864 },
+ { url = "https://files.pythonhosted.org/packages/f3/f1/700ee6655561cfda986e03f7afc309e3738918551afa7dedd99225586227/MarkupSafe-3.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fb532dd9900381d2e8f48172ddc5a59db4c445a11b9fab40b3b786da40d3b56b", size = 23440 },
+ { url = "https://files.pythonhosted.org/packages/fb/3e/d26623ac7f16709823b4c80e0b4a1c9196eeb46182a6c1d47b5e0c8434f4/MarkupSafe-3.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0f84af7e813784feb4d5e4ff7db633aba6c8ca64a833f61d8e4eade234ef0c38", size = 23610 },
+ { url = "https://files.pythonhosted.org/packages/51/04/1f8da0810c39cb9fcff96b6baed62272c97065e9cf11471965a161439e20/MarkupSafe-3.0.1-cp312-cp312-win32.whl", hash = "sha256:cbf445eb5628981a80f54087f9acdbf84f9b7d862756110d172993b9a5ae81aa", size = 15113 },
+ { url = "https://files.pythonhosted.org/packages/eb/24/a36dc37365bdd358b1e583cc40475593e36ab02cb7da6b3d0b9c05b0da7a/MarkupSafe-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:a10860e00ded1dd0a65b83e717af28845bb7bd16d8ace40fe5531491de76b79f", size = 15611 },
+ { url = "https://files.pythonhosted.org/packages/b1/60/4572a8aa1beccbc24b133aa0670781a5d2697f4fa3fecf0a87b46383174b/MarkupSafe-3.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e81c52638315ff4ac1b533d427f50bc0afc746deb949210bc85f05d4f15fd772", size = 14325 },
+ { url = "https://files.pythonhosted.org/packages/38/42/849915b99a765ec104bfd07ee933de5fc9c58fa9570efa7db81717f495d8/MarkupSafe-3.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:312387403cd40699ab91d50735ea7a507b788091c416dd007eac54434aee51da", size = 12373 },
+ { url = "https://files.pythonhosted.org/packages/ef/82/4caaebd963c6d60b28e4445f38841d24f8b49bc10594a09956c9d73bfc08/MarkupSafe-3.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ae99f31f47d849758a687102afdd05bd3d3ff7dbab0a8f1587981b58a76152a", size = 24059 },
+ { url = "https://files.pythonhosted.org/packages/20/15/6b319be2f79fcfa3173f479d69f4e950b5c9b642db4f22cf73ae5ade745f/MarkupSafe-3.0.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c97ff7fedf56d86bae92fa0a646ce1a0ec7509a7578e1ed238731ba13aabcd1c", size = 23211 },
+ { url = "https://files.pythonhosted.org/packages/9d/3f/8963bdf4962feb2154475acb7dc350f04217b5e0be7763a39b432291e229/MarkupSafe-3.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7420ceda262dbb4b8d839a4ec63d61c261e4e77677ed7c66c99f4e7cb5030dd", size = 23095 },
+ { url = "https://files.pythonhosted.org/packages/af/93/f770bc70953d32de0c6ce4bcb76271512123a1ead91aaef625a020c5bfaf/MarkupSafe-3.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45d42d132cff577c92bfba536aefcfea7e26efb975bd455db4e6602f5c9f45e7", size = 23901 },
+ { url = "https://files.pythonhosted.org/packages/11/92/1e5a33aa0a1190161238628fb68eb1bc5e67b56a5c89f0636328704b463a/MarkupSafe-3.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4c8817557d0de9349109acb38b9dd570b03cc5014e8aabf1cbddc6e81005becd", size = 23463 },
+ { url = "https://files.pythonhosted.org/packages/0d/fe/657efdfe385d2a3a701f2c4fcc9577c63c438aeefdd642d0d956c4ecd225/MarkupSafe-3.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6a54c43d3ec4cf2a39f4387ad044221c66a376e58c0d0e971d47c475ba79c6b5", size = 23569 },
+ { url = "https://files.pythonhosted.org/packages/cf/24/587dea40304046ace60f846cedaebc0d33d967a3ce46c11395a10e7a78ba/MarkupSafe-3.0.1-cp313-cp313-win32.whl", hash = "sha256:c91b394f7601438ff79a4b93d16be92f216adb57d813a78be4446fe0f6bc2d8c", size = 15117 },
+ { url = "https://files.pythonhosted.org/packages/32/8f/d8961d633f26a011b4fe054f3bfff52f673423b8c431553268741dfb089e/MarkupSafe-3.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:fe32482b37b4b00c7a52a07211b479653b7fe4f22b2e481b9a9b099d8a430f2f", size = 15613 },
+ { url = "https://files.pythonhosted.org/packages/9e/93/d6367ffbcd0c5c371370767f768eaa32af60bc411245b8517e383c6a2b12/MarkupSafe-3.0.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:17b2aea42a7280db02ac644db1d634ad47dcc96faf38ab304fe26ba2680d359a", size = 14563 },
+ { url = "https://files.pythonhosted.org/packages/4a/37/f813c3835747dec08fe19ac9b9eced01fdf93a4b3e626521675dc7f423a9/MarkupSafe-3.0.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:852dc840f6d7c985603e60b5deaae1d89c56cb038b577f6b5b8c808c97580f1d", size = 12505 },
+ { url = "https://files.pythonhosted.org/packages/72/bf/800b4d1580298ca91ccd6c95915bbd147142dad1b8cf91d57b93b28670dd/MarkupSafe-3.0.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0778de17cff1acaeccc3ff30cd99a3fd5c50fc58ad3d6c0e0c4c58092b859396", size = 25358 },
+ { url = "https://files.pythonhosted.org/packages/fd/78/26e209abc8f0a379f031f0acc151231974e5b153d7eda5759d17d8f329f2/MarkupSafe-3.0.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:800100d45176652ded796134277ecb13640c1a537cad3b8b53da45aa96330453", size = 23797 },
+ { url = "https://files.pythonhosted.org/packages/09/e1/918496a9390891756efee818880e71c1bbaf587f4dc8ede3f3852357310a/MarkupSafe-3.0.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d06b24c686a34c86c8c1fba923181eae6b10565e4d80bdd7bc1c8e2f11247aa4", size = 23743 },
+ { url = "https://files.pythonhosted.org/packages/cd/c6/26f576cd58d6c2decd9045e4e3f3c5dbc01ea6cb710916e7bbb6ebd95b6b/MarkupSafe-3.0.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:33d1c36b90e570ba7785dacd1faaf091203d9942bc036118fab8110a401eb1a8", size = 25076 },
+ { url = "https://files.pythonhosted.org/packages/b5/fa/10b24fb3b0e15fe5389dc88ecc6226ede08297e0ba7130610efbe0cdfb27/MarkupSafe-3.0.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:beeebf760a9c1f4c07ef6a53465e8cfa776ea6a2021eda0d0417ec41043fe984", size = 24037 },
+ { url = "https://files.pythonhosted.org/packages/c8/81/4b3f5537d9f6cc4f5c80d6c4b78af9a5247fd37b5aba95807b2cbc336b9a/MarkupSafe-3.0.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:bbde71a705f8e9e4c3e9e33db69341d040c827c7afa6789b14c6e16776074f5a", size = 24015 },
+ { url = "https://files.pythonhosted.org/packages/5f/07/8e8dcecd53216c5e01a51e84c32a2bce166690ed19c184774b38cd41921d/MarkupSafe-3.0.1-cp313-cp313t-win32.whl", hash = "sha256:82b5dba6eb1bcc29cc305a18a3c5365d2af06ee71b123216416f7e20d2a84e5b", size = 15213 },
+ { url = "https://files.pythonhosted.org/packages/0d/87/4c364e0f109eea2402079abecbe33fef4f347b551a11423d1f4e187ea497/MarkupSafe-3.0.1-cp313-cp313t-win_amd64.whl", hash = "sha256:730d86af59e0e43ce277bb83970530dd223bf7f2a838e086b50affa6ec5f9295", size = 15741 },
+]
+
+[[package]]
+name = "mergedeep"
+version = "1.3.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/3a/41/580bb4006e3ed0361b8151a01d324fb03f420815446c7def45d02f74c270/mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8", size = 4661 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2c/19/04f9b178c2d8a15b076c8b5140708fa6ffc5601fb6f1e975537072df5b2a/mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307", size = 6354 },
+]
+
+[[package]]
+name = "mike"
+version = "2.1.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "importlib-metadata" },
+ { name = "importlib-resources" },
+ { name = "jinja2" },
+ { name = "mkdocs" },
+ { name = "pyparsing" },
+ { name = "pyyaml" },
+ { name = "pyyaml-env-tag" },
+ { name = "verspec" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ab/f7/2933f1a1fb0e0f077d5d6a92c6c7f8a54e6128241f116dff4df8b6050bbf/mike-2.1.3.tar.gz", hash = "sha256:abd79b8ea483fb0275b7972825d3082e5ae67a41820f8d8a0dc7a3f49944e810", size = 38119 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/fd/1a/31b7cd6e4e7a02df4e076162e9783620777592bea9e4bb036389389af99d/mike-2.1.3-py3-none-any.whl", hash = "sha256:d90c64077e84f06272437b464735130d380703a76a5738b152932884c60c062a", size = 33754 },
+]
+
+[[package]]
+name = "mkdocs"
+version = "1.6.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "click" },
+ { name = "colorama", marker = "platform_system == 'Windows'" },
+ { name = "ghp-import" },
+ { name = "jinja2" },
+ { name = "markdown" },
+ { name = "markupsafe" },
+ { name = "mergedeep" },
+ { name = "mkdocs-get-deps" },
+ { name = "packaging" },
+ { name = "pathspec" },
+ { name = "pyyaml" },
+ { name = "pyyaml-env-tag" },
+ { name = "watchdog" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/bc/c6/bbd4f061bd16b378247f12953ffcb04786a618ce5e904b8c5a01a0309061/mkdocs-1.6.1.tar.gz", hash = "sha256:7b432f01d928c084353ab39c57282f29f92136665bdd6abf7c1ec8d822ef86f2", size = 3889159 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/22/5b/dbc6a8cddc9cfa9c4971d59fb12bb8d42e161b7e7f8cc89e49137c5b279c/mkdocs-1.6.1-py3-none-any.whl", hash = "sha256:db91759624d1647f3f34aa0c3f327dd2601beae39a366d6e064c03468d35c20e", size = 3864451 },
+]
+
+[[package]]
+name = "mkdocs-autorefs"
+version = "1.2.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "markdown" },
+ { name = "markupsafe" },
+ { name = "mkdocs" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/fb/ae/0f1154c614d6a8b8a36fff084e5b82af3a15f7d2060cf0dcdb1c53297a71/mkdocs_autorefs-1.2.0.tar.gz", hash = "sha256:a86b93abff653521bda71cf3fc5596342b7a23982093915cb74273f67522190f", size = 40262 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/71/26/4d39d52ea2219604053a4d05b98e90d6a335511cc01806436ec4886b1028/mkdocs_autorefs-1.2.0-py3-none-any.whl", hash = "sha256:d588754ae89bd0ced0c70c06f58566a4ee43471eeeee5202427da7de9ef85a2f", size = 16522 },
+]
+
+[[package]]
+name = "mkdocs-get-deps"
+version = "0.2.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "mergedeep" },
+ { name = "platformdirs" },
+ { name = "pyyaml" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/98/f5/ed29cd50067784976f25ed0ed6fcd3c2ce9eb90650aa3b2796ddf7b6870b/mkdocs_get_deps-0.2.0.tar.gz", hash = "sha256:162b3d129c7fad9b19abfdcb9c1458a651628e4b1dea628ac68790fb3061c60c", size = 10239 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/9f/d4/029f984e8d3f3b6b726bd33cafc473b75e9e44c0f7e80a5b29abc466bdea/mkdocs_get_deps-0.2.0-py3-none-any.whl", hash = "sha256:2bf11d0b133e77a0dd036abeeb06dec8775e46efa526dc70667d8863eefc6134", size = 9521 },
+]
+
+[[package]]
+name = "mkdocs-material"
+version = "9.5.39"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "babel" },
+ { name = "colorama" },
+ { name = "jinja2" },
+ { name = "markdown" },
+ { name = "mkdocs" },
+ { name = "mkdocs-material-extensions" },
+ { name = "paginate" },
+ { name = "pygments" },
+ { name = "pymdown-extensions" },
+ { name = "regex" },
+ { name = "requests" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/be/f3/87688912b3ac75d5934830bef86762310a7df881ea9c9f50f4e4f5f49754/mkdocs_material-9.5.39.tar.gz", hash = "sha256:25faa06142afa38549d2b781d475a86fb61de93189f532b88e69bf11e5e5c3be", size = 4001520 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5b/12/419d1e0e6a1a8757bc6c371a895789960000fc56a40b22752a824068dc50/mkdocs_material-9.5.39-py3-none-any.whl", hash = "sha256:0f2f68c8db89523cb4a59705cd01b4acd62b2f71218ccb67e1e004e560410d2b", size = 8708939 },
+]
+
+[package.optional-dependencies]
+imaging = [
+ { name = "cairosvg" },
+ { name = "pillow" },
+]
+
+[[package]]
+name = "mkdocs-material-extensions"
+version = "1.3.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/79/9b/9b4c96d6593b2a541e1cb8b34899a6d021d208bb357042823d4d2cabdbe7/mkdocs_material_extensions-1.3.1.tar.gz", hash = "sha256:10c9511cea88f568257f960358a467d12b970e1f7b2c0e5fb2bb48cab1928443", size = 11847 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5b/54/662a4743aa81d9582ee9339d4ffa3c8fd40a4965e033d77b9da9774d3960/mkdocs_material_extensions-1.3.1-py3-none-any.whl", hash = "sha256:adff8b62700b25cb77b53358dad940f3ef973dd6db797907c49e3c2ef3ab4e31", size = 8728 },
+]
+
+[[package]]
+name = "mkdocstrings"
+version = "0.26.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "click" },
+ { name = "jinja2" },
+ { name = "markdown" },
+ { name = "markupsafe" },
+ { name = "mkdocs" },
+ { name = "mkdocs-autorefs" },
+ { name = "platformdirs" },
+ { name = "pymdown-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/e6/bf/170ff04de72227f715d67da32950c7b8434449f3805b2ec3dd1085db4d7c/mkdocstrings-0.26.1.tar.gz", hash = "sha256:bb8b8854d6713d5348ad05b069a09f3b79edbc6a0f33a34c6821141adb03fe33", size = 92677 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/23/cc/8ba127aaee5d1e9046b0d33fa5b3d17da95a9d705d44902792e0569257fd/mkdocstrings-0.26.1-py3-none-any.whl", hash = "sha256:29738bfb72b4608e8e55cc50fb8a54f325dc7ebd2014e4e3881a49892d5983cf", size = 29643 },
+]
+
+[package.optional-dependencies]
+python = [
+ { name = "mkdocstrings-python" },
+]
+
+[[package]]
+name = "mkdocstrings-python"
+version = "1.11.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "griffe" },
+ { name = "mkdocs-autorefs" },
+ { name = "mkdocstrings" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/fc/ba/534c934cd0a809f51c91332d6ed278782ee4126b8ba8db02c2003f162b47/mkdocstrings_python-1.11.1.tar.gz", hash = "sha256:8824b115c5359304ab0b5378a91f6202324a849e1da907a3485b59208b797322", size = 166890 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2f/f2/2a2c48fda645ac6bbe73bcc974587a579092b6868e6ff8bc6d177f4db38a/mkdocstrings_python-1.11.1-py3-none-any.whl", hash = "sha256:a21a1c05acef129a618517bb5aae3e33114f569b11588b1e7af3e9d4061a71af", size = 109297 },
+]
+
+[[package]]
+name = "mypy"
+version = "1.11.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "mypy-extensions" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/5c/86/5d7cbc4974fd564550b80fbb8103c05501ea11aa7835edf3351d90095896/mypy-1.11.2.tar.gz", hash = "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79", size = 3078806 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/35/3a/ed7b12ecc3f6db2f664ccf85cb2e004d3e90bec928e9d7be6aa2f16b7cdf/mypy-1.11.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318", size = 10990335 },
+ { url = "https://files.pythonhosted.org/packages/04/e4/1a9051e2ef10296d206519f1df13d2cc896aea39e8683302f89bf5792a59/mypy-1.11.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36", size = 10007119 },
+ { url = "https://files.pythonhosted.org/packages/f3/3c/350a9da895f8a7e87ade0028b962be0252d152e0c2fbaafa6f0658b4d0d4/mypy-1.11.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987", size = 12506856 },
+ { url = "https://files.pythonhosted.org/packages/b6/49/ee5adf6a49ff13f4202d949544d3d08abb0ea1f3e7f2a6d5b4c10ba0360a/mypy-1.11.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca", size = 12952066 },
+ { url = "https://files.pythonhosted.org/packages/27/c0/b19d709a42b24004d720db37446a42abadf844d5c46a2c442e2a074d70d9/mypy-1.11.2-cp312-cp312-win_amd64.whl", hash = "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70", size = 9664000 },
+ { url = "https://files.pythonhosted.org/packages/42/3a/bdf730640ac523229dd6578e8a581795720a9321399de494374afc437ec5/mypy-1.11.2-py3-none-any.whl", hash = "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12", size = 2619625 },
+]
+
+[[package]]
+name = "mypy-extensions"
+version = "1.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782", size = 4433 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695 },
+]
+
+[[package]]
+name = "packaging"
+version = "24.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/51/65/50db4dda066951078f0a96cf12f4b9ada6e4b811516bf0262c0f4f7064d4/packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002", size = 148788 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124", size = 53985 },
+]
+
+[[package]]
+name = "paginate"
+version = "0.5.7"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ec/46/68dde5b6bc00c1296ec6466ab27dddede6aec9af1b99090e1107091b3b84/paginate-0.5.7.tar.gz", hash = "sha256:22bd083ab41e1a8b4f3690544afb2c60c25e5c9a63a30fa2f483f6c60c8e5945", size = 19252 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/90/96/04b8e52da071d28f5e21a805b19cb9390aa17a47462ac87f5e2696b9566d/paginate-0.5.7-py2.py3-none-any.whl", hash = "sha256:b885e2af73abcf01d9559fd5216b57ef722f8c42affbb63942377668e35c7591", size = 13746 },
+]
+
+[[package]]
+name = "pathspec"
+version = "0.12.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191 },
+]
+
+[[package]]
+name = "pillow"
+version = "10.4.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/cd/74/ad3d526f3bf7b6d3f408b73fde271ec69dfac8b81341a318ce825f2b3812/pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06", size = 46555059 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/05/cb/0353013dc30c02a8be34eb91d25e4e4cf594b59e5a55ea1128fde1e5f8ea/pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94", size = 3509350 },
+ { url = "https://files.pythonhosted.org/packages/e7/cf/5c558a0f247e0bf9cec92bff9b46ae6474dd736f6d906315e60e4075f737/pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597", size = 3374980 },
+ { url = "https://files.pythonhosted.org/packages/84/48/6e394b86369a4eb68b8a1382c78dc092245af517385c086c5094e3b34428/pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80", size = 4343799 },
+ { url = "https://files.pythonhosted.org/packages/3b/f3/a8c6c11fa84b59b9df0cd5694492da8c039a24cd159f0f6918690105c3be/pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca", size = 4459973 },
+ { url = "https://files.pythonhosted.org/packages/7d/1b/c14b4197b80150fb64453585247e6fb2e1d93761fa0fa9cf63b102fde822/pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef", size = 4370054 },
+ { url = "https://files.pythonhosted.org/packages/55/77/40daddf677897a923d5d33329acd52a2144d54a9644f2a5422c028c6bf2d/pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a", size = 4539484 },
+ { url = "https://files.pythonhosted.org/packages/40/54/90de3e4256b1207300fb2b1d7168dd912a2fb4b2401e439ba23c2b2cabde/pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b", size = 4477375 },
+ { url = "https://files.pythonhosted.org/packages/13/24/1bfba52f44193860918ff7c93d03d95e3f8748ca1de3ceaf11157a14cf16/pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9", size = 4608773 },
+ { url = "https://files.pythonhosted.org/packages/55/04/5e6de6e6120451ec0c24516c41dbaf80cce1b6451f96561235ef2429da2e/pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42", size = 2235690 },
+ { url = "https://files.pythonhosted.org/packages/74/0a/d4ce3c44bca8635bd29a2eab5aa181b654a734a29b263ca8efe013beea98/pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a", size = 2554951 },
+ { url = "https://files.pythonhosted.org/packages/b5/ca/184349ee40f2e92439be9b3502ae6cfc43ac4b50bc4fc6b3de7957563894/pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9", size = 2243427 },
+ { url = "https://files.pythonhosted.org/packages/c3/00/706cebe7c2c12a6318aabe5d354836f54adff7156fd9e1bd6c89f4ba0e98/pillow-10.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3", size = 3525685 },
+ { url = "https://files.pythonhosted.org/packages/cf/76/f658cbfa49405e5ecbfb9ba42d07074ad9792031267e782d409fd8fe7c69/pillow-10.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb", size = 3374883 },
+ { url = "https://files.pythonhosted.org/packages/46/2b/99c28c4379a85e65378211971c0b430d9c7234b1ec4d59b2668f6299e011/pillow-10.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70", size = 4339837 },
+ { url = "https://files.pythonhosted.org/packages/f1/74/b1ec314f624c0c43711fdf0d8076f82d9d802afd58f1d62c2a86878e8615/pillow-10.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be", size = 4455562 },
+ { url = "https://files.pythonhosted.org/packages/4a/2a/4b04157cb7b9c74372fa867096a1607e6fedad93a44deeff553ccd307868/pillow-10.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0", size = 4366761 },
+ { url = "https://files.pythonhosted.org/packages/ac/7b/8f1d815c1a6a268fe90481232c98dd0e5fa8c75e341a75f060037bd5ceae/pillow-10.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc", size = 4536767 },
+ { url = "https://files.pythonhosted.org/packages/e5/77/05fa64d1f45d12c22c314e7b97398ffb28ef2813a485465017b7978b3ce7/pillow-10.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a", size = 4477989 },
+ { url = "https://files.pythonhosted.org/packages/12/63/b0397cfc2caae05c3fb2f4ed1b4fc4fc878f0243510a7a6034ca59726494/pillow-10.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309", size = 4610255 },
+ { url = "https://files.pythonhosted.org/packages/7b/f9/cfaa5082ca9bc4a6de66ffe1c12c2d90bf09c309a5f52b27759a596900e7/pillow-10.4.0-cp313-cp313-win32.whl", hash = "sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060", size = 2235603 },
+ { url = "https://files.pythonhosted.org/packages/01/6a/30ff0eef6e0c0e71e55ded56a38d4859bf9d3634a94a88743897b5f96936/pillow-10.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea", size = 2554972 },
+ { url = "https://files.pythonhosted.org/packages/48/2c/2e0a52890f269435eee38b21c8218e102c621fe8d8df8b9dd06fabf879ba/pillow-10.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d", size = 2243375 },
+]
+
+[[package]]
+name = "platformdirs"
+version = "4.3.6"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/13/fc/128cc9cb8f03208bdbf93d3aa862e16d376844a14f9a0ce5cf4507372de4/platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907", size = 21302 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/3c/a6/bc1012356d8ece4d66dd75c4b9fc6c1f6650ddd5991e421177d9f8f671be/platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb", size = 18439 },
+]
+
+[[package]]
+name = "pluggy"
+version = "1.5.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 },
+]
+
+[[package]]
+name = "pycparser"
+version = "2.22"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552 },
+]
+
+[[package]]
+name = "pygments"
+version = "2.18.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/8e/62/8336eff65bcbc8e4cb5d05b55faf041285951b6e80f33e2bff2024788f31/pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199", size = 4891905 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f7/3f/01c8b82017c199075f8f788d0d906b9ffbbc5a47dc9918a945e13d5a2bda/pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a", size = 1205513 },
+]
+
+[[package]]
+name = "pymdown-extensions"
+version = "10.11.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "markdown" },
+ { name = "pyyaml" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/f4/71/2730a20e9e3752393d78998347f8b1085ef9c417646ea9befbeef221e3c4/pymdown_extensions-10.11.2.tar.gz", hash = "sha256:bc8847ecc9e784a098efd35e20cba772bc5a1b529dfcef9dc1972db9021a1049", size = 830241 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c2/35/c0edf199257ef0a7d407d29cd51c4e70d1dad4370a5f44deb65a7a5475e2/pymdown_extensions-10.11.2-py3-none-any.whl", hash = "sha256:41cdde0a77290e480cf53892f5c5e50921a7ee3e5cd60ba91bf19837b33badcf", size = 259044 },
+]
+
+[[package]]
+name = "pyparsing"
+version = "3.1.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/83/08/13f3bce01b2061f2bbd582c9df82723de943784cf719a35ac886c652043a/pyparsing-3.1.4.tar.gz", hash = "sha256:f86ec8d1a83f11977c9a6ea7598e8c27fc5cddfa5b07ea2241edbbde1d7bc032", size = 900231 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e5/0c/0e3c05b1c87bb6a1c76d281b0f35e78d2d80ac91b5f8f524cebf77f51049/pyparsing-3.1.4-py3-none-any.whl", hash = "sha256:a6a7ee4235a3f944aa1fa2249307708f893fe5717dc603503c6c7969c070fb7c", size = 104100 },
+]
+
+[[package]]
+name = "pytest"
+version = "8.3.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+ { name = "iniconfig" },
+ { name = "packaging" },
+ { name = "pluggy" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/8b/6c/62bbd536103af674e227c41a8f3dcd022d591f6eed5facb5a0f31ee33bbc/pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181", size = 1442487 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/6b/77/7440a06a8ead44c7757a64362dd22df5760f9b12dc5f11b6188cd2fc27a0/pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2", size = 342341 },
+]
+
+[[package]]
+name = "python-dateutil"
+version = "2.9.0.post0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "six" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 },
+]
+
+[[package]]
+name = "pyyaml"
+version = "6.0.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873 },
+ { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302 },
+ { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154 },
+ { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223 },
+ { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542 },
+ { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164 },
+ { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611 },
+ { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591 },
+ { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338 },
+ { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309 },
+ { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679 },
+ { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428 },
+ { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361 },
+ { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523 },
+ { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660 },
+ { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597 },
+ { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527 },
+ { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446 },
+]
+
+[[package]]
+name = "pyyaml-env-tag"
+version = "0.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pyyaml" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/fb/8e/da1c6c58f751b70f8ceb1eb25bc25d524e8f14fe16edcce3f4e3ba08629c/pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb", size = 5631 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5a/66/bbb1dd374f5c870f59c5bb1db0e18cbe7fa739415a24cbd95b2d1f5ae0c4/pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069", size = 3911 },
+]
+
+[[package]]
+name = "regex"
+version = "2024.9.11"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f9/38/148df33b4dbca3bd069b963acab5e0fa1a9dbd6820f8c322d0dd6faeff96/regex-2024.9.11.tar.gz", hash = "sha256:6c188c307e8433bcb63dc1915022deb553b4203a70722fc542c363bf120a01fd", size = 399403 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/6e/92/407531450762bed778eedbde04407f68cbd75d13cee96c6f8d6903d9c6c1/regex-2024.9.11-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b0d0a6c64fcc4ef9c69bd5b3b3626cc3776520a1637d8abaa62b9edc147a58f7", size = 483590 },
+ { url = "https://files.pythonhosted.org/packages/8e/a2/048acbc5ae1f615adc6cba36cc45734e679b5f1e4e58c3c77f0ed611d4e2/regex-2024.9.11-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:49b0e06786ea663f933f3710a51e9385ce0cba0ea56b67107fd841a55d56a231", size = 288175 },
+ { url = "https://files.pythonhosted.org/packages/8a/ea/909d8620329ab710dfaf7b4adee41242ab7c9b95ea8d838e9bfe76244259/regex-2024.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5b513b6997a0b2f10e4fd3a1313568e373926e8c252bd76c960f96fd039cd28d", size = 284749 },
+ { url = "https://files.pythonhosted.org/packages/ca/fa/521eb683b916389b4975337873e66954e0f6d8f91bd5774164a57b503185/regex-2024.9.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee439691d8c23e76f9802c42a95cfeebf9d47cf4ffd06f18489122dbb0a7ad64", size = 795181 },
+ { url = "https://files.pythonhosted.org/packages/28/db/63047feddc3280cc242f9c74f7aeddc6ee662b1835f00046f57d5630c827/regex-2024.9.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8f877c89719d759e52783f7fe6e1c67121076b87b40542966c02de5503ace42", size = 835842 },
+ { url = "https://files.pythonhosted.org/packages/e3/94/86adc259ff8ec26edf35fcca7e334566c1805c7493b192cb09679f9c3dee/regex-2024.9.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23b30c62d0f16827f2ae9f2bb87619bc4fba2044911e2e6c2eb1af0161cdb766", size = 823533 },
+ { url = "https://files.pythonhosted.org/packages/29/52/84662b6636061277cb857f658518aa7db6672bc6d1a3f503ccd5aefc581e/regex-2024.9.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85ab7824093d8f10d44330fe1e6493f756f252d145323dd17ab6b48733ff6c0a", size = 797037 },
+ { url = "https://files.pythonhosted.org/packages/c3/2a/cd4675dd987e4a7505f0364a958bc41f3b84942de9efaad0ef9a2646681c/regex-2024.9.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8dee5b4810a89447151999428fe096977346cf2f29f4d5e29609d2e19e0199c9", size = 784106 },
+ { url = "https://files.pythonhosted.org/packages/6f/75/3ea7ec29de0bbf42f21f812f48781d41e627d57a634f3f23947c9a46e303/regex-2024.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:98eeee2f2e63edae2181c886d7911ce502e1292794f4c5ee71e60e23e8d26b5d", size = 782468 },
+ { url = "https://files.pythonhosted.org/packages/d3/67/15519d69b52c252b270e679cb578e22e0c02b8dd4e361f2b04efcc7f2335/regex-2024.9.11-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:57fdd2e0b2694ce6fc2e5ccf189789c3e2962916fb38779d3e3521ff8fe7a822", size = 790324 },
+ { url = "https://files.pythonhosted.org/packages/9c/71/eff77d3fe7ba08ab0672920059ec30d63fa7e41aa0fb61c562726e9bd721/regex-2024.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d552c78411f60b1fdaafd117a1fca2f02e562e309223b9d44b7de8be451ec5e0", size = 860214 },
+ { url = "https://files.pythonhosted.org/packages/81/11/e1bdf84a72372e56f1ea4b833dd583b822a23138a616ace7ab57a0e11556/regex-2024.9.11-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a0b2b80321c2ed3fcf0385ec9e51a12253c50f146fddb2abbb10f033fe3d049a", size = 859420 },
+ { url = "https://files.pythonhosted.org/packages/ea/75/9753e9dcebfa7c3645563ef5c8a58f3a47e799c872165f37c55737dadd3e/regex-2024.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:18406efb2f5a0e57e3a5881cd9354c1512d3bb4f5c45d96d110a66114d84d23a", size = 787333 },
+ { url = "https://files.pythonhosted.org/packages/bc/4e/ba1cbca93141f7416624b3ae63573e785d4bc1834c8be44a8f0747919eca/regex-2024.9.11-cp312-cp312-win32.whl", hash = "sha256:e464b467f1588e2c42d26814231edecbcfe77f5ac414d92cbf4e7b55b2c2a776", size = 262058 },
+ { url = "https://files.pythonhosted.org/packages/6e/16/efc5f194778bf43e5888209e5cec4b258005d37c613b67ae137df3b89c53/regex-2024.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:9e8719792ca63c6b8340380352c24dcb8cd7ec49dae36e963742a275dfae6009", size = 273526 },
+ { url = "https://files.pythonhosted.org/packages/93/0a/d1c6b9af1ff1e36832fe38d74d5c5bab913f2bdcbbd6bc0e7f3ce8b2f577/regex-2024.9.11-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c157bb447303070f256e084668b702073db99bbb61d44f85d811025fcf38f784", size = 483376 },
+ { url = "https://files.pythonhosted.org/packages/a4/42/5910a050c105d7f750a72dcb49c30220c3ae4e2654e54aaaa0e9bc0584cb/regex-2024.9.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4db21ece84dfeefc5d8a3863f101995de646c6cb0536952c321a2650aa202c36", size = 288112 },
+ { url = "https://files.pythonhosted.org/packages/8d/56/0c262aff0e9224fa7ffce47b5458d373f4d3e3ff84e99b5ff0cb15e0b5b2/regex-2024.9.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:220e92a30b426daf23bb67a7962900ed4613589bab80382be09b48896d211e92", size = 284608 },
+ { url = "https://files.pythonhosted.org/packages/b9/54/9fe8f9aec5007bbbbce28ba3d2e3eaca425f95387b7d1e84f0d137d25237/regex-2024.9.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb1ae19e64c14c7ec1995f40bd932448713d3c73509e82d8cd7744dc00e29e86", size = 795337 },
+ { url = "https://files.pythonhosted.org/packages/b2/e7/6b2f642c3cded271c4f16cc4daa7231be544d30fe2b168e0223724b49a61/regex-2024.9.11-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f47cd43a5bfa48f86925fe26fbdd0a488ff15b62468abb5d2a1e092a4fb10e85", size = 835848 },
+ { url = "https://files.pythonhosted.org/packages/cd/9e/187363bdf5d8c0e4662117b92aa32bf52f8f09620ae93abc7537d96d3311/regex-2024.9.11-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9d4a76b96f398697fe01117093613166e6aa8195d63f1b4ec3f21ab637632963", size = 823503 },
+ { url = "https://files.pythonhosted.org/packages/f8/10/601303b8ee93589f879664b0cfd3127949ff32b17f9b6c490fb201106c4d/regex-2024.9.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ea51dcc0835eea2ea31d66456210a4e01a076d820e9039b04ae8d17ac11dee6", size = 797049 },
+ { url = "https://files.pythonhosted.org/packages/ef/1c/ea200f61ce9f341763f2717ab4daebe4422d83e9fd4ac5e33435fd3a148d/regex-2024.9.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7aaa315101c6567a9a45d2839322c51c8d6e81f67683d529512f5bcfb99c802", size = 784144 },
+ { url = "https://files.pythonhosted.org/packages/d8/5c/d2429be49ef3292def7688401d3deb11702c13dcaecdc71d2b407421275b/regex-2024.9.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c57d08ad67aba97af57a7263c2d9006d5c404d721c5f7542f077f109ec2a4a29", size = 782483 },
+ { url = "https://files.pythonhosted.org/packages/12/d9/cbc30f2ff7164f3b26a7760f87c54bf8b2faed286f60efd80350a51c5b99/regex-2024.9.11-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f8404bf61298bb6f8224bb9176c1424548ee1181130818fcd2cbffddc768bed8", size = 790320 },
+ { url = "https://files.pythonhosted.org/packages/19/1d/43ed03a236313639da5a45e61bc553c8d41e925bcf29b0f8ecff0c2c3f25/regex-2024.9.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dd4490a33eb909ef5078ab20f5f000087afa2a4daa27b4c072ccb3cb3050ad84", size = 860435 },
+ { url = "https://files.pythonhosted.org/packages/34/4f/5d04da61c7c56e785058a46349f7285ae3ebc0726c6ea7c5c70600a52233/regex-2024.9.11-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:eee9130eaad130649fd73e5cd92f60e55708952260ede70da64de420cdcad554", size = 859571 },
+ { url = "https://files.pythonhosted.org/packages/12/7f/8398c8155a3c70703a8e91c29532558186558e1aea44144b382faa2a6f7a/regex-2024.9.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6a2644a93da36c784e546de579ec1806bfd2763ef47babc1b03d765fe560c9f8", size = 787398 },
+ { url = "https://files.pythonhosted.org/packages/58/3a/f5903977647a9a7e46d5535e9e96c194304aeeca7501240509bde2f9e17f/regex-2024.9.11-cp313-cp313-win32.whl", hash = "sha256:e997fd30430c57138adc06bba4c7c2968fb13d101e57dd5bb9355bf8ce3fa7e8", size = 262035 },
+ { url = "https://files.pythonhosted.org/packages/ff/80/51ba3a4b7482f6011095b3a036e07374f64de180b7d870b704ed22509002/regex-2024.9.11-cp313-cp313-win_amd64.whl", hash = "sha256:042c55879cfeb21a8adacc84ea347721d3d83a159da6acdf1116859e2427c43f", size = 273510 },
+]
+
+[[package]]
+name = "requests"
+version = "2.32.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "certifi" },
+ { name = "charset-normalizer" },
+ { name = "idna" },
+ { name = "urllib3" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 },
+]
+
+[[package]]
+name = "ruff"
+version = "0.6.9"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/26/0d/6148a48dab5662ca1d5a93b7c0d13c03abd3cc7e2f35db08410e47cef15d/ruff-0.6.9.tar.gz", hash = "sha256:b076ef717a8e5bc819514ee1d602bbdca5b4420ae13a9cf61a0c0a4f53a2baa2", size = 3095355 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/6e/8f/f7a0a0ef1818662efb32ed6df16078c95da7a0a3248d64c2410c1e27799f/ruff-0.6.9-py3-none-linux_armv6l.whl", hash = "sha256:064df58d84ccc0ac0fcd63bc3090b251d90e2a372558c0f057c3f75ed73e1ccd", size = 10440526 },
+ { url = "https://files.pythonhosted.org/packages/8b/69/b179a5faf936a9e2ab45bb412a668e4661eded964ccfa19d533f29463ef6/ruff-0.6.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:140d4b5c9f5fc7a7b074908a78ab8d384dd7f6510402267bc76c37195c02a7ec", size = 10034612 },
+ { url = "https://files.pythonhosted.org/packages/c7/ef/fd1b4be979c579d191eeac37b5cfc0ec906de72c8bcd8595e2c81bb700c1/ruff-0.6.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:53fd8ca5e82bdee8da7f506d7b03a261f24cd43d090ea9db9a1dc59d9313914c", size = 9706197 },
+ { url = "https://files.pythonhosted.org/packages/29/61/b376d775deb5851cb48d893c568b511a6d3625ef2c129ad5698b64fb523c/ruff-0.6.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645d7d8761f915e48a00d4ecc3686969761df69fb561dd914a773c1a8266e14e", size = 10751855 },
+ { url = "https://files.pythonhosted.org/packages/13/d7/def9e5f446d75b9a9c19b24231a3a658c075d79163b08582e56fa5dcfa38/ruff-0.6.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eae02b700763e3847595b9d2891488989cac00214da7f845f4bcf2989007d577", size = 10200889 },
+ { url = "https://files.pythonhosted.org/packages/6c/d6/7f34160818bcb6e84ce293a5966cba368d9112ff0289b273fbb689046047/ruff-0.6.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d5ccc9e58112441de8ad4b29dcb7a86dc25c5f770e3c06a9d57e0e5eba48829", size = 11038678 },
+ { url = "https://files.pythonhosted.org/packages/13/34/a40ff8ae62fb1b26fb8e6fa7e64bc0e0a834b47317880de22edd6bfb54fb/ruff-0.6.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:417b81aa1c9b60b2f8edc463c58363075412866ae4e2b9ab0f690dc1e87ac1b5", size = 11808682 },
+ { url = "https://files.pythonhosted.org/packages/2e/6d/25a4386ae4009fc798bd10ba48c942d1b0b3e459b5403028f1214b6dd161/ruff-0.6.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c866b631f5fbce896a74a6e4383407ba7507b815ccc52bcedabb6810fdb3ef7", size = 11330446 },
+ { url = "https://files.pythonhosted.org/packages/f7/f6/bdf891a9200d692c94ebcd06ae5a2fa5894e522f2c66c2a12dd5d8cb2654/ruff-0.6.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b118afbb3202f5911486ad52da86d1d52305b59e7ef2031cea3425142b97d6f", size = 12483048 },
+ { url = "https://files.pythonhosted.org/packages/a7/86/96f4252f41840e325b3fa6c48297e661abb9f564bd7dcc0572398c8daa42/ruff-0.6.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a67267654edc23c97335586774790cde402fb6bbdb3c2314f1fc087dee320bfa", size = 10936855 },
+ { url = "https://files.pythonhosted.org/packages/45/87/801a52d26c8dbf73424238e9908b9ceac430d903c8ef35eab1b44fcfa2bd/ruff-0.6.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3ef0cc774b00fec123f635ce5c547dac263f6ee9fb9cc83437c5904183b55ceb", size = 10713007 },
+ { url = "https://files.pythonhosted.org/packages/be/27/6f7161d90320a389695e32b6ebdbfbedde28ccbf52451e4b723d7ce744ad/ruff-0.6.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:12edd2af0c60fa61ff31cefb90aef4288ac4d372b4962c2864aeea3a1a2460c0", size = 10274594 },
+ { url = "https://files.pythonhosted.org/packages/00/52/dc311775e7b5f5b19831563cb1572ecce63e62681bccc609867711fae317/ruff-0.6.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:55bb01caeaf3a60b2b2bba07308a02fca6ab56233302406ed5245180a05c5625", size = 10608024 },
+ { url = "https://files.pythonhosted.org/packages/98/b6/be0a1ddcbac65a30c985cf7224c4fce786ba2c51e7efeb5178fe410ed3cf/ruff-0.6.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:925d26471fa24b0ce5a6cdfab1bb526fb4159952385f386bdcc643813d472039", size = 10982085 },
+ { url = "https://files.pythonhosted.org/packages/bb/a4/c84bc13d0b573cf7bb7d17b16d6d29f84267c92d79b2f478d4ce322e8e72/ruff-0.6.9-py3-none-win32.whl", hash = "sha256:eb61ec9bdb2506cffd492e05ac40e5bc6284873aceb605503d8494180d6fc84d", size = 8522088 },
+ { url = "https://files.pythonhosted.org/packages/74/be/fc352bd8ca40daae8740b54c1c3e905a7efe470d420a268cd62150248c91/ruff-0.6.9-py3-none-win_amd64.whl", hash = "sha256:785d31851c1ae91f45b3d8fe23b8ae4b5170089021fbb42402d811135f0b7117", size = 9359275 },
+ { url = "https://files.pythonhosted.org/packages/3e/14/fd026bc74ded05e2351681545a5f626e78ef831f8edce064d61acd2e6ec7/ruff-0.6.9-py3-none-win_arm64.whl", hash = "sha256:a9641e31476d601f83cd602608739a0840e348bda93fec9f1ee816f8b6798b93", size = 8679879 },
+]
+
+[[package]]
+name = "six"
+version = "1.16.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/71/39/171f1c67cd00715f190ba0b100d606d440a28c93c7714febeca8b79af85e/six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", size = 34041 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d9/5a/e7c31adbe875f2abbb91bd84cf2dc52d792b5a01506781dbcf25c91daf11/six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254", size = 11053 },
+]
+
+[[package]]
+name = "tinycss2"
+version = "1.3.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "webencodings" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/44/6f/38d2335a2b70b9982d112bb177e3dbe169746423e33f718bf5e9c7b3ddd3/tinycss2-1.3.0.tar.gz", hash = "sha256:152f9acabd296a8375fbca5b84c961ff95971fcfc32e79550c8df8e29118c54d", size = 67360 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2c/4d/0db5b8a613d2a59bbc29bc5bb44a2f8070eb9ceab11c50d477502a8a0092/tinycss2-1.3.0-py3-none-any.whl", hash = "sha256:54a8dbdffb334d536851be0226030e9505965bb2f30f21a4a82c55fb2a80fae7", size = 22532 },
+]
+
+[[package]]
+name = "typing-extensions"
+version = "4.12.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 },
+]
+
+[[package]]
+name = "urllib3"
+version = "2.2.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ed/63/22ba4ebfe7430b76388e7cd448d5478814d3032121827c12a2cc287e2260/urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9", size = 300677 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ce/d9/5f4c13cecde62396b0d3fe530a50ccea91e7dfc1ccf0e09c228841bb5ba8/urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac", size = 126338 },
+]
+
+[[package]]
+name = "verspec"
+version = "0.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/e7/44/8126f9f0c44319b2efc65feaad589cadef4d77ece200ae3c9133d58464d0/verspec-0.1.0.tar.gz", hash = "sha256:c4504ca697b2056cdb4bfa7121461f5a0e81809255b41c03dda4ba823637c01e", size = 27123 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a4/ce/3b6fee91c85626eaf769d617f1be9d2e15c1cca027bbdeb2e0d751469355/verspec-0.1.0-py3-none-any.whl", hash = "sha256:741877d5633cc9464c45a469ae2a31e801e6dbbaa85b9675d481cda100f11c31", size = 19640 },
+]
+
+[[package]]
+name = "watchdog"
+version = "5.0.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a2/48/a86139aaeab2db0a2482676f64798d8ac4d2dbb457523f50ab37bf02ce2c/watchdog-5.0.3.tar.gz", hash = "sha256:108f42a7f0345042a854d4d0ad0834b741d421330d5f575b81cb27b883500176", size = 129556 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/1c/9b/8b206a928c188fdeb7b12e1c795199534cd44bdef223b8470129016009dd/watchdog-5.0.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:94d11b07c64f63f49876e0ab8042ae034674c8653bfcdaa8c4b32e71cfff87e8", size = 96739 },
+ { url = "https://files.pythonhosted.org/packages/e1/26/129ca9cd0f8016672f37000010c2fedc0b86816e894ebdc0af9bb04a6439/watchdog-5.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:349c9488e1d85d0a58e8cb14222d2c51cbc801ce11ac3936ab4c3af986536926", size = 88708 },
+ { url = "https://files.pythonhosted.org/packages/8f/b3/5e10ec32f0c429cdb55b1369066d6e83faf9985b3a53a4e37bb5c5e29aa0/watchdog-5.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:53a3f10b62c2d569e260f96e8d966463dec1a50fa4f1b22aec69e3f91025060e", size = 89309 },
+ { url = "https://files.pythonhosted.org/packages/54/c4/49af4ab00bcfb688e9962eace2edda07a2cf89b9699ea536da48e8585cff/watchdog-5.0.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:950f531ec6e03696a2414b6308f5c6ff9dab7821a768c9d5788b1314e9a46ca7", size = 96740 },
+ { url = "https://files.pythonhosted.org/packages/96/a4/b24de77cc9ae424c1687c9d4fb15aa560d7d7b28ba559aca72f781d0202b/watchdog-5.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ae6deb336cba5d71476caa029ceb6e88047fc1dc74b62b7c4012639c0b563906", size = 88711 },
+ { url = "https://files.pythonhosted.org/packages/a4/71/3f2e9fe8403386b99d788868955b3a790f7a09721501a7e1eb58f514ffaa/watchdog-5.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1021223c08ba8d2d38d71ec1704496471ffd7be42cfb26b87cd5059323a389a1", size = 89319 },
+ { url = "https://files.pythonhosted.org/packages/60/33/7cb71c9df9a77b6927ee5f48d25e1de5562ce0fa7e0c56dcf2b0472e64a2/watchdog-5.0.3-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dd021efa85970bd4824acacbb922066159d0f9e546389a4743d56919b6758b91", size = 79335 },
+ { url = "https://files.pythonhosted.org/packages/f6/91/320bc1496cf951a3cf93a7ffd18a581f0792c304be963d943e0e608c2919/watchdog-5.0.3-py3-none-manylinux2014_armv7l.whl", hash = "sha256:78864cc8f23dbee55be34cc1494632a7ba30263951b5b2e8fc8286b95845f82c", size = 79334 },
+ { url = "https://files.pythonhosted.org/packages/8b/2c/567c5e042ed667d3544c43d48a65cf853450a2d2a9089d9523a65f195e94/watchdog-5.0.3-py3-none-manylinux2014_i686.whl", hash = "sha256:1e9679245e3ea6498494b3028b90c7b25dbb2abe65c7d07423ecfc2d6218ff7c", size = 79333 },
+ { url = "https://files.pythonhosted.org/packages/c3/f0/64059fe162ef3274662e67bbdea6c45b3cd53e846d5bd1365fcdc3dc1d15/watchdog-5.0.3-py3-none-manylinux2014_ppc64.whl", hash = "sha256:9413384f26b5d050b6978e6fcd0c1e7f0539be7a4f1a885061473c5deaa57221", size = 79334 },
+ { url = "https://files.pythonhosted.org/packages/f6/d9/19b7d02965be2801e2d0f6f4bde23e4ae172620071b65430fa0c2f8441ac/watchdog-5.0.3-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:294b7a598974b8e2c6123d19ef15de9abcd282b0fbbdbc4d23dfa812959a9e05", size = 79333 },
+ { url = "https://files.pythonhosted.org/packages/cb/a1/5393ac6d0b095d3a44946b09258e9b5f22cb2fb67bcfa419dd868478826c/watchdog-5.0.3-py3-none-manylinux2014_s390x.whl", hash = "sha256:26dd201857d702bdf9d78c273cafcab5871dd29343748524695cecffa44a8d97", size = 79332 },
+ { url = "https://files.pythonhosted.org/packages/a0/58/edec25190b6403caf4426dd418234f2358a106634b7d6aa4aec6939b104f/watchdog-5.0.3-py3-none-manylinux2014_x86_64.whl", hash = "sha256:0f9332243355643d567697c3e3fa07330a1d1abf981611654a1f2bf2175612b7", size = 79334 },
+ { url = "https://files.pythonhosted.org/packages/97/69/cfb2d17ba8aabc73be2e2d03c8c319b1f32053a02c4b571852983aa24ff2/watchdog-5.0.3-py3-none-win32.whl", hash = "sha256:c66f80ee5b602a9c7ab66e3c9f36026590a0902db3aea414d59a2f55188c1f49", size = 79320 },
+ { url = "https://files.pythonhosted.org/packages/91/b4/2b5b59358dadfa2c8676322f955b6c22cde4937602f40490e2f7403e548e/watchdog-5.0.3-py3-none-win_amd64.whl", hash = "sha256:f00b4cf737f568be9665563347a910f8bdc76f88c2970121c86243c8cfdf90e9", size = 79325 },
+ { url = "https://files.pythonhosted.org/packages/38/b8/0aa69337651b3005f161f7f494e59188a1d8d94171666900d26d29d10f69/watchdog-5.0.3-py3-none-win_ia64.whl", hash = "sha256:49f4d36cb315c25ea0d946e018c01bb028048023b9e103d3d3943f58e109dd45", size = 79324 },
+]
+
+[[package]]
+name = "webencodings"
+version = "0.5.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923", size = 9721 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", size = 11774 },
+]
+
+[[package]]
+name = "zipp"
+version = "3.20.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/54/bf/5c0000c44ebc80123ecbdddba1f5dcd94a5ada602a9c225d84b5aaa55e86/zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29", size = 24199 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/62/8b/5ba542fa83c90e09eac972fc9baca7a88e7e7ca4b221a89251954019308b/zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350", size = 9200 },
+]
| Switch to `--workspace` in CI
If I look at
```
cargo check --help
```
it says that `--all` is deprecated
```
Package Selection:
--all Alias for --workspace (deprecated)
```
Maybe switch to `--workspace`?
_Originally posted by @kylebarron in https://github.com/developmentseed/cql2-rs/pull/32#discussion_r1793795102_
| 2024-10-10T00:11:26 | 0.0 | [] | [] |
|||
developmentseed/cql2-rs | developmentseed__cql2-rs-26 | 0f4942193d2aa6f870468cd81428d5836fbb5786 | diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 4ad7907..22b6bfa 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -24,4 +24,4 @@ jobs:
- name: cargo check
run: cargo check
- name: cargo test
- run: cargo test
+ run: cargo test --package cql2
diff --git a/Cargo.lock b/Cargo.lock
index b400c9e..0ce4b6c 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -24,6 +24,55 @@ dependencies = [
"memchr",
]
+[[package]]
+name = "anstream"
+version = "0.6.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526"
+dependencies = [
+ "anstyle",
+ "anstyle-parse",
+ "anstyle-query",
+ "anstyle-wincon",
+ "colorchoice",
+ "is_terminal_polyfill",
+ "utf8parse",
+]
+
+[[package]]
+name = "anstyle"
+version = "1.0.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1"
+
+[[package]]
+name = "anstyle-parse"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb"
+dependencies = [
+ "utf8parse",
+]
+
+[[package]]
+name = "anstyle-query"
+version = "1.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a"
+dependencies = [
+ "windows-sys",
+]
+
+[[package]]
+name = "anstyle-wincon"
+version = "3.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8"
+dependencies = [
+ "anstyle",
+ "windows-sys",
+]
+
[[package]]
name = "appendlist"
version = "1.4.0"
@@ -102,6 +151,52 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+[[package]]
+name = "clap"
+version = "4.5.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7be5744db7978a28d9df86a214130d106a89ce49644cbc4e3f0c22c3fba30615"
+dependencies = [
+ "clap_builder",
+ "clap_derive",
+]
+
+[[package]]
+name = "clap_builder"
+version = "4.5.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a5fbc17d3ef8278f55b282b2a2e75ae6f6c7d4bb70ed3d0382375104bfafdb4b"
+dependencies = [
+ "anstream",
+ "anstyle",
+ "clap_lex",
+ "strsim",
+]
+
+[[package]]
+name = "clap_derive"
+version = "4.5.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab"
+dependencies = [
+ "heck",
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "clap_lex"
+version = "0.7.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97"
+
+[[package]]
+name = "colorchoice"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0"
+
[[package]]
name = "cpufeatures"
version = "0.2.14"
@@ -130,6 +225,15 @@ dependencies = [
"thiserror",
]
+[[package]]
+name = "cql2-cli"
+version = "0.1.0"
+dependencies = [
+ "clap",
+ "cql2",
+ "serde_json",
+]
+
[[package]]
name = "crypto-common"
version = "0.1.6"
@@ -340,6 +444,12 @@ version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb"
+[[package]]
+name = "heck"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
+
[[package]]
name = "idna"
version = "0.5.0"
@@ -360,6 +470,12 @@ dependencies = [
"hashbrown",
]
+[[package]]
+name = "is_terminal_polyfill"
+version = "1.70.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
+
[[package]]
name = "itoa"
version = "1.0.11"
@@ -408,12 +524,9 @@ dependencies = [
[[package]]
name = "once_cell"
-version = "1.20.1"
+version = "1.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "82881c4be219ab5faaf2ad5e5e5ecdff8c66bd7402ca3160975c93b24961afd1"
-dependencies = [
- "portable-atomic",
-]
+checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775"
[[package]]
name = "percent-encoding"
@@ -478,12 +591,6 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
-[[package]]
-name = "portable-atomic"
-version = "1.9.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cc9c68a3f6da06753e9335d63e27f6b9754dd1920d941135b7ea8224f141adb2"
-
[[package]]
name = "proc-macro-crate"
version = "3.2.0"
@@ -650,6 +757,12 @@ dependencies = [
"autocfg",
]
+[[package]]
+name = "strsim"
+version = "0.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
+
[[package]]
name = "syn"
version = "2.0.79"
@@ -757,6 +870,12 @@ dependencies = [
"percent-encoding",
]
+[[package]]
+name = "utf8parse"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
+
[[package]]
name = "version_check"
version = "0.9.5"
@@ -769,6 +888,79 @@ version = "0.11.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
+[[package]]
+name = "windows-sys"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
+dependencies = [
+ "windows-targets",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
+dependencies = [
+ "windows_aarch64_gnullvm",
+ "windows_aarch64_msvc",
+ "windows_i686_gnu",
+ "windows_i686_gnullvm",
+ "windows_i686_msvc",
+ "windows_x86_64_gnu",
+ "windows_x86_64_gnullvm",
+ "windows_x86_64_msvc",
+]
+
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
+
+[[package]]
+name = "windows_i686_gnullvm"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
+
[[package]]
name = "winnow"
version = "0.6.20"
diff --git a/Cargo.toml b/Cargo.toml
index cc11957..131cf91 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -20,14 +20,6 @@ thiserror = "1.0"
assert-json-diff = "2"
rstest = "0.23"
-[[bin]]
-name = "cql2json"
-path = "src/bin/cql2json.rs"
-test = false
-doc = false
-
-[[bin]]
-name = "cql2text"
-path = "src/bin/cql2text.rs"
-test = false
-doc = false
+[workspace]
+default-members = ["cli"]
+members = ["cli"]
diff --git a/README.md b/README.md
index 96e6f1e..bb41cff 100644
--- a/README.md
+++ b/README.md
@@ -5,13 +5,65 @@
Parse, validate, and convert CQL2-Text and CQL2-JSON.
## CLI
-Both commands take either CQL2-Text or CQL2-JSON on stdin, as a quoted/escaped argument, or interactively. They will return status code 0 on successful validation or status code 1 if there was a problem parsing or validating. Verbosity of the validation errors can be controlled using the CQL2_DEBUG_LEVEL environment variable between 0 and 3.
-- cql2json - returns standardized CQL2-JSON
-- cql2text - returns standardized CQL2-Text
+
+At its simplest, the command-line interface (CLI) is a pass-through validator:
+
+```shell
+$ cql2 < tests/fixtures/text/example01.txt # will succeed if the CQL2 is valid
+("landsat:scene_id" = 'LC82030282019133LGN00')
+```
+
+You can convert formats:
+
+```shell
+$ cql2 -o json < tests/fixtures/text/example01.txt
+{"op":"=","args":[{"property":"landsat:scene_id"},"LC82030282019133LGN00"]}
+```
+
+Use `-v` to get detailed validation information:
+
+```shell
+$ cql2 'wrong' -v
+[ERROR] Invalid CQL2: wrong
+For more detailed validation information, use -vv
+jsonschema validation failed with file:///tmp/cql2.json#
+- at '': oneOf failed, none matched
+ - at '': missing properties 'op', 'args'
+ - at '': missing properties 'op', 'args'
+ - at '': oneOf failed, none matched
+ - at '': missing properties 'op', 'args'
+ - at '': missing properties 'op', 'args'
+ - at '': missing properties 'op', 'args'
+ - at '': missing properties 'op', 'args'
+ - at '': missing properties 'op', 'args'
+ - at '': missing properties 'op', 'args'
+ - at '': missing properties 'op', 'args'
+ - at '': missing properties 'op', 'args'
+ - at '': missing properties 'op', 'args'
+ - at '': want boolean, but got object
+```
+
+cql2-text parsing errors are pretty-printed:
+
+```shell
+$ cql2 '(foo ~= "bar")'
+[ERROR] Parsing error: (foo ~= "bar")
+ --> 1:6
+ |
+1 | (foo ~= "bar")
+ | ^---
+ |
+ = expected NotFlag, And, Or, ConcatInfixOp, Add, Subtract, Multiply, Divide, Modulo, Power, Eq, Gt, GtEq, Lt, LtEq, NotEq, Is, or IsNullPostfix
+```
+
+Use `cql2 --help` to get a complete listing of the CLI arguments and formats.
## Response
-Response may not match the input.
+
+Responses may not match the input.
+
### CQL2-Text Differences
+
- all identifiers in output are double quoted
- position of "NOT" keywords is standardized to be before the expression (ie "... NOT LIKE ..." will become "NOT ... LIKE ..."
- The Negative operator on anything besides a literal number becomes "* -1"
@@ -19,5 +71,5 @@ Response may not match the input.
Tasks to get to ready-to-use state:
- [x] Parse all examples from CQL2 examples into json that passes json schema validation.
-- [ ] Add tests that compare OGC examples to parsed/standardized/validated CQL2-Text and CQL2-JSON
+- [x] Add tests that compare OGC examples to parsed/standardized/validated CQL2-Text and CQL2-JSON
- [ ] Fix issues with Z, ZM, and M WKT variants
diff --git a/cli/Cargo.toml b/cli/Cargo.toml
new file mode 100644
index 0000000..36e0756
--- /dev/null
+++ b/cli/Cargo.toml
@@ -0,0 +1,15 @@
+[package]
+name = "cql2-cli"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
+cql2 = { path = "..", version = "0.1.0" }
+clap = { version = "4.5", features = ["derive"] }
+serde_json = "1.0"
+
+[[bin]]
+name = "cql2"
+path = "src/main.rs"
+test = false
+doc = false
diff --git a/cli/src/main.rs b/cli/src/main.rs
new file mode 100644
index 0000000..8744f18
--- /dev/null
+++ b/cli/src/main.rs
@@ -0,0 +1,121 @@
+use clap::{ArgAction, Parser, ValueEnum};
+use cql2::{Expr, Validator};
+use std::io::Read;
+
+#[derive(Debug, Parser)]
+struct Cli {
+ /// The input CQL2
+ ///
+ /// If not provided, or `-`, the CQL2 will be read from standard input. The
+ /// type (json or text) will be auto-detected. To specify a format, use
+ /// --input-format.
+ input: Option<String>,
+
+ /// The input format.
+ ///
+ /// If not provided, the format will be auto-detected from the input.
+ #[arg(short, long)]
+ input_format: Option<InputFormat>,
+
+ /// The output format.
+ ///
+ /// If not provided, the format will be the same as the input.
+ #[arg(short, long)]
+ output_format: Option<OutputFormat>,
+
+ /// Validate the CQL2
+ #[arg(long, default_value_t = true, action = ArgAction::Set)]
+ validate: bool,
+
+ /// Verbosity.
+ ///
+ /// Provide this argument several times to turn up the chatter.
+ #[arg(short, long, action = ArgAction::Count)]
+ verbose: u8,
+}
+
+#[derive(Debug, ValueEnum, Clone)]
+enum InputFormat {
+ /// cql2-json
+ Json,
+
+ /// cql2-text
+ Text,
+}
+
+#[derive(Debug, ValueEnum, Clone)]
+enum OutputFormat {
+ /// cql2-json, pretty-printed
+ JsonPretty,
+
+ /// cql2-json, compact
+ Json,
+
+ /// cql2-text
+ Text,
+
+ /// SQL
+ Sql,
+}
+
+fn main() {
+ let cli = Cli::parse();
+ let input = cli
+ .input
+ .and_then(|input| if input == "-" { None } else { Some(input) })
+ .unwrap_or_else(read_stdin);
+ let input_format = cli.input_format.unwrap_or_else(|| {
+ if input.starts_with('{') {
+ InputFormat::Json
+ } else {
+ InputFormat::Text
+ }
+ });
+ let expr: Expr = match input_format {
+ InputFormat::Json => cql2::parse_json(&input).unwrap(),
+ InputFormat::Text => match cql2::parse_text(&input) {
+ Ok(expr) => expr,
+ Err(err) => {
+ eprintln!("[ERROR] Parsing error: {input}");
+ eprintln!("{err}");
+ std::process::exit(1)
+ }
+ },
+ };
+ if cli.validate {
+ let validator = Validator::new().unwrap();
+ let value = serde_json::to_value(&expr).unwrap();
+ if let Err(error) = validator.validate(&value) {
+ eprintln!("[ERROR] Invalid CQL2: {input}");
+ match cli.verbose {
+ 0 => eprintln!("For more detailed validation information, use -v"),
+ 1 => eprintln!("For more detailed validation information, use -vv\n{error}"),
+ 2 => eprintln!("For more detailed validation information, use -vvv\n{error:#}"),
+ _ => {
+ let detailed_output = error.detailed_output();
+ eprintln!("{detailed_output:#}");
+ }
+ }
+ std::process::exit(1)
+ }
+ }
+ let output_format = cli.output_format.unwrap_or_else(|| match input_format {
+ InputFormat::Json => OutputFormat::Json,
+ InputFormat::Text => OutputFormat::Text,
+ });
+ match output_format {
+ OutputFormat::JsonPretty => serde_json::to_writer_pretty(std::io::stdout(), &expr).unwrap(),
+ OutputFormat::Json => serde_json::to_writer(std::io::stdout(), &expr).unwrap(),
+ OutputFormat::Text => print!("{}", expr.to_text().unwrap()),
+ OutputFormat::Sql => {
+ serde_json::to_writer_pretty(std::io::stdout(), &expr.to_sql().unwrap()).unwrap()
+ }
+ }
+ println!()
+}
+
+fn read_stdin() -> String {
+ let mut buf = String::new();
+ std::io::stdin().read_to_string(&mut buf).unwrap();
+ buf
+}
diff --git a/src/bin/cql2json.rs b/src/bin/cql2json.rs
deleted file mode 100644
index cd8852b..0000000
--- a/src/bin/cql2json.rs
+++ /dev/null
@@ -1,9 +0,0 @@
-use cql2::parse_stdin;
-
-fn main() {
- if let Ok(parsed) = parse_stdin() {
- println!("{}", parsed.to_json().unwrap());
- } else {
- std::process::exit(1)
- }
-}
diff --git a/src/bin/cql2text.rs b/src/bin/cql2text.rs
deleted file mode 100644
index 4c97a2d..0000000
--- a/src/bin/cql2text.rs
+++ /dev/null
@@ -1,9 +0,0 @@
-use cql2::parse_stdin;
-
-fn main() {
- if let Ok(parsed) = parse_stdin() {
- println!("{}", parsed.to_text().unwrap());
- } else {
- std::process::exit(1)
- }
-}
diff --git a/src/lib.rs b/src/lib.rs
index fc9cbfb..3549951 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -9,7 +9,7 @@ mod parser;
mod validator;
use serde_derive::{Deserialize, Serialize};
-use std::{fs, io::Read, path::Path};
+use std::{fs, path::Path};
pub use {error::Error, expr::Expr, geometry::Geometry, parser::parse_text, validator::Validator};
/// A SQL query, broken into the query and parameters.
@@ -43,74 +43,5 @@ pub fn parse_file(path: impl AsRef<Path>) -> Result<Expr, Error> {
s.parse()
}
-fn get_stdin() -> Result<String, std::io::Error> {
- use std::{
- env,
- io::{self, IsTerminal},
- };
- let args: Vec<String> = env::args().collect();
- let mut buffer = String::new();
-
- if args.len() >= 2 {
- buffer = args[1].to_string();
- } else if io::stdin().is_terminal() {
- println!("Enter CQL2 as Text or JSON, then hit return");
- io::stdin().read_line(&mut buffer)?;
- } else {
- io::stdin().read_to_string(&mut buffer)?;
- }
- Ok(buffer)
-}
-
-fn parse_stderr(s: &str) -> Result<Expr, Error> {
- let debug_level: u8 = std::env::var("CQL2_DEBUG_LEVEL")
- .ok()
- .map(|s| s.parse())
- .transpose()?
- .unwrap_or(1);
- let validator = Validator::new().unwrap();
-
- let parsed: Expr = s.parse()?;
- let value = serde_json::to_value(&parsed)?;
-
- let validation = validator.validate(&value);
-
- match validation {
- Ok(()) => Ok(parsed),
- Err(err) => {
- eprintln!("Passed in CQL2 parsed to {value}.");
- eprintln!("This did not pass jsonschema validation for CQL2.");
- match debug_level {
- 0 => eprintln!("For more detailed validation details set CQL2_DEBUG_LEVEL to 1."),
- 1 => eprintln!(
- "{err}\nFor more detailed validation details set CQL2_DEBUG_LEVEL to 2."
- ),
- 2 => eprintln!(
- "{err:#}\nFor more detailed validation details set CQL2_DEBUG_LEVEL to 3."
- ),
- _ => {
- let detailed_output = err.detailed_output();
- eprintln!("{detailed_output:#}");
- }
- }
- Err(Error::Validation(serde_json::to_value(
- err.detailed_output(),
- )?))
- }
- }
-}
-
-/// Parse standard input into a CQL2 expression.
-///
-/// # Examples
-///
-/// ```no_run
-/// let expr = cql2::parse_stdin();
-/// ```
-pub fn parse_stdin() -> Result<Expr, Error> {
- let buffer = get_stdin()?;
- parse_stderr(&buffer)
-}
-
#[cfg(test)]
use {assert_json_diff as _, rstest as _};
| Consolidate to one bin
I think it would be a simpler user experience and less maintenance for us to provide a single `cql2` bin instead of the current `cql2text` and `cql2json`.
| I'm cool with that. At this point in time, really the bins were mostly there for me to do quick tests. | 2024-10-05T17:23:33 | 0.0 | [] | [] |
||
developmentseed/cql2-rs | developmentseed__cql2-rs-17 | b85cb6bea101c4a30ae2f5e1c2caefada9e66b1c | diff --git a/.github/workflows/test.yml b/.github/workflows/ci.yml
similarity index 52%
rename from .github/workflows/test.yml
rename to .github/workflows/ci.yml
index 561e383..4ad7907 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/ci.yml
@@ -1,4 +1,4 @@
-name: Rust
+name: CI
on:
push:
@@ -12,24 +12,16 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
-
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
with:
components: rustfmt, clippy
-
- uses: Swatinem/rust-cache@v2
-
- - name: Cargo fmt
+ - name: cargo fmt
run: cargo fmt -- --check
-
- - name: "clippy --all"
- run: cargo clippy --all-features --tests -- -D warnings
-
- - name: "cargo check"
- run: cargo check --all-features
-
- - name: "cargo test"
- run: |
- cargo test --all --no-default-features
- cargo test --all --all-features
+ - name: cargo clippy
+ run: cargo clippy --tests -- -D warnings
+ - name: cargo check
+ run: cargo check
+ - name: cargo test
+ run: cargo test
diff --git a/rustfmt.toml b/rustfmt.toml
new file mode 100644
index 0000000..c3c8c37
--- /dev/null
+++ b/rustfmt.toml
@@ -0,0 +1,1 @@
+imports_granularity = "Crate"
diff --git a/src/lib.rs b/src/lib.rs
index d49112a..c5794ad 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -3,16 +3,19 @@
#![deny(unused_crate_dependencies)]
use boon::{Compiler, SchemaIndex, Schemas, ValidationError};
-use geozero::geojson::{GeoJsonString, GeoJsonWriter};
-use geozero::wkt::Wkt;
-use geozero::{CoordDimensions, GeozeroGeometry, ToJson, ToWkt};
-use pest::iterators::{Pair, Pairs};
-use pest::pratt_parser::PrattParser;
-use pest::Parser;
+use geozero::{
+ geojson::{GeoJsonString, GeoJsonWriter},
+ wkt::Wkt,
+ CoordDimensions, GeozeroGeometry, ToJson, ToWkt,
+};
+use pest::{
+ iterators::{Pair, Pairs},
+ pratt_parser::PrattParser,
+ Parser,
+};
use serde_derive::{Deserialize, Serialize};
use serde_json::Value;
-use std::fs;
-use std::path::Path;
+use std::{fs, io::Read, path::Path};
use thiserror::Error;
/// Crate-specific error enum.
@@ -675,8 +678,10 @@ pub fn parse_file(path: impl AsRef<Path>) -> Result<Expr, Error> {
}
fn get_stdin() -> Result<String, std::io::Error> {
- use std::env;
- use std::io::{self, IsTerminal};
+ use std::{
+ env,
+ io::{self, IsTerminal},
+ };
let args: Vec<String> = env::args().collect();
let mut buffer = String::new();
@@ -686,7 +691,7 @@ fn get_stdin() -> Result<String, std::io::Error> {
println!("Enter CQL2 as Text or JSON, then hit return");
io::stdin().read_line(&mut buffer)?;
} else {
- io::stdin().read_line(&mut buffer)?;
+ io::stdin().read_to_string(&mut buffer)?;
}
Ok(buffer)
}
| Create fixtures for standardized output for all tests that come from the OGC examples.
Right now, we test that running the parser against all OGC examples passes validation, but we do not actually check that the output is what we expect. Create CQL2 text and CQL2 json examples that will match the output from the OGC examples formatted to match the output of cql2-rs
| 2024-10-01T15:05:21 | 0.0 | [] | [] |
|||
connelldave/botocove | connelldave__botocove-36 | c0e8ff0c4bb9ba25057d7dd24c976b15d16251c7 | diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index bf0b8da..68e1428 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,7 +1,7 @@
exclude: "^docs/gitbook/"
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v3.3.0 # Use the ref you want to point at
+ rev: v4.1.0
hooks:
- id: trailing-whitespace
- id: check-ast
@@ -21,13 +21,13 @@ repos:
name: isort (python)
- repo: https://github.com/ambv/black
- rev: 21.12b0
+ rev: 22.3.0
hooks:
- id: black
language_version: python3.8
- repo: https://github.com/pre-commit/pygrep-hooks
- rev: v1.7.0
+ rev: v1.9.0
hooks:
- id: python-use-type-annotations
- id: python-no-eval
@@ -55,4 +55,3 @@ repos:
entry: poetry run mypy .
language: system
pass_filenames: false
-
diff --git a/README.md b/README.md
index 535c71f..45c7ae5 100644
--- a/README.md
+++ b/README.md
@@ -174,15 +174,15 @@ An IAM role session name that will be passed to each Cove session's
`policy`: str
-A policy document that will be used as a session policy in each Cove session's
-`sts.assume_role()` call. Unless the value is None, it is passed through via the
-Policy parameter.
+A policy document that will be used as a session policy. A non-None value is
+passed through via the Policy parameter in each Cove session's
+`sts.assume_role()` call.
-`policy_arns`: List[str]
+`policy_arns`: List[[PolicyDescriptorTypeTypeDef](https://pypi.org/project/mypy-boto3-sts/)]
-A list of managed policy ARNs that will be used as a session policy in each Cove
-session's `sts.assume_role()` call. Unless the value is None, it is passed
-through via the PolicyArns parameter.
+A list of managed policy ARNs that will be used as a session policy. A non-None
+value is passed through via the PolicyArns parameter in each Cove session's
+`sts.assume_role()` call.
`assuming_session`: Session
diff --git a/botocove/cove_decorator.py b/botocove/cove_decorator.py
index c5583a8..db1fd0b 100644
--- a/botocove/cove_decorator.py
+++ b/botocove/cove_decorator.py
@@ -3,6 +3,7 @@
from typing import Any, Callable, List, Optional
from boto3.session import Session
+from mypy_boto3_sts.type_defs import PolicyDescriptorTypeTypeDef
from botocove.cove_host_account import CoveHostAccount
from botocove.cove_runner import CoveRunner
@@ -19,7 +20,7 @@ def cove(
rolename: Optional[str] = None,
role_session_name: Optional[str] = None,
policy: Optional[str] = None,
- policy_arns: Optional[List[str]] = None,
+ policy_arns: Optional[List[PolicyDescriptorTypeTypeDef]] = None,
assuming_session: Optional[Session] = None,
raise_exception: bool = False,
org_master: bool = True,
diff --git a/botocove/cove_host_account.py b/botocove/cove_host_account.py
index f8dda5f..2526ec1 100644
--- a/botocove/cove_host_account.py
+++ b/botocove/cove_host_account.py
@@ -20,6 +20,7 @@
from mypy_boto3_organizations.client import OrganizationsClient
from mypy_boto3_organizations.type_defs import ListChildrenResponseTypeDef
from mypy_boto3_sts.client import STSClient
+from mypy_boto3_sts.type_defs import PolicyDescriptorTypeTypeDef
from botocove.cove_types import CoveSessionInformation
@@ -39,7 +40,7 @@ def __init__(
rolename: Optional[str],
role_session_name: Optional[str],
policy: Optional[str],
- policy_arns: Optional[List[str]],
+ policy_arns: Optional[List[PolicyDescriptorTypeTypeDef]],
assuming_session: Optional[Session],
org_master: bool,
thread_workers: int,
diff --git a/botocove/cove_types.py b/botocove/cove_types.py
index 0efc795..498c5f7 100644
--- a/botocove/cove_types.py
+++ b/botocove/cove_types.py
@@ -1,6 +1,7 @@
from typing import Any, Dict, List, Optional, TypedDict
from mypy_boto3_organizations.literals import AccountStatusType
+from mypy_boto3_sts.type_defs import PolicyDescriptorTypeTypeDef
class CoveSessionInformation(TypedDict):
@@ -13,7 +14,7 @@ class CoveSessionInformation(TypedDict):
Status: Optional[AccountStatusType]
RoleSessionName: Optional[str]
Policy: Optional[str]
- PolicyArns: Optional[List[str]]
+ PolicyArns: Optional[List[PolicyDescriptorTypeTypeDef]]
Result: Any
ExceptionDetails: Optional[Exception]
Region: Optional[str]
| Can we do without DescribeAccount?
[CoveSession](https://github.com/connelldave/botocove/blob/c0e8ff0c4bb9ba25057d7dd24c976b15d16251c7/botocove/cove_session.py#L41) calls [`DescribeAccount`](https://docs.aws.amazon.com/organizations/latest/APIReference/API_DescribeAccount.html) once for each member account in the organization.
But [CoveHostAccount](https://github.com/connelldave/botocove/blob/c0e8ff0c4bb9ba25057d7dd24c976b15d16251c7/botocove/cove_host_account.py#L230) also calls [`ListAccount`](https://docs.aws.amazon.com/organizations/latest/APIReference/API_ListAccounts.html) and pages it out fully, although everything except the account ID gets discarded.
DescribeAccount and ListAccount return the same attributes for each account.
I'm wondering if it would be possible to retain the list of `AccountTypeDef` instead of just a set of IDs so that `CoveSessionInformation` objects can be built without calling DescribeAccount.
The practical reason for doing that would be to avoid DescribeAccount's throttling errors (see #12 and #17). They happen less now, but you can still get them if you give `cove` a function that returns quickly enough.
Once, to avoid errors in downstream tooling, I needed to filter my organization's account list to only those that have a working OrganizationAccountAccessRole. Cove does that almost for free :-) But because my function was almost a no-op, the DescribeAccount API would sometimes fail.
| I started hacking on this last night. It's harder than I thought because of the different ways to resolve accounts. But if I find a way to do it I'll share a PR for review :-) | 2022-03-31T01:00:46 | 0.0 | [] | [] |
||
connelldave/botocove | connelldave__botocove-24 | f5b84943a374aceece64124f1ce030f5772f7570 | diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
new file mode 100644
index 0000000..256a537
--- /dev/null
+++ b/.github/workflows/codeql-analysis.yml
@@ -0,0 +1,70 @@
+# For most projects, this workflow file will not need changing; you simply need
+# to commit it to your repository.
+#
+# You may wish to alter this file to override the set of languages analyzed,
+# or to provide custom queries or build logic.
+#
+# ******** NOTE ********
+# We have attempted to detect the languages in your repository. Please check
+# the `language` matrix defined below to confirm you have the correct set of
+# supported CodeQL languages.
+#
+name: "CodeQL"
+
+on:
+ push:
+ branches: [ master ]
+ pull_request:
+ # The branches below must be a subset of the branches above
+ branches: [ master ]
+ schedule:
+ - cron: '29 8 * * 4'
+
+jobs:
+ analyze:
+ name: Analyze
+ runs-on: ubuntu-latest
+ permissions:
+ actions: read
+ contents: read
+ security-events: write
+
+ strategy:
+ fail-fast: false
+ matrix:
+ language: [ 'python' ]
+ # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
+ # Learn more about CodeQL language support at https://git.io/codeql-language-support
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v2
+
+ # Initializes the CodeQL tools for scanning.
+ - name: Initialize CodeQL
+ uses: github/codeql-action/init@v1
+ with:
+ languages: ${{ matrix.language }}
+ # If you wish to specify custom queries, you can do so here or in a config file.
+ # By default, queries listed here will override any specified in a config file.
+ # Prefix the list here with "+" to use these queries and those in the config file.
+ # queries: ./path/to/local/query, your-org/your-repo/queries@main
+
+ # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
+ # If this step fails, then you should remove it and run the build manually (see below)
+ - name: Autobuild
+ uses: github/codeql-action/autobuild@v1
+
+ # ℹ️ Command-line programs to run using the OS shell.
+ # 📚 https://git.io/JvXDl
+
+ # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
+ # and modify them (or add more) to build your code if your project
+ # uses a compiled language
+
+ #- run: |
+ # make bootstrap
+ # make release
+
+ - name: Perform CodeQL Analysis
+ uses: github/codeql-action/analyze@v1
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
index 40139cb..c209665 100644
--- a/.github/workflows/lint.yml
+++ b/.github/workflows/lint.yml
@@ -22,13 +22,4 @@ jobs:
run: poetry install -n -v
- name: Lint with flakeheaven
- run: poetry run flakeheaven lint
-
- - name: Lint with black
- run: poetry run black . --check
-
- - name: Lint with isort
- run: poetry run isort . --check
-
- - name: Lint with mypy
- run: poetry run mypy .
+ run: poetry run pre-commit run --all-files
diff --git a/.gitignore b/.gitignore
index be29c14..b7dfa43 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,4 @@
-
+
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 0000000..6b11b18
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,53 @@
+exclude: "^docs/gitbook/"
+repos:
+ - repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v3.3.0 # Use the ref you want to point at
+ hooks:
+ - id: trailing-whitespace
+ - id: check-ast
+ - id: check-case-conflict
+ - id: debug-statements
+ - id: check-yaml
+
+ - repo: https://github.com/pycqa/isort
+ rev: 5.10.1
+ hooks:
+ - id: isort
+ name: isort (python)
+
+ - repo: https://github.com/ambv/black
+ rev: 21.12b0
+ hooks:
+ - id: black
+ language_version: python3.8
+
+ - repo: https://github.com/pre-commit/pygrep-hooks
+ rev: v1.7.0
+ hooks:
+ - id: python-use-type-annotations
+ - id: python-no-eval
+ - id: python-no-log-warn
+
+ - repo: local
+ hooks:
+ - id: pytest
+ name: pytest
+ entry: poetry run pytest tests
+ language: system
+ pass_filenames: false
+ # alternatively you could `types: [python]` so it only runs when python files change
+ # though tests might be invalidated if you were to say change a data file
+ always_run: true
+
+ - id: flakeheaven
+ name: flakeheaven
+ entry: poetry run flakeheaven lint
+ language: system
+ pass_filenames: false
+
+ - id: mypy
+ name: mypy
+ entry: poetry run mypy .
+ language: system
+ pass_filenames: false
+
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 69bde92..f42c6d6 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -3,6 +3,14 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+## [1.5.0] - 2022-06-02
+### Added
+- thread_workers argument
+
+### Fixed
+- Memory leak when running in large organisations: botocove now allows
+ completed Session objects to be garbage collected
+
## [1.4.1] - 2022-15-01
### Added
- Support for Policy and PolicyArn restriction on assumed roles
diff --git a/README.md b/README.md
index fe777d7..63f4ff8 100644
--- a/README.md
+++ b/README.md
@@ -9,7 +9,7 @@ account.
- Easy
- Dolphin Themed 🐬
-A simple decorator for functions to remove time and complexity burden. Uses
+A simple decorator for functions to remove time and complexity burden. Uses
`ThreadPoolExecutor` to run boto3 sessions against one to all
of your AWS accounts at (nearly!) the same speed as running against one.
@@ -79,13 +79,13 @@ def get_iam_users(session):
def main():
# No session passed as the decorator injects it
- all_results = get_iam_users()
+ all_results = get_iam_users()
# Now returns a Dict with keys Results, Exceptions and FailedAssumeRole
-
+
# A list of dictionaries for each account, with account details included.
# Each account's get_iam_users return is in a "Result" key.
- print(all_results["Results"])
-
+ print(all_results["Results"])
+
# A list of dictionaries for each account that raised an exception
print(all_results["Exceptions"])
@@ -96,7 +96,7 @@ def main():
## Arguments
### Cove
-`@cove()`:
+`@cove()`:
Uses boto3 credential chain to get every AWS account within the
organization, assume the `OrganizationAccountAccessRole` in it and run the
@@ -119,12 +119,12 @@ be ignored.
`rolename`: Optional[str]
-An IAM role name that will be attempted to assume in all target accounts.
+An IAM role name that will be attempted to assume in all target accounts.
Defaults to the AWS Organization default, `OrganizationAccountAccessRole`.
`role_session_name`: Optional[str]
-An IAM role session name that will be passed to each Cove session's `sts.assume_role()` call.
+An IAM role session name that will be passed to each Cove session's `sts.assume_role()` call.
Defaults to the name of the role being used if unset.
`policy`: Optional[str]
@@ -154,12 +154,20 @@ It is vital to run interruptible, idempotent code with this argument as `True`.
Defaults to True. When True, will leverage the Boto3 Organizations API to list
all accounts in the organization, and enrich each `CoveSession` with information
-available (`Id`, `Arn`, `Name`).
+available (`Id`, `Arn`, `Name`, `Status`, `Email`). Disabling this and providing your
+own full list of accounts may be a desirable optimisation if speed is an issue.
`org_master=False` means `target_ids` must be provided (as no list of accounts
can be created for you), as well as likely `rolename`. Only `Id` will be
available to `CoveSession`.
+`thread_workers`: int
+
+Defaults to 20. Cove utilises a ThreadPoolWorker under the hood, which can be tuned
+with this argument. Number of thread workers directly corrolates to memory usage: see
+[here](#is-botocove-thread-safe)
+
+
### CoveSession
Cove supplies an enriched Boto3 session to each function called. Account details
@@ -180,7 +188,7 @@ def do_nothing(session: CoveSession):
Wrapped functions return a dictionary. Each value contains List[Dict[str, Any]]:
```
{
- "Results": results:
+ "Results": results:
"Exceptions": exceptions,
"FailedAssumeRole": invalid_sessions,
}
@@ -195,13 +203,35 @@ An example of cove_output["Results"]:
'Status': 'ACTIVE',
'AssumeRoleSuccess': True,
'Result': wrapped_function_return_value # Result of wrapped func
- }
-]
+ }
+]
```
+### Is botocove thread safe?
+
+botocove is thread safe, but number of threaded executions will be bound by memory,
+network IO and AWS api rate limiting. Defaulting to 20 thread workers is a reasonable
+starting point, but can be further optimised for runtime with experimentation.
+
+botocove has no constraint or understanding of the function it's wrapping: it is
+recommended to avoid shared state for botocove wrapped functions, and to write simple
+functions that are written to be idempotent and independent.
+
+[Boto3 Session objects are not natively thread safe and should not be shared across threads](https://boto3.amazonaws.com/v1/documentation/api/1.14.31/guide/session.html#multithreading-or-multiprocessing-with-sessions).
+However, botocove is instantiating a new Session object per thread/account and running
+decorated functions inside their own closure. A shared client is used from the host account
+that botocove is run from (eg, an organization master account) -
+[clients are threadsafe](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/clients.html#multithreading-or-multiprocessing-with-clients) and allow this.
+
+boto3 sessions have a significant memory footprint:
+Version 1.5.0 of botocove was re-written to ensure that boto3 sessions are released
+after completion which resolved memory starvation issues. This was discussed here:
+https://github.com/connelldave/botocove/issues/20 and a relevant boto3 issue is here:
+https://github.com/boto/boto3/issues/1670
+
### botocove?
It turns out that the Amazon's Boto dolphins are solitary or small-group animals,
-unlike the large pods of dolphins in the oceans. This killed my "large group of
+unlike the large pods of dolphins in the oceans. This killed my "large group of
boto" idea, so the next best idea was where might they all shelter together... a
cove!
diff --git a/botocove/cove_decorator.py b/botocove/cove_decorator.py
index f8e668e..39554ee 100644
--- a/botocove/cove_decorator.py
+++ b/botocove/cove_decorator.py
@@ -5,8 +5,8 @@
from boto3.session import Session
+from botocove.cove_host_account import CoveHostAccount
from botocove.cove_runner import CoveRunner
-from botocove.cove_sessions import CoveSessions
from botocove.cove_types import CoveOutput, CoveSessionInformation, R
logger = logging.getLogger(__name__)
@@ -14,7 +14,7 @@
def dataclass_converter(d: CoveSessionInformation) -> Dict[str, Any]:
"""Unpack dataclass into dict and remove None values"""
- return {k: v for k, v in asdict(d).items() if v}
+ return {k: v for k, v in asdict(d).items() if v is not None}
def cove(
@@ -29,11 +29,13 @@ def cove(
assuming_session: Optional[Session] = None,
raise_exception: bool = False,
org_master: bool = True,
+ thread_workers: int = 20
) -> Callable:
def decorator(func: Callable[..., R]) -> Callable[..., CoveOutput]:
@functools.wraps(func)
def wrapper(*args: Any, **kwargs: Any) -> CoveOutput:
- valid_sessions, invalid_sessions = CoveSessions(
+
+ host_account = CoveHostAccount(
target_ids=target_ids,
ignore_ids=ignore_ids,
rolename=rolename,
@@ -42,23 +44,32 @@ def wrapper(*args: Any, **kwargs: Any) -> CoveOutput:
policy_arns=policy_arns,
org_master=org_master,
assuming_session=assuming_session,
- ).get_cove_sessions()
+ )
runner = CoveRunner(
- valid_sessions=valid_sessions,
+ host_account=host_account,
func=func,
raise_exception=raise_exception,
func_args=args,
func_kwargs=kwargs,
+ thread_workers=thread_workers,
)
output = runner.run_cove_function()
# Rewrite dataclasses into untyped dicts to retain current functionality
return CoveOutput(
- FailedAssumeRole=[dataclass_converter(f) for f in invalid_sessions],
Results=[dataclass_converter(r) for r in output["Results"]],
- Exceptions=[dataclass_converter(e) for e in output["Exceptions"]],
+ Exceptions=[
+ dataclass_converter(e)
+ for e in output["Exceptions"]
+ if e.AssumeRoleSuccess
+ ],
+ FailedAssumeRole=[
+ dataclass_converter(f)
+ for f in output["Exceptions"]
+ if not f.AssumeRoleSuccess
+ ],
)
return wrapper
diff --git a/botocove/cove_sessions.py b/botocove/cove_host_account.py
similarity index 53%
rename from botocove/cove_sessions.py
rename to botocove/cove_host_account.py
index 17e8a74..1fb57c0 100644
--- a/botocove/cove_sessions.py
+++ b/botocove/cove_host_account.py
@@ -1,18 +1,13 @@
import logging
-from concurrent import futures
-from typing import Any, List, Literal, Optional, Set, Tuple, Union
+from typing import Any, List, Literal, Optional, Set, Union
import boto3
from boto3.session import Session
from botocore.config import Config
-from botocore.exceptions import ClientError
from mypy_boto3_organizations.client import OrganizationsClient
-from mypy_boto3_organizations.type_defs import AccountTypeDef
from mypy_boto3_sts.client import STSClient
-from tqdm import tqdm
-from botocove.cove_session import CoveSession
-from botocove.cove_types import CoveResults, CoveSessionInformation
+from botocove.cove_types import CoveSessionInformation
logger = logging.getLogger(__name__)
@@ -20,7 +15,7 @@
DEFAULT_ROLENAME = "OrganizationAccountAccessRole"
-class CoveSessions(object):
+class CoveHostAccount(object):
def __init__(
self,
target_ids: Optional[List[str]],
@@ -50,97 +45,26 @@ def __init__(
self.org_master = org_master
- def get_cove_sessions(self) -> Tuple[List[CoveSession], CoveResults]:
+ def get_cove_session_info(self) -> List[CoveSessionInformation]:
logger.info(
- f"Getting sessions in accounts: {self.role_to_assume=} "
+ f"Getting session information: {self.role_to_assume=} "
f"{self.role_session_name=} {self.target_accounts=} "
f"{self.provided_ignore_ids=}"
)
logger.info(f"Session policy: {self.policy_arns=} {self.policy=}")
- with futures.ThreadPoolExecutor(max_workers=20) as executor:
- sessions = list(
- tqdm(
- executor.map(self._cove_session_factory, self.target_accounts),
- total=len(self.target_accounts),
- desc="Assuming sessions",
- colour="#39ff14", # neon green
- )
+ sessions = []
+ for account_id in self.target_accounts:
+ account_details: CoveSessionInformation = CoveSessionInformation(
+ Id=account_id,
+ RoleName=self.role_to_assume,
+ RoleSessionName=self.role_session_name,
+ Policy=self.policy,
+ PolicyArns=self.policy_arns,
)
+ sessions.append(account_details)
- self.valid_sessions = [
- session for session in sessions if session.assume_role_success is True
- ]
- if not self.valid_sessions:
- raise ValueError("No accounts are accessible: check logs for detail")
-
- self.invalid_sessions = self._get_invalid_cove_sessions(sessions)
- return self.valid_sessions, self.invalid_sessions
-
- def _cove_session_factory(self, account_id: str) -> CoveSession:
- role_arn = f"arn:aws:iam::{account_id}:role/{self.role_to_assume}"
- account_details: CoveSessionInformation = CoveSessionInformation(
- Id=account_id,
- RoleSessionName=self.role_session_name,
- Policy=self.policy,
- PolicyArns=self.policy_arns,
- )
-
- if self.org_master:
- try:
- account_description: AccountTypeDef = self.org_client.describe_account(
- AccountId=account_id
- )["Account"]
- account_details.Arn = account_description["Arn"]
- account_details.Email = account_description["Email"]
- account_details.Name = account_description["Name"]
- account_details.Status = account_description["Status"]
- except ClientError:
- logger.exception(f"Failed to call describe_account for {account_id}")
-
- cove_session = CoveSession(account_details)
-
- try:
- logger.debug(f"Attempting to assume {role_arn}")
- # This calling style avoids a ParamValidationError from botocore.
- # Passing None is not allowed for the optional parameters.
-
- assume_role_args = {
- k: v
- for k, v in [
- ("RoleArn", role_arn),
- ("RoleSessionName", self.role_session_name),
- ("Policy", self.policy),
- ("PolicyArns", self.policy_arns),
- ]
- if v is not None
- }
- creds = self.sts_client.assume_role(**assume_role_args)["Credentials"] # type: ignore[arg-type] # noqa E501
- cove_session.initialize_boto_session(
- aws_access_key_id=creds["AccessKeyId"],
- aws_secret_access_key=creds["SecretAccessKey"],
- aws_session_token=creds["SessionToken"],
- )
- except ClientError as e:
- cove_session.store_exception(e)
-
- return cove_session
-
- def _get_invalid_cove_sessions(self, sessions: List[CoveSession]) -> CoveResults:
- invalid_sessions = [
- session.format_cove_error()
- for session in sessions
- if session.assume_role_success is False
- ]
-
- if invalid_sessions:
- logger.warning("Could not assume role into these accounts:")
- for invalid_session in invalid_sessions:
- logger.warning(invalid_session)
- invalid_ids = [failure.Id for failure in invalid_sessions]
- logger.warning(f"\n\nInvalid session Account IDs as list: {invalid_ids}")
-
- return invalid_sessions
+ return sessions
def _get_boto3_client(
self,
diff --git a/botocove/cove_runner.py b/botocove/cove_runner.py
index b352510..adeffde 100644
--- a/botocove/cove_runner.py
+++ b/botocove/cove_runner.py
@@ -1,9 +1,10 @@
import logging
from concurrent import futures
-from typing import Any, Callable, List, Tuple
+from typing import Any, Callable
from tqdm import tqdm
+from botocove.cove_host_account import CoveHostAccount
from botocove.cove_session import CoveSession
from botocove.cove_types import (
CoveFunctionOutput,
@@ -18,60 +19,67 @@
class CoveRunner(object):
def __init__(
self,
- valid_sessions: List[CoveSession],
+ host_account: CoveHostAccount,
func: Callable[..., R],
raise_exception: bool,
func_args: Any,
func_kwargs: Any,
+ thread_workers: int,
) -> None:
- self.sessions = valid_sessions
- self.raise_exception = raise_exception
+
+ self.host_account = host_account
+ self.sessions = host_account.get_cove_session_info()
+
self.cove_wrapped_func = func
+ self.raise_exception = raise_exception
self.func_args = func_args
self.func_kwargs = func_kwargs
+ self.thread_workers = thread_workers
+
def run_cove_function(self) -> CoveFunctionOutput:
# Run decorated func with all valid sessions
- results, exceptions = self._async_boto3_call()
+ with futures.ThreadPoolExecutor(max_workers=self.thread_workers) as executor:
+ completed: CoveResults = list(
+ tqdm(
+ executor.map(self.cove_thread, self.sessions),
+ total=len(self.sessions),
+ desc="Executing function",
+ colour="#ff69b4", # hotpink
+ )
+ )
+ successful_results = [
+ result for result in completed if not result.ExceptionDetails
+ ]
+ exceptions = [result for result in completed if result.ExceptionDetails]
+
return CoveFunctionOutput(
- Results=results,
+ Results=successful_results,
Exceptions=exceptions,
)
- def cove_exception_wrapper_func(
+ def cove_thread(
self,
- account_session: CoveSession,
+ account_session_info: CoveSessionInformation,
) -> CoveSessionInformation:
- # Wrapper capturing exceptions and formatting results
+ cove_session = CoveSession(
+ account_session_info,
+ sts_client=self.host_account.sts_client,
+ org_client=self.host_account.org_client,
+ org_master=self.host_account.org_master,
+ )
try:
+ cove_session.activate_cove_session()
+
result = self.cove_wrapped_func(
- account_session, *self.func_args, **self.func_kwargs
+ cove_session, *self.func_args, **self.func_kwargs
)
- return account_session.format_cove_result(result)
+
+ return cove_session.format_cove_result(result)
+
except Exception as e:
if self.raise_exception is True:
- account_session.store_exception(e)
- logger.exception(account_session.format_cove_error())
+ logger.exception(cove_session.format_cove_error(e))
raise
else:
- account_session.store_exception(e)
- return account_session.format_cove_error()
-
- def _async_boto3_call(
- self,
- ) -> Tuple[CoveResults, CoveResults]:
- with futures.ThreadPoolExecutor(max_workers=20) as executor:
- completed: CoveResults = list(
- tqdm(
- executor.map(self.cove_exception_wrapper_func, self.sessions),
- total=len(self.sessions),
- desc="Executing function",
- colour="#ff69b4", # hotpink
- )
- )
-
- successful_results = [
- result for result in completed if not result.ExceptionDetails
- ]
- exceptions = [result for result in completed if result.ExceptionDetails]
- return successful_results, exceptions
+ return cove_session.format_cove_error(e)
diff --git a/botocove/cove_session.py b/botocove/cove_session.py
index 82a54e2..cbbbb26 100644
--- a/botocove/cove_session.py
+++ b/botocove/cove_session.py
@@ -1,9 +1,16 @@
+import logging
from typing import Any
from boto3.session import Session
+from botocore.exceptions import ClientError
+from mypy_boto3_organizations.client import OrganizationsClient
+from mypy_boto3_organizations.type_defs import AccountTypeDef
+from mypy_boto3_sts.client import STSClient
from botocove.cove_types import CoveSessionInformation, R
+logger = logging.getLogger(__name__)
+
class CoveSession(Session):
"""Enriches a boto3 Session with account data from Master account if run from
@@ -15,27 +22,81 @@ class CoveSession(Session):
session_information: CoveSessionInformation
stored_exception: Exception
- def __init__(self, session_info: CoveSessionInformation) -> None:
+ def __init__(
+ self,
+ session_info: CoveSessionInformation,
+ org_client: OrganizationsClient,
+ sts_client: STSClient,
+ org_master: bool,
+ ) -> None:
self.session_information = session_info
+ self.org_master = org_master
+ self.org_client = org_client
+ self.sts_client = sts_client
def __repr__(self) -> str:
# Overwrite boto3's repr to avoid AttributeErrors
return f"{self.__class__.__name__}(account_id={self.session_information.Id})"
+ def activate_cove_session(self) -> "CoveSession":
+ role_arn = (
+ f"arn:aws:iam::{self.session_information.Id}:role/"
+ f"{self.session_information.RoleName}"
+ )
+
+ if self.org_master:
+ try:
+ account_description: AccountTypeDef = self.org_client.describe_account(
+ AccountId=self.session_information.Id
+ )["Account"]
+ self.session_information.Arn = account_description["Arn"]
+ self.session_information.Email = account_description["Email"]
+ self.session_information.Name = account_description["Name"]
+ self.session_information.Status = account_description["Status"]
+ except ClientError:
+ logger.exception(
+ f"Failed to call describe_account for {self.session_information.Id}"
+ )
+
+ try:
+ logger.debug(f"Attempting to assume {role_arn}")
+
+ # This calling style avoids a ParamValidationError from botocore.
+ # Passing None is not allowed for the optional parameters.
+ assume_role_args = {
+ k: v
+ for k, v in [
+ ("RoleArn", role_arn),
+ ("RoleSessionName", self.session_information.RoleSessionName),
+ ("Policy", self.session_information.Policy),
+ ("PolicyArns", self.session_information.PolicyArns),
+ ]
+ if v is not None
+ }
+ creds = self.sts_client.assume_role(**assume_role_args)["Credentials"] # type: ignore[arg-type] # noqa E501
+ self.initialize_boto_session(
+ aws_access_key_id=creds["AccessKeyId"],
+ aws_secret_access_key=creds["SecretAccessKey"],
+ aws_session_token=creds["SessionToken"],
+ )
+ self.session_information.AssumeRoleSuccess = True
+ except ClientError:
+ logger.error(
+ f"Failed to initalize cove session for "
+ f"account {self.session_information.Id}"
+ )
+ raise
+
+ return self
+
def initialize_boto_session(self, *args: Any, **kwargs: Any) -> None:
# Inherit from and initialize standard boto3 Session object
super().__init__(*args, **kwargs)
- self.assume_role_success = True
- self.session_information.AssumeRoleSuccess = self.assume_role_success
-
- def store_exception(self, err: Exception) -> None:
- self.stored_exception = err
def format_cove_result(self, result: R) -> CoveSessionInformation:
self.session_information.Result = result
return self.session_information
- def format_cove_error(self) -> CoveSessionInformation:
- self.session_information.ExceptionDetails = self.stored_exception
- self.session_information.AssumeRoleSuccess = self.assume_role_success
+ def format_cove_error(self, err: Exception) -> CoveSessionInformation:
+ self.session_information.ExceptionDetails = err
return self.session_information
diff --git a/botocove/cove_types.py b/botocove/cove_types.py
index 7a111a1..3e593e9 100644
--- a/botocove/cove_types.py
+++ b/botocove/cove_types.py
@@ -9,11 +9,12 @@
@dataclass
class CoveSessionInformation(Generic[R]):
Id: str
+ RoleName: str
+ AssumeRoleSuccess: bool = False
Arn: Optional[str] = None
Email: Optional[str] = None
Name: Optional[str] = None
Status: Optional[AccountStatusType] = None
- AssumeRoleSuccess: Optional[bool] = None
RoleSessionName: Optional[str] = None
Policy: Optional[str] = None
PolicyArns: Optional[List[str]] = None
diff --git a/poetry.lock b/poetry.lock
index 09f1820..058f2dd 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -62,14 +62,14 @@ uvloop = ["uvloop (>=0.15.2)"]
[[package]]
name = "boto3"
-version = "1.20.38"
+version = "1.20.41"
description = "The AWS SDK for Python"
category = "main"
optional = false
python-versions = ">= 3.6"
[package.dependencies]
-botocore = ">=1.23.38,<1.24.0"
+botocore = ">=1.23.41,<1.24.0"
jmespath = ">=0.7.1,<1.0.0"
s3transfer = ">=0.5.0,<0.6.0"
@@ -78,8 +78,8 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
[[package]]
name = "boto3-stubs"
-version = "1.20.38"
-description = "Type annotations for boto3 1.20.38, generated by mypy-boto3-builder 6.3.2"
+version = "1.20.40"
+description = "Type annotations for boto3 1.20.40, generated by mypy-boto3-builder 6.3.2"
category = "main"
optional = false
python-versions = ">=3.6"
@@ -395,7 +395,7 @@ xray = ["mypy-boto3-xray (>=1.20.0)"]
[[package]]
name = "botocore"
-version = "1.23.38"
+version = "1.23.41"
description = "Low-level, data-driven core of boto 3."
category = "main"
optional = false
@@ -411,8 +411,8 @@ crt = ["awscrt (==0.12.5)"]
[[package]]
name = "botocore-stubs"
-version = "1.23.38"
-description = "Type annotations for botocore 1.23.38, generated by mypy-boto3-builder 6.3.2"
+version = "1.23.40"
+description = "Type annotations for botocore 1.23.40, generated by mypy-boto3-builder 6.3.2"
category = "main"
optional = false
python-versions = ">=3.6"
@@ -420,6 +420,14 @@ python-versions = ">=3.6"
[package.dependencies]
typing-extensions = {version = "*", markers = "python_version < \"3.9\""}
+[[package]]
+name = "cfgv"
+version = "3.3.1"
+description = "Validate configuration and produce human readable error messages."
+category = "dev"
+optional = false
+python-versions = ">=3.6.1"
+
[[package]]
name = "click"
version = "8.0.3"
@@ -439,11 +447,19 @@ category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+[[package]]
+name = "distlib"
+version = "0.3.4"
+description = "Distribution utilities"
+category = "dev"
+optional = false
+python-versions = "*"
+
[[package]]
name = "entrypoints"
version = "0.3"
description = "Discover and load entry points from installed packages."
-category = "main"
+category = "dev"
optional = false
python-versions = ">=2.7"
@@ -455,11 +471,23 @@ category = "dev"
optional = false
python-versions = "*"
+[[package]]
+name = "filelock"
+version = "3.4.2"
+description = "A platform independent file lock."
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[package.extras]
+docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"]
+testing = ["covdefaults (>=1.2.0)", "coverage (>=4)", "pytest (>=4)", "pytest-cov", "pytest-timeout (>=1.4.2)"]
+
[[package]]
name = "flake8"
version = "4.0.1"
description = "the modular source code checker: pep8 pyflakes and co"
-category = "main"
+category = "dev"
optional = false
python-versions = ">=3.6"
@@ -609,7 +637,7 @@ flake8-plugin-utils = ">=1.3.2,<2.0.0"
name = "flakeheaven"
version = "0.11.0"
description = "Flake8 wrapper to make it nice and configurable"
-category = "main"
+category = "dev"
optional = false
python-versions = ">=3.5"
@@ -647,6 +675,17 @@ python-versions = ">=3.7"
[package.dependencies]
gitdb = ">=4.0.1,<5"
+[[package]]
+name = "identify"
+version = "2.4.4"
+description = "File identification library for Python"
+category = "dev"
+optional = false
+python-versions = ">=3.6.1"
+
+[package.extras]
+license = ["ukkonen"]
+
[[package]]
name = "iniconfig"
version = "1.1.1"
@@ -681,7 +720,7 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
name = "mccabe"
version = "0.6.1"
description = "McCabe checker, plugin for flake8"
-category = "main"
+category = "dev"
optional = false
python-versions = "*"
@@ -732,6 +771,14 @@ category = "dev"
optional = false
python-versions = "*"
+[[package]]
+name = "nodeenv"
+version = "1.6.0"
+description = "Node.js virtual environment builder"
+category = "dev"
+optional = false
+python-versions = "*"
+
[[package]]
name = "packaging"
version = "21.3"
@@ -795,6 +842,22 @@ python-versions = ">=3.6"
dev = ["pre-commit", "tox"]
testing = ["pytest", "pytest-benchmark"]
+[[package]]
+name = "pre-commit"
+version = "2.17.0"
+description = "A framework for managing and maintaining multi-language pre-commit hooks."
+category = "dev"
+optional = false
+python-versions = ">=3.6.1"
+
+[package.dependencies]
+cfgv = ">=2.0.0"
+identify = ">=1.0.0"
+nodeenv = ">=0.11.1"
+pyyaml = ">=5.1"
+toml = "*"
+virtualenv = ">=20.0.8"
+
[[package]]
name = "py"
version = "1.11.0"
@@ -807,7 +870,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
name = "pycodestyle"
version = "2.8.0"
description = "Python style guide checker"
-category = "main"
+category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
@@ -815,7 +878,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
name = "pyflakes"
version = "2.4.0"
description = "passive checker of Python programs"
-category = "main"
+category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
@@ -823,13 +886,13 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
name = "pygments"
version = "2.11.2"
description = "Pygments is a syntax highlighting package written in Python."
-category = "main"
+category = "dev"
optional = false
python-versions = ">=3.5"
[[package]]
name = "pyparsing"
-version = "3.0.6"
+version = "3.0.7"
description = "Python parsing module"
category = "dev"
optional = false
@@ -950,7 +1013,7 @@ test = ["pytest (>=3.6)", "pytest-cov", "pytest-django", "zope.component", "sybi
name = "toml"
version = "0.10.2"
description = "Python Library for Tom's Obvious, Minimal Language"
-category = "main"
+category = "dev"
optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
@@ -999,10 +1062,28 @@ brotli = ["brotlipy (>=0.6.0)"]
secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"]
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
+[[package]]
+name = "virtualenv"
+version = "20.13.0"
+description = "Virtual Python Environment builder"
+category = "dev"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+
+[package.dependencies]
+distlib = ">=0.3.1,<1"
+filelock = ">=3.2,<4"
+platformdirs = ">=2,<3"
+six = ">=1.9.0,<2"
+
+[package.extras]
+docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"]
+testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"]
+
[metadata]
lock-version = "1.1"
python-versions = "^3.8"
-content-hash = "76028fd434fdd3aa5eb4c5895765f5f27ec701489e834125edcc99452d5a27e4"
+content-hash = "77f63aae84d92ce55be9aab5e019f8fc53cf3945c923f645470356780c02a9bd"
[metadata.files]
atomicwrites = [
@@ -1022,20 +1103,24 @@ black = [
{file = "black-21.12b0.tar.gz", hash = "sha256:77b80f693a569e2e527958459634f18df9b0ba2625ba4e0c2d5da5be42e6f2b3"},
]
boto3 = [
- {file = "boto3-1.20.38-py3-none-any.whl", hash = "sha256:22b243302f526df9c599c6b81092cb3c62f785bc06cedceeff9054489df4ffb3"},
- {file = "boto3-1.20.38.tar.gz", hash = "sha256:edeae6d38c98691cb9da187c541f3033e0f30d6b2a0b54b5399a44d9b3ba4f61"},
+ {file = "boto3-1.20.41-py3-none-any.whl", hash = "sha256:aaddf6cf93568b734ad62fd96991775bccc7f016e93ff4e98dc1aa4f7586440c"},
+ {file = "boto3-1.20.41.tar.gz", hash = "sha256:fb02467a6e8109c7db994ba77fa2e8381ed129ce312988d8ef23edf6e3a3c7f1"},
]
boto3-stubs = [
- {file = "boto3-stubs-1.20.38.tar.gz", hash = "sha256:123f56892453ce268d7bfe43b1280241e51d66a18c502e00dafa58ab5784f1bc"},
- {file = "boto3_stubs-1.20.38-py3-none-any.whl", hash = "sha256:937cfbe7e3685b0bc6ab1bd1853c72143154af68e700d3920f40caec145be2f4"},
+ {file = "boto3-stubs-1.20.40.tar.gz", hash = "sha256:24f23e14de15d29a85e301b5beb144d2c778ed350e0c08a2136a978c8105e3c9"},
+ {file = "boto3_stubs-1.20.40-py3-none-any.whl", hash = "sha256:2e940afd4a47688bb536155b10bdc65cc99390217bfcb392f4fc8c188646a65f"},
]
botocore = [
- {file = "botocore-1.23.38-py3-none-any.whl", hash = "sha256:49b304d9d4a782d7108f6a5ca0df6557da20a22b74d5bf745f02fea5cffc35ca"},
- {file = "botocore-1.23.38.tar.gz", hash = "sha256:f733bc565f144f0ec97ffe0d51235d358ad2f5f12b331563b69d9e9227262a36"},
+ {file = "botocore-1.23.41-py3-none-any.whl", hash = "sha256:41104e1c976c9c410387b3c7d265466b314f287a1c13fd4b543768135301058a"},
+ {file = "botocore-1.23.41.tar.gz", hash = "sha256:9137c59c4eb1dee60ae3c710e94f56119a1b33b0b17ff3ad878fc2f4ce77843a"},
]
botocore-stubs = [
- {file = "botocore-stubs-1.23.38.tar.gz", hash = "sha256:ad954929705c0496df58d46ec0b23d2c53dd8700288ba84d49116c45450a9f5b"},
- {file = "botocore_stubs-1.23.38-py3-none-any.whl", hash = "sha256:5b587016bacd4bb82207b8953e9f2b0b5cf811b350557e0d8760aeaaed86beef"},
+ {file = "botocore-stubs-1.23.40.tar.gz", hash = "sha256:48529a2b7e14c6e3dd4544c21d4cf342ad512e2a526f5262c565357683d78787"},
+ {file = "botocore_stubs-1.23.40-py3-none-any.whl", hash = "sha256:b5762895175cbacfa989b7ff313ca20f30f82137fcfd8a389cfe4a920cb57e73"},
+]
+cfgv = [
+ {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"},
+ {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"},
]
click = [
{file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"},
@@ -1045,6 +1130,10 @@ colorama = [
{file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
{file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
]
+distlib = [
+ {file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"},
+ {file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"},
+]
entrypoints = [
{file = "entrypoints-0.3-py2.py3-none-any.whl", hash = "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19"},
{file = "entrypoints-0.3.tar.gz", hash = "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"},
@@ -1052,6 +1141,10 @@ entrypoints = [
eradicate = [
{file = "eradicate-2.0.0.tar.gz", hash = "sha256:27434596f2c5314cc9b31410c93d8f7e8885747399773cd088d3adea647a60c8"},
]
+filelock = [
+ {file = "filelock-3.4.2-py3-none-any.whl", hash = "sha256:cf0fc6a2f8d26bd900f19bf33915ca70ba4dd8c56903eeb14e1e7a2fd7590146"},
+ {file = "filelock-3.4.2.tar.gz", hash = "sha256:38b4f4c989f9d06d44524df1b24bd19e167d851f19b50bf3e3559952dddc5b80"},
+]
flake8 = [
{file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"},
{file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"},
@@ -1111,6 +1204,10 @@ gitpython = [
{file = "GitPython-3.1.26-py3-none-any.whl", hash = "sha256:26ac35c212d1f7b16036361ca5cff3ec66e11753a0d677fb6c48fa4e1a9dd8d6"},
{file = "GitPython-3.1.26.tar.gz", hash = "sha256:fc8868f63a2e6d268fb25f481995ba185a85a66fcad126f039323ff6635669ee"},
]
+identify = [
+ {file = "identify-2.4.4-py2.py3-none-any.whl", hash = "sha256:aa68609c7454dbcaae60a01ff6b8df1de9b39fe6e50b1f6107ec81dcda624aa6"},
+ {file = "identify-2.4.4.tar.gz", hash = "sha256:6b4b5031f69c48bf93a646b90de9b381c6b5f560df4cbe0ed3cf7650ae741e4d"},
+]
iniconfig = [
{file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
{file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
@@ -1161,6 +1258,10 @@ mypy-extensions = [
{file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
{file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
]
+nodeenv = [
+ {file = "nodeenv-1.6.0-py2.py3-none-any.whl", hash = "sha256:621e6b7076565ddcacd2db0294c0381e01fd28945ab36bcf00f41c5daf63bef7"},
+ {file = "nodeenv-1.6.0.tar.gz", hash = "sha256:3ef13ff90291ba2a4a7a4ff9a979b63ffdd00a464dbe04acf0ea6471517a4c2b"},
+]
packaging = [
{file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
{file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
@@ -1185,6 +1286,10 @@ pluggy = [
{file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
{file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
]
+pre-commit = [
+ {file = "pre_commit-2.17.0-py2.py3-none-any.whl", hash = "sha256:725fa7459782d7bec5ead072810e47351de01709be838c2ce1726b9591dad616"},
+ {file = "pre_commit-2.17.0.tar.gz", hash = "sha256:c1a8040ff15ad3d648c70cc3e55b93e4d2d5b687320955505587fd79bbaed06a"},
+]
py = [
{file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
@@ -1202,8 +1307,8 @@ pygments = [
{file = "Pygments-2.11.2.tar.gz", hash = "sha256:4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a"},
]
pyparsing = [
- {file = "pyparsing-3.0.6-py3-none-any.whl", hash = "sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4"},
- {file = "pyparsing-3.0.6.tar.gz", hash = "sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81"},
+ {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"},
+ {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"},
]
pytest = [
{file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"},
@@ -1292,3 +1397,7 @@ urllib3 = [
{file = "urllib3-1.26.8-py2.py3-none-any.whl", hash = "sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed"},
{file = "urllib3-1.26.8.tar.gz", hash = "sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c"},
]
+virtualenv = [
+ {file = "virtualenv-20.13.0-py2.py3-none-any.whl", hash = "sha256:339f16c4a86b44240ba7223d0f93a7887c3ca04b5f9c8129da7958447d079b09"},
+ {file = "virtualenv-20.13.0.tar.gz", hash = "sha256:d8458cf8d59d0ea495ad9b34c2599487f8a7772d796f9910858376d1600dd2dd"},
+]
diff --git a/pyproject.toml b/pyproject.toml
index 4629726..17d31e1 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "botocove"
-version = "1.4.1"
+version = "1.5.0"
description = "A decorator to allow running a function against all AWS accounts in an organization"
authors = ["Dave Connell <[email protected]>"]
license = "LGPL-3.0-or-later"
@@ -13,14 +13,12 @@ python = "^3.8"
boto3 = "*"
tqdm = "*"
boto3-stubs = {extras = ["sts", "organizations"], version = "*"}
-flakeheaven = "^0.11.0"
[tool.poetry.dev-dependencies]
pytest = "*"
pytest-mock = "*"
isort = "*"
black = "*"
-flake8-bandit = "*"
flake8-bugbear = "*"
flake8-builtins = "*"
flake8-comprehensions = "*"
@@ -31,6 +29,8 @@ flake8-pytest-style = "*"
pep8-naming = "*"
flake8-print = "*"
mypy = "*"
+pre-commit = "*"
+flakeheaven = "*"
[build-system]
@@ -57,7 +57,6 @@ flake8-bugbear = ["+*"]
flake8-builtins = ["+*"]
flake8-comprehensions = ["+*"]
flake8-eradicate = ["+*"]
-flake8-isort = ["+*"]
flake8-mutable = ["+*"]
flake8-pytest-style = ["+*"]
mccabe = ["+*"]
| How to reduce memory usage?
When I run botocove in an interactive session, or when I run it as part of [aws-org-inventory](https://github.com/iainelder/aws-org-inventory), and query a sufficiently large organization, the process consumes so much memory so as to destabilize the operating system. (Ubuntu gets ugly when it runs out of memory.)
It appears that the amount of memory required is proportional to the number of accounts in the organization.
I haven't had time to study it carefully, but today I did watch the system activity monitor while running aws-org-inventory across a large organization (around 1000 accounts).
The memory consumption started in the low MBs, which is normal for Python, and increased steadily as it queried each account. When it finally completed processing, it had consumed 4GB of memory. (The thing is that my work machine has only 8GB of memory :sweat_smile: )
Is there a different programming technique I should use as a client to avoid this?
Or can something be changed in the library, such as using a generator to yield results instead of collecting a huge object?
| TLDR: Sessions aren't threadsafe. I don't know enough about Python to say why, but I think sharing the boto3 sessions across threads is causing the leak.
> Can something be changed in the library, such as using a generator to yield results instead of collecting a huge object?
I measured the memory use of various implementations of CoveRunner in a version of botocove that I modified for testing (see #23 for the ideas that lead to this).
The chart below shows memory use over time to list the IAM users in an organization of 500 member accounts.

The X axis shows memory use in gigabytes. The Y axis shows elapsed quarters of a second.
The MultiThreadedListCoveRunner is the original implementation. The MultiThreadedGenCoveRunner uses a generator instead of a list to return results. Over the long term, each implementation uses the same amount of memory: 2.5GB.
The MonoThreaded versions of these take longer, but use the same amount of memory.
The amount of memory used scales linearly with the number of queried accounts.
If you want to know more about how this was set up, check the README and the testing_botocove.md in my memory_profiling branch: https://github.com/iainelder/botocove/tree/memory_profiling/profiling (It doesn't necessarily make much sense to anyone other than me :sweat_smile:)
---
Before I got around to also rewriting the CoveSessions class as a generator, I learned that memory leaks in boto are a common ocurrence. We're in good company at least!
* https://github.com/boto/botocore/issues/2047
* https://github.com/boto/boto3/issues/1670
* https://github.com/boto/botocore/issues/1246
* https://githubmate.com/repo/kislyuk/watchtower/issues/34
These issues were addressed recently by better boto3 documentation for threading:
* https://github.com/boto/boto3/pull/2848
To cut to the chase: Sessions aren't threadsafe. I don't know enough about Python to say why, but I think sharing the boto3 sessions across threads is causing the leak.
From the latest [Session documentation](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/session.html):
> Similar to Resource objects, Session objects are not thread safe and should not be shared across threads and processes. It's recommended to create a new Session object for each thread or process.
Thanks to @swetashre's [sample code](https://github.com/boto/botocore/issues/2047#issuecomment-632250344) for profiling boto3 which I adapted for this investigation.
This is super interesting, thanks - great notes. Interestingly from that writeup, I think we are handling Sessions in a safe manner (they're not shared across threads - we cut a new session for each thread), but instantiation of a Session just eats a chunk of memory and we do a lot of that - this issue was the one that felt closest to what we're seeing? https://github.com/boto/boto3/issues/1670
Either way, might be worth calling out in the readme that botocove's memory requirements scale linearly with number of targets and provisioning an ec2 instance might be a requriement for hitting 5k accounts at once 😅
I wonder if #23 might still have a solution here: if I'm understanding the problem correctly (we use a Session per thread which is memory-safe but expensive as each instance isn't cleaned up until the script finishes), a generator or context manager might allow cleanup during execution to release memory?
One reason I refactored the codebase to just use threadpools over async.io was the opportunity to have a single thread do the whole task, rather than "batch get all the sessions -> resolve all threads", "batch do all the sessions -> resolve threads", we could have a threadpool that does lots of "get a session, assume a role, do the work, finish" - I was initially thinking about this to take the pressure off the sts assume role and describe account limits you saw previously.
I wonder if we could do something like a) see if a Boto3 session can be used as `with Session as s:`, if that's not inherent then implement a manual cleanup around boto3 somehow (seems most likely), or ideally check the behaviour of a generator that's finished with the Session and can drop it (this one seems like a no-go from your tests).
Just top of mind after reading your excellent research, may be entirely up the wrong tree :)
> we use a Session per thread which is memory-safe but expensive as each instance isn't cleaned up until the script finishes
In the current implementation, each session isn't scoped to a single thread. The session-starting thread returns each session to the main thread. The session is later picked up by the function-running thread. So each session is shared among three threads. I think that sharing causes the memory leak.
In my mono-threaded implementation the memory consumption is the same. Here the main thread is also the function-running thread. So each session is shared among two threads, and there is still a memory leak.
> we could have a threadpool that does lots of "get a session, assume a role, do the work, finish"
I think this is worth exploring. In fact, I already put together a prototype script that rearranges the existing parts to use a single thread pool that keeps the session scoped to the worker thread as you describe. Instead of iterating over sessions, the worker thread iterates over account IDs.
See the main method in the hopefully-named thread_safe_demo.py for what I imagine the body of the decorator would look like. https://github.com/iainelder/botocove/blob/91740738567e8b869aabb2e7b8629abf03d3e080/thread_safe_demo.py
It works on my personal org just like the current decorator, except that tqdm paints only one progress bar (See below for example output.)
The next step is to run another experiment with the two decorator implementations side by side to see how the memory consumption compares.
```
$ poetry run python thread_safe_demo.py
Querying organization: 100%|███████████████████████████████████████████████| 7/7 [00:01<00:00, 5.32it/s]
{'Exceptions': [],
'FailedAssumeRole': [{'ExceptionDetails': ClientError('An error occurred (AccessDenied) when calling the AssumeRole operation: User: arn:aws:sts::480783779961:assumed-role/AWSReservedSSO_AdministratorAccess_10cd3aecf3710de6/iain is not authorized to perform: sts:AssumeRole on resource: arn:aws:iam::111111111111:role/OrganizationAccountAccessRole'),
'Id': '111111111111',
'RoleSessionName': 'OrganizationAccountAccessRole'}],
'Results': [{'AssumeRoleSuccess': True,
'Id': '139442570134',
'Result': {'Account': '139442570134',
'Arn': 'arn:aws:sts::139442570134:assumed-role/OrganizationAccountAccessRole/OrganizationAccountAccessRole',
'UserId': 'AROASA53MYOLKFBJWMTDK:OrganizationAccountAccessRole'},
'RoleSessionName': 'OrganizationAccountAccessRole'},
[...]
```
I set up a new experiment to compare the current decorator implementation and the "thread-safe" version I described above. See [compare_decorators.py](https://github.com/iainelder/botocove/blob/5a55cfc4a7277c20829efe97218996b17393a640/profiling/compare_decorators.py) for the implementation.
The experiment refers to the current implementation as `two_phase_cove`, because of the session-starting phase implemented by CoveSessions and the function-running phase implemented by CoveRunner, each with their own thread pool.
It refers to the thread-safe version as `one_phase_cove` because pieces of CoveSessions and CoveRunner are mixed together to get the same result with a single thread pool.
It looks like we might be on to something with the one-phase version.
This plot records memory use across an organization of 500 member accounts, run on an r6g.large.

This plot records memory use across an organization of 5000 (!) member accounts, run on an r6g.4xlarge.

`one_phase_cove` uses a constant amount of memory. It uses less than 10% of the peak memory used by `two_phase_cove`.
`one_phase_cove` takes about 70% of the time taken by by `two_phase_cove`.
`two_phase_cove` uses about 23GB of memory to query 5000 accounts. `one_phase_cove` uses less than 1GB. (The plot isn't detailed enough to say, but I suspect that it's not much more than the 0.25GB required to query 500 accounts.)
> > we use a Session per thread which is memory-safe but expensive as each instance isn't cleaned up until the script finishes
>
> In the current implementation, each session isn't scoped to a single thread. The session-starting thread returns each session to the main thread. The session is later picked up by the function-running thread. So each session is shared among three threads. I think that sharing causes the memory leak.
I'm relatively confident that this isn't the case: the problem is that we instantiate (for `N` accounts) `N*2` boto3 sessions and only allow them to be garbage collected at the end of the botocove interaction: https://github.com/boto/boto3/issues/1670#issuecomment-456991798 gives context of how and why a boto3 session is so expensive: botocove today uses `N * 2 * 20mb~` memory
Slightly pedantic, but I'd suggest botocove is causing the leaky behaviour here, rather than boto3, and the implementation in threading rather than issues of thread-safety in implementation cause it.
This tallies with your awesome `one_phase_cove` results: I'd expect 0.25gb being a function of threadpoolworkers * 1 * boto3 sessions in this implementation, as the single phase allows the GC to recycle memory holding boto3 sessions. It also means a generator might be a bit of a red herring we don't need, given boto3 is already lazy loaded but once loaded is the unavoidable chunky 20mb footprint.
A quick hack on your testing (altering the threadworkers value to 10 and 40) seems to bear this theory out (and also as an interesting side effect help profile the optimum number of workers for memory pressure on my 13" Intel MBP with 16gb ram - I did see a few rate limit exceptions get swallowed in the logs though so this probably isn't the purest of tests, I think it's valid for our case though):

Exciting stuff! I'll get a PR up if you'd like to collab on implementing the refactor to 1-phase: seems prudent to expose a threadworker variable too now. | 2022-01-20T21:01:50 | 0.0 | [] | [] |
||
connelldave/botocove | connelldave__botocove-13 | 5e091335b57211be41c5941e352d418f5997d39c | diff --git a/CHANGELOG.md b/CHANGELOG.md
index 8d3da0e..6b8247e 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,6 +5,9 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [Unreleased]
+## [1.3.1] - 2021-27-9
+### Fixed
+- Fixed bug where a large amount of accounts would cause the AWS DescribeAccount api to throttle and throw an exception
## [1.3.0] - 2021-07-1
### Added
- Added option to set a custom `RoleSessionName` parameter in `sts.assume_role()` calls for the `cove` decorator.
diff --git a/botocove/cove_decorator.py b/botocove/cove_decorator.py
index 5d00e2d..02808fd 100644
--- a/botocove/cove_decorator.py
+++ b/botocove/cove_decorator.py
@@ -8,6 +8,7 @@
import boto3
from boto3.session import Session
from botocore.config import Config
+from botocore.exceptions import ClientError
from botocove.cove_session import CoveSession
@@ -37,7 +38,17 @@ def _get_cove_session(
) -> CoveSession:
role_arn = f"arn:aws:iam::{account_id}:role/{rolename}"
if org_master:
- account_details = org_client.describe_account(AccountId=account_id)["Account"]
+ try:
+ account_details = org_client.describe_account(AccountId=account_id)[
+ "Account"
+ ]
+ except ClientError:
+ logger.exception(f"Failed to call describe_account for {account_id}")
+ account_details = {
+ "Id": account_id,
+ "RoleSessionName": role_session_name,
+ }
+
else:
account_details = {"Id": account_id, "RoleSessionName": role_session_name}
cove_session = CoveSession(account_details)
diff --git a/pyproject.toml b/pyproject.toml
index abe565a..bfcbc55 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "botocove"
-version = "1.3.0"
+version = "1.3.1"
description = "A decorator to allow running a function against all AWS accounts in an organization"
authors = ["Dave Connell <[email protected]>"]
license = "LGPL-3.0-or-later"
| botocove sometimes crashes when describing accounts
Sometimes I get an error like this when I run botocove:
```
TooManyRequestsException: An error occurred (TooManyRequestsException) when calling the DescribeAccount operation (reached max retries: 4): AWS Organizations can't complete your request because another request is already in progress. Try again later.
```
I'm running it in an organization with 915 accounts.
Can we do something to make it more robust here?
Do you need a complete example of what I'm doing? I haven't included example code here because I'm under the impression that this could happen with any query at the right scale.
| 2021-09-26T19:19:21 | 0.0 | [] | [] |
|||
icecube/skyllh | icecube__skyllh-166 | eb3baf4f5888c846d388dc963ad856da7c28c0f2 | diff --git a/CHANGELOG.txt b/CHANGELOG.txt
index 645420b1d6..5139b7aa16 100644
--- a/CHANGELOG.txt
+++ b/CHANGELOG.txt
@@ -2,6 +2,11 @@ This file contains a log-book for major changes between releases.
v23.2.1
=======
+- Add access operator support for core.dataset.DatasetCollection.
+
+ - Individual datasets of a dataset collection (``dsc``) can now be accessed
+ via ``dsc[name]`` or ``dsc[name1, name2, ...]``.
+
- Allow the definition of an origin of a dataset via the
core.dataset.DatasetOrigin class and download the dataset automatically from
the origin to the local host. The following transfer methods are provided:
diff --git a/doc/sphinx/tutorials/publicdata_ps.ipynb b/doc/sphinx/tutorials/publicdata_ps.ipynb
index af8ecd6312..a65cebc0b4 100644
--- a/doc/sphinx/tutorials/publicdata_ps.ipynb
+++ b/doc/sphinx/tutorials/publicdata_ps.ipynb
@@ -158,7 +158,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
- "The individual data sets ``IC86_II``, ``IC86_III``, ``IC86_IV``, ``IC86_V``, ``IC86_VI``, and ``IC86_VII`` are also available as a single combined data set ``IC86_II-VII``, because these data sets share the same detector simulation and event selection. Hence, we can get a list of data sets via the ``get_datasets`` method of the ``dsc`` instance:"
+ "The individual data sets ``IC86_II``, ``IC86_III``, ``IC86_IV``, ``IC86_V``, ``IC86_VI``, and ``IC86_VII`` are also available as a single combined data set ``IC86_II-VII``, because these data sets share the same detector simulation and event selection. Hence, we can get a list of data sets via the access operator ``[dataset1, dataset2, ...]`` of the ``dsc`` instance:"
]
},
{
@@ -167,7 +167,7 @@
"metadata": {},
"outputs": [],
"source": [
- "datasets = dsc.get_datasets(['IC40', 'IC59', 'IC79', 'IC86_I', 'IC86_II-VII'])"
+ "datasets = dsc['IC40', 'IC59', 'IC79', 'IC86_I', 'IC86_II-VII']"
]
},
{
diff --git a/doc/sphinx/tutorials/publicdata_ps_timedep.ipynb b/doc/sphinx/tutorials/publicdata_ps_timedep.ipynb
index 391c170feb..5f445f03b7 100644
--- a/doc/sphinx/tutorials/publicdata_ps_timedep.ipynb
+++ b/doc/sphinx/tutorials/publicdata_ps_timedep.ipynb
@@ -70,7 +70,7 @@
"dsc = create_dataset_collection(\n",
" cfg=cfg,\n",
" base_path=\"/home/mwolf/projects/publicdata_ps/\")\n",
- "datasets = dsc.get_datasets([\"IC86_II-VII\"])"
+ "datasets = dsc[\"IC86_II-VII\", ]"
]
},
{
diff --git a/skyllh/core/dataset.py b/skyllh/core/dataset.py
index 4ad7d417be..4023f3ba55 100644
--- a/skyllh/core/dataset.py
+++ b/skyllh/core/dataset.py
@@ -2382,6 +2382,39 @@ def verqualifiers(self):
ds_name = list(self._datasets.keys())[0]
return self._datasets[ds_name].verqualifiers
+ def __getitem__(
+ self,
+ key,
+ ):
+ """Implementation of the access operator ``[key]``.
+
+ Parameters
+ ----------
+ key : str | sequence of str
+ The name or names of the dataset(s) that should get retrieved from
+ this dataset collection.
+
+ Returns
+ -------
+ datasets : instance of Dataset | list of instance of Dataset
+ The dataset instance or the list of dataset instances corresponding
+ to the given key.
+ """
+ if not issequence(key):
+ return self.get_dataset(key)
+
+ if not issequenceof(key, str):
+ raise TypeError(
+ 'The key for the access operator must be an instance of str or '
+ 'a sequence of str instances!')
+
+ datasets = [
+ self.get_dataset(name)
+ for name in key
+ ]
+
+ return datasets
+
def __iadd__(self, ds):
"""Implementation of the ``self += dataset`` operation to add a
Dataset object to this dataset collection.
diff --git a/skyllh/datasets/i3/PublicData_10y_ps_wMC.py b/skyllh/datasets/i3/PublicData_10y_ps_wMC.py
index 2ef405294e..bc404a1fe7 100644
--- a/skyllh/datasets/i3/PublicData_10y_ps_wMC.py
+++ b/skyllh/datasets/i3/PublicData_10y_ps_wMC.py
@@ -60,7 +60,7 @@ def create_dataset_collection(
IC86_VI,
IC86_VII,
IC86_II_VII,
- ) = dsc.get_datasets((
+ ) = dsc[
'IC40',
'IC59',
'IC79',
@@ -72,7 +72,7 @@ def create_dataset_collection(
'IC86_VI',
'IC86_VII',
'IC86_II-VII',
- ))
+ ]
IC40.mc_pathfilename_list = 'sim/IC40_MC.npy'
IC59.mc_pathfilename_list = 'sim/IC59_MC.npy'
IC79.mc_pathfilename_list = 'sim/IC79_MC.npy'
| Implement __getitem__ for DatasetCollection
This would allow to get a dataset or a list of datasets from the dataset collection like from a dictionary.
https://github.com/icecube/skyllh/blob/9de6228da7f2abb843d489dd2970d82f0ef380a9/skyllh/core/dataset.py#L1425
| 2023-08-08T13:22:13 | 0.0 | [] | [] |
|||
icecube/skyllh | icecube__skyllh-154 | 4a5adbb1ca07e9d227bc2c7eb7c6602af4ff8ede | diff --git a/CHANGELOG.txt b/CHANGELOG.txt
index a6a4959421..f2023c622b 100644
--- a/CHANGELOG.txt
+++ b/CHANGELOG.txt
@@ -37,6 +37,8 @@ v23.2.0
Improved calculation of right-ascention difference. Thus, speed-up in
trial generation when using this event selection method.
+- core.scrambling.DataScrambler.scramble_data method also takes Dataset instance
+ as argument
- Usage of the tqdm Python package for progress bars.
- More unit tests.
- Improved documentation.
diff --git a/examples/scrambling.py b/examples/scrambling.py
index 69b0fc1488..b704434bb3 100644
--- a/examples/scrambling.py
+++ b/examples/scrambling.py
@@ -1,11 +1,14 @@
# -*- coding: utf-8 -*-
"""
-Example how to use the data scrambling mechanism of skyllh.
+Example how to use the data scrambling mechanism of SkyLLH.
"""
import numpy as np
+from skyllh.core.livetime import (
+ Livetime,
+)
from skyllh.core.random import (
RandomStateService,
)
@@ -13,11 +16,28 @@
DataScrambler,
UniformRAScramblingMethod,
)
+from skyllh.core.times import (
+ LivetimeTimeGenerationMethod,
+ TimeGenerator,
+)
+
+from skyllh.i3.scrambling import (
+ I3TimeScramblingMethod,
+)
-def gen_data(rss, N=100, window=(0, 365)):
- """Create uniformly distributed data on sphere. """
- arr = np.empty((N,), dtype=[("ra", np.float64), ("dec", np.float64)])
+def gen_data(rss, N):
+ """Create uniformly distributed data on sphere.
+ """
+ arr = np.empty(
+ (N,),
+ dtype=[
+ ("azi", np.float64),
+ ("zen", np.float64),
+ ("ra", np.float64),
+ ("dec", np.float64),
+ ("time", np.float64),
+ ])
arr["ra"] = rss.random.uniform(0., 2.*np.pi, N)
arr["dec"] = rss.random.uniform(-np.pi, np.pi, N)
@@ -25,18 +45,67 @@ def gen_data(rss, N=100, window=(0, 365)):
return arr
-if __name__ == '__main__':
+def ex1():
+ """Data scrambling via right-ascention scrambling.
+ """
+ print("Example 1")
+ print("=========")
+
rss = RandomStateService(seed=1)
- # Generate some psydo data.
- data = gen_data(rss, N=10)
- print(data['ra'])
+ # Generate some pseudo data.
+ data = gen_data(rss=rss, N=10)
+ print(f'before scrambling: data["ra"]={data["ra"]}')
# Create DataScrambler instance with uniform RA scrambling.
- scr = DataScrambler(method=UniformRAScramblingMethod())
+ scrambler = DataScrambler(
+ method=UniformRAScramblingMethod())
# Scramble the data.
- scr.scramble_data(
+ scrambler.scramble_data(
rss=rss,
+ dataset=None,
data=data)
- print(data['ra'])
+
+ print(f'after scrambling: data["ra"]={data["ra"]}')
+
+
+def ex2():
+ """Data scrambling via detector on-time scrambling.
+ """
+ print("Example 2")
+ print("=========")
+
+ rss = RandomStateService(seed=1)
+
+ # Generate some pseudo data.
+ data = gen_data(rss=rss, N=10)
+ print(f'before scrambling: data["ra"]={data["ra"]}')
+
+ # Create a Livetime object, which defines the detector live-time.
+ lt = Livetime(uptime_mjd_intervals_arr=np.array(
+ [
+ [55000, 56000],
+ [60000, 69000]
+ ],
+ dtype=np.float64))
+
+ # Create a TimeGenerator with an on-time time generation method.
+ timegen = TimeGenerator(method=LivetimeTimeGenerationMethod(livetime=lt))
+
+ # Create DataScrambler with IceCube time scrambing method.
+ scrambler = DataScrambler(
+ method=I3TimeScramblingMethod(timegen))
+
+ # Scramble the data.
+ scrambler.scramble_data(
+ rss=rss,
+ dataset=None,
+ data=data)
+
+ print(f'after scrambling: data["ra"]={data["ra"]}')
+
+
+if __name__ == '__main__':
+ ex1()
+ ex2()
diff --git a/skyllh/core/background_generation.py b/skyllh/core/background_generation.py
index b9b7d99029..da5e78d719 100644
--- a/skyllh/core/background_generation.py
+++ b/skyllh/core/background_generation.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
import abc
-import numpy as np
+import numpy as np
from skyllh.core.config import (
CFG,
@@ -33,7 +33,8 @@
class BackgroundGenerationMethod(
object,
- metaclass=abc.ABCMeta):
+ metaclass=abc.ABCMeta,
+):
"""This is the abstract base class for a detector specific background
generation method.
"""
@@ -62,7 +63,8 @@ def generate_events(
data,
mean,
tl=None,
- **kwargs):
+ **kwargs,
+ ):
"""This method is supposed to generate a `mean` number of background
events for the given dataset and its data.
@@ -102,7 +104,8 @@ def generate_events(
class MCDataSamplingBkgGenMethod(
- BackgroundGenerationMethod):
+ BackgroundGenerationMethod,
+):
"""This class implements the method to generate background events from
monte-carlo (MC) data by sampling events from the MC data set according to a
probability value given for each event. Functions can be provided to get the
@@ -118,7 +121,8 @@ def __init__(
mc_inplace_scrambling=False,
keep_mc_data_fields=None,
pre_event_selection_method=None,
- **kwargs):
+ **kwargs,
+ ):
"""Creates a new instance of the MCDataSamplingBkgGenMethod class.
Parameters
@@ -361,7 +365,8 @@ def generate_events(
data,
mean=None,
poisson=True,
- tl=None):
+ tl=None,
+ ):
"""Generates a ``mean`` number of background events for the given
dataset and its data.
@@ -520,6 +525,7 @@ def generate_events(
with TaskTimer(tl, 'Scramble MC background data.'):
bkg_events = self._data_scrambler.scramble_data(
rss=rss,
+ dataset=dataset,
data=bkg_events,
copy=False)
diff --git a/skyllh/core/scrambling.py b/skyllh/core/scrambling.py
index 3a8b02a756..19bc475a7c 100644
--- a/skyllh/core/scrambling.py
+++ b/skyllh/core/scrambling.py
@@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
import abc
+
import numpy as np
from skyllh.core.times import (
@@ -10,7 +11,8 @@
class DataScramblingMethod(
object,
- metaclass=abc.ABCMeta):
+ metaclass=abc.ABCMeta,
+):
"""Base class for implementing a data scrambling method.
"""
@@ -22,7 +24,9 @@ def __init__(self, **kwargs):
def scramble(
self,
rss,
- data):
+ dataset,
+ data,
+ ):
"""The scramble method implements the actual scrambling of the given
data, which is method dependent. The scrambling must be performed
in-place, i.e. it alters the data inside the given data array.
@@ -32,6 +36,8 @@ def scramble(
rss : instance of RandomStateService
The random state service providing the random number
generator (RNG).
+ dataset : instance of Dataset
+ The instance of Dataset for which the data should get scrambled.
data : instance of DataFieldRecordArray
The DataFieldRecordArray containing the to be scrambled data.
@@ -44,16 +50,21 @@ def scramble(
class UniformRAScramblingMethod(
- DataScramblingMethod):
+ DataScramblingMethod,
+):
r"""The UniformRAScramblingMethod method performs right-ascention scrambling
uniformly within a given RA range. By default it's (0, 2\pi).
- Note: This alters only the ``ra`` values of the data!
+ :note::
+
+ This alters only the ``ra`` values of the data!
+
"""
def __init__(
self,
ra_range=None,
- **kwargs):
+ **kwargs,
+ ):
r"""Initializes a new RAScramblingMethod instance.
Parameters
@@ -90,7 +101,9 @@ def ra_range(self, ra_range):
def scramble(
self,
rss,
- data):
+ dataset,
+ data,
+ ):
"""Scrambles the given data uniformly in right-ascention.
Parameters
@@ -98,6 +111,8 @@ def scramble(
rss : instance of RandomStateService
The random state service providing the random number
generator (RNG).
+ dataset : instance of Dataset
+ The instance of Dataset for which the data should get scrambled.
data : instance of DataFieldRecordArray
The DataFieldRecordArray instance containing the to be scrambled
data.
@@ -127,7 +142,8 @@ def __init__(
self,
timegen,
hor_to_equ_transform,
- **kwargs):
+ **kwargs,
+ ):
"""Initializes a new time scramling method instance.
Parameters
@@ -182,7 +198,9 @@ def hor_to_equ_transform(self, transform):
def scramble(
self,
rss,
- data):
+ dataset,
+ data,
+ ):
"""Scrambles the given data based on random MJD times, which are
generated from a TimeGenerator instance. The event's right-ascention and
declination coordinates are calculated via a horizontal-to-equatorial
@@ -193,6 +211,8 @@ def scramble(
rss : instance of RandomStateService
The random state service providing the random number
generator (RNG).
+ dataset : instance of Dataset
+ The instance of Dataset for which the data should get scrambled.
data : instance of DataFieldRecordArray
The DataFieldRecordArray instance containing the to be scrambled
data.
@@ -213,10 +233,13 @@ def scramble(
class DataScrambler(
- object):
+ object,
+):
def __init__(
self,
- method):
+ method,
+ **kwargs,
+ ):
"""Creates a data scrambler instance with a given defined scrambling
method.
@@ -226,6 +249,9 @@ def __init__(
The instance of DataScramblingMethod that defines the method of
the data scrambling.
"""
+ super().__init__(
+ **kwargs)
+
self.method = method
@property
@@ -246,8 +272,10 @@ def method(self, method):
def scramble_data(
self,
rss,
+ dataset,
data,
- copy=False):
+ copy=False,
+ ):
"""Scrambles the given data by calling the scramble method of the
scrambling method class, that was configured for the data scrambler.
If the ``inplace_scrambling`` property is set to False, a copy of the
@@ -258,9 +286,11 @@ def scramble_data(
rss : instance of RandomStateService
The random state service providing the random number generator
(RNG).
+ dataset : instance of Dataset
+ The instance of Dataset for which the data should get scrambled.
data : instance of DataFieldRecordArray
- The DataFieldRecordArray instance holding the data, which should get
- scrambled.
+ The instance of DataFieldRecordArray holding the data, which should
+ get scrambled.
copy : bool
Flag if a copy of the given data should be made before scrambling
the data. The default is False.
@@ -276,6 +306,9 @@ def scramble_data(
if copy:
data = data.copy()
- data = self._method.scramble(rss, data)
+ data = self._method.scramble(
+ rss=rss,
+ dataset=dataset,
+ data=data)
return data
diff --git a/skyllh/i3/background_generation.py b/skyllh/i3/background_generation.py
index 4e4e14775f..ffae5b482e 100644
--- a/skyllh/i3/background_generation.py
+++ b/skyllh/i3/background_generation.py
@@ -1,16 +1,29 @@
# -*- coding: utf-8 -*-
-from skyllh.core.background_generation import BackgroundGenerationMethod
-from skyllh.core.scrambling import DataScrambler
+from skyllh.core.background_generation import (
+ BackgroundGenerationMethod,
+)
+from skyllh.core.py import (
+ classname,
+)
+from skyllh.core.scrambling import (
+ DataScrambler,
+)
-class FixedScrambledExpDataI3BkgGenMethod(BackgroundGenerationMethod):
+class FixedScrambledExpDataI3BkgGenMethod(
+ BackgroundGenerationMethod,
+):
"""This class implements the background event generation method for the
IceCube detector using scrambled experimental data as background hypothesis
with a fixed number of background events equal to the number of events in
the dataset. This background generation method is the one used in SkyLab.
"""
- def __init__(self, data_scrambler):
+ def __init__(
+ self,
+ data_scrambler,
+ **kwargs,
+ ):
"""Creates a new background generation method instance to generate
background events from scrambled experimental data with a fixed number
of events equal to the number of events in the dataset.
@@ -21,7 +34,7 @@ def __init__(self, data_scrambler):
The DataScrambler instance to use to generate scrambled experimental
data.
"""
- super(FixedScrambledExpDataI3BkgGenMethod, self).__init__()
+ super().__init__(**kwargs)
self.data_scrambler = data_scrambler
@@ -36,12 +49,20 @@ def data_scrambler(self, scrambler):
if not isinstance(scrambler, DataScrambler):
raise TypeError(
'The data_scrambler property must be an instance of '
- 'DataScrambler!')
+ 'DataScrambler! '
+ f'Its current type is {classname(scrambler)}!')
self._data_scrambler = scrambler
- def generate_events(self, rss, dataset, data, **kwargs):
+ def generate_events(
+ self,
+ rss,
+ dataset,
+ data,
+ **kwargs,
+ ):
"""Generates background events from the given data, by scrambling the
- data. The number of events is equal to the size of the given dataset.
+ experimental data. The number of events is equal to the size of the
+ given dataset.
Parameters
----------
@@ -63,7 +84,10 @@ def generate_events(self, rss, dataset, data, **kwargs):
The instance of DataFieldRecordArray holding the generated
background events.
"""
- # Scramble the experimental data events, but make a copy first.
- bkg_events = self._data_scrambler.scramble_data(rss, data.exp.copy())
+ bkg_events = self._data_scrambler.scramble_data(
+ rss=rss,
+ dataset=dataset,
+ data=data.exp,
+ copy=True)
return (len(bkg_events), bkg_events)
diff --git a/skyllh/i3/scrambling.py b/skyllh/i3/scrambling.py
index d6224becb4..416a7bdb33 100644
--- a/skyllh/i3/scrambling.py
+++ b/skyllh/i3/scrambling.py
@@ -6,18 +6,25 @@
DataScramblingMethod,
TimeScramblingMethod,
)
+
from skyllh.i3.utils.coords import (
azi_to_ra_transform,
hor_to_equ_transform,
)
-class I3TimeScramblingMethod(TimeScramblingMethod):
+class I3TimeScramblingMethod(
+ TimeScramblingMethod,
+):
"""The I3TimeScramblingMethod class provides a data scrambling method to
perform time scrambling of the data,
by drawing a MJD time from a given time generator.
"""
- def __init__(self, timegen):
+ def __init__(
+ self,
+ timegen,
+ **kwargs,
+ ):
"""Initializes a new I3 time scrambling instance.
Parameters
@@ -25,11 +32,19 @@ def __init__(self, timegen):
timegen : TimeGenerator
The time generator that should be used to generate random MJD times.
"""
- super(I3TimeScramblingMethod, self).__init__(timegen, hor_to_equ_transform)
+ super().__init__(
+ timegen=timegen,
+ hor_to_equ_transform=hor_to_equ_transform,
+ **kwargs)
# We override the scramble method because for IceCube we only need to change
# the ``ra`` field.
- def scramble(self, rss, data):
+ def scramble(
+ self,
+ rss,
+ dataset,
+ data,
+ ):
"""Draws a time from the time generator and calculates the right
ascention coordinate from the azimuth angle according to the time.
Sets the values of the ``time`` and ``ra`` keys of data.
@@ -39,6 +54,8 @@ def scramble(self, rss, data):
rss : RandomStateService
The random state service providing the random number
generator (RNG).
+ dataset : instance of Dataset
+ The instance of Dataset for which the data should get scrambled.
data : DataFieldRecordArray instance
The DataFieldRecordArray instance containing the to be scrambled
data.
@@ -56,12 +73,18 @@ def scramble(self, rss, data):
return data
-class I3SeasonalVariationTimeScramblingMethod(DataScramblingMethod):
+class I3SeasonalVariationTimeScramblingMethod(
+ DataScramblingMethod,
+):
"""The I3SeasonalVariationTimeScramblingMethod class provides a data
scrambling method to perform data coordinate scrambling based on a generated
time, which follows seasonal variations within the experimental data.
"""
- def __init__(self, data, **kwargs):
+ def __init__(
+ self,
+ data,
+ **kwargs,
+ ):
"""Initializes a new seasonal time scrambling instance.
Parameters
@@ -84,7 +107,12 @@ def __init__(self, data, **kwargs):
self.grl = data.grl
- def scramble(self, rss, data):
+ def scramble(
+ self,
+ rss,
+ dataset,
+ data,
+ ):
"""Scrambles the given data based on random MJD times, which are
generated uniformely within the data runs, where the data runs are
weighted based on their amount of events compared to the total events.
@@ -94,6 +122,8 @@ def scramble(self, rss, data):
rss : instance of RandomStateService
The random state service providing the random number
generator (RNG).
+ dataset : instance of Dataset
+ The instance of Dataset for which the data should get scrambled.
data : instance of DataFieldRecordArray
The DataFieldRecordArray instance containing the to be scrambled
data.
| Pass the Dataset instance as argument
The DataScrambler and DataScrambling method should know for which Dataset it scrambles the data. This will make it possible to have Dataset specific settings/configurations for the data scrambler.
https://github.com/icecube/skyllh/blob/01a4a53741551984a62a4d691bf3f9786c055b02/skyllh/core/scrambling.py#L205
| 2023-06-30T08:01:25 | 0.0 | [] | [] |
|||
icecube/skyllh | icecube__skyllh-113 | fad94b8edcf86f80a7902501f01e1c2de35cae32 | diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml
index 7a85d4eddd..41cc565769 100644
--- a/.github/workflows/documentation.yml
+++ b/.github/workflows/documentation.yml
@@ -12,12 +12,12 @@ jobs:
deploy:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Setup Python
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v4
with:
python-version: '3.8'
@@ -31,7 +31,7 @@ jobs:
run: echo "::set-output name=dir::$(pip cache dir)"
- name: Cache dependencies
- uses: actions/cache@v2
+ uses: actions/cache@v3
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml
index 8abf32fdc5..71dfd83e66 100644
--- a/.github/workflows/pythonpackage.yml
+++ b/.github/workflows/pythonpackage.yml
@@ -12,15 +12,15 @@ on:
jobs:
build:
- runs-on: ubuntu-latest
+ runs-on: ubuntu-20.04
strategy:
matrix:
python-version: [3.6, 3.7, 3.8]
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v1
+ uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
diff --git a/doc/user_manual.pdf b/doc/user_manual.pdf
index 29de440cbb..2d393319d3 100644
Binary files a/doc/user_manual.pdf and b/doc/user_manual.pdf differ
diff --git a/doc/user_manual.tex b/doc/user_manual.tex
index 90d77391ea..4b39ca5e22 100644
--- a/doc/user_manual.tex
+++ b/doc/user_manual.tex
@@ -446,17 +446,32 @@ \subsection{Stacking of Sources}
In general a likelihood value can be calculated for a set of $K$ stacked
sources in a weighted fashion. In this case the signal PDF expression of
equation (\ref{eq:Si}) becomes a bit more complicated due to the relative
-source weighting. The sources must be weighted according to their signal detection
-efficiency, $\mathcal{Y}_{\mathrm{s},k}$, and a relative strength weight of the
-sources, $W_k$, with $\sum_{k=1}^{K} W_k = 1$. Hence, the combined signal PDF is
-given as
+source weighting. The sources must be weighted according to their signal
+detector yield, $\mathcal{Y}_{\mathrm{s}_k}$, and a relative strength weight of
+the sources, $W_k$. Hence, the combined signal PDF is given as
\begin{equation}
- \mathcal{S}_i(\ps) \equiv \frac{\sum_{k=1}^{K} W_k \mathcal{Y}_{\mathrm{s}}(\xsk,\psk) \mathcal{S}_{i}(\psk)}{\sum_{k=1}^{K}W_k\mathcal{Y}_{\mathrm{s}}(\xsk,\psk)}.
+ \mathcal{S}_i(\ps) \equiv \frac{\sum_{k=1}^{K} W_k \mathcal{Y}_{\mathrm{s}_k}(\psk) \mathcal{S}_{i}(\psk)}{\sum_{k=1}^{K}W_k\mathcal{Y}_{\mathrm{s}_k}(\psk)}.
\label{eq:SiStacking}
\end{equation}
One should note that this formalism allows for different source properties, e.g.
energy spectra, for the various sources.
+With the definitions
+\begin{equation}
+a_k(\psk) \equiv W_k \mathcal{Y}_{\mathrm{s}_k}(\psk),
+\label{eq:SourceWeightCoefficient}
+\end{equation}
+and
+\begin{equation}
+A(\ps) \equiv \sum_{k=1}^{K} a_k(\psk),
+\label{eq:SumOfSourceWeightCoefficients}
+\end{equation}
+equation (\ref{eq:SiStacking}) reads
+\begin{equation}
+\mathcal{S}_i(\ps) \equiv \frac{1}{A(\ps)} \sum_{k=1}^{K} a_k(\psk) \mathcal{S}_{i}(\psk).
+\label{eq:SiStackingA}
+\end{equation}
+
\subsection{Gradients of the Log-Likelihood Ratio}
@@ -555,45 +570,45 @@ \subsection{Gradients of the Log-Likelihood Ratio}
For stacked sources the expression for $\mathcal{R}_i(\ps)$ in equation (\ref{eq:Ri})
becomes slightly more complicated due to the source strength weighting.
With equation (\ref{eq:SiStacking}) and the definitions
+(\ref{eq:SourceWeightCoefficient}), and
+(\ref{eq:SumOfSourceWeightCoefficients}), it is given by
\begin{equation}
- a_k(\xsk,\psk) = W_k\mathcal{Y}_{\mathrm{s}}(\xsk,\psk),
-\end{equation}
-and
-\begin{equation}
- A(\ps) = \sum_{k=1}^{K} a_k(\xsk,\psk),
-\end{equation}
-it is given by
-\begin{equation}
-\mathcal{R}_i(\ps) = \frac{\mathcal{S}_i(\ps)}{\mathcal{B}_i} = \frac{1}{A(\ps)} \sum_{k=1}^{K} a_k(\xsk,\psk) \frac{\mathcal{S}_{i}(\psk)}{\mathcal{B}_{i}}.
+\mathcal{R}_i(\ps) = \frac{\mathcal{S}_i(\ps)}{\mathcal{B}_i} = \frac{1}{A(\ps)} \sum_{k=1}^{K} a_k(\psk) \frac{\mathcal{S}_{i}(\psk)}{\mathcal{B}_{i}}.
\label{eq:RiStacking}
\end{equation}
-The signal over background ratio $\mathcal{S}_{i}(\psk) / \mathcal{B}_{i} \equiv \mathcal{R}_{k,i}(\psk)$
-for the single source $k$ is then given by equation (\ref{eq:Ri}).
+The signal over background ratio $\mathcal{S}_{i}(\psk) / \mathcal{B}_{i} \equiv \mathcal{R}_{i,k}(\psk)$
+for the source $k$ is then given by equation (\ref{eq:Ri}).
-Using the same set of source fit parameters $\ps$ for all sources, i.e. called
-global source fit parameters, the derivative of $\mathcal{R}_i(\ps)$ for
-all stacked sources w.r.t. the single global source fit parameter,
+Using the same set of source fit parameters $\ps$ for all sources, the
+derivative of $\mathcal{R}_i(\ps)$ for
+all stacked sources w.r.t. the single source fit parameter,
$p_{\mathrm{s}}$, is then given by
\begin{equation}
- \frac{\mathrm{d} \mathcal{R}_{i}(\ps)}{\mathrm{d} p_{\mathrm{s}}} = - \frac{1}{A^2} \frac{\mathrm{d} A}{\mathrm{d} p_{\mathrm{s}}} \sum_{k=1}^{K} a_{k} \mathcal{R}_{k,i}(\psk) + \frac{1}{A}\sum_{k=1}^{K} \left( \frac{\mathrm{d} a_{k}}{\mathrm{d} p_{\mathrm{s}}}\mathcal{R}_{k,i}(\psk) + a_{k}\frac{\mathrm{d} \mathcal{R}_{k,i}(\psk)}{\mathrm{d} p_{\mathrm{s}}} \right).
+ \frac{\mathrm{d} \mathcal{R}_{i}(\ps)}{\mathrm{d} p_{\mathrm{s}}} = - \frac{1}{A^2} \frac{\mathrm{d} A}{\mathrm{d} p_{\mathrm{s}}} \sum_{k=1}^{K} a_{k} \mathcal{R}_{i,k}(\psk) + \frac{1}{A}\sum_{k=1}^{K} \left( \frac{\mathrm{d} a_{k}}{\mathrm{d} p_{\mathrm{s}}}\mathcal{R}_{i,k}(\psk) + a_{k}\frac{\mathrm{d} \mathcal{R}_{i,k}(\psk)}{\mathrm{d} p_{\mathrm{s}}} \right).
\end{equation}
Using $\mathcal{R}_i(\ps)$ from equation (\ref{eq:RiStacking}) it simplifies to
\begin{equation}
- \frac{\mathrm{d} \mathcal{R}_{i}(\ps)}{\mathrm{d} p_{\mathrm{s}}} = \frac{1}{A(\ps)}\left[ -\mathcal{R}_i(\ps)\frac{\mathrm{d} A}{\mathrm{d} p_{\mathrm{s}}} + \sum_{k=1}^{K} \left( \frac{\mathrm{d} a_{k}}{\mathrm{d} p_{\mathrm{s}}}\mathcal{R}_{k,i}(\psk) + a_{k}\frac{\mathrm{d} \mathcal{R}_{k,i}(\psk)}{\mathrm{d} p_{\mathrm{s}}} \right) \right],
+ \frac{\mathrm{d} \mathcal{R}_{i}(\ps)}{\mathrm{d} p_{\mathrm{s}}} = \frac{1}{A(\ps)}\left[ -\mathcal{R}_i(\ps)\frac{\mathrm{d} A}{\mathrm{d} p_{\mathrm{s}}} + \sum_{k=1}^{K} \left( \frac{\mathrm{d} a_{k}}{\mathrm{d} p_{\mathrm{s}}}\mathcal{R}_{i,k}(\psk) + a_{k}\frac{\mathrm{d} \mathcal{R}_{i,k}(\psk)}{\mathrm{d} p_{\mathrm{s}}} \right) \right],
\label{eq:gradRi}
\end{equation}
-with the derivative of $A(\ps)$ given by
+with the derivative of equation (\ref{eq:SumOfSourceWeightCoefficients}) given by
+\begin{equation}
+ \frac{\mathrm{d} A}{\mathrm{d} p_{\mathrm{s}}} = \sum_{k=1}^{K} \frac{\mathrm{d} a_k}{\mathrm{d} p_{\mathrm{s}}},
+\end{equation}
+with
\begin{equation}
- \frac{\mathrm{d} A}{\mathrm{d} p_{\mathrm{s}}} = \sum_{k=1}^{K} \frac{\mathrm{d} a_k}{\mathrm{d} p_{\mathrm{s}}}.
+\frac{\mathrm{d} a_k}{\mathrm{d} p_{\mathrm{s}}} = W_k \frac{\mathrm{d}\mathcal{Y}_{\mathrm{s}_{k}}(\psk)}{\mathrm{d}{p_s}_k},
\end{equation}
+where the local source parameter $p_{\mathrm{s}_{k}}$ maps to the correct global
+parameter $p_{\mathrm{s}}$.
-In case one would fit each source individually with its own set of signal fit
-parameters, $\vec{p}_{\mathrm{s},k}$, $\ps$ would be a set of $K$ sets
-of source fit parameters $\vec{p}_{\mathrm{s},k}$, and a derivative for each
-individual source fit parameter $p_{\mathrm{s},k}$ would have to be calculated.
-The expression for such a derivative would be similar to equation (\ref{eq:gradRi}),
-but only the summand for the particular source, for which the fit parameter is for, would
-contribute.
+In case one would fit each source individually with its own set of source fit
+parameters, $\psk$, $\ps$ would be a set of $K$ sets
+of source fit parameters $\psk$, and a derivative for each
+global fit parameter $\ps$ would have to be calculated.
+The expression for such a derivative would be equivalent to equation
+(\ref{eq:gradRi}), but only the summand for the source, which depend on the
+particular global fit parameter, would contribute.
\subsection{Multiple Datasets}
diff --git a/skyllh/core/analysis.py b/skyllh/core/analysis.py
index d32547476f..13bfb5e083 100644
--- a/skyllh/core/analysis.py
+++ b/skyllh/core/analysis.py
@@ -49,7 +49,10 @@
from skyllh.core.multiproc import get_ncpu, parallelize
from skyllh.core.background_generation import BackgroundGenerationMethod
from skyllh.core.background_generator import BackgroundGenerator
-from skyllh.core.signal_generator import SignalGenerator
+from skyllh.core.signal_generator import (
+ SignalGeneratorBase,
+ SignalGenerator
+)
from skyllh.physics.source import SourceModel
@@ -85,7 +88,7 @@ class Analysis(object, metaclass=abc.ABCMeta):
"""
def __init__(self, src_hypo_group_manager, src_fitparam_mapper,
- test_statistic, bkg_gen_method=None, custom_sig_generator=None):
+ test_statistic, bkg_gen_method=None, sig_generator_cls=None):
"""Constructor of the analysis base class.
Parameters
@@ -104,9 +107,10 @@ def __init__(self, src_hypo_group_manager, src_fitparam_mapper,
The instance of BackgroundGenerationMethod that should be used to
generate background events for pseudo data. This can be set to None,
if there is no need to generate background events.
- custom_sig_generator : SignalGenerator class | None
- The signal generator class used to create `_sig_generator` instance.
- Uses default `SignalGenerator` implementation if set to None.
+ sig_generator_cls : SignalGeneratorBase class | None
+ The signal generator class used to create the signal generator
+ instance.
+ If set to None, the `SignalGenerator` class is used.
"""
# Call the super function to allow for multiple class inheritance.
super(Analysis, self).__init__()
@@ -115,6 +119,7 @@ def __init__(self, src_hypo_group_manager, src_fitparam_mapper,
self.src_fitparam_mapper = src_fitparam_mapper
self.test_statistic = test_statistic
self.bkg_gen_method = bkg_gen_method
+ self.sig_generator_cls = sig_generator_cls
self._dataset_list = []
self._data_list = []
@@ -131,7 +136,6 @@ def __init__(self, src_hypo_group_manager, src_fitparam_mapper,
# Predefine the variable for the background and signal generators.
self._bkg_generator = None
self._sig_generator = None
- self._custom_sig_generator = custom_sig_generator
@property
def src_hypo_group_manager(self):
@@ -250,6 +254,22 @@ def bkg_generator(self):
"""
return self._bkg_generator
+ @property
+ def sig_generator_cls(self):
+ """The signal generator class that should be used to construct the
+ signal generator instance.
+ """
+ return self._sig_generator_cls
+ @sig_generator_cls.setter
+ def sig_generator_cls(self, cls):
+ if cls is None:
+ cls = SignalGenerator
+ if not issubclass(cls, SignalGeneratorBase):
+ raise TypeError(
+ 'The sig_generator_cls property must be an subclass of '
+ 'SignalGeneratorBase!')
+ self._sig_generator_cls = cls
+
@property
def sig_generator(self):
"""(read-only) The signal generator instance. Is None if the signal
@@ -369,12 +389,10 @@ def construct_signal_generator(self):
Analysis class instance. The signal generation method has to be set
through the source hypothesis group.
"""
- if self._custom_sig_generator is None:
- self._sig_generator = SignalGenerator(
- self._src_hypo_group_manager, self._dataset_list, self._data_list)
- else:
- self._sig_generator = self._custom_sig_generator(
- self._src_hypo_group_manager, self._dataset_list, self._data_list)
+ self._sig_generator = self.sig_generator_cls(
+ src_hypo_group_manager=self._src_hypo_group_manager,
+ dataset_list=self._dataset_list,
+ data_list=self._data_list)
@abc.abstractmethod
def initialize_trial(self, events_list, n_events_list=None):
@@ -536,7 +554,7 @@ def generate_signal_events(
Poisson distribution with this given signal mean as mean.
sig_kwargs : dict | None
Additional keyword arguments for the `generate_signal_events` method
- of the `SignalGenerator` class. An usual keyword argument is
+ of the `sig_generator_cls` class. An usual keyword argument is
`poisson`.
n_events_list : list of int | None
If given, it specifies the number of events of each data set already
@@ -1033,7 +1051,7 @@ class TimeIntegratedMultiDatasetSingleSourceAnalysis(Analysis):
:meth:`maximize_llhratio` method.
"""
def __init__(self, src_hypo_group_manager, src_fitparam_mapper, fitparam_ns,
- test_statistic, bkg_gen_method=None, custom_sig_generator=None):
+ test_statistic, bkg_gen_method=None, sig_generator_cls=None):
"""Creates a new time-integrated point-like source analysis assuming a
single source.
@@ -1055,17 +1073,21 @@ def __init__(self, src_hypo_group_manager, src_fitparam_mapper, fitparam_ns,
The instance of BackgroundGenerationMethod that will be used to
generate background events for a new analysis trial. This can be set
to None, if no background events have to get generated.
- custom_sig_generator : SignalGenerator class | None
- The signal generator class used to create `_sig_generator` instance.
- Uses default `SignalGenerator` implementation if set to None.
+ sig_generator_cls : SignalGeneratorBase class | None
+ The signal generator class used to create the signal generator
+ instance.
+ If set to None, the `SignalGenerator` class is used.
"""
if(not isinstance(src_fitparam_mapper, SingleSourceFitParameterMapper)):
raise TypeError('The src_fitparam_mapper argument must be an '
'instance of SingleSourceFitParameterMapper!')
- super(TimeIntegratedMultiDatasetSingleSourceAnalysis, self).__init__(
- src_hypo_group_manager, src_fitparam_mapper, test_statistic,
- bkg_gen_method, custom_sig_generator)
+ super().__init__(
+ src_hypo_group_manager=src_hypo_group_manager,
+ src_fitparam_mapper=src_fitparam_mapper,
+ test_statistic=test_statistic,
+ bkg_gen_method=bkg_gen_method,
+ sig_generator_cls=sig_generator_cls)
self.fitparam_ns = fitparam_ns
@@ -1415,7 +1437,7 @@ class TimeIntegratedMultiDatasetMultiSourceAnalysis(
"""
def __init__(
self, src_hypo_group_manager, src_fitparam_mapper, fitparam_ns,
- test_statistic, bkg_gen_method=None, custom_sig_generator=None):
+ test_statistic, bkg_gen_method=None, sig_generator_cls=None):
"""Creates a new time-integrated point-like source analysis assuming
multiple sources.
@@ -1437,17 +1459,18 @@ def __init__(
The instance of BackgroundGenerationMethod that will be used to
generate background events for a new analysis trial. This can be set
to None, if no background events have to get generated.
- custom_sig_generator : SignalGenerator class | None
- The signal generator class used to create `_sig_generator` instance.
- Uses default `SignalGenerator` implementation if set to None.
+ sig_generator_cls : SignalGeneratorBase class | None
+ The signal generator class used to create the signal generator
+ instance.
+ If set to None, the `SignalGenerator` class is used.
"""
- if(not isinstance(src_fitparam_mapper, SingleSourceFitParameterMapper)):
- raise TypeError('The src_fitparam_mapper argument must be an '
- 'instance of SingleSourceFitParameterMapper!')
-
- super(TimeIntegratedMultiDatasetMultiSourceAnalysis, self).__init__(
- src_hypo_group_manager, src_fitparam_mapper,
- fitparam_ns, test_statistic, bkg_gen_method, custom_sig_generator)
+ super().__init__(
+ src_hypo_group_manager=src_hypo_group_manager,
+ src_fitparam_mapper=src_fitparam_mapper,
+ fitparam_ns=fitparam_ns,
+ test_statistic=test_statistic,
+ bkg_gen_method=bkg_gen_method,
+ sig_generator_cls=sig_generator_cls)
def construct_llhratio(self, minimizer, ppbar=None):
"""Constructs the log-likelihood-ratio (LLH-ratio) function of the
diff --git a/skyllh/core/dataset.py b/skyllh/core/dataset.py
index 8fed0e75f2..6a3880fbbe 100644
--- a/skyllh/core/dataset.py
+++ b/skyllh/core/dataset.py
@@ -1475,7 +1475,7 @@ def add_data_preparation(self, func):
for (dsname, dataset) in self._datasets.items():
dataset.add_data_preparation(func)
- def remove_data_preparation(self, index=-1):
+ def remove_data_preparation(self, key=-1):
"""Removes data preparation function from all the datasets of this
dataset collection.
@@ -1486,7 +1486,7 @@ def remove_data_preparation(self, index=-1):
is the last added function.
"""
for (dsname, dataset) in self._datasets.items():
- dataset.remove_data_preparation(index=index)
+ dataset.remove_data_preparation(key=key)
def update_version_qualifiers(self, verqualifiers):
"""Updates the version qualifiers of all datasets of this dataset
diff --git a/skyllh/core/signal_generator.py b/skyllh/core/signal_generator.py
index 485fe6edae..cba9194678 100644
--- a/skyllh/core/signal_generator.py
+++ b/skyllh/core/signal_generator.py
@@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
+import abc
import itertools
import numpy as np
@@ -17,7 +18,117 @@
)
-class SignalGenerator(object):
+class SignalGeneratorBase(object, metaclass=abc.ABCMeta):
+ """This is the abstract base class for all signal generator classes in
+ SkyLLH. It defines the interface for signal generators.
+ """
+ def __init__(self, src_hypo_group_manager, dataset_list, data_list,
+ *args, **kwargs):
+ """Constructs a new signal generator instance.
+
+ Parameters
+ ----------
+ src_hypo_group_manager : SourceHypoGroupManager instance
+ The SourceHypoGroupManager instance defining the source hypothesis
+ groups.
+ dataset_list : list of Dataset instances
+ The list of Dataset instances for which signal events should get
+ generated for.
+ data_list : list of DatasetData instances
+ The list of DatasetData instances holding the actual data of each
+ dataset. The order must match the order of ``dataset_list``.
+ """
+ super().__init__(*args, **kwargs)
+
+ self.src_hypo_group_manager = src_hypo_group_manager
+ self.dataset_list = dataset_list
+ self.data_list = data_list
+
+ @property
+ def src_hypo_group_manager(self):
+ """The SourceHypoGroupManager instance defining the source hypothesis
+ groups.
+ """
+ return self._src_hypo_group_manager
+ @src_hypo_group_manager.setter
+ def src_hypo_group_manager(self, manager):
+ if(not isinstance(manager, SourceHypoGroupManager)):
+ raise TypeError(
+ 'The src_hypo_group_manager property must be an instance of '
+ 'SourceHypoGroupManager!')
+ self._src_hypo_group_manager = manager
+
+ @property
+ def dataset_list(self):
+ """The list of Dataset instances for which signal events should get
+ generated for.
+ """
+ return self._dataset_list
+ @dataset_list.setter
+ def dataset_list(self, datasets):
+ if(not issequenceof(datasets, Dataset)):
+ raise TypeError(
+ 'The dataset_list property must be a sequence of Dataset '
+ 'instances!')
+ self._dataset_list = list(datasets)
+
+ @property
+ def data_list(self):
+ """The list of DatasetData instances holding the actual data of each
+ dataset. The order must match the order of the ``dataset_list``
+ property.
+ """
+ return self._data_list
+ @data_list.setter
+ def data_list(self, datas):
+ if(not issequenceof(datas, DatasetData)):
+ raise TypeError(
+ 'The data_list property must be a sequence of DatasetData '
+ 'instances!')
+ self._data_list = datas
+
+ def change_source_hypo_group_manager(self, src_hypo_group_manager):
+ """Changes the source hypothesis group manager. Derived classes can
+ reimplement this method but this method of the base class must still be
+ called by the derived class.
+ """
+ self.src_hypo_group_manager = src_hypo_group_manager
+
+ @abc.abstractmethod
+ def generate_signal_events(self, rss, mean, poisson=True):
+ """This abstract method must be implemented by the derived class to
+ generate a given number of signal events.
+
+ Parameters
+ ----------
+ rss : instance of RandomStateService
+ The instance of RandomStateService providing the random number
+ generator state.
+ mean : float
+ The mean number of signal events. If the ``poisson`` argument is set
+ to True, the actual number of generated signal events will be drawn
+ from a Poisson distribution with this given mean value of signal
+ events.
+ poisson : bool
+ If set to True, the actual number of generated signal events will
+ be drawn from a Poisson distribution with the given mean value of
+ signal events.
+ If set to False, the argument ``mean`` specifies the actual number
+ of generated signal events.
+
+ Returns
+ -------
+ n_signal : int
+ The number of generated signal events.
+ signal_events_dict : dict of DataFieldRecordArray
+ The dictionary holding the DataFieldRecordArray instancs with the
+ generated signal events. Each key of this dictionary represents the
+ dataset index for which the signal events have been generated.
+ """
+ pass
+
+
+class SignalGenerator(SignalGeneratorBase):
"""This is the general signal generator class. It does not depend on the
detector or source hypothesis, because these dependencies are factored out
into the signal generation method. In fact the construction within this
@@ -25,7 +136,8 @@ class depends on the construction of the signal generation method. In case
of multiple sources the handling here is very suboptimal. Therefore the
MultiSourceSignalGenerator should be used instead!
"""
- def __init__(self, src_hypo_group_manager, dataset_list, data_list):
+ def __init__(self, src_hypo_group_manager, dataset_list, data_list,
+ *args, **kwargs):
"""Constructs a new signal generator instance.
Parameters
@@ -40,12 +152,12 @@ def __init__(self, src_hypo_group_manager, dataset_list, data_list):
The list of DatasetData instances holding the actual data of each
dataset. The order must match the order of ``dataset_list``.
"""
- super(SignalGenerator, self).__init__()
-
- self.src_hypo_group_manager = src_hypo_group_manager
-
- self.dataset_list = dataset_list
- self.data_list = data_list
+ super().__init__(
+ *args,
+ src_hypo_group_manager=src_hypo_group_manager,
+ dataset_list=dataset_list,
+ data_list=data_list,
+ **kwargs)
self._construct_signal_candidates()
@@ -102,51 +214,12 @@ def _construct_signal_candidates(self):
self._sig_candidates_weight_sum = np.sum(self._sig_candidates['weight'])
self._sig_candidates['weight'] /= self._sig_candidates_weight_sum
- @property
- def src_hypo_group_manager(self):
- """The SourceHypoGroupManager instance defining the source groups with
- their spectra.
- """
- return self._src_hypo_group_manager
- @src_hypo_group_manager.setter
- def src_hypo_group_manager(self, manager):
- if(not isinstance(manager, SourceHypoGroupManager)):
- raise TypeError('The src_hypo_group_manager property must be an '
- 'instance of SourceHypoGroupManager!')
- self._src_hypo_group_manager = manager
-
- @property
- def dataset_list(self):
- """The list of Dataset instances for which signal events should get
- generated for.
- """
- return self._dataset_list
- @dataset_list.setter
- def dataset_list(self, datasets):
- if(not issequenceof(datasets, Dataset)):
- raise TypeError('The dataset_list property must be a sequence of '
- 'Dataset instances!')
- self._dataset_list = list(datasets)
-
- @property
- def data_list(self):
- """The list of DatasetData instances holding the actual data of each
- dataset. The order must match the order of the ``dataset_list``
- property.
- """
- return self._data_list
- @data_list.setter
- def data_list(self, datas):
- if(not issequenceof(datas, DatasetData)):
- raise TypeError('The data_list property must be a sequence of '
- 'DatasetData instances!')
- self._data_list = datas
-
def change_source_hypo_group_manager(self, src_hypo_group_manager):
"""Recreates the signal candidates with the changed source hypothesis
group manager.
"""
- self.src_hypo_group_manager = src_hypo_group_manager
+ super().change_source_hypo_group_manager(src_hypo_group_manager)
+
self._construct_signal_candidates()
def mu2flux(self, mu, per_source=False):
| Rename custom_sig_generator argument to sig_generator_cls. Introduce …
…SignalGeneratorBase class.
| 2022-12-08T12:32:32 | 0.0 | [] | [] |
|||
PKU-Alignment/omnisafe | PKU-Alignment__omnisafe-251 | a07910282ab74537301eb1030f7affac962b3319 | diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index be33c4d1c..b3da80fdf 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -29,7 +29,7 @@ repos:
- id: debug-statements
- id: double-quote-string-fixer
- repo: https://github.com/charliermarsh/ruff-pre-commit
- rev: v0.0.267
+ rev: v0.0.275
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
@@ -42,7 +42,7 @@ repos:
hooks:
- id: black-jupyter
- repo: https://github.com/asottile/pyupgrade
- rev: v3.4.0
+ rev: v3.7.0
hooks:
- id: pyupgrade
args: [--py38-plus] # sync with requires-python
@@ -63,7 +63,7 @@ repos:
^docs/source/conf.py$
)
- repo: https://github.com/codespell-project/codespell
- rev: v2.2.4
+ rev: v2.2.5
hooks:
- id: codespell
additional_dependencies: [".[toml]"]
diff --git a/omnisafe/common/offline/dataset.py b/omnisafe/common/offline/dataset.py
index 8a46a0665..9502e8bf8 100644
--- a/omnisafe/common/offline/dataset.py
+++ b/omnisafe/common/offline/dataset.py
@@ -19,6 +19,7 @@
import hashlib
import os
from dataclasses import dataclass
+from typing import ClassVar
import gdown
import numpy as np
@@ -40,7 +41,7 @@ class OfflineMeta:
class OfflineDataset(Dataset):
"""A dataset for offline algorithms."""
- _name_to_metadata: dict[str, OfflineMeta] = {
+ _name_to_metadata: ClassVar[dict[str, OfflineMeta]] = {
'SafetyPointCircle1-v0-mixed-beta0.5': OfflineMeta(
url='https://drive.google.com/file/d/17q2-T1o01GNM3rBmLP52kRTojYS1ePTX/view?usp=sharing',
sha256sum='354a762a4fba372c497a0c84e3405863c192406ff754b18eea51a036f47cd5ba',
diff --git a/omnisafe/envs/core.py b/omnisafe/envs/core.py
index 089d7d818..1740182fe 100644
--- a/omnisafe/envs/core.py
+++ b/omnisafe/envs/core.py
@@ -18,7 +18,7 @@
import inspect
from abc import ABC, abstractmethod
-from typing import Any
+from typing import Any, ClassVar
import torch
@@ -45,7 +45,6 @@ class CMDP(ABC):
need_auto_reset_wrapper (bool): Whether the environment need auto reset wrapper.
"""
- _support_envs: list[str]
_action_space: OmnisafeSpace
_observation_space: OmnisafeSpace
_metadata: dict[str, Any]
@@ -55,6 +54,8 @@ class CMDP(ABC):
need_time_limit_wrapper: bool
need_auto_reset_wrapper: bool
+ _support_envs: ClassVar[list[str]]
+
@classmethod
def support_envs(cls) -> list[str]:
"""The supported environments.
diff --git a/omnisafe/envs/mujoco_env.py b/omnisafe/envs/mujoco_env.py
index 0caef7c18..8cced02d6 100644
--- a/omnisafe/envs/mujoco_env.py
+++ b/omnisafe/envs/mujoco_env.py
@@ -13,9 +13,10 @@
# limitations under the License.
# ==============================================================================
"""Environments in the Safety Gymnasium."""
+
from __future__ import annotations
-from typing import Any
+from typing import Any, ClassVar
import gymnasium
import numpy as np
@@ -34,7 +35,11 @@ class MujocoEnv(CMDP):
need_time_limit_wrapper (bool): Whether to use time limit wrapper.
"""
- _support_envs = [
+ need_auto_reset_wrapper = False
+
+ need_time_limit_wrapper = False
+ need_action_repeat_wrapper = True
+ _support_envs: ClassVar[list[str]] = [
'Ant-v4',
'Hopper-v4',
'Walker2d-v4',
@@ -42,10 +47,6 @@ class MujocoEnv(CMDP):
'Swimmer-v4',
'HalfCheetah-v4',
]
- need_auto_reset_wrapper = False
-
- need_time_limit_wrapper = False
- need_action_repeat_wrapper = True
def __init__(
self,
diff --git a/omnisafe/envs/safety_gymnasium_env.py b/omnisafe/envs/safety_gymnasium_env.py
index f0756266f..3f214e008 100644
--- a/omnisafe/envs/safety_gymnasium_env.py
+++ b/omnisafe/envs/safety_gymnasium_env.py
@@ -16,7 +16,7 @@
from __future__ import annotations
-from typing import Any
+from typing import Any, ClassVar
import numpy as np
import safety_gymnasium
@@ -51,7 +51,8 @@ class SafetyGymnasiumEnv(CMDP):
need_auto_reset_wrapper: bool = False
need_time_limit_wrapper: bool = False
- _support_envs: list[str] = [
+
+ _support_envs: ClassVar[list[str]] = [
'SafetyPointGoal0-v0',
'SafetyPointGoal1-v0',
'SafetyPointGoal2-v0',
@@ -109,6 +110,7 @@ def __init__(
super().__init__(env_id)
self._num_envs = num_envs
self._device = torch.device(device)
+
if num_envs > 1:
self._env = safety_gymnasium.vector.make(env_id=env_id, num_envs=num_envs, **kwargs)
assert isinstance(self._env.single_action_space, Box), 'Only support Box action space.'
diff --git a/omnisafe/envs/safety_gymnasium_modelbased.py b/omnisafe/envs/safety_gymnasium_modelbased.py
index 1837f6b43..dfeb1646e 100644
--- a/omnisafe/envs/safety_gymnasium_modelbased.py
+++ b/omnisafe/envs/safety_gymnasium_modelbased.py
@@ -17,7 +17,7 @@
from __future__ import annotations
-from typing import Any
+from typing import Any, ClassVar
import gymnasium
import numpy as np
@@ -38,7 +38,10 @@ class SafetyGymnasiumModelBased(CMDP): # pylint: disable=too-many-instance-attr
need_time_limit_wrapper (bool): Whether to use time limit wrapper.
"""
- _support_envs = [
+ need_auto_reset_wrapper = False
+ need_time_limit_wrapper = False
+
+ _support_envs: ClassVar[list[str]] = [
'SafetyPointGoal0-v0-modelbased',
'SafetyPointGoal1-v0-modelbased',
'SafetyCarGoal0-v0-modelbased',
@@ -46,8 +49,6 @@ class SafetyGymnasiumModelBased(CMDP): # pylint: disable=too-many-instance-attr
'SafetyAntGoal0-v0-modelbased',
'SafetyAntGoal1-v0-modelbased',
]
- need_auto_reset_wrapper = False
- need_time_limit_wrapper = False
def __init__(
self,
@@ -74,6 +75,7 @@ def __init__(
height (int, optional): The height of the rendered image. Defaults to 256.
"""
super().__init__(env_id)
+
self._use_lidar = use_lidar
if num_envs == 1:
self._env = safety_gymnasium.make(
@@ -221,10 +223,9 @@ def get_lidar_from_coordinate(self, obs: np.ndarray) -> torch.Tensor:
obs_vec = list(base_state_vec) + list(hazards_lidar_vec) + list(goal_lidar_vec)
- # obs_vec = self.make_observation(obs, lidar_vec)
obs_vec = np.array(obs_vec)
- obs_vec = torch.as_tensor(obs_vec, dtype=torch.float32, device=self._device).unsqueeze(0)
- return obs_vec
+
+ return torch.as_tensor(obs_vec, dtype=torch.float32, device=self._device).unsqueeze(0)
def _ego_xy(
self,
diff --git a/omnisafe/utils/math.py b/omnisafe/utils/math.py
index a5d19cff4..6c2457396 100644
--- a/omnisafe/utils/math.py
+++ b/omnisafe/utils/math.py
@@ -171,12 +171,14 @@ class TanhNormal(TransformedDistribution): # pylint: disable=abstract-method
scale (float or Tensor): The standard deviation of the underlying normal distribution.
"""
- arg_constraints = {'loc': constraints.real, 'scale': constraints.positive}
-
def __init__(self, loc: torch.Tensor, scale: torch.Tensor) -> None:
"""Initialize an instance of :class:`TanhNormal`."""
base_dist = Normal(loc, scale)
super().__init__(base_dist, SafeTanhTransformer())
+ self.arg_constraints = {
+ 'loc': constraints.real,
+ 'scale': constraints.positive,
+ }
def expand(self, batch_shape: tuple[int, ...], instance: Any | None = None) -> TanhNormal:
"""Expand the distribution."""
| fix: correct the author list of PIDLag

| I believe this modification is correct and necessary. However, currently, due to the updates in the `ruff` version, there are new requirements for code standards, and your pull request fails the lint check. We will soon open a new pull request to fix this issue. After merging the latest changes using git rebase, we will review your pull request then merge it. | 2023-06-24T06:50:35 | 0.0 | [] | [] |
||
PKU-Alignment/omnisafe | PKU-Alignment__omnisafe-5 | 3536b836fc9e7a1749e20a7f8db14781d86be14f | diff --git a/examples/vis_safety_gymnasium.py b/examples/vis_safety_gymnasium.py
index 88c39eec2..f368490b2 100644
--- a/examples/vis_safety_gymnasium.py
+++ b/examples/vis_safety_gymnasium.py
@@ -1,13 +1,36 @@
+import argparse
+
import safety_gymnasium
-env_name = 'SafetyPointGoal1-v0'
-env = safety_gymnasium.make(env_name)
+def run_random(env_name):
+ env = safety_gymnasium.make(env_name, render_mode='human')
+ # env.seed(0)
+ obs, _ = env.reset()
+ terminled = False
+ ep_ret = 0
+ ep_cost = 0
+ while True:
+ if terminled:
+ print('Episode Return: %.3f \t Episode Cost: %.3f' % (ep_ret, ep_cost))
+ ep_ret, ep_cost = 0, 0
+ obs, _ = env.reset()
+ assert env.observation_space.contains(obs)
+ act = env.action_space.sample()
+ assert env.action_space.contains(act)
+ # Use the environment's built_in max_episode_steps
+ if hasattr(env, '_max_episode_steps'):
+ max_ep_len = env._max_episode_steps
+
+ obs, reward, cost, terminled, truncated, info = env.step(act)
+
+ ep_ret += reward
+ ep_cost += cost
+
-obs, info = env.reset()
-terminated = False
+if __name__ == '__main__':
-while not terminated:
- act = env.action_space.sample()
- obs, reward, cost, terminated, truncated, info = env.step(act)
- env.render()
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--env', default='SafetyPointGoal2-v0')
+ args = parser.parse_args()
+ run_random(args.env)
diff --git a/omnisafe/envs/Safety_Gymnasium/examples/safety_gym_v2_vision.py b/omnisafe/envs/Safety_Gymnasium/examples/safety_gym_v2_vision.py
new file mode 100644
index 000000000..10b1fb467
--- /dev/null
+++ b/omnisafe/envs/Safety_Gymnasium/examples/safety_gym_v2_vision.py
@@ -0,0 +1,51 @@
+import argparse
+import os
+
+# import gymnasium
+import safety_gymnasium
+from gymnasium.utils.save_video import save_video
+
+
+WORKDIR = os.path.abspath('.')
+DIR = os.path.join(WORKDIR, 'omnisafe/envs/Safety_Gymnasium/examples', 'cached_test_vision_video')
+
+
+def run_random(env_name):
+ env = safety_gymnasium.make(env_name)
+ # env.seed(0)
+ obs, _ = env.reset()
+ terminled = False
+ ep_ret = 0
+ ep_cost = 0
+ render_list = []
+ for i in range(1001):
+ if terminled:
+ print('Episode Return: %.3f \t Episode Cost: %.3f' % (ep_ret, ep_cost))
+ ep_ret, ep_cost = 0, 0
+ obs, _ = env.reset()
+ save_video(
+ frames=render_list,
+ video_folder=DIR,
+ name_prefix=f'test_vision_output',
+ fps=30,
+ )
+ render_list = []
+ assert env.observation_space.contains(obs)
+ act = env.action_space.sample()
+ assert env.action_space.contains(act)
+ # Use the environment's built_in max_episode_steps
+ if hasattr(env, '_max_episode_steps'):
+ max_ep_len = env._max_episode_steps
+ render_list.append(obs['vision'])
+ obs, reward, cost, terminled, truncated, info = env.step(act)
+
+ ep_ret += reward
+ ep_cost += cost
+
+
+if __name__ == '__main__':
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--env', default='SafetyCarGoal0Vision-v0')
+ args = parser.parse_args()
+ run_random(args.env)
diff --git a/omnisafe/envs/Safety_Gymnasium/examples/test_safety_gym_v2.py b/omnisafe/envs/Safety_Gymnasium/examples/vis_safety_gym_v2.py
similarity index 85%
rename from omnisafe/envs/Safety_Gymnasium/examples/test_safety_gym_v2.py
rename to omnisafe/envs/Safety_Gymnasium/examples/vis_safety_gym_v2.py
index 3d7925da3..b62a45b07 100644
--- a/omnisafe/envs/Safety_Gymnasium/examples/test_safety_gym_v2.py
+++ b/omnisafe/envs/Safety_Gymnasium/examples/vis_safety_gym_v2.py
@@ -1,11 +1,10 @@
import argparse
-# import gymnasium
import safety_gymnasium
def run_random(env_name):
- env = safety_gymnasium.make(env_name)
+ env = safety_gymnasium.make(env_name, render_mode='human')
# env.seed(0)
obs, _ = env.reset()
terminled = False
@@ -27,12 +26,11 @@ def run_random(env_name):
ep_ret += reward
ep_cost += cost
- env.render()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
- parser.add_argument('--env', default='SafetyPointGoal2-v0')
+ parser.add_argument('--env', default='SafetyPointGoal0-v0')
args = parser.parse_args()
run_random(args.env)
diff --git a/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/__init__.py b/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/__init__.py
index c2318369f..e67ef98eb 100644
--- a/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/__init__.py
+++ b/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/__init__.py
@@ -29,7 +29,7 @@
},
}
-MAKE_VISION_ENVIRONMENTS = False
+MAKE_VISION_ENVIRONMENTS = True
# ========================================#
# Helper Class for Easy Gym Registration #
@@ -81,7 +81,7 @@ def register(self, name='', config={}):
if MAKE_VISION_ENVIRONMENTS:
# Vision: note, these environments are experimental! Correct behavior not guaranteed
- vision_env_name = f'{self.prefix}-{robot_name}{self.name + name}Vision-{VERSION}'
+ vision_env_name = f'{self.prefix}{robot_name}{self.name + name}Vision-{VERSION}'
vision_config = {
'world': {},
'task': {
diff --git a/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/base_task.py b/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/base_task.py
index acc8d9aaa..29c31aefd 100644
--- a/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/base_task.py
+++ b/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/base_task.py
@@ -44,14 +44,17 @@ def __init__(
# otherwise, raise a python exception.
# TODO: randomize starting joint positions
+ self.observe_vision = False
+ self.observation_flatten = True
+
# # Vision observation parameters
# TODO JJM
self.vision_size = (
60,
40,
) # Size (width, height) of vision observation; gets flipped internally to (rows, cols) format
- # self.vision_render = True # Render vision observation in the viewer
- # self.vision_render_size = (300, 200) # Size to render the vision in the viewer
+ self.vision_render = True # Render vision observation in the viewer
+ self.vision_render_size = (300, 200) # Size to render the vision in the viewer
# Lidar observation parameters
self.lidar_num_bins = 16 # Bins (around a full circle) for lidar sensing
@@ -354,8 +357,8 @@ def obs_vision(self):
# Get a render context so we can
rows, cols = self.vision_size
width, height = cols, rows
- vision = self.sim.render(width, height, camera_name='vision', mode='offscreen')
- return np.array(vision, dtype='float32') / 255
+ vision = self.engine.render(width, height, mode='rgb_array', camera_name='vision', cost={})
+ return vision
def obs_lidar(self, positions, group):
"""
@@ -477,16 +480,16 @@ def build_sensor_observation_space(self):
# if self.observe_sensors:
for sensor in self.sensors_obs: # Explicitly listed sensors
dim = self.robot.sensor_dim[sensor]
- obs_space_dict[sensor] = gymnasium.spaces.Box(-np.inf, np.inf, (dim,), dtype=np.float32)
+ obs_space_dict[sensor] = gymnasium.spaces.Box(-np.inf, np.inf, (dim,), dtype=np.float64)
# Velocities don't have wraparound effects that rotational positions do
# Wraparounds are not kind to neural networks
# Whereas the angle 2*pi is very close to 0, this isn't true in the network
# In theory the network could learn this, but in practice we simplify it
# when the sensors_angle_components switch is enabled.
for sensor in self.robot.hinge_vel_names:
- obs_space_dict[sensor] = gymnasium.spaces.Box(-np.inf, np.inf, (1,), dtype=np.float32)
+ obs_space_dict[sensor] = gymnasium.spaces.Box(-np.inf, np.inf, (1,), dtype=np.float64)
for sensor in self.robot.ballangvel_names:
- obs_space_dict[sensor] = gymnasium.spaces.Box(-np.inf, np.inf, (3,), dtype=np.float32)
+ obs_space_dict[sensor] = gymnasium.spaces.Box(-np.inf, np.inf, (3,), dtype=np.float64)
# Angular positions have wraparound effects, so output something more friendly
if self.sensors_angle_components:
# Single joints are turned into sin(x), cos(x) pairs
@@ -494,7 +497,7 @@ def build_sensor_observation_space(self):
# Since for angles, small perturbations in angle give small differences in sin/cos
for sensor in self.robot.hinge_pos_names:
obs_space_dict[sensor] = gymnasium.spaces.Box(
- -np.inf, np.inf, (2,), dtype=np.float32
+ -np.inf, np.inf, (2,), dtype=np.float64
)
# Quaternions are turned into 3x3 rotation matrices
# Quaternions have a wraparound issue in how they are normalized,
@@ -508,18 +511,18 @@ def build_sensor_observation_space(self):
# Instead we use a 3x3 rotation matrix, which if normalized, smoothly varies as well.
for sensor in self.robot.ballquat_names:
obs_space_dict[sensor] = gymnasium.spaces.Box(
- -np.inf, np.inf, (3, 3), dtype=np.float32
+ -np.inf, np.inf, (3, 3), dtype=np.float64
)
else:
# Otherwise include the sensor without any processing
# TODO: comparative study of the performance with and without this feature.
for sensor in self.robot.hinge_pos_names:
obs_space_dict[sensor] = gymnasium.spaces.Box(
- -np.inf, np.inf, (1,), dtype=np.float32
+ -np.inf, np.inf, (1,), dtype=np.float64
)
for sensor in self.robot.ballquat_names:
obs_space_dict[sensor] = gymnasium.spaces.Box(
- -np.inf, np.inf, (4,), dtype=np.float32
+ -np.inf, np.inf, (4,), dtype=np.float64
)
return obs_space_dict
diff --git a/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/builder.py b/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/builder.py
index c1efd9518..fd77dfea7 100644
--- a/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/builder.py
+++ b/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/builder.py
@@ -37,13 +37,6 @@ class Builder(gymnasium.Env, gymnasium.utils.EzPickle):
'render_lidar_size': 0.025,
'render_lidar_offset_init': 0.5,
'render_lidar_offset_delta': 0.06,
- # Vision observation parameters
- 'vision_size': (
- 60,
- 40,
- ), # Size (width, height) of vision observation; gets flipped internally to (rows, cols) format
- 'vision_render': True, # Render vision observation in the viewer
- 'vision_render_size': (300, 200), # Size to render the vision in the viewer
# Frameskip is the number of physics simulation steps per environment step
# Frameskip is sampled as a binomial distribution
# For deterministic steps, set frameskip_binom_p = 1.0 (always take max frameskip)
@@ -51,7 +44,7 @@ class Builder(gymnasium.Env, gymnasium.utils.EzPickle):
'frameskip_binom_p': 1.0, # Probability of trial return (controls distribution)
}
- def __init__(self, config={}):
+ def __init__(self, config={}, **kwargs):
# First, parse configuration. Important note: LOTS of stuff happens in
# parse, and many attributes of the class get set through setattr. If you
# are trying to track down where an attribute gets initially set, and
@@ -62,12 +55,13 @@ def __init__(self, config={}):
self.get_config(config)
self.task_id = config['task']['task_id']
self.seed()
- print('xxx')
self._setup_simulation()
self.done = True
+ self.render_mode = kwargs.get('render_mode', None)
+
def get_config(self, config):
"""Parse a config dict - see self.DEFAULT for description"""
world_config = config['world']
@@ -134,7 +128,6 @@ def reset(self, seed=None, options=None):
self.first_reset = False # Built our first world successfully
# Return an observation
-
return (self.task.obs(), info)
def world_xy(self, pos):
@@ -227,11 +220,17 @@ def step(self, action):
terminaled = self.done
truncated = False
+ if self.render_mode == 'human':
+ self.render()
return self.task.obs(), reward, cost, terminaled, truncated, info
- def render(self, mode='human', camera_id=None, width=DEFAULT_WIDTH, height=DEFAULT_HEIGHT):
+ def render(self, camera_id=None, width=DEFAULT_WIDTH, height=DEFAULT_HEIGHT):
+ assert self.render_mode, 'Please specify the render mode when you make env.'
+ assert (
+ not self.task.observe_vision
+ ), 'When you use vision envs, you should not call this function explicitly.'
return self.engine.render(
- mode=mode, camera_id=camera_id, width=width, height=height, cost=self._cost
+ mode=self.render_mode, camera_id=camera_id, width=width, height=height, cost=self._cost
)
@property
diff --git a/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/engine.py b/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/engine.py
index 419b6dda3..f0487bbc3 100644
--- a/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/engine.py
+++ b/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/engine.py
@@ -27,7 +27,7 @@ def __init__(self, task, world_config={}, task_config={}):
self.task = task
self.robot = Robot(self.robot_base)
- self.action_space = gymnasium.spaces.Box(-1, 1, (self.robot.nu,), dtype=np.float32)
+ self.action_space = gymnasium.spaces.Box(-1, 1, (self.robot.nu,), dtype=np.float64)
self.placements = self.task.build_placements_dict()
# self.world = self.get_world()
self.clear()
@@ -295,29 +295,36 @@ def render_swap_callback(self):
# self.viewer.draw_pixels(self.save_obs_vision, 0, 0)
pass
- def render(self, width, height, mode='human', camera_id=None, camera_name=None, cost={}):
+ def render(self, width, height, mode, camera_id=None, camera_name=None, cost={}):
"""Render the environment to the screen"""
self.model.vis.global_.offwidth = width
self.model.vis.global_.offheight = height
- if camera_id is not None and camera_name is not None:
- raise ValueError('Both camera_id and camera_name cannot be specified at the same time.')
+ if mode in {
+ 'rgb_array',
+ 'depth_array',
+ }:
- if mode == 'rgb_array':
- self._get_viewer(mode)
- self.viewer._hide_overlay = True
- camera_id = 3
- self.viewer.cam.fixedcamid = camera_id # self.model.camera_name2id(mode)
- self.viewer.cam.type = mujoco.mjtCamera.mjCAMERA_FIXED
- elif mode == 'depth_array':
- self._get_viewer(mode)
- self.viewer._hide_overlay = True
- camera_id = 3
- self.viewer.cam.fixedcamid = camera_id # self.model.camera_name2id(mode)
- self.viewer.cam.type = mujoco.mjtCamera.mjCAMERA_FIXED
- elif mode == 'human':
+ if camera_id is not None and camera_name is not None:
+ raise ValueError(
+ 'Both `camera_id` and `camera_name` cannot be' ' specified at the same time.'
+ )
+
+ no_camera_specified = camera_name is None and camera_id is None
+ if no_camera_specified:
+ camera_id = 3
+
+ if camera_id is None:
+ camera_id = mujoco.mj_name2id(
+ self.model,
+ mujoco.mjtObj.mjOBJ_CAMERA,
+ camera_name,
+ )
+
+ self._get_viewer(mode).render(camera_id=camera_id)
+
+ if mode == 'human':
self._get_viewer(mode)
- self.viewer.cam.fixedcamid = -1
self.viewer.cam.type = mujoco.mjtCamera.mjCAMERA_FREE
self.viewer.render_swap_callback = self.render_swap_callback
@@ -325,17 +332,6 @@ def render(self, width, height, mode='human', camera_id=None, camera_name=None,
self.viewer.vopt.geomgroup[:] = 1
# self.viewer.update_sim(self.sim)
- if camera_id is not None:
- # Update camera if desired
- self.viewer.cam.fixedcamid = camera_id
- elif camera_name is not None:
- camera_id = mujoco.mj_name2id(
- self.model,
- mujoco.mjtObj.mjOBJ_CAMERA,
- camera_name,
- )
- self.viewer.cam.fixedcamid = camera_id
-
# Lidar markers
if self.render_lidar_markers:
offset = self.render_lidar_offset_init # Height offset for successive lidar indicators
@@ -428,8 +424,8 @@ def render(self, width, height, mode='human', camera_id=None, camera_name=None,
def _get_viewer(
self, mode
) -> Union[
- 'gym.envs.mujoco.mujoco_rendering.Viewer',
- 'gym.envs.mujoco.mujoco_rendering.RenderContextOffscreen',
+ 'gymnasium.envs.mujoco.mujoco_rendering.Viewer',
+ 'gymnasium.envs.mujoco.mujoco_rendering.RenderContextOffscreen',
]:
self.viewer = self._viewers.get(mode)
if self.viewer is None:
diff --git a/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/tasks/button/button_level0.py b/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/tasks/button/button_level0.py
index 9bde5fe43..3d5e199e7 100644
--- a/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/tasks/button/button_level0.py
+++ b/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/tasks/button/button_level0.py
@@ -145,7 +145,7 @@ def build_world_config(self, layout):
# if not self.observe_vision:
# world_config['render_context'] = -1 # Hijack this so we don't create context
- # world_config['observe_vision'] = self.observe_vision
+ world_config['observe_vision'] = self.observe_vision
# Extra geoms (immovable objects) to add to the scene
world_config['geoms'] = {}
@@ -166,28 +166,30 @@ def build_observation_space(self):
# if self.observe_goal_lidar:
obs_space_dict['goal_lidar'] = gymnasium.spaces.Box(
- 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float32
+ 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float64
)
# if self.buttons_num and self.observe_buttons:
obs_space_dict['buttons_lidar'] = gymnasium.spaces.Box(
- 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float32
+ 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float64
)
- # if self.observe_vision:
- # width, height = self.vision_size
- # rows, cols = height, width
- # self.vision_size = (rows, cols)
- # obs_space_dict['vision'] = gym.spaces.Box(0, 1.0, self.vision_size + (3,), dtype=np.float32)
+ if self.observe_vision:
+ width, height = self.vision_size
+ rows, cols = height, width
+ self.vision_size = (rows, cols)
+ obs_space_dict['vision'] = gymnasium.spaces.Box(
+ 0, 255, self.vision_size + (3,), dtype=np.uint8
+ )
# Flatten it ourselves
self.obs_space_dict = obs_space_dict
- # if self.observation_flatten:
- self.obs_flat_size = sum([np.prod(i.shape) for i in self.obs_space_dict.values()])
- self.observation_space = gymnasium.spaces.Box(
- -np.inf, np.inf, (self.obs_flat_size,), dtype=np.float64
- )
- # else:
- # self.observation_space = gym.spaces.Dict(obs_space_dict)
+ if self.observation_flatten:
+ self.obs_flat_size = sum([np.prod(i.shape) for i in self.obs_space_dict.values()])
+ self.observation_space = gymnasium.spaces.Box(
+ -np.inf, np.inf, (self.obs_flat_size,), dtype=np.float64
+ )
+ else:
+ self.observation_space = gymnasium.spaces.Dict(obs_space_dict)
def obs(self):
"""Return the observation of our agent"""
@@ -206,18 +208,17 @@ def obs(self):
else:
obs['buttons_lidar'] = np.zeros(self.lidar_num_bins)
- # if self.observe_vision:
- # obs['vision'] = self.obs_vision()
-
- # if self.observation_flatten:
- flat_obs = np.zeros(self.obs_flat_size)
- offset = 0
- for k in sorted(self.obs_space_dict.keys()):
- k_size = np.prod(obs[k].shape)
- flat_obs[offset : offset + k_size] = obs[k].flat
- offset += k_size
- obs = flat_obs
- assert self.observation_space.contains(obs), f'Bad obs {obs} {self.observation_space}'
+ if self.observe_vision:
+ obs['vision'] = self.obs_vision()
+ if self.observation_flatten:
+ flat_obs = np.zeros(self.obs_flat_size)
+ offset = 0
+ for k in sorted(self.obs_space_dict.keys()):
+ k_size = np.prod(obs[k].shape)
+ flat_obs[offset : offset + k_size] = obs[k].flat
+ offset += k_size
+ obs = flat_obs
+ assert self.observation_space.contains(obs), f'Bad obs {obs} {self.observation_space}'
return obs
def buttons_timer_tick(self):
diff --git a/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/tasks/button/button_level1.py b/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/tasks/button/button_level1.py
index e41f9ac7e..0dc02500d 100644
--- a/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/tasks/button/button_level1.py
+++ b/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/tasks/button/button_level1.py
@@ -92,7 +92,7 @@ def build_world_config(self, layout):
# if not self.observe_vision:
# world_config['render_context'] = -1 # Hijack this so we don't create context
- # world_config['observe_vision'] = self.observe_vision
+ world_config['observe_vision'] = self.observe_vision
# Extra objects to add to the scene
world_config['objects'] = {}
@@ -139,38 +139,41 @@ def build_observation_space(self):
# if self.observe_goal_lidar:
obs_space_dict['goal_lidar'] = gymnasium.spaces.Box(
- 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float32
+ 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float64
)
# if self.observe_hazards:
obs_space_dict['hazards_lidar'] = gymnasium.spaces.Box(
- 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float32
+ 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float64
)
# if self.gremlins_num and self.observe_gremlins:
obs_space_dict['gremlins_lidar'] = gymnasium.spaces.Box(
- 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float32
+ 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float64
)
# if self.buttons_num and self.observe_buttons:
obs_space_dict['buttons_lidar'] = gymnasium.spaces.Box(
- 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float32
+ 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float64
)
- # if self.observe_vision:
- # width, height = self.vision_size
- # rows, cols = height, width
- # self.vision_size = (rows, cols)
- # obs_space_dict['vision'] = gym.spaces.Box(0, 1.0, self.vision_size + (3,), dtype=np.float32)
+ if self.observe_vision:
+ width, height = self.vision_size
+ rows, cols = height, width
+ self.vision_size = (rows, cols)
+ obs_space_dict['vision'] = gymnasium.spaces.Box(
+ 0, 255, self.vision_size + (3,), dtype=np.uint8
+ )
+
# Flatten it ourselves
self.obs_space_dict = obs_space_dict
- # if self.observation_flatten:
- self.obs_flat_size = sum([np.prod(i.shape) for i in self.obs_space_dict.values()])
- self.observation_space = gymnasium.spaces.Box(
- -np.inf, np.inf, (self.obs_flat_size,), dtype=np.float64
- )
- # else:
- # self.observation_space = gym.spaces.Dict(obs_space_dict)
+ if self.observation_flatten:
+ self.obs_flat_size = sum([np.prod(i.shape) for i in self.obs_space_dict.values()])
+ self.observation_space = gymnasium.spaces.Box(
+ -np.inf, np.inf, (self.obs_flat_size,), dtype=np.float64
+ )
+ else:
+ self.observation_space = gymnasium.spaces.Dict(obs_space_dict)
def obs(self):
"""Return the observation of our agent"""
@@ -195,15 +198,15 @@ def obs(self):
else:
obs['buttons_lidar'] = np.zeros(self.lidar_num_bins)
- # if self.observe_vision:
- # obs['vision'] = self.obs_vision()
- # if self.observation_flatten:
- flat_obs = np.zeros(self.obs_flat_size)
- offset = 0
- for k in sorted(self.obs_space_dict.keys()):
- k_size = np.prod(obs[k].shape)
- flat_obs[offset : offset + k_size] = obs[k].flat
- offset += k_size
- obs = flat_obs
- assert self.observation_space.contains(obs), f'Bad obs {obs} {self.observation_space}'
+ if self.observe_vision:
+ obs['vision'] = self.obs_vision()
+ if self.observation_flatten:
+ flat_obs = np.zeros(self.obs_flat_size)
+ offset = 0
+ for k in sorted(self.obs_space_dict.keys()):
+ k_size = np.prod(obs[k].shape)
+ flat_obs[offset : offset + k_size] = obs[k].flat
+ offset += k_size
+ obs = flat_obs
+ assert self.observation_space.contains(obs), f'Bad obs {obs} {self.observation_space}'
return obs
diff --git a/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/tasks/goal/goal_level0.py b/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/tasks/goal/goal_level0.py
index 0f5f5c950..cd39e6fdb 100644
--- a/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/tasks/goal/goal_level0.py
+++ b/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/tasks/goal/goal_level0.py
@@ -144,18 +144,26 @@ def build_observation_space(self):
# if self.observe_goal_lidar:
obs_space_dict['goal_lidar'] = gymnasium.spaces.Box(
- 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float32
+ 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float64
)
+ if self.observe_vision:
+ width, height = self.vision_size
+ rows, cols = height, width
+ self.vision_size = (rows, cols)
+ obs_space_dict['vision'] = gymnasium.spaces.Box(
+ 0, 255, self.vision_size + (3,), dtype=np.uint8
+ )
+
# Flatten it ourselves
self.obs_space_dict = obs_space_dict
- # if self.observation_flatten:
- self.obs_flat_size = sum([np.prod(i.shape) for i in self.obs_space_dict.values()])
- self.observation_space = gymnasium.spaces.Box(
- -np.inf, np.inf, (self.obs_flat_size,), dtype=np.float64
- )
- # else:
- # self.observation_space = gym.spaces.Dict(obs_space_dict)
+ if self.observation_flatten:
+ self.obs_flat_size = sum([np.prod(i.shape) for i in self.obs_space_dict.values()])
+ self.observation_space = gymnasium.spaces.Box(
+ -np.inf, np.inf, (self.obs_flat_size,), dtype=np.float64
+ )
+ else:
+ self.observation_space = gymnasium.spaces.Dict(obs_space_dict)
def obs(self):
"""Return the observation of our agent"""
@@ -187,13 +195,15 @@ def obs(self):
for sensor in self.robot.ballquat_names:
obs[sensor] = self.world.get_sensor(sensor)
- # if self.observation_flatten:
- flat_obs = np.zeros(self.obs_flat_size)
- offset = 0
- for k in sorted(self.obs_space_dict.keys()):
- k_size = np.prod(obs[k].shape)
- flat_obs[offset : offset + k_size] = obs[k].flat
- offset += k_size
- obs = flat_obs
- assert self.observation_space.contains(obs), f'Bad obs {obs} {self.observation_space}'
+ if self.observe_vision:
+ obs['vision'] = self.obs_vision()
+ if self.observation_flatten:
+ flat_obs = np.zeros(self.obs_flat_size)
+ offset = 0
+ for k in sorted(self.obs_space_dict.keys()):
+ k_size = np.prod(obs[k].shape)
+ flat_obs[offset : offset + k_size] = obs[k].flat
+ offset += k_size
+ obs = flat_obs
+ assert self.observation_space.contains(obs), f'Bad obs {obs} {self.observation_space}'
return obs
diff --git a/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/tasks/goal/goal_level1.py b/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/tasks/goal/goal_level1.py
index 76fb4eaec..8e5bfff27 100644
--- a/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/tasks/goal/goal_level1.py
+++ b/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/tasks/goal/goal_level1.py
@@ -103,16 +103,16 @@ def build_observation_space(self):
for sensor in self.sensors_obs: # Explicitly listed sensors
dim = self.robot.sensor_dim[sensor]
- obs_space_dict[sensor] = gymnasium.spaces.Box(-np.inf, np.inf, (dim,), dtype=np.float32)
+ obs_space_dict[sensor] = gymnasium.spaces.Box(-np.inf, np.inf, (dim,), dtype=np.float64)
# Velocities don't have wraparound effects that rotational positions do
# Wraparounds are not kind to neural networks
# Whereas the angle 2*pi is very close to 0, this isn't true in the network
# In theory the network could learn this, but in practice we simplify it
# when the sensors_angle_components switch is enabled.
for sensor in self.robot.hinge_vel_names:
- obs_space_dict[sensor] = gymnasium.spaces.Box(-np.inf, np.inf, (1,), dtype=np.float32)
+ obs_space_dict[sensor] = gymnasium.spaces.Box(-np.inf, np.inf, (1,), dtype=np.float64)
for sensor in self.robot.ballangvel_names:
- obs_space_dict[sensor] = gymnasium.spaces.Box(-np.inf, np.inf, (3,), dtype=np.float32)
+ obs_space_dict[sensor] = gymnasium.spaces.Box(-np.inf, np.inf, (3,), dtype=np.float64)
# Angular positions have wraparound effects, so output something more friendly
if self.sensors_angle_components:
# Single joints are turned into sin(x), cos(x) pairs
@@ -120,7 +120,7 @@ def build_observation_space(self):
# Since for angles, small perturbations in angle give small differences in sin/cos
for sensor in self.robot.hinge_pos_names:
obs_space_dict[sensor] = gymnasium.spaces.Box(
- -np.inf, np.inf, (2,), dtype=np.float32
+ -np.inf, np.inf, (2,), dtype=np.float64
)
# Quaternions are turned into 3x3 rotation matrices
# Quaternions have a wraparound issue in how they are normalized,
@@ -134,43 +134,51 @@ def build_observation_space(self):
# Instead we use a 3x3 rotation matrix, which if normalized, smoothly varies as well.
for sensor in self.robot.ballquat_names:
obs_space_dict[sensor] = gymnasium.spaces.Box(
- -np.inf, np.inf, (3, 3), dtype=np.float32
+ -np.inf, np.inf, (3, 3), dtype=np.float64
)
else:
# Otherwise include the sensor without any processing
# TODO: comparative study of the performance with and without this feature.
for sensor in self.robot.hinge_pos_names:
obs_space_dict[sensor] = gymnasium.spaces.Box(
- -np.inf, np.inf, (1,), dtype=np.float32
+ -np.inf, np.inf, (1,), dtype=np.float64
)
for sensor in self.robot.ballquat_names:
obs_space_dict[sensor] = gymnasium.spaces.Box(
- -np.inf, np.inf, (4,), dtype=np.float32
+ -np.inf, np.inf, (4,), dtype=np.float64
)
# if self.observe_goal_lidar:
obs_space_dict['goal_lidar'] = gymnasium.spaces.Box(
- 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float32
+ 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float64
)
# if self.observe_hazards:
obs_space_dict['hazards_lidar'] = gymnasium.spaces.Box(
- 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float32
+ 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float64
)
# if self.observe_vases:
obs_space_dict['vases_lidar'] = gymnasium.spaces.Box(
- 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float32
+ 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float64
)
+ if self.observe_vision:
+ width, height = self.vision_size
+ rows, cols = height, width
+ self.vision_size = (rows, cols)
+ obs_space_dict['vision'] = gymnasium.spaces.Box(
+ 0, 255, self.vision_size + (3,), dtype=np.uint8
+ )
+
# Flatten it ourselves
self.obs_space_dict = obs_space_dict
- # if self.observation_flatten:
- self.obs_flat_size = sum([np.prod(i.shape) for i in self.obs_space_dict.values()])
- self.observation_space = gymnasium.spaces.Box(
- -np.inf, np.inf, (self.obs_flat_size,), dtype=np.float64
- )
- # else:
- # self.observation_space = gym.spaces.Dict(obs_space_dict)
+ if self.observation_flatten:
+ self.obs_flat_size = sum([np.prod(i.shape) for i in self.obs_space_dict.values()])
+ self.observation_space = gymnasium.spaces.Box(
+ -np.inf, np.inf, (self.obs_flat_size,), dtype=np.float64
+ )
+ else:
+ self.observation_space = gymnasium.spaces.Dict(obs_space_dict)
def obs(self):
"""Return the observation of our agent"""
@@ -187,13 +195,15 @@ def obs(self):
# if self.observe_vases:
obs['vases_lidar'] = self.obs_lidar(self.vases_pos, GROUP['vase'])
- # if self.observation_flatten:
- flat_obs = np.zeros(self.obs_flat_size)
- offset = 0
- for k in sorted(self.obs_space_dict.keys()):
- k_size = np.prod(obs[k].shape)
- flat_obs[offset : offset + k_size] = obs[k].flat
- offset += k_size
- obs = flat_obs
- assert self.observation_space.contains(obs), f'Bad obs {obs} {self.observation_space}'
+ if self.observe_vision:
+ obs['vision'] = self.obs_vision()
+ if self.observation_flatten:
+ flat_obs = np.zeros(self.obs_flat_size)
+ offset = 0
+ for k in sorted(self.obs_space_dict.keys()):
+ k_size = np.prod(obs[k].shape)
+ flat_obs[offset : offset + k_size] = obs[k].flat
+ offset += k_size
+ obs = flat_obs
+ assert self.observation_space.contains(obs), f'Bad obs {obs} {self.observation_space}'
return obs
diff --git a/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/tasks/push/push_level0.py b/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/tasks/push/push_level0.py
index e06ee5e04..ee581aa9f 100644
--- a/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/tasks/push/push_level0.py
+++ b/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/tasks/push/push_level0.py
@@ -164,29 +164,31 @@ def build_observation_space(self):
# obs_space_dict['box_compass'] = gym.spaces.Box(-1.0, 1.0, (self.compass_shape,), dtype=np.float32)
# if self.observe_box_lidar:
obs_space_dict['box_lidar'] = gymnasium.spaces.Box(
- 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float32
+ 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float64
)
# if self.observe_goal_lidar:
obs_space_dict['goal_lidar'] = gymnasium.spaces.Box(
- 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float32
+ 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float64
)
- # if self.observe_vision:
- # width, height = self.vision_size
- # rows, cols = height, width
- # self.vision_size = (rows, cols)
- # obs_space_dict['vision'] = gym.spaces.Box(0, 1.0, self.vision_size + (3,), dtype=np.float32)
+ if self.observe_vision:
+ width, height = self.vision_size
+ rows, cols = height, width
+ self.vision_size = (rows, cols)
+ obs_space_dict['vision'] = gymnasium.spaces.Box(
+ 0, 255, self.vision_size + (3,), dtype=np.uint8
+ )
# Flatten it ourselves
self.obs_space_dict = obs_space_dict
- # if self.observation_flatten:
- self.obs_flat_size = sum([np.prod(i.shape) for i in self.obs_space_dict.values()])
- self.observation_space = gymnasium.spaces.Box(
- -np.inf, np.inf, (self.obs_flat_size,), dtype=np.float64
- )
- # else:
- # self.observation_space = gym.spaces.Dict(obs_space_dict)
+ if self.observation_flatten:
+ self.obs_flat_size = sum([np.prod(i.shape) for i in self.obs_space_dict.values()])
+ self.observation_space = gymnasium.spaces.Box(
+ -np.inf, np.inf, (self.obs_flat_size,), dtype=np.float64
+ )
+ else:
+ self.observation_space = gymnasium.spaces.Dict(obs_space_dict)
def obs(self):
"""Return the observation of our agent"""
@@ -204,18 +206,17 @@ def obs(self):
obs.update(self.get_sensor_obs())
- # if self.observe_vision:
- # obs['vision'] = self.obs_vision()
-
- # if self.observation_flatten:
- flat_obs = np.zeros(self.obs_flat_size)
- offset = 0
- for k in sorted(self.obs_space_dict.keys()):
- k_size = np.prod(obs[k].shape)
- flat_obs[offset : offset + k_size] = obs[k].flat
- offset += k_size
- obs = flat_obs
- assert self.observation_space.contains(obs), f'Bad obs {obs} {self.observation_space}'
+ if self.observe_vision:
+ obs['vision'] = self.obs_vision()
+ if self.observation_flatten:
+ flat_obs = np.zeros(self.obs_flat_size)
+ offset = 0
+ for k in sorted(self.obs_space_dict.keys()):
+ k_size = np.prod(obs[k].shape)
+ flat_obs[offset : offset + k_size] = obs[k].flat
+ offset += k_size
+ obs = flat_obs
+ assert self.observation_space.contains(obs), f'Bad obs {obs} {self.observation_space}'
return obs
def dist_box(self):
diff --git a/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/tasks/push/push_level1.py b/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/tasks/push/push_level1.py
index 76b72a946..83309ad13 100644
--- a/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/tasks/push/push_level1.py
+++ b/omnisafe/envs/Safety_Gymnasium/safety_gymnasium/envs/safety_gym_v2/tasks/push/push_level1.py
@@ -133,38 +133,41 @@ def build_observation_space(self):
# obs_space_dict['box_compass'] = gym.spaces.Box(-1.0, 1.0, (self.compass_shape,), dtype=np.float32)
# if self.observe_box_lidar:
obs_space_dict['box_lidar'] = gymnasium.spaces.Box(
- 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float32
+ 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float64
)
# if self.observe_goal_lidar:
obs_space_dict['goal_lidar'] = gymnasium.spaces.Box(
- 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float32
+ 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float64
)
# if self.observe_hazards:
obs_space_dict['hazards_lidar'] = gymnasium.spaces.Box(
- 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float32
+ 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float64
)
# if self.pillars_num and self.observe_pillars:
obs_space_dict['pillars_lidar'] = gymnasium.spaces.Box(
- 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float32
+ 0.0, 1.0, (self.lidar_num_bins,), dtype=np.float64
)
- # if self.observe_vision:
- # width, height = self.vision_size
- # rows, cols = height, width
- # self.vision_size = (rows, cols)
- # obs_space_dict['vision'] = gym.spaces.Box(0, 1.0, self.vision_size + (3,), dtype=np.float32)
+ if self.observe_vision:
+ width, height = self.vision_size
+ rows, cols = height, width
+ self.vision_size = (rows, cols)
+ obs_space_dict['vision'] = gymnasium.spaces.Box(
+ 0, 255, self.vision_size + (3,), dtype=np.uint8
+ )
+
# Flatten it ourselves
self.obs_space_dict = obs_space_dict
- # if self.observation_flatten:
- self.obs_flat_size = sum([np.prod(i.shape) for i in self.obs_space_dict.values()])
- self.observation_space = gymnasium.spaces.Box(
- -np.inf, np.inf, (self.obs_flat_size,), dtype=np.float64
- )
- # else:
- # self.observation_space = gym.spaces.Dict(obs_space_dict)
+ if self.observation_flatten:
+ self.obs_flat_size = sum([np.prod(i.shape) for i in self.obs_space_dict.values()])
+ self.observation_space = gymnasium.spaces.Box(
+ -np.inf, np.inf, (self.obs_flat_size,), dtype=np.float64
+ )
+ else:
+ self.observation_space = gymnasium.spaces.Dict(obs_space_dict)
def obs(self):
"""Return the observation of our agent"""
@@ -188,16 +191,15 @@ def obs(self):
# if self.pillars_num and self.observe_pillars:
obs['pillars_lidar'] = self.obs_lidar(self.pillars_pos, GROUP['pillar'])
- # if self.observe_vision:
- # obs['vision'] = self.obs_vision()
-
- # if self.observation_flatten:
- flat_obs = np.zeros(self.obs_flat_size)
- offset = 0
- for k in sorted(self.obs_space_dict.keys()):
- k_size = np.prod(obs[k].shape)
- flat_obs[offset : offset + k_size] = obs[k].flat
- offset += k_size
- obs = flat_obs
- assert self.observation_space.contains(obs), f'Bad obs {obs} {self.observation_space}'
+ if self.observe_vision:
+ obs['vision'] = self.obs_vision()
+ if self.observation_flatten:
+ flat_obs = np.zeros(self.obs_flat_size)
+ offset = 0
+ for k in sorted(self.obs_space_dict.keys()):
+ k_size = np.prod(obs[k].shape)
+ flat_obs[offset : offset + k_size] = obs[k].flat
+ offset += k_size
+ obs = flat_obs
+ assert self.observation_space.contains(obs), f'Bad obs {obs} {self.observation_space}'
return obs
| How to change the render mode?
## Questions
It seems that the environment cannot modify its rendering mode. We tested two ways and both failed.
Like the new way in gymnasium library:
`env = safety_gymnasium.make(env_name, render='rgb_array')`
which gets
`TypeError: __init__() got an unexpected keyword argument 'render'`
Or the old way in gym library:
`env.render(render_mode='rgb_array')`
which gets
`TypeError: env_render_passive_checker() got an unexpected keyword argument 'render_mode'`.
## Checklist
- [x] I have checked that there is no similar issue in the repo. (**required**)
- [x] I have read the [documentation](https://omnisafe.readthedocs.io). (**required**)
| 2022-11-21T08:00:25 | 0.0 | [] | [] |
|||
sayari-analytics/pyisic | sayari-analytics__pyisic-70 | 105f07c16639d5bb69cc2a21e666361050aeacde | diff --git a/.github/workflows/main.yaml b/.github/workflows/main.yaml
index f378e60..a72d33d 100644
--- a/.github/workflows/main.yaml
+++ b/.github/workflows/main.yaml
@@ -26,10 +26,10 @@ jobs:
PYTHON: ${{ matrix.python-version }}
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
@@ -38,7 +38,7 @@ jobs:
run: echo "::set-output name=dir::$(pip cache dir)"
- name: pip cache
- uses: actions/cache@v2
+ uses: actions/cache@v3
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('**/setup.py') }}
@@ -55,7 +55,7 @@ jobs:
python scripts/data_artifacts.py
- name: "Upload coverage to Codecov"
- uses: codecov/codecov-action@v2
+ uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./coverage.xml
@@ -63,39 +63,31 @@ jobs:
verbose: true
- name: Upload data artifact
- uses: actions/upload-artifact@v2
+ uses: actions/upload-artifact@v3
with:
name: data
path: standards.json
style:
- runs-on: ${{ matrix.os }}
- strategy:
- matrix:
- os: [ubuntu-latest]
- python-version: ["3.6", "3.7", "3.8", "3.9"]
- env:
- OS: ${{ matrix.os }}
- PYTHON: ${{ matrix.python-version }}
-
+ runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v3
with:
- python-version: ${{ matrix.python-version }}
+ python-version: '3.8'
- name: Get pip cache dir
id: pip-cache
run: echo "::set-output name=dir::$(pip cache dir)"
- name: pip cache
- uses: actions/cache@v2
+ uses: actions/cache@v3
with:
path: ${{ steps.pip-cache.outputs.dir }}
- key: ${{ runner.os }}-pip-precommit-py${{ matrix.python-version }}-${{ hashFiles('**/.pre-commit-config.yaml') }}
- restore-keys: ${{ runner.os }}-pip-precommit-
+ key: ${{ runner.os }}-pip-precommit-py-${{ hashFiles('**/.pre-commit-config.yaml') }}
+ restore-keys: ${{ runner.os }}-pip-precommit-py-${{ hashFiles('**/.pre-commit-config.yaml') }}
- name: Install pre-commit
run: |
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index f8e77b1..863c1da 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -34,10 +34,11 @@ repos:
args: [--markdown-linebreak-ext=md]
exclude: \.(html|svg)$
- - repo: https://gitlab.com/pycqa/flake8
+ - repo: https://github.com/pycqa/flake8
rev: 3.8.4
hooks:
- id: flake8
+ exclude: deprecated/
additional_dependencies:
- flake8-2020
- flake8-bugbear
diff --git a/pyisic/__init__.py b/pyisic/__init__.py
index 8e36162..23a6358 100644
--- a/pyisic/__init__.py
+++ b/pyisic/__init__.py
@@ -18,6 +18,7 @@
from ._standards.naics2017 import NAICS2017, NAICS2017_to_ISIC4
from ._standards.sbi2008 import SBI2008, SBI2008_to_NACE2
from ._standards.scian2018 import SCIAN2018, SCIAN2018_to_ISIC4
+from ._standards.sic import SIC, SIC_to_NAICS2017
from ._standards.skd2002 import SKD2002, SKD2002_to_NACE2, SKD2002_to_SKD2008
from ._standards.skd2008 import SKD2008, SKD2008_to_SKD2002
from ._standards.skis2010 import SKIS2010
@@ -49,5 +50,6 @@
CAEM2009_to_ISIC4,
CAEM2005_to_ISIC3,
SBI2008_to_NACE2,
+ SIC_to_NAICS2017,
],
)
diff --git a/pyisic/_standards/sic/__init__.py b/pyisic/_standards/sic/__init__.py
new file mode 100644
index 0000000..cd883bd
--- /dev/null
+++ b/pyisic/_standards/sic/__init__.py
@@ -0,0 +1,3 @@
+# -*- coding: utf-8 -*-
+from .sic import SIC
+from .sic_to_naics2017 import SIC_to_NAICS2017
diff --git a/pyisic/_standards/sic/sic.py b/pyisic/_standards/sic/sic.py
new file mode 100644
index 0000000..2522e09
--- /dev/null
+++ b/pyisic/_standards/sic/sic.py
@@ -0,0 +1,3067 @@
+"""`SIC Standard <https://www.ehso.com/siccodes.php>`_.
+"""
+from ...types import Category, Classification, Standard, Standards
+
+SIC = Standard(
+ standard=Standards.SIC,
+ classes=[
+ Classification("A", "AGRICULTURE, FORESTRY, AND FISHING", Category.SECTION),
+ Classification("01", "AGRICULTURAL PRODUCTION-CROPS", Category.DIVISION),
+ Classification("011", "CASH GRAINS", Category.GROUP),
+ Classification("0111", "WHEAT", Category.CLASS),
+ Classification("0112", "RICE", Category.CLASS),
+ Classification("0115", "CORN", Category.CLASS),
+ Classification("0116", "SOYBEANS", Category.CLASS),
+ Classification("0119", "CASH GRAINS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("013", "FIELD CROPS, EXCEPT CASH GRAINS", Category.GROUP),
+ Classification("0131", "COTTON", Category.CLASS),
+ Classification("0132", "TOBACCO", Category.CLASS),
+ Classification("0133", "SUGARCANE AND SUGAR BEETS", Category.CLASS),
+ Classification("0134", "IRISH POTATOES", Category.CLASS),
+ Classification("0139", "FIELD CROPS, EXCEPT CASH GRAINS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("016", "VEGETABLES AND MELONS", Category.GROUP),
+ Classification("0161", "VEGETABLES AND MELONS", Category.CLASS),
+ Classification("017", "FRUITS AND TREE NUTS", Category.GROUP),
+ Classification("0171", "BERRY CROPS", Category.CLASS),
+ Classification("0172", "GRAPES", Category.CLASS),
+ Classification("0173", "TREE NUTS", Category.CLASS),
+ Classification("0174", "CITRUS FRUITS", Category.CLASS),
+ Classification("0175", "DECIDUOUS TREE FRUITS", Category.CLASS),
+ Classification("0179", "FRUITS AND TREE NUTS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("018", "HORTICULTURAL SPECIALTIES", Category.GROUP),
+ Classification("0181", "ORNAMENTAL FLORICULTURE AND NURSERY PRODUCTS", Category.CLASS),
+ Classification("0182", "FOOD CROPS GROWN UNDER COVER", Category.CLASS),
+ Classification("019", "GENERAL FARMS, PRIMARILY CROP", Category.GROUP),
+ Classification("0191", "GENERAL FARMS, PRIMARILY CROP", Category.CLASS),
+ Classification("02", "AGRICULTURAL PRODUCTION-LIVESTOCK AND ANIMAL SPECIALTIES", Category.DIVISION),
+ Classification("021", "LIVESTOCK, EXCEPT DAIRY AND POULTRY", Category.GROUP),
+ Classification("0211", "BEEF CATTLE FEEDLOTS", Category.CLASS),
+ Classification("0212", "BEEF CATTLE, EXCEPT FEEDLOTS", Category.CLASS),
+ Classification("0213", "HOGS", Category.CLASS),
+ Classification("0214", "SHEEP AND GOATS", Category.CLASS),
+ Classification("0219", "GENERAL LIVESTOCK, EXCEPT DAIRY AND POULTRY", Category.CLASS),
+ Classification("024", "DAIRY FARMS", Category.GROUP),
+ Classification("0241", "DAIRY FARMS", Category.CLASS),
+ Classification("025", "POULTRY AND EGGS", Category.GROUP),
+ Classification("0251", "BROILER, FRYER, AND ROASTER CHICKENS", Category.CLASS),
+ Classification("0252", "CHICKEN EGGS", Category.CLASS),
+ Classification("0253", "TURKEYS AND TURKEY EGGS", Category.CLASS),
+ Classification("0254", "POULTRY HATCHERIES", Category.CLASS),
+ Classification("0259", "POULTRY AND EGGS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("027", "ANIMAL SPECIALTIES", Category.GROUP),
+ Classification("0271", "FUR-BEARING ANIMALS AND RABBITS", Category.CLASS),
+ Classification("0272", "HORSES AND OTHER EQUINES", Category.CLASS),
+ Classification("0273", "ANIMAL AQUACULTURE", Category.CLASS),
+ Classification("0279", "ANIMAL SPECIALTIES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("029", "GENERAL FARMS, PRIMARILY LIVESTOCK AND ANIMAL SPECIALTIES", Category.GROUP),
+ Classification("0291", "GENERAL FARMS, PRIMARILY LIVESTOCK AND ANIMAL SPECIALTIES", Category.CLASS),
+ Classification("07", "AGRICULTURAL SERVICES", Category.DIVISION),
+ Classification("071", "SOIL PREPARATION SERVICES", Category.GROUP),
+ Classification("0711", "SOIL PREPARATION SERVICES", Category.CLASS),
+ Classification("072", "CROP SERVICES", Category.GROUP),
+ Classification("0721", "CROP PLANTING, CULTIVATING, AND PROTECTING", Category.CLASS),
+ Classification("0722", "CROP HARVESTING, PRIMARILY BY MACHINE", Category.CLASS),
+ Classification("0723", "CROP PREPARATION SERVICES FOR MARKET, EXCEPT COTTON GINNING", Category.CLASS),
+ Classification("0724", "COTTON GINNING", Category.CLASS),
+ Classification("074", "VETERINARY SERVICES", Category.GROUP),
+ Classification("0741", "VETERINARY SERVICES FOR LIVESTOCK", Category.CLASS),
+ Classification("0742", "VETERINARY SERVICES FOR ANIMAL SPECIALTIES", Category.CLASS),
+ Classification("075", "ANIMAL SERVICES, EXCEPT VETERINARY", Category.GROUP),
+ Classification("0751", "LIVESTOCK SERVICES, EXCEPT VETERINARY", Category.CLASS),
+ Classification("0752", "ANIMAL SPECIALTY SERVICES, EXCEPT VETERINARY", Category.CLASS),
+ Classification("076", "FARM LABOR AND MANAGEMENT SERVICES", Category.GROUP),
+ Classification("0761", "FARM LABOR CONTRACTORS AND CREW LEADERS", Category.CLASS),
+ Classification("0762", "FARM MANAGEMENT SERVICES", Category.CLASS),
+ Classification("078", "LANDSCAPE AND HORTICULTURAL SERVICES", Category.GROUP),
+ Classification("0781", "LANDSCAPE COUNSELING AND PLANNING", Category.CLASS),
+ Classification("0782", "LAWN AND GARDEN SERVICES", Category.CLASS),
+ Classification("0783", "ORNAMENTAL SHRUB AND TREE SERVICES", Category.CLASS),
+ Classification("08", "FORESTRY", Category.DIVISION),
+ Classification("081", "TIMBER TRACTS", Category.GROUP),
+ Classification("0811", "TIMBER TRACTS", Category.CLASS),
+ Classification("083", "FOREST NURSERIES AND GATHERING OF FOREST PRODUCTS", Category.GROUP),
+ Classification("0831", "FOREST NURSERIES AND GATHERING OF FOREST PRODUCTS", Category.CLASS),
+ Classification("085", "FORESTRY SERVICES", Category.GROUP),
+ Classification("0851", "FORESTRY SERVICES", Category.CLASS),
+ Classification("09", "FISHING, HUNTING, AND TRAPPING", Category.DIVISION),
+ Classification("091", "COMMERCIAL FISHING", Category.GROUP),
+ Classification("0912", "FINFISH", Category.CLASS),
+ Classification("0913", "SHELLFISH", Category.CLASS),
+ Classification("0919", "MISCELLANEOUS MARINE PRODUCTS", Category.CLASS),
+ Classification("092", "FISH HATCHERIES AND PRESERVES", Category.GROUP),
+ Classification("0921", "FISH HATCHERIES AND PRESERVES", Category.CLASS),
+ Classification("097", "HUNTING AND TRAPPING, AND GAME PROPAGATION", Category.GROUP),
+ Classification("0971", "HUNTING AND TRAPPING, AND GAME PROPAGATION", Category.CLASS),
+ Classification("B", "MINING", Category.SECTION),
+ Classification("10", "METAL MINING", Category.DIVISION),
+ Classification("101", "IRON ORES", Category.GROUP),
+ Classification("1011", "IRON ORES", Category.CLASS),
+ Classification("102", "COPPER ORES", Category.GROUP),
+ Classification("1021", "COPPER ORES", Category.CLASS),
+ Classification("103", "LEAD AND ZINC ORES", Category.GROUP),
+ Classification("1031", "LEAD AND ZINC ORES", Category.CLASS),
+ Classification("104", "GOLD AND SILVER ORES", Category.GROUP),
+ Classification("1041", "GOLD ORES", Category.CLASS),
+ Classification("1044", "SILVER ORES", Category.CLASS),
+ Classification("106", "FERROALLOY ORES, EXCEPT VANADIUM", Category.GROUP),
+ Classification("1061", "FERROALLOY ORES, EXCEPT VANADIUM", Category.CLASS),
+ Classification("108", "METAL MINING SERVICES", Category.GROUP),
+ Classification("1081", "METAL MINING SERVICES", Category.CLASS),
+ Classification("109", "MISCELLANEOUS METAL ORES", Category.GROUP),
+ Classification("1094", "URANIUM-RADIUM-VANADIUM ORES", Category.CLASS),
+ Classification("1099", "MISCELLANEOUS METAL ORES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("12", "COAL MINING", Category.DIVISION),
+ Classification("122", "BITUMINOUS COAL AND LIGNITE MINING", Category.GROUP),
+ Classification("1221", "BITUMINOUS COAL AND LIGNITE SURFACE MINING", Category.CLASS),
+ Classification("1222", "BITUMINOUS COAL UNDERGROUND MINING", Category.CLASS),
+ Classification("123", "ANTHRACITE MINING", Category.GROUP),
+ Classification("1231", "ANTHRACITE MINING", Category.CLASS),
+ Classification("124", "COAL MINING SERVICES", Category.GROUP),
+ Classification("1241", "COAL MINING SERVICES", Category.CLASS),
+ Classification("13", "OIL AND GAS EXTRACTION", Category.DIVISION),
+ Classification("131", "CRUDE PETROLEUM AND NATURAL GAS", Category.GROUP),
+ Classification("1311", "CRUDE PETROLEUM AND NATURAL GAS", Category.CLASS),
+ Classification("132", "NATURAL GAS LIQUIDS", Category.GROUP),
+ Classification("1321", "NATURAL GAS LIQUIDS", Category.CLASS),
+ Classification("138", "OIL AND GAS FIELD SERVICES", Category.GROUP),
+ Classification("1381", "DRILLING OIL AND GAS WELLS", Category.CLASS),
+ Classification("1382", "OIL AND GAS FIELD EXPLORATION SERVICES", Category.CLASS),
+ Classification("1389", "OIL AND GAS FIELD SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("14", "MINING AND QUARRYING OF NONMETALLIC MINERALS, EXCEPT FUELS", Category.DIVISION),
+ Classification("141", "DIMENSION STONE", Category.GROUP),
+ Classification("1411", "DIMENSION STONE", Category.CLASS),
+ Classification("142", "CRUSHED AND BROKEN STONE, INCLUDING RIPRAP", Category.GROUP),
+ Classification("1422", "CRUSHED AND BROKEN LIMESTONE", Category.CLASS),
+ Classification("1423", "CRUSHED AND BROKEN GRANITE", Category.CLASS),
+ Classification("1429", "CRUSHED AND BROKEN STONE, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("144", "SAND AND GRAVEL", Category.GROUP),
+ Classification("1442", "CONSTRUCTION SAND AND GRAVEL", Category.CLASS),
+ Classification("1446", "INDUSTRIAL SAND", Category.CLASS),
+ Classification("145", "CLAY, CERAMIC, AND REFRACTORY MINERALS", Category.GROUP),
+ Classification("1455", "KAOLIN AND BALL CLAY", Category.CLASS),
+ Classification("1459", "CLAY, CERAMIC, AND REFRACTORY MINERALS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("147", "CHEMICAL AND FERTILIZER MINERAL MINING", Category.GROUP),
+ Classification("1474", "POTASH, SODA, AND BORATE MINERALS", Category.CLASS),
+ Classification("1475", "PHOSPHATE ROCK", Category.CLASS),
+ Classification("1479", "CHEMICAL AND FERTILIZER MINERAL MINING, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("148", "NONMETALLIC MINERALS SERVICES, EXCEPT FUELS", Category.GROUP),
+ Classification("1481", "NONMETALLIC MINERALS SERVICES, EXCEPT FUELS", Category.CLASS),
+ Classification("149", "MISCELLANEOUS NONMETALLIC MINERALS, EXCEPT FUELS", Category.GROUP),
+ Classification("1499", "MISCELLANEOUS NONMETALLIC MINERALS, EXCEPT FUELS", Category.CLASS),
+ Classification("C", "CONSTRUCTION", Category.SECTION),
+ Classification("15", "BUILDING CONSTRUCTION-GENERAL CONTRACTORS AND OPERATIVE BUILDERS", Category.DIVISION),
+ Classification("152", "GENERAL BUILDING CONTRACTORS-RESIDENTIAL BUILDINGS", Category.GROUP),
+ Classification("1521", "GENERAL CONTRACTORS-SINGLE-FAMILY HOUSES", Category.CLASS),
+ Classification("1522", "GENERAL CONTRACTORS-RESIDENTIAL BUILDINGS, OTHER THAN SINGLE-FAMI", Category.CLASS),
+ Classification("153", "OPERATIVE BUILDERS", Category.GROUP),
+ Classification("1531", "OPERATIVE BUILDERS", Category.CLASS),
+ Classification("154", "GENERAL BUILDING CONTRACTORS-NONRESIDENTIAL BUILDINGS", Category.GROUP),
+ Classification("1541", "GENERAL CONTRACTORS-INDUSTRIAL BUILDINGS AND WAREHOUSES", Category.CLASS),
+ Classification("1542", "GENERAL CONTRACTORS-NONRESIDENTIAL BUILDINGS, OTHER THAN INDUSTRI", Category.CLASS),
+ Classification("16", "HEAVY CONSTRUCTION OTHER THAN BUILDING CONSTRUCTION-CONTRACTORS", Category.DIVISION),
+ Classification("161", "HIGHWAY AND STREET CONSTRUCTION, EXCEPT ELEVATED HIGHWAYS", Category.GROUP),
+ Classification("1611", "HIGHWAY AND STREET CONSTRUCTION, EXCEPT ELEVATED HIGHWAYS", Category.CLASS),
+ Classification("162", "HEAVY CONSTRUCTION, EXCEPT HIGHWAY AND STREET CONSTRUCTION", Category.GROUP),
+ Classification("1622", "BRIDGE, TUNNEL, AND ELEVATED HIGHWAY CONSTRUCTION", Category.CLASS),
+ Classification("1623", "WATER, SEWER, PIPELINE, AND COMMUNICATIONS AND POWER LINE CONSTRU", Category.CLASS),
+ Classification("1629", "HEAVY CONSTRUCTION, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("17", "CONSTRUCTION-SPECIAL TRADE CONTRACTORS", Category.DIVISION),
+ Classification("171", "PLUMBING, HEATING AND AIR-CONDITIONING", Category.GROUP),
+ Classification("1711", "PLUMBING, HEATING AND AIR-CONDITIONING", Category.CLASS),
+ Classification("172", "PAINTING AND PAPER HANGING", Category.GROUP),
+ Classification("1721", "PAINTING AND PAPER HANGING", Category.CLASS),
+ Classification("173", "ELECTRICAL WORK", Category.GROUP),
+ Classification("1731", "ELECTRICAL WORK", Category.CLASS),
+ Classification("174", "MASONRY, STONEWORK, TILE SETTING, AND PLASTERING", Category.GROUP),
+ Classification("1741", "MASONRY, STONE SETTING, AND OTHER STONE WORK", Category.CLASS),
+ Classification("1742", "PLASTERING, DRYWALL, ACOUSTICAL, AND INSULATION WORK", Category.CLASS),
+ Classification("1743", "TERRAZZO, TILE, MARBLE, AND MOSAIC WORK", Category.CLASS),
+ Classification("175", "CARPENTRY AND FLOOR WORK", Category.GROUP),
+ Classification("1751", "CARPENTRY WORK", Category.CLASS),
+ Classification("1752", "FLOOR LAYING AND OTHER FLOOR WORK, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("176", "ROOFING, SIDING, AND SHEET METAL WORK", Category.GROUP),
+ Classification("1761", "ROOFING, SIDING, AND SHEET METAL WORK", Category.CLASS),
+ Classification("177", "CONCRETE WORK", Category.GROUP),
+ Classification("1771", "CONCRETE WORK", Category.CLASS),
+ Classification("178", "WATER WELL DRILLING", Category.GROUP),
+ Classification("1781", "WATER WELL DRILLING", Category.CLASS),
+ Classification("179", "MISCELLANEOUS SPECIAL TRADE CONTRACTORS", Category.GROUP),
+ Classification("1791", "STRUCTURAL STEEL ERECTION", Category.CLASS),
+ Classification("1793", "GLASS AND GLAZING WORK", Category.CLASS),
+ Classification("1793", "GLASS INSTALLATION, EXCEPT AUTOMOTIVE-CONTRACTORS", Category.CLASS),
+ Classification("1794", "EXCAVATION WORK", Category.CLASS),
+ Classification("1795", "WRECKING AND DEMOLITION WORK", Category.CLASS),
+ Classification("1796", "INSTALLATION OR ERECTION OF BUILDING EQUIPMENT, NOT ELSEWHERE CLA", Category.CLASS),
+ Classification("1799", "SPECIAL TRADE CONTRACTORS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("D", "MANUFACTURING", Category.SECTION),
+ Classification("20", "FOOD AND KINDRED PRODUCTS", Category.DIVISION),
+ Classification("201", "MEAT PRODUCTS", Category.GROUP),
+ Classification("2011", "MEAT PACKING PLANTS", Category.CLASS),
+ Classification("2013", "SAUSAGES AND OTHER PREPARED MEAT PRODUCTS", Category.CLASS),
+ Classification("2015", "POULTRY SLAUGHTERING AND PROCESSING", Category.CLASS),
+ Classification("202", "DAIRY PRODUCTS", Category.GROUP),
+ Classification("2021", "CREAMERY BUTTER", Category.CLASS),
+ Classification("2022", "NATURAL, PROCESSED, AND IMITATION CHEESE", Category.CLASS),
+ Classification("2023", "DRY, CONDENSED, AND EVAPORATED DAIRY PRODUCTS", Category.CLASS),
+ Classification("2024", "ICE CREAM AND FROZEN DESSERTS", Category.CLASS),
+ Classification("2026", "FLUID MILK", Category.CLASS),
+ Classification("203", "CANNED, FROZEN, AND PRESERVED FRUITS, VEGETABLES, AND FOOD SPECIAL", Category.GROUP),
+ Classification("2032", "CANNED SPECIALTIES", Category.CLASS),
+ Classification("2033", "CANNED FRUITS, VEGETABLES, PRESERVES, JAMS, AND JELLIES", Category.CLASS),
+ Classification("2034", "DRIED AND DEHYDRATED FRUITS, VEGETABLES, AND SOUP MIXES", Category.CLASS),
+ Classification("2035", "PICKLED FRUITS AND VEGETABLES, VEGETABLE SAUCES AND SEASONINGS, A", Category.CLASS),
+ Classification("2037", "FROZEN FRUITS, FRUIT JUICES, AND VEGETABLES", Category.CLASS),
+ Classification("2038", "FROZEN SPECIALTIES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("204", "GRAIN MILL PRODUCTS", Category.GROUP),
+ Classification("2041", "FLOUR AND OTHER GRAIN MILL PRODUCTS", Category.CLASS),
+ Classification("2043", "CEREAL BREAKFAST FOODS", Category.CLASS),
+ Classification("2044", "RICE MILLING", Category.CLASS),
+ Classification("2045", "PREPARED FLOUR MIXES AND DOUGHS", Category.CLASS),
+ Classification("2046", "WET CORN MILLING", Category.CLASS),
+ Classification("2047", "DOG AND CAT FOOD", Category.CLASS),
+ Classification("2048", "PREPARED FEEDS AND FEED INGREDIENTS FOR ANIMALS AND FOWLS, EXCEPT", Category.CLASS),
+ Classification("205", "BAKERY PRODUCTS", Category.GROUP),
+ Classification("2051", "BREAD AND OTHER BAKERY PRODUCTS, EXCEPT COOKIES AND CRACKERS", Category.CLASS),
+ Classification("2052", "COOKIES AND CRACKERS", Category.CLASS),
+ Classification("2053", "FROZEN BAKERY PRODUCTS, EXCEPT BREAD", Category.CLASS),
+ Classification("206", "SUGAR AND CONFECTIONERY PRODUCTS", Category.GROUP),
+ Classification("2061", "CANE SUGAR, EXCEPT REFINING", Category.CLASS),
+ Classification("2062", "CANE SUGAR REFINING", Category.CLASS),
+ Classification("2063", "BEET SUGAR", Category.CLASS),
+ Classification("2064", "CANDY AND OTHER CONFECTIONERY PRODUCTS", Category.CLASS),
+ Classification("2066", "CHOCOLATE AND COCOA PRODUCTS", Category.CLASS),
+ Classification("2067", "CHEWING GUM", Category.CLASS),
+ Classification("2068", "SALTED AND ROASTED NUTS AND SEEDS", Category.CLASS),
+ Classification("207", "FATS AND OILS", Category.GROUP),
+ Classification("2074", "COTTONSEED OIL MILLS", Category.CLASS),
+ Classification("2075", "SOYBEAN OIL MILLS", Category.CLASS),
+ Classification("2076", "VEGETABLE OIL MILLS, EXCEPT CORN, COTTONSEED, AND SOYBEAN", Category.CLASS),
+ Classification("2077", "ANIMAL AND MARINE FATS AND OILS", Category.CLASS),
+ Classification("2079", "SHORTENING, TABLE OILS, MARGARINE, AND OTHER EDIBLE FATS AND OILS", Category.CLASS),
+ Classification("208", "BEVERAGES", Category.GROUP),
+ Classification("2082", "MALT BEVERAGES", Category.CLASS),
+ Classification("2083", "MALT", Category.CLASS),
+ Classification("2084", "WINES, BRANDY, AND BRANDY SPIRITS", Category.CLASS),
+ Classification("2085", "DISTILLED AND BLENDED LIQUORS", Category.CLASS),
+ Classification("2086", "BOTTLED AND CANNED SOFT DRINKS AND CARBONATED WATERS", Category.CLASS),
+ Classification("2087", "FLAVORING EXTRACTS AND FLAVORING SYRUPS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("209", "MISCELLANEOUS FOOD PREPARATIONS AND KINDRED PRODUCTS", Category.GROUP),
+ Classification("2091", "CANNED AND CURED FISH AND SEAFOODS", Category.CLASS),
+ Classification("2092", "PREPARED FRESH OR FROZEN FISH AND SEAFOODS", Category.CLASS),
+ Classification("2095", "ROASTED COFFEE", Category.CLASS),
+ Classification("2096", "POTATO CHIPS, CORN CHIPS, AND SIMILAR SNACKS", Category.CLASS),
+ Classification("2097", "MANUFACTURED ICE", Category.CLASS),
+ Classification("2098", "MACARONI, SPAGHETTI, VERMICELLI, AND NOODLES", Category.CLASS),
+ Classification("2099", "FOOD PREPARATIONS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("21", "TOBACCO PRODUCTS", Category.DIVISION),
+ Classification("211", "CIGARETTES", Category.GROUP),
+ Classification("2111", "CIGARETTES", Category.CLASS),
+ Classification("212", "CIGARS", Category.GROUP),
+ Classification("2121", "CIGARS", Category.CLASS),
+ Classification("213", "CHEWING AND SMOKING TOBACCO AND SNUFF", Category.GROUP),
+ Classification("2131", "CHEWING AND SMOKING TOBACCO AND SNUFF", Category.CLASS),
+ Classification("214", "TOBACCO STEMMING AND REDRYING", Category.GROUP),
+ Classification("2141", "TOBACCO STEMMING AND REDRYING", Category.CLASS),
+ Classification("22", "TEXTILE MILL PRODUCTS", Category.DIVISION),
+ Classification("221", "BROADWOVEN FABRIC MILLS, COTTON", Category.GROUP),
+ Classification("2211", "BROADWOVEN FABRIC MILLS, COTTON", Category.CLASS),
+ Classification("222", "BROADWOVEN FABRIC MILLS, MANMADE FIBER AND SILK", Category.GROUP),
+ Classification("2221", "BROADWOVEN FABRIC MILLS, MANMADE FIBER AND SILK", Category.CLASS),
+ Classification("223", "BROADWOVEN FABRIC MILLS, WOOL (INCLUDING DYEING AND FINISHING)", Category.GROUP),
+ Classification("2231", "BROADWOVEN FABRIC MILLS, WOOL (INCLUDING DYEING AND FINISHING)", Category.CLASS),
+ Classification("224", "NARROW FABRIC AND OTHER SMALLWARES MILLS: COTTON, WOOL, SILK, AND", Category.GROUP),
+ Classification("2241", "NARROW FABRIC AND OTHER SMALLWARES MILLS: COTTON, WOOL, SILK, AND", Category.CLASS),
+ Classification("225", "KNITTING MILLS", Category.GROUP),
+ Classification("2251", "WOMEN'S FULL-LENGTH AND KNEE-LENGTH HOSIERY, EXCEPT SOCKS", Category.CLASS),
+ Classification("2252", "HOSIERY, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("2253", "KNIT OUTERWEAR MILLS", Category.CLASS),
+ Classification("2254", "KNIT UNDERWEAR AND NIGHTWEAR MILLS", Category.CLASS),
+ Classification("2257", "WEFT KNIT FABRIC MILLS", Category.CLASS),
+ Classification("2258", "LACE AND WARP KNIT FABRIC MILLS", Category.CLASS),
+ Classification("2259", "KNITTING MILLS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("226", "DYEING AND FINISHING TEXTILES, EXCEPT WOOL FABRICS AND KNIT GOODS", Category.GROUP),
+ Classification("2261", "FINISHERS OF BROADWOVEN FABRICS OF COTTON", Category.CLASS),
+ Classification("2262", "FINISHERS OF BROADWOVEN FABRICS OF MANMADE FIBER AND SILK", Category.CLASS),
+ Classification("2269", "FINISHERS OF TEXTILES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("227", "CARPETS AND RUGS", Category.GROUP),
+ Classification("2273", "CARPETS AND RUGS", Category.CLASS),
+ Classification("228", "YARN AND THREAD MILLS", Category.GROUP),
+ Classification("2281", "YARN SPINNING MILLS", Category.CLASS),
+ Classification("2282", "YARN TEXTURIZING, THROWING, TWISTING, AND WINDING MILLS", Category.CLASS),
+ Classification("2282", "ACETATE FILAMENT YARN: THROWING, TWISTING, WINDING, OR SPOOLING", Category.CLASS),
+ Classification("2284", "THREAD MILLS", Category.CLASS),
+ Classification("229", "MISCELLANEOUS TEXTILE GOODS", Category.GROUP),
+ Classification("2295", "COATED FABRICS, NOT RUBBERIZED", Category.CLASS),
+ Classification("2296", "TIRE CORD AND FABRICS", Category.CLASS),
+ Classification("2297", "NONWOVEN FABRICS", Category.CLASS),
+ Classification("2298", "CORDAGE AND TWINE", Category.CLASS),
+ Classification("2299", "TEXTILE GOODS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification(
+ "23", "APPAREL AND OTHER FINISHED PRODUCTS MADE FROM FABRICS AND SIMILAR MATERIAL", Category.DIVISION
+ ),
+ Classification("231", "MEN'S AND BOYS' SUITS, COATS, AND OVERCOATS", Category.GROUP),
+ Classification("2311", "MEN'S AND BOYS' SUITS, COATS, AND OVERCOATS", Category.CLASS),
+ Classification("232", "MEN'S AND BOYS' FURNISHINGS, WORK CLOTHING, AND ALLIED GARMENTS", Category.GROUP),
+ Classification("2321", "MEN'S AND BOYS' SHIRTS, EXCEPT WORK SHIRTS", Category.CLASS),
+ Classification("2322", "MEN'S AND BOYS' UNDERWEAR AND NIGHTWEAR", Category.CLASS),
+ Classification("2323", "MEN'S AND BOYS' NECKWEAR", Category.CLASS),
+ Classification("2325", "MEN'S AND BOYS' SEPARATE TROUSERS AND SLACKS", Category.CLASS),
+ Classification("2326", "MEN'S AND BOYS' WORK CLOTHING", Category.CLASS),
+ Classification("2329", "MEN'S AND BOYS' CLOTHING, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("233", "WOMEN'S, MISSES', AND JUNIORS' OUTERWEAR", Category.GROUP),
+ Classification("2331", "WOMEN'S, MISSES', AND JUNIORS' BLOUSES AND SHIRTS", Category.CLASS),
+ Classification("2335", "WOMEN'S, MISSES', AND JUNIORS' DRESSES", Category.CLASS),
+ Classification("2337", "WOMEN'S, MISSES', AND JUNIORS' SUITS, SKIRTS, AND COATS", Category.CLASS),
+ Classification("2339", "WOMEN'S, MISSES', AND JUNIORS' OUTERWEAR, NOT ELSEWHERE CLASSIFIE", Category.CLASS),
+ Classification("234", "WOMEN'S, MISSES', CHILDREN'S, AND INFANTS' UNDERGARMENTS", Category.GROUP),
+ Classification("2341", "WOMEN'S, MISSES', CHILDREN'S, AND INFANTS' UNDERWEAR AND NIGHTWEA", Category.CLASS),
+ Classification("2342", "BRASSIERES, GIRDLES, AND ALLIED GARMENTS", Category.CLASS),
+ Classification("235", "HATS, CAPS, AND MILLINERY", Category.GROUP),
+ Classification("2353", "HATS, CAPS, AND MILLINERY", Category.CLASS),
+ Classification("236", "GIRLS', CHILDREN'S, AND INFANTS' OUTERWEAR", Category.GROUP),
+ Classification("2361", "GIRLS', CHILDREN'S, AND INFANTS' DRESSES, BLOUSES, AND SHIRTS", Category.CLASS),
+ Classification("2369", "GIRLS', CHILDREN'S, AND INFANTS' OUTERWEAR, NOT ELSEWHERE CLASSIF", Category.CLASS),
+ Classification("237", "FUR GOODS", Category.GROUP),
+ Classification("2371", "FUR GOODS", Category.CLASS),
+ Classification("238", "MISCELLANEOUS APPAREL AND ACCESSORIES", Category.GROUP),
+ Classification("2381", "DRESS AND WORK GLOVES, EXCEPT KNIT AND ALL-LEATHER", Category.CLASS),
+ Classification("2384", "ROBES AND DRESSING GOWNS", Category.CLASS),
+ Classification("2385", "WATERPROOF OUTERWEAR", Category.CLASS),
+ Classification("2386", "LEATHER AND SHEEP-LINED CLOTHING", Category.CLASS),
+ Classification("2387", "APPAREL BELTS", Category.CLASS),
+ Classification("2389", "APPAREL AND ACCESSORIES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("239", "MISCELLANEOUS FABRICATED TEXTILE PRODUCTS", Category.GROUP),
+ Classification("2391", "CURTAINS AND DRAPERIES", Category.CLASS),
+ Classification("2392", "HOUSEFURNISHINGS, EXCEPT CURTAINS AND DRAPERIES", Category.CLASS),
+ Classification("2393", "TEXTILE BAGS", Category.CLASS),
+ Classification("2394", "CANVAS AND RELATED PRODUCTS", Category.CLASS),
+ Classification("2395", "PLEATING, DECORATIVE AND NOVELTY STITCHING, AND TUCKING FOR THE T", Category.CLASS),
+ Classification("2396", "AUTOMOTIVE TRIMMINGS, APPAREL FINDINGS, AND RELATED PRODUCTS", Category.CLASS),
+ Classification("2397", "SCHIFFLI MACHINE EMBROIDERIES", Category.CLASS),
+ Classification("2399", "FABRICATED TEXTILE PRODUCTS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("24", "LUMBER AND WOOD PRODUCTS, EXCEPT FURNITURE", Category.DIVISION),
+ Classification("241", "LOGGING", Category.GROUP),
+ Classification("2411", "LOGGING", Category.CLASS),
+ Classification("242", "SAWMILLS AND PLANING MILLS", Category.GROUP),
+ Classification("2421", "SAWMILLS AND PLANING MILLS, GENERAL", Category.CLASS),
+ Classification("2426", "HARDWOOD DIMENSION AND FLOORING MILLS", Category.CLASS),
+ Classification("2429", "SPECIAL PRODUCT SAWMILLS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("243", "MILLWORK, VENEER, PLYWOOD, AND STRUCTURAL WOOD MEMBERS", Category.GROUP),
+ Classification("2431", "MILLWORK", Category.CLASS),
+ Classification("2434", "WOOD KITCHEN CABINETS", Category.CLASS),
+ Classification("2435", "HARDWOOD VENEER AND PLYWOOD", Category.CLASS),
+ Classification("2436", "SOFTWOOD VENEER AND PLYWOOD", Category.CLASS),
+ Classification("2439", "STRUCTURAL WOOD MEMBERS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("244", "WOOD CONTAINERS", Category.GROUP),
+ Classification("2441", "NAILED AND LOCK CORNER WOOD BOXES AND SHOOK", Category.CLASS),
+ Classification("2448", "WOOD PALLETS AND SKIDS", Category.CLASS),
+ Classification("2449", "WOOD CONTAINERS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("245", "WOOD BUILDINGS AND MOBILE HOMES", Category.GROUP),
+ Classification("2451", "MOBILE HOMES", Category.CLASS),
+ Classification("2452", "PREFABRICATED WOOD BUILDINGS AND COMPONENTS", Category.CLASS),
+ Classification("249", "MISCELLANEOUS WOOD PRODUCTS", Category.GROUP),
+ Classification("2491", "WOOD PRESERVING", Category.CLASS),
+ Classification("2493", "RECONSTITUTED WOOD PRODUCTS", Category.CLASS),
+ Classification("2499", "WOOD PRODUCTS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("25", "FURNITURE AND FIXTURES", Category.DIVISION),
+ Classification("251", "HOUSEHOLD FURNITURE", Category.GROUP),
+ Classification("2511", "WOOD HOUSEHOLD FURNITURE, EXCEPT UPHOLSTERED", Category.CLASS),
+ Classification("2512", "WOOD HOUSEHOLD FURNITURE, UPHOLSTERED", Category.CLASS),
+ Classification("2514", "METAL HOUSEHOLD FURNITURE", Category.CLASS),
+ Classification("2515", "MATTRESSES, FOUNDATIONS, AND CONVERTIBLE BEDS", Category.CLASS),
+ Classification("2517", "WOOD TELEVISION, RADIO, PHONOGRAPH, AND SEWING MACHINE CABINETS", Category.CLASS),
+ Classification("2519", "HOUSEHOLD FURNITURE, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("252", "OFFICE FURNITURE", Category.GROUP),
+ Classification("2521", "WOOD OFFICE FURNITURE", Category.CLASS),
+ Classification("2522", "OFFICE FURNITURE, EXCEPT WOOD", Category.CLASS),
+ Classification("253", "PUBLIC BUILDING AND RELATED FURNITURE", Category.GROUP),
+ Classification("2531", "PUBLIC BUILDING AND RELATED FURNITURE", Category.CLASS),
+ Classification("254", "PARTITIONS, SHELVING, LOCKERS, AND OFFICE AND STORE FIXTURES", Category.GROUP),
+ Classification("2541", "WOOD OFFICE AND STORE FIXTURES, PARTITIONS, SHELVING, AND LOCKERS", Category.CLASS),
+ Classification("2542", "OFFICE AND STORE FIXTURES, PARTITIONS, SHELVING, AND LOCKERS, EXC", Category.CLASS),
+ Classification("259", "MISCELLANEOUS FURNITURE AND FIXTURES", Category.GROUP),
+ Classification("2591", "DRAPERY HARDWARE AND WINDOW BLINDS AND SHADES", Category.CLASS),
+ Classification("2599", "FURNITURE AND FIXTURES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("26", "PAPER AND ALLIED PRODUCTS", Category.DIVISION),
+ Classification("261", "PULP MILLS", Category.GROUP),
+ Classification("2611", "PULP MILLS", Category.CLASS),
+ Classification("262", "PAPER MILLS", Category.GROUP),
+ Classification("2621", "PAPER MILLS", Category.CLASS),
+ Classification("263", "PAPERBOARD MILLS", Category.GROUP),
+ Classification("2631", "PAPERBOARD MILLS", Category.CLASS),
+ Classification("265", "PAPERBOARD CONTAINERS AND BOXES", Category.GROUP),
+ Classification("2652", "SETUP PAPERBOARD BOXES", Category.CLASS),
+ Classification("2653", "CORRUGATED AND SOLID FIBER BOXES", Category.CLASS),
+ Classification("2655", "FIBER CANS, TUBES, DRUMS, AND SIMILAR PRODUCTS", Category.CLASS),
+ Classification("2656", "SANITARY FOOD CONTAINERS, EXCEPT FOLDING", Category.CLASS),
+ Classification("2657", "FOLDING PAPERBOARD BOXES, INCLUDING SANITARY", Category.CLASS),
+ Classification("267", "CONVERTED PAPER AND PAPERBOARD PRODUCTS, EXCEPT CONTAINERS AND BOX", Category.GROUP),
+ Classification("2671", "PACKAGING PAPER AND PLASTICS FILM, COATED AND LAMINATED", Category.CLASS),
+ Classification("2672", "COATED AND LAMINATED PAPER, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("2673", "PLASTICS, FOIL, AND COATED PAPER BAGS", Category.CLASS),
+ Classification("2674", "UNCOATED PAPER AND MULTIWALL BAGS", Category.CLASS),
+ Classification("2675", "DIE-CUT PAPER AND PAPERBOARD AND CARDBOARD", Category.CLASS),
+ Classification("2676", "SANITARY PAPER PRODUCTS", Category.CLASS),
+ Classification("2677", "ENVELOPES", Category.CLASS),
+ Classification("2678", "STATIONERY, TABLETS, AND RELATED PRODUCTS", Category.CLASS),
+ Classification("2679", "CONVERTED PAPER AND PAPERBOARD PRODUCTS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("27", "PRINTING, PUBLISHING, AND ALLIED INDUSTRIES", Category.DIVISION),
+ Classification("271", "NEWSPAPERS: PUBLISHING, OR PUBLISHING AND PRINTING", Category.GROUP),
+ Classification("2711", "NEWSPAPERS: PUBLISHING, OR PUBLISHING AND PRINTING", Category.CLASS),
+ Classification("272", "PERIODICALS: PUBLISHING, OR PUBLISHING AND PRINTING", Category.GROUP),
+ Classification("2721", "PERIODICALS: PUBLISHING, OR PUBLISHING AND PRINTING", Category.CLASS),
+ Classification("273", "BOOKS", Category.GROUP),
+ Classification("2731", "BOOKS: PUBLISHING, OR PUBLISHING AND PRINTING", Category.CLASS),
+ Classification("2732", "BOOK PRINTING", Category.CLASS),
+ Classification("274", "MISCELLANEOUS PUBLISHING", Category.GROUP),
+ Classification("2741", "MISCELLANEOUS PUBLISHING", Category.CLASS),
+ Classification("275", "COMMERCIAL PRINTING", Category.GROUP),
+ Classification("2752", "COMMERCIAL PRINTING, LITHOGRAPHIC", Category.CLASS),
+ Classification("2754", "COMMERCIAL PRINTING, GRAVURE", Category.CLASS),
+ Classification("2759", "COMMERCIAL PRINTING, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("276", "MANIFOLD BUSINESS FORMS", Category.GROUP),
+ Classification("2761", "MANIFOLD BUSINESS FORMS", Category.CLASS),
+ Classification("277", "GREETING CARDS", Category.GROUP),
+ Classification("2771", "GREETING CARDS", Category.CLASS),
+ Classification("278", "BLANKBOOKS, LOOSELEAF BINDERS, AND BOOKBINDING AND RELATED WORK", Category.GROUP),
+ Classification("2782", "BLANKBOOKS, LOOSELEAF BINDERS AND DEVICES", Category.CLASS),
+ Classification("2789", "BOOKBINDING AND RELATED WORK", Category.CLASS),
+ Classification("279", "SERVICE INDUSTRIES FOR THE PRINTING TRADE", Category.GROUP),
+ Classification("2791", "TYPESETTING", Category.CLASS),
+ Classification("2796", "PLATEMAKING AND RELATED SERVICES", Category.CLASS),
+ Classification("28", "CHEMICALS AND ALLIED PRODUCTS", Category.DIVISION),
+ Classification("281", "INDUSTRIAL INORGANIC CHEMICALS", Category.GROUP),
+ Classification("2812", "ALKALIES AND CHLORINE", Category.CLASS),
+ Classification("2813", "INDUSTRIAL GASES", Category.CLASS),
+ Classification("2816", "INORGANIC PIGMENTS", Category.CLASS),
+ Classification("2819", "INDUSTRIAL INORGANIC CHEMICALS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("282", "PLASTICS MATERIALS AND SYNTHETIC RESINS, SYNTHETIC RUBBER, CELLULO", Category.GROUP),
+ Classification("2821", "PLASTICS MATERIALS, SYNTHETIC RESINS, AND NONVULCANIZABLE ELASTOM", Category.CLASS),
+ Classification("2822", "SYNTHETIC RUBBER (VULCANIZABLE ELASTOMERS)", Category.CLASS),
+ Classification("2823", "CELLULOSIC MANMADE FIBERS", Category.CLASS),
+ Classification("2824", "MANMADE ORGANIC FIBERS, EXCEPT CELLULOSIC", Category.CLASS),
+ Classification("283", "DRUGS", Category.GROUP),
+ Classification("2833", "MEDICINAL CHEMICALS AND BOTANICAL PRODUCTS", Category.CLASS),
+ Classification("2834", "PHARMACEUTICAL PREPARATIONS", Category.CLASS),
+ Classification("2835", "IN VITRO AND IN VIVO DIAGNOSTIC SUBSTANCES", Category.CLASS),
+ Classification("2836", "BIOLOGICAL PRODUCTS, EXCEPT DIAGNOSTIC SUBSTANCES", Category.CLASS),
+ Classification("284", "SOAP, DETERGENTS, AND CLEANING PREPARATIONS; PERFUMES, COSMETICS", Category.GROUP),
+ Classification("2841", "SOAP AND OTHER DETERGENTS, EXCEPT SPECIALTY CLEANERS", Category.CLASS),
+ Classification("2842", "SPECIALTY CLEANING, POLISHING, AND SANITATION PREPARATIONS", Category.CLASS),
+ Classification("2843", "SURFACE ACTIVE AGENTS, FINISHING AGENTS, SULFONATED OILS, AND ASS", Category.CLASS),
+ Classification("2844", "PERFUMES, COSMETICS, AND OTHER TOILET PREPARATIONS", Category.CLASS),
+ Classification("285", "PAINTS, VARNISHES, LACQUERS, ENAMELS, AND ALLIED PRODUCTS", Category.GROUP),
+ Classification("2851", "PAINTS, VARNISHES, LACQUERS, ENAMELS, AND ALLIED PRODUCTS", Category.CLASS),
+ Classification("286", "INDUSTRIAL ORGANIC CHEMICALS", Category.GROUP),
+ Classification("2861", "GUM AND WOOD CHEMICALS", Category.CLASS),
+ Classification("2865", "CYCLIC ORGANIC CRUDES AND INTERMEDIATES, AND ORGANIC DYES AND PIG", Category.CLASS),
+ Classification("2869", "INDUSTRIAL ORGANIC CHEMICALS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("287", "AGRICULTURAL CHEMICALS", Category.GROUP),
+ Classification("2873", "NITROGENOUS FERTILIZERS", Category.CLASS),
+ Classification("2874", "PHOSPHATIC FERTILIZERS", Category.CLASS),
+ Classification("2875", "FERTILIZERS, MIXING ONLY", Category.CLASS),
+ Classification("2879", "PESTICIDES AND AGRICULTURAL CHEMICALS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("289", "MISCELLANEOUS CHEMICAL PRODUCTS", Category.GROUP),
+ Classification("2891", "ADHESIVES AND SEALANTS", Category.CLASS),
+ Classification("2892", "EXPLOSIVES", Category.CLASS),
+ Classification("2893", "PRINTING INK", Category.CLASS),
+ Classification("2895", "CARBON BLACK", Category.CLASS),
+ Classification("2899", "CHEMICALS AND CHEMICAL PREPARATIONS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("29", "PETROLEUM REFINING AND RELATED INDUSTRIES", Category.DIVISION),
+ Classification("291", "PETROLEUM REFINING", Category.GROUP),
+ Classification("2911", "PETROLEUM REFINING", Category.CLASS),
+ Classification("295", "ASPHALT PAVING AND ROOFING MATERIALS", Category.GROUP),
+ Classification("2951", "ASPHALT PAVING MIXTURES AND BLOCKS", Category.CLASS),
+ Classification("2952", "ASPHALT FELTS AND COATINGS", Category.CLASS),
+ Classification("299", "MISCELLANEOUS PRODUCTS OF PETROLEUM AND COAL", Category.GROUP),
+ Classification("2992", "LUBRICATING OILS AND GREASES", Category.CLASS),
+ Classification("2999", "PRODUCTS OF PETROLEUM AND COAL, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("30", "RUBBER AND MISCELLANEOUS PLASTICS PRODUCTS", Category.DIVISION),
+ Classification("301", "TIRES AND INNER TUBES", Category.GROUP),
+ Classification("3011", "TIRES AND INNER TUBES", Category.CLASS),
+ Classification("302", "RUBBER AND PLASTICS FOOTWEAR", Category.GROUP),
+ Classification("3021", "RUBBER AND PLASTICS FOOTWEAR", Category.CLASS),
+ Classification("305", "GASKETS, PACKING, AND SEALING DEVICES AND RUBBER AND PLASTICS HOSE", Category.GROUP),
+ Classification("3052", "RUBBER AND PLASTICS HOSE AND BELTING", Category.CLASS),
+ Classification("3053", "GASKETS, PACKING, AND SEALING DEVICES", Category.CLASS),
+ Classification("306", "FABRICATED RUBBER PRODUCTS, NOT ELSEWHERE CLASSIFIED", Category.GROUP),
+ Classification("3061", "MOLDED, EXTRUDED, AND LATHE-CUT MECHANICAL RUBBER GOODS", Category.CLASS),
+ Classification("3069", "FABRICATED RUBBER PRODUCTS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("308", "MISCELLANEOUS PLASTICS PRODUCTS", Category.GROUP),
+ Classification("3081", "UNSUPPORTED PLASTICS FILM AND SHEET", Category.CLASS),
+ Classification("3082", "UNSUPPORTED PLASTICS PROFILE SHAPES", Category.CLASS),
+ Classification("3083", "LAMINATED PLASTICS PLATE, SHEET, AND PROFILE SHAPES", Category.CLASS),
+ Classification("3084", "PLASTICS PIPE", Category.CLASS),
+ Classification("3085", "PLASTICS BOTTLES", Category.CLASS),
+ Classification("3086", "PLASTICS FOAM PRODUCTS", Category.CLASS),
+ Classification("3087", "CUSTOM COMPOUNDING OF PURCHASED PLASTICS RESINS", Category.CLASS),
+ Classification("3088", "PLASTICS PLUMBING FIXTURES", Category.CLASS),
+ Classification("3089", "PLASTICS PRODUCTS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("31", "LEATHER AND LEATHER PRODUCTS", Category.DIVISION),
+ Classification("311", "LEATHER TANNING AND FINISHING", Category.GROUP),
+ Classification("3111", "LEATHER TANNING AND FINISHING", Category.CLASS),
+ Classification("313", "BOOT AND SHOE CUT STOCK AND FINDINGS", Category.GROUP),
+ Classification("3131", "BOOT AND SHOE CUT STOCK AND FINDINGS", Category.CLASS),
+ Classification("314", "FOOTWEAR, EXCEPT RUBBER", Category.GROUP),
+ Classification("3142", "HOUSE SLIPPERS", Category.CLASS),
+ Classification("3143", "MEN'S FOOTWEAR, EXCEPT ATHLETIC", Category.CLASS),
+ Classification("3144", "WOMEN'S FOOTWEAR, EXCEPT ATHLETIC", Category.CLASS),
+ Classification("3149", "FOOTWEAR, EXCEPT RUBBER, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("315", "LEATHER GLOVES AND MITTENS", Category.GROUP),
+ Classification("3151", "LEATHER GLOVES AND MITTENS", Category.CLASS),
+ Classification("316", "LUGGAGE", Category.GROUP),
+ Classification("3161", "LUGGAGE", Category.CLASS),
+ Classification("317", "HANDBAGS AND OTHER PERSONAL LEATHER GOODS", Category.GROUP),
+ Classification("3171", "WOMEN'S HANDBAGS AND PURSES", Category.CLASS),
+ Classification("3172", "PERSONAL LEATHER GOODS, EXCEPT WOMEN'S HANDBAGS AND PURSES", Category.CLASS),
+ Classification("319", "LEATHER GOODS, NOT ELSEWHERE CLASSIFIED", Category.GROUP),
+ Classification("3199", "LEATHER GOODS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("32", "STONE, CLAY, GLASS, AND CONCRETE PRODUCTS", Category.DIVISION),
+ Classification("321", "FLAT GLASS", Category.GROUP),
+ Classification("3211", "FLAT GLASS", Category.CLASS),
+ Classification("322", "GLASS AND GLASSWARE, PRESSED OR BLOWN", Category.GROUP),
+ Classification("3221", "GLASS CONTAINERS", Category.CLASS),
+ Classification("3229", "PRESSED AND BLOWN GLASS AND GLASSWARE, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("323", "GLASS PRODUCTS, MADE OF PURCHASED GLASS", Category.GROUP),
+ Classification("3231", "GLASS PRODUCTS, MADE OF PURCHASED GLASS", Category.CLASS),
+ Classification("324", "CEMENT, HYDRAULIC", Category.GROUP),
+ Classification("3241", "CEMENT, HYDRAULIC", Category.CLASS),
+ Classification("325", "STRUCTURAL CLAY PRODUCTS", Category.GROUP),
+ Classification("3251", "BRICK AND STRUCTURAL CLAY TILE", Category.CLASS),
+ Classification("3253", "CERAMIC WALL AND FLOOR TILE", Category.CLASS),
+ Classification("3255", "CLAY REFRACTORIES", Category.CLASS),
+ Classification("3259", "STRUCTURAL CLAY PRODUCTS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("326", "POTTERY AND RELATED PRODUCTS", Category.GROUP),
+ Classification("3261", "VITREOUS CHINA PLUMBING FIXTURES AND CHINA AND EARTHENWARE FITTIN", Category.CLASS),
+ Classification("3262", "VITREOUS CHINA TABLE AND KITCHEN ARTICLES", Category.CLASS),
+ Classification("3263", "FINE EARTHENWARE (WHITEWARE) TABLE AND KITCHEN ARTICLES", Category.CLASS),
+ Classification("3264", "PORCELAIN ELECTRICAL SUPPLIES", Category.CLASS),
+ Classification("3269", "POTTERY PRODUCTS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("327", "CONCRETE, GYPSUM, AND PLASTER PRODUCTS", Category.GROUP),
+ Classification("3271", "CONCRETE BLOCK AND BRICK", Category.CLASS),
+ Classification("3272", "CONCRETE PRODUCTS, EXCEPT BLOCK AND BRICK", Category.CLASS),
+ Classification("3273", "READY-MIXED CONCRETE", Category.CLASS),
+ Classification("3274", "LIME", Category.CLASS),
+ Classification("3275", "GYPSUM PRODUCTS", Category.CLASS),
+ Classification("328", "CUT STONE AND STONE PRODUCTS", Category.GROUP),
+ Classification("3281", "CUT STONE AND STONE PRODUCTS", Category.CLASS),
+ Classification("329", "ABRASIVE, ASBESTOS, AND MISCELLANEOUS NONMETALLIC MINERAL PRODUCTS", Category.GROUP),
+ Classification("3291", "ABRASIVE PRODUCTS", Category.CLASS),
+ Classification("3292", "ASBESTOS PRODUCTS", Category.CLASS),
+ Classification("3295", "MINERALS AND EARTHS, GROUND OR OTHERWISE TREATED", Category.CLASS),
+ Classification("3296", "MINERAL WOOL", Category.CLASS),
+ Classification("3297", "NONCLAY REFRACTORIES", Category.CLASS),
+ Classification("3299", "NONMETALLIC MINERAL PRODUCTS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("33", "PRIMARY METAL INDUSTRIES", Category.DIVISION),
+ Classification("331", "STEEL WORKS, BLAST FURNACES, AND ROLLING AND FINISHING MILLS", Category.GROUP),
+ Classification("3312", "STEEL WORKS, BLAST FURNACES (INCLUDING COKE OVENS), AND ROLLING M", Category.CLASS),
+ Classification("3313", "ELECTROMETALLURGICAL PRODUCTS, EXCEPT STEEL", Category.CLASS),
+ Classification("3315", "STEEL WIREDRAWING AND STEEL NAILS AND SPIKES", Category.CLASS),
+ Classification("3316", "COLD-ROLLED STEEL SHEET, STRIP, AND BARS", Category.CLASS),
+ Classification("3317", "STEEL PIPE AND TUBES", Category.CLASS),
+ Classification("332", "IRON AND STEEL FOUNDRIES", Category.GROUP),
+ Classification("3321", "GRAY AND DUCTILE IRON FOUNDRIES", Category.CLASS),
+ Classification("3322", "MALLEABLE IRON FOUNDRIES", Category.CLASS),
+ Classification("3324", "STEEL INVESTMENT FOUNDRIES", Category.CLASS),
+ Classification("3325", "STEEL FOUNDRIES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("333", "PRIMARY SMELTING AND REFINING OF NONFERROUS METALS", Category.GROUP),
+ Classification("3331", "PRIMARY SMELTING AND REFINING OF COPPER", Category.CLASS),
+ Classification("3334", "PRIMARY PRODUCTION OF ALUMINUM", Category.CLASS),
+ Classification("3339", "PRIMARY SMELTING AND REFINING OF NONFERROUS METALS, EXCEPT COPPER", Category.CLASS),
+ Classification("334", "SECONDARY SMELTING AND REFINING OF NONFERROUS METALS", Category.GROUP),
+ Classification("3341", "SECONDARY SMELTING AND REFINING OF NONFERROUS METALS", Category.CLASS),
+ Classification("335", "ROLLING, DRAWING, AND EXTRUDING OF NONFERROUS METALS", Category.GROUP),
+ Classification("3351", "ROLLING, DRAWING, AND EXTRUDING OF COPPER", Category.CLASS),
+ Classification("3353", "ALUMINUM SHEET, PLATE, AND FOIL", Category.CLASS),
+ Classification("3354", "ALUMINUM EXTRUDED PRODUCTS", Category.CLASS),
+ Classification("3355", "ALUMINUM ROLLING AND DRAWING, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("3356", "ROLLING, DRAWING, AND EXTRUDING OF NONFERROUS METALS, EXCEPT COPP", Category.CLASS),
+ Classification("3357", "DRAWING AND INSULATING OF NONFERROUS WIRE", Category.CLASS),
+ Classification("336", "NONFERROUS FOUNDRIES (CASTINGS)", Category.GROUP),
+ Classification("3363", "ALUMINUM DIE-CASTINGS", Category.CLASS),
+ Classification("3364", "NONFERROUS DIE-CASTINGS, EXCEPT ALUMINUM", Category.CLASS),
+ Classification("3365", "ALUMINUM FOUNDRIES", Category.CLASS),
+ Classification("3366", "COPPER FOUNDRIES", Category.CLASS),
+ Classification("3369", "NONFERROUS FOUNDRIES, EXCEPT ALUMINUM AND COPPER", Category.CLASS),
+ Classification("339", "MISCELLANEOUS PRIMARY METAL PRODUCTS", Category.GROUP),
+ Classification("3398", "METAL HEAT TREATING", Category.CLASS),
+ Classification("3399", "PRIMARY METAL PRODUCTS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification(
+ "34", "FABRICATED METAL PRODUCTS, EXCEPT MACHINERY AND TRANSPORTATION EQUIPMENT", Category.DIVISION
+ ),
+ Classification("341", "METAL CANS AND SHIPPING CONTAINERS", Category.GROUP),
+ Classification("3411", "METAL CANS", Category.CLASS),
+ Classification("3412", "METAL SHIPPING BARRELS, DRUMS, KEGS, AND PAILS", Category.CLASS),
+ Classification("342", "CUTLERY, HANDTOOLS, AND GENERAL HARDWARE", Category.GROUP),
+ Classification("3421", "CUTLERY", Category.CLASS),
+ Classification("3423", "HAND AND EDGE TOOLS, EXCEPT MACHINE TOOLS AND HANDSAWS", Category.CLASS),
+ Classification("3425", "SAW BLADES AND HANDSAWS", Category.CLASS),
+ Classification("3429", "HARDWARE, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("343", "HEATING EQUIPMENT, EXCEPT ELECTRIC AND WARM AIR; AND PLUMBING FIXT", Category.GROUP),
+ Classification("3431", "ENAMELED IRON AND METAL SANITARY WARE", Category.CLASS),
+ Classification("3432", "PLUMBING FIXTURE FITTINGS AND TRIM", Category.CLASS),
+ Classification("3433", "HEATING EQUIPMENT, EXCEPT ELECTRIC AND WARM AIR FURNACES", Category.CLASS),
+ Classification("344", "FABRICATED STRUCTURAL METAL PRODUCTS", Category.GROUP),
+ Classification("3441", "FABRICATED STRUCTURAL METAL", Category.CLASS),
+ Classification("3442", "METAL DOORS, SASH, FRAMES, MOLDING, AND TRIM", Category.CLASS),
+ Classification("3443", "FABRICATED PLATE WORK (BOILER SHOPS)", Category.CLASS),
+ Classification("3444", "SHEET METALWORK", Category.CLASS),
+ Classification("3446", "ARCHITECTURAL AND ORNAMENTAL METALWORK", Category.CLASS),
+ Classification("3448", "PREFABRICATED METAL BUILDINGS AND COMPONENTS", Category.CLASS),
+ Classification("3449", "MISCELLANEOUS STRUCTURAL METALWORK", Category.CLASS),
+ Classification("345", "SCREW MACHINE PRODUCTS, AND BOLTS, NUTS, SCREWS, RIVETS, AND WASHE", Category.GROUP),
+ Classification("3451", "SCREW MACHINE PRODUCTS", Category.CLASS),
+ Classification("3452", "BOLTS, NUTS, SCREWS, RIVETS, AND WASHERS", Category.CLASS),
+ Classification("346", "METAL FORGINGS AND STAMPINGS", Category.GROUP),
+ Classification("3462", "IRON AND STEEL FORGINGS", Category.CLASS),
+ Classification("3463", "NONFERROUS FORGINGS", Category.CLASS),
+ Classification("3465", "AUTOMOTIVE STAMPINGS", Category.CLASS),
+ Classification("3466", "CROWNS AND CLOSURES", Category.CLASS),
+ Classification("3469", "METAL STAMPINGS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("347", "COATING, ENGRAVING, AND ALLIED SERVICES", Category.GROUP),
+ Classification("3471", "ELECTROPLATING, PLATING, POLISHING, ANODIZING, AND COLORING", Category.CLASS),
+ Classification("3479", "COATING, ENGRAVING, AND ALLIED SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("348", "ORDNANCE AND ACCESSORIES, EXCEPT VEHICLES AND GUIDED MISSILES", Category.GROUP),
+ Classification("3482", "SMALL ARMS AMMUNITION", Category.CLASS),
+ Classification("3483", "AMMUNITION, EXCEPT FOR SMALL ARMS", Category.CLASS),
+ Classification("3484", "SMALL ARMS", Category.CLASS),
+ Classification("3489", "ORDNANCE AND ACCESSORIES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("349", "MISCELLANEOUS FABRICATED METAL PRODUCTS", Category.GROUP),
+ Classification("3491", "INDUSTRIAL VALVES", Category.CLASS),
+ Classification("3492", "FLUID POWER VALVES AND HOSE FITTINGS", Category.CLASS),
+ Classification("3493", "STEEL SPRINGS, EXCEPT WIRE", Category.CLASS),
+ Classification("3494", "VALVES AND PIPE FITTINGS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("3495", "WIRE SPRINGS", Category.CLASS),
+ Classification("3496", "MISCELLANEOUS FABRICATED WIRE PRODUCTS", Category.CLASS),
+ Classification("3497", "METAL FOIL AND LEAF", Category.CLASS),
+ Classification("3498", "FABRICATED PIPE AND PIPE FITTINGS", Category.CLASS),
+ Classification("3499", "FABRICATED METAL PRODUCTS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("35", "INDUSTRIAL AND COMMERCIAL MACHINERY AND COMPUTER EQUIPMENT", Category.DIVISION),
+ Classification("351", "ENGINES AND TURBINES", Category.GROUP),
+ Classification("3511", "STEAM, GAS, AND HYDRAULIC TURBINES, AND TURBINE GENERATOR SET UNI", Category.CLASS),
+ Classification("3519", "INTERNAL COMBUSTION ENGINES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("352", "FARM AND GARDEN MACHINERY AND EQUIPMENT", Category.GROUP),
+ Classification("3523", "FARM MACHINERY AND EQUIPMENT", Category.CLASS),
+ Classification("3524", "LAWN AND GARDEN TRACTORS AND HOME LAWN AND GARDEN EQUIPMENT", Category.CLASS),
+ Classification("3524", "BLOWERS, RESIDENTIAL LAWN", Category.CLASS),
+ Classification("353", "CONSTRUCTION, MINING, AND MATERIALS HANDLING MACHINERY AND EQUIPME", Category.GROUP),
+ Classification("3531", "CONSTRUCTION MACHINERY AND EQUIPMENT", Category.CLASS),
+ Classification("3532", "MINING MACHINERY AND EQUIPMENT, EXCEPT OIL AND GAS FIELD MACHINER", Category.CLASS),
+ Classification("3533", "OIL AND GAS FIELD MACHINERY AND EQUIPMENT", Category.CLASS),
+ Classification("3534", "ELEVATORS AND MOVING STAIRWAYS", Category.CLASS),
+ Classification("3535", "CONVEYORS AND CONVEYING EQUIPMENT", Category.CLASS),
+ Classification("3536", "OVERHEAD TRAVELING CRANES, HOISTS, AND MONORAIL SYSTEMS", Category.CLASS),
+ Classification("3537", "INDUSTRIAL TRUCKS, TRACTORS, TRAILERS, AND STACKERS", Category.CLASS),
+ Classification("354", "METALWORKING MACHINERY AND EQUIPMENT", Category.GROUP),
+ Classification("3541", "MACHINE TOOLS, METAL CUTTING TYPES", Category.CLASS),
+ Classification("3542", "MACHINE TOOLS, METAL FORMING TYPES", Category.CLASS),
+ Classification("3543", "INDUSTRIAL PATTERNS", Category.CLASS),
+ Classification("3544", "SPECIAL DIES AND TOOLS, DIE SETS, JIGS AND FIXTURES, AND INDUSTRI", Category.CLASS),
+ Classification("3545", "CUTTING TOOLS, MACHINE TOOL ACCESSORIES, AND MACHINISTS' PRECISIO", Category.CLASS),
+ Classification("3546", "POWER-DRIVEN HANDTOOLS", Category.CLASS),
+ Classification("3547", "ROLLING MILL MACHINERY AND EQUIPMENT", Category.CLASS),
+ Classification("3548", "ELECTRIC AND GAS WELDING AND SOLDERING EQUIPMENT", Category.CLASS),
+ Classification("3549", "METALWORKING MACHINERY, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("355", "SPECIAL INDUSTRY MACHINERY, EXCEPT METALWORKING MACHINERY", Category.GROUP),
+ Classification("3552", "TEXTILE MACHINERY", Category.CLASS),
+ Classification("3553", "WOODWORKING MACHINERY", Category.CLASS),
+ Classification("3554", "PAPER INDUSTRIES MACHINERY", Category.CLASS),
+ Classification("3555", "PRINTING TRADES MACHINERY AND EQUIPMENT", Category.CLASS),
+ Classification("3556", "FOOD PRODUCTS MACHINERY", Category.CLASS),
+ Classification("3559", "SPECIAL INDUSTRY MACHINERY, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("356", "GENERAL INDUSTRIAL MACHINERY AND EQUIPMENT", Category.GROUP),
+ Classification("3561", "PUMPS AND PUMPING EQUIPMENT", Category.CLASS),
+ Classification("3562", "BALL AND ROLLER BEARINGS", Category.CLASS),
+ Classification("3563", "AIR AND GAS COMPRESSORS", Category.CLASS),
+ Classification("3564", "INDUSTRIAL AND COMMERCIAL FANS AND BLOWERS AND AIR PURIFICATION E", Category.CLASS),
+ Classification("3565", "PACKAGING MACHINERY", Category.CLASS),
+ Classification("3566", "SPEED CHANGERS, INDUSTRIAL HIGH-SPEED DRIVES, AND GEARS", Category.CLASS),
+ Classification("3567", "INDUSTRIAL PROCESS FURNACES AND OVENS", Category.CLASS),
+ Classification("3568", "MECHANICAL POWER TRANSMISSION EQUIPMENT, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("3569", "GENERAL INDUSTRIAL MACHINERY AND EQUIPMENT, NOT ELSEWHERE CLASSIF", Category.CLASS),
+ Classification("357", "COMPUTER AND OFFICE EQUIPMENT", Category.GROUP),
+ Classification("3571", "ELECTRONIC COMPUTERS", Category.CLASS),
+ Classification("3572", "COMPUTER STORAGE DEVICES", Category.CLASS),
+ Classification("3575", "COMPUTER TERMINALS", Category.CLASS),
+ Classification("3577", "COMPUTER PERIPHERAL EQUIPMENT, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("3578", "CALCULATING AND ACCOUNTING MACHINES, EXCEPT ELECTRONIC COMPUTERS", Category.CLASS),
+ Classification("3579", "OFFICE MACHINES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("358", "REFRIGERATION AND SERVICE INDUSTRY MACHINERY", Category.GROUP),
+ Classification("3581", "AUTOMATIC VENDING MACHINES", Category.CLASS),
+ Classification("3582", "COMMERCIAL LAUNDRY, DRYCLEANING, AND PRESSING MACHINES", Category.CLASS),
+ Classification("3585", "AIR-CONDITIONING AND WARM AIR HEATING EQUIPMENT AND COMMERCIAL AN", Category.CLASS),
+ Classification("3586", "MEASURING AND DISPENSING PUMPS", Category.CLASS),
+ Classification("3589", "SERVICE INDUSTRY MACHINERY, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("359", "MISCELLANEOUS INDUSTRIAL AND COMMERCIAL MACHINERY AND EQUIPMENT", Category.GROUP),
+ Classification("3592", "CARBURETORS, PISTONS, PISTON RINGS, AND VALVES", Category.CLASS),
+ Classification("3593", "FLUID POWER CYLINDERS AND ACTUATORS", Category.CLASS),
+ Classification("3594", "FLUID POWER PUMPS AND MOTORS", Category.CLASS),
+ Classification("3596", "SCALES AND BALANCES, EXCEPT LABORATORY", Category.CLASS),
+ Classification("3599", "INDUSTRIAL AND COMMERCIAL MACHINERY AND EQUIPMENT, NOT ELSEWHERE", Category.CLASS),
+ Classification(
+ "36", "ELECTRONIC AND OTHER ELECTRICAL EQUIPMENT AND COMPONENTS, EXCEPT COMPUTER", Category.DIVISION
+ ),
+ Classification("361", "ELECTRIC TRANSMISSION AND DISTRIBUTION EQUIPMENT", Category.GROUP),
+ Classification("3612", "POWER, DISTRIBUTION, AND SPECIALTY TRANSFORMERS", Category.CLASS),
+ Classification("3613", "SWITCHGEAR AND SWITCHBOARD APPARATUS", Category.CLASS),
+ Classification("362", "ELECTRICAL INDUSTRIAL APPARATUS", Category.GROUP),
+ Classification("3621", "MOTORS AND GENERATORS", Category.CLASS),
+ Classification("3624", "CARBON AND GRAPHITE PRODUCTS", Category.CLASS),
+ Classification("3625", "RELAYS AND INDUSTRIAL CONTROLS", Category.CLASS),
+ Classification("3629", "ELECTRICAL INDUSTRIAL APPARATUS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("363", "HOUSEHOLD APPLIANCES", Category.GROUP),
+ Classification("3631", "HOUSEHOLD COOKING EQUIPMENT", Category.CLASS),
+ Classification("3632", "HOUSEHOLD REFRIGERATORS AND HOME AND FARM FREEZERS", Category.CLASS),
+ Classification("3633", "HOUSEHOLD LAUNDRY EQUIPMENT", Category.CLASS),
+ Classification("3634", "ELECTRIC HOUSEWARES AND FANS", Category.CLASS),
+ Classification("3635", "HOUSEHOLD VACUUM CLEANERS", Category.CLASS),
+ Classification("3639", "HOUSEHOLD APPLIANCES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("364", "ELECTRIC LIGHTING AND WIRING EQUIPMENT", Category.GROUP),
+ Classification("3641", "ELECTRIC LAMP BULBS AND TUBES", Category.CLASS),
+ Classification("3643", "CURRENT-CARRYING WIRING DEVICES", Category.CLASS),
+ Classification("3644", "NONCURRENT-CARRYING WIRING DEVICES", Category.CLASS),
+ Classification("3645", "RESIDENTIAL ELECTRIC LIGHTING FIXTURES", Category.CLASS),
+ Classification("3646", "COMMERCIAL, INDUSTRIAL, AND INSTITUTIONAL ELECTRIC LIGHTING FIXTU", Category.CLASS),
+ Classification("3647", "VEHICULAR LIGHTING EQUIPMENT", Category.CLASS),
+ Classification("3648", "LIGHTING EQUIPMENT, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("365", "HOUSEHOLD AUDIO AND VIDEO EQUIPMENT, AND AUDIO RECORDINGS", Category.GROUP),
+ Classification("3651", "HOUSEHOLD AUDIO AND VIDEO EQUIPMENT", Category.CLASS),
+ Classification("3652", "PHONOGRAPH RECORDS AND PRERECORDED AUDIO TAPES AND DISKS", Category.CLASS),
+ Classification("366", "COMMUNICATIONS EQUIPMENT", Category.GROUP),
+ Classification("3661", "TELEPHONE AND TELEGRAPH APPARATUS", Category.CLASS),
+ Classification("3663", "RADIO AND TELEVISION BROADCASTING AND COMMUNICATIONS EQUIPMENT", Category.CLASS),
+ Classification("3669", "COMMUNICATIONS EQUIPMENT, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("367", "ELECTRONIC COMPONENTS AND ACCESSORIES", Category.GROUP),
+ Classification("3671", "ELECTRON TUBES", Category.CLASS),
+ Classification("3672", "PRINTED CIRCUIT BOARDS", Category.CLASS),
+ Classification("3674", "SEMICONDUCTORS AND RELATED DEVICES", Category.CLASS),
+ Classification("3675", "ELECTRONIC CAPACITORS", Category.CLASS),
+ Classification("3676", "ELECTRONIC RESISTORS", Category.CLASS),
+ Classification("3677", "ELECTRONIC COILS, TRANSFORMERS, AND OTHER INDUCTORS", Category.CLASS),
+ Classification("3678", "ELECTRONIC CONNECTORS", Category.CLASS),
+ Classification("3679", "ELECTRONIC COMPONENTS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("369", "MISCELLANEOUS ELECTRICAL MACHINERY, EQUIPMENT, AND SUPPLIES", Category.GROUP),
+ Classification("3691", "STORAGE BATTERIES", Category.CLASS),
+ Classification("3692", "PRIMARY BATTERIES, DRY AND WET", Category.CLASS),
+ Classification("3694", "ELECTRICAL EQUIPMENT FOR INTERNAL COMBUSTION ENGINES", Category.CLASS),
+ Classification("3695", "MAGNETIC AND OPTICAL RECORDING MEDIA", Category.CLASS),
+ Classification("3699", "ELECTRICAL MACHINERY, EQUIPMENT, AND SUPPLIES, NOT ELSEWHERE CLAS", Category.CLASS),
+ Classification("37", "TRANSPORTATION EQUIPMENT", Category.DIVISION),
+ Classification("371", "MOTOR VEHICLES AND MOTOR VEHICLE EQUIPMENT", Category.GROUP),
+ Classification("3711", "MOTOR VEHICLES AND PASSENGER CAR BODIES", Category.CLASS),
+ Classification("3713", "TRUCK AND BUS BODIES", Category.CLASS),
+ Classification("3714", "MOTOR VEHICLE PARTS AND ACCESSORIES", Category.CLASS),
+ Classification("3715", "TRUCK TRAILERS", Category.CLASS),
+ Classification("3716", "MOTOR HOMES", Category.CLASS),
+ Classification("372", "AIRCRAFT AND PARTS", Category.GROUP),
+ Classification("3721", "AIRCRAFT", Category.CLASS),
+ Classification("3724", "AIRCRAFT ENGINES AND ENGINE PARTS", Category.CLASS),
+ Classification("3728", "AIRCRAFT PARTS AND AUXILIARY EQUIPMENT, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("373", "SHIP AND BOAT BUILDING AND REPAIRING", Category.GROUP),
+ Classification("3731", "SHIP BUILDING AND REPAIRING", Category.CLASS),
+ Classification("3732", "BOAT BUILDING AND REPAIRING", Category.CLASS),
+ Classification("374", "RAILROAD EQUIPMENT", Category.GROUP),
+ Classification("3743", "RAILROAD EQUIPMENT", Category.CLASS),
+ Classification("375", "MOTORCYCLES, BICYCLES, AND PARTS", Category.GROUP),
+ Classification("3751", "MOTORCYCLES, BICYCLES, AND PARTS", Category.CLASS),
+ Classification("376", "GUIDED MISSILES AND SPACE VEHICLES AND PARTS", Category.GROUP),
+ Classification("3761", "GUIDED MISSILES AND SPACE VEHICLES", Category.CLASS),
+ Classification("3764", "GUIDED MISSILE AND SPACE VEHICLE PROPULSION UNITS AND PROPULSION", Category.CLASS),
+ Classification("3769", "GUIDED MISSILE AND SPACE VEHICLE PARTS AND AUXILIARY EQUIPMENT, N", Category.CLASS),
+ Classification("379", "MISCELLANEOUS TRANSPORTATION EQUIPMENT", Category.GROUP),
+ Classification("3792", "TRAVEL TRAILERS AND CAMPERS", Category.CLASS),
+ Classification("3795", "TANKS AND TANK COMPONENTS", Category.CLASS),
+ Classification("3799", "TRANSPORTATION EQUIPMENT, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification(
+ "38", "MEASURING, ANALYZING AND CONTROLLING INSTRUMENTS; PHOTOGRAPHIC, MEDICAL AN", Category.DIVISION
+ ),
+ Classification("381", "SEARCH, DETECTION, NAVIGATION, GUIDANCE, AERONAUTICAL, AND NAUTICA", Category.GROUP),
+ Classification("3812", "SEARCH, DETECTION, NAVIGATION, GUIDANCE, AERONAUTICAL, AND NAUTIC", Category.CLASS),
+ Classification("382", "LABORATORY APPARATUS AND ANALYTICAL, OPTICAL, MEASURING, AND CONTR", Category.GROUP),
+ Classification("3821", "LABORATORY APPARATUS AND FURNITURE", Category.CLASS),
+ Classification("3822", "AUTOMATIC CONTROLS FOR REGULATING RESIDENTIAL AND COMMERCIAL ENVI", Category.CLASS),
+ Classification("3823", "INDUSTRIAL INSTRUMENTS FOR MEASUREMENT, DISPLAY, AND CONTROL OF P", Category.CLASS),
+ Classification("3824", "TOTALIZING FLUID METERS AND COUNTING DEVICES", Category.CLASS),
+ Classification("3825", "INSTRUMENTS FOR MEASURING AND TESTING OF ELECTRICITY AND ELECTRIC", Category.CLASS),
+ Classification("3826", "LABORATORY ANALYTICAL INSTRUMENTS", Category.CLASS),
+ Classification("3827", "OPTICAL INSTRUMENTS AND LENSES", Category.CLASS),
+ Classification("3829", "MEASURING AND CONTROLLING DEVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("384", "SURGICAL, MEDICAL, AND DENTAL INSTRUMENTS AND SUPPLIES", Category.GROUP),
+ Classification("3841", "SURGICAL AND MEDICAL INSTRUMENTS AND APPARATUS", Category.CLASS),
+ Classification("3842", "ORTHOPEDIC, PROSTHETIC, AND SURGICAL APPLIANCES AND SUPPLIES", Category.CLASS),
+ Classification("3843", "DENTAL EQUIPMENT AND SUPPLIES", Category.CLASS),
+ Classification("3844", "X-RAY APPARATUS AND TUBES AND RELATED IRRADIATION APPARATUS", Category.CLASS),
+ Classification("3845", "ELECTROMEDICAL AND ELECTROTHERAPEUTIC APPARATUS", Category.CLASS),
+ Classification("385", "OPHTHALMIC GOODS", Category.GROUP),
+ Classification("3851", "OPHTHALMIC GOODS", Category.CLASS),
+ Classification("386", "PHOTOGRAPHIC EQUIPMENT AND SUPPLIES", Category.GROUP),
+ Classification("3861", "PHOTOGRAPHIC EQUIPMENT AND SUPPLIES", Category.CLASS),
+ Classification("387", "WATCHES, CLOCKS, CLOCKWORK OPERATED DEVICES, AND PARTS", Category.GROUP),
+ Classification("3873", "WATCHES, CLOCKS, CLOCKWORK OPERATED DEVICES, AND PARTS", Category.CLASS),
+ Classification("39", "MISCELLANEOUS MANUFACTURING INDUSTRIES", Category.DIVISION),
+ Classification("391", "JEWELRY, SILVERWARE, AND PLATED WARE", Category.GROUP),
+ Classification("3911", "JEWELRY, PRECIOUS METAL", Category.CLASS),
+ Classification("3914", "SILVERWARE, PLATED WARE, AND STAINLESS STEEL WARE", Category.CLASS),
+ Classification("3915", "JEWELERS' FINDINGS AND MATERIALS, AND LAPIDARY WORK", Category.CLASS),
+ Classification("393", "MUSICAL INSTRUMENTS", Category.GROUP),
+ Classification("3931", "MUSICAL INSTRUMENTS", Category.CLASS),
+ Classification("394", "DOLLS, TOYS, GAMES AND SPORTING AND ATHLETIC GOODS", Category.GROUP),
+ Classification("3942", "DOLLS AND STUFFED TOYS", Category.CLASS),
+ Classification("3944", "GAMES, TOYS, AND CHILDREN'S VEHICLES, EXCEPT DOLLS AND BICYCLES", Category.CLASS),
+ Classification("3949", "SPORTING AND ATHLETIC GOODS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("395", "PENS, PENCILS, AND OTHER ARTISTS' MATERIALS", Category.GROUP),
+ Classification("3951", "PENS, MECHANICAL PENCILS, AND PARTS", Category.CLASS),
+ Classification("3952", "LEAD PENCILS, CRAYONS, AND ARTISTS' MATERIALS", Category.CLASS),
+ Classification("3953", "MARKING DEVICES", Category.CLASS),
+ Classification("3955", "CARBON PAPER AND INKED RIBBONS", Category.CLASS),
+ Classification("396", "COSTUME JEWELRY, COSTUME NOVELTIES, BUTTONS, AND MISCELLANEOUS NOT", Category.GROUP),
+ Classification("3961", "COSTUME JEWELRY AND COSTUME NOVELTIES, EXCEPT PRECIOUS METAL", Category.CLASS),
+ Classification("3965", "FASTENERS, BUTTONS, NEEDLES, AND PINS", Category.CLASS),
+ Classification("399", "MISCELLANEOUS MANUFACTURING INDUSTRIES", Category.GROUP),
+ Classification("3991", "BROOMS AND BRUSHES", Category.CLASS),
+ Classification("3993", "SIGNS AND ADVERTISING SPECIALTIES", Category.CLASS),
+ Classification("3995", "BURIAL CASKETS", Category.CLASS),
+ Classification("3996", "LINOLEUM, ASPHALTED-FELT-BASE, AND OTHER HARD SURFACE FLOOR COVER", Category.CLASS),
+ Classification("3999", "MANUFACTURING INDUSTRIES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("E", "TRANSPORTATION, COMMUNICATIONS, ELECTRIC, GAS, AND SANITARY SERVICE", Category.SECTION),
+ Classification("40", "RAILROAD TRANSPORTATION", Category.DIVISION),
+ Classification("401", "RAILROADS", Category.GROUP),
+ Classification("4011", "RAILROADS, LINE-HAUL OPERATING", Category.CLASS),
+ Classification("4013", "RAILROAD SWITCHING AND TERMINAL ESTABLISHMENTS", Category.CLASS),
+ Classification(
+ "41", "LOCAL AND SUBURBAN TRANSIT AND INTERURBAN HIGHWAY PASSENGER TRANSPORTATION", Category.DIVISION
+ ),
+ Classification("411", "LOCAL AND SUBURBAN PASSENGER TRANSPORTATION", Category.GROUP),
+ Classification("4111", "LOCAL AND SUBURBAN TRANSIT", Category.CLASS),
+ Classification("4119", "LOCAL PASSENGER TRANSPORTATION, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("412", "TAXICABS", Category.GROUP),
+ Classification("4121", "TAXICABS", Category.CLASS),
+ Classification("413", "INTERCITY AND RURAL BUS TRANSPORTATION", Category.GROUP),
+ Classification("4131", "INTERCITY AND RURAL BUS TRANSPORTATION", Category.CLASS),
+ Classification("414", "BUS CHARTER SERVICE", Category.GROUP),
+ Classification("4141", "LOCAL BUS CHARTER SERVICE", Category.CLASS),
+ Classification("4142", "BUS CHARTER SERVICE, EXCEPT LOCAL", Category.CLASS),
+ Classification("415", "SCHOOL BUSES", Category.GROUP),
+ Classification("4151", "SCHOOL BUSES", Category.CLASS),
+ Classification("417", "TERMINAL AND SERVICE FACILITIES FOR MOTOR VEHICLE PASSENGER TRANSP", Category.GROUP),
+ Classification("4173", "TERMINAL AND SERVICE FACILITIES FOR MOTOR VEHICLE PASSENGER TRANS", Category.CLASS),
+ Classification("42", "MOTOR FREIGHT TRANSPORTATION AND WAREHOUSING", Category.DIVISION),
+ Classification("421", "TRUCKING AND COURIER SERVICES, EXCEPT AIR", Category.GROUP),
+ Classification("4212", "LOCAL TRUCKING WITHOUT STORAGE", Category.CLASS),
+ Classification("4213", "TRUCKING, EXCEPT LOCAL", Category.CLASS),
+ Classification("4214", "LOCAL TRUCKING WITH STORAGE", Category.CLASS),
+ Classification("4215", "COURIER SERVICES, EXCEPT BY AIR", Category.CLASS),
+ Classification("422", "PUBLIC WAREHOUSING AND STORAGE", Category.GROUP),
+ Classification("4221", "FARM PRODUCT WAREHOUSING AND STORAGE", Category.CLASS),
+ Classification("4222", "REFRIGERATED WAREHOUSING AND STORAGE", Category.CLASS),
+ Classification("4225", "GENERAL WAREHOUSING AND STORAGE", Category.CLASS),
+ Classification("4226", "SPECIAL WAREHOUSING AND STORAGE, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("423", "TERMINAL AND JOINT TERMINAL MAINTENANCE FACILITIES FOR MOTOR FREIG", Category.GROUP),
+ Classification("4231", "TERMINAL AND JOINT TERMINAL MAINTENANCE FACILITIES FOR MOTOR FREI", Category.CLASS),
+ Classification("43", "UNITED STATES POSTAL SERVICE", Category.DIVISION),
+ Classification("431", "UNITED STATES POSTAL SERVICE", Category.GROUP),
+ Classification("4311", "UNITED STATES POSTAL SERVICE", Category.CLASS),
+ Classification("44", "WATER TRANSPORTATION", Category.DIVISION),
+ Classification("441", "DEEP SEA FOREIGN TRANSPORTATION OF FREIGHT", Category.GROUP),
+ Classification("4412", "DEEP SEA FOREIGN TRANSPORTATION OF FREIGHT", Category.CLASS),
+ Classification("442", "DEEP SEA DOMESTIC TRANSPORTATION OF FREIGHT", Category.GROUP),
+ Classification("4424", "DEEP SEA DOMESTIC TRANSPORTATION OF FREIGHT", Category.CLASS),
+ Classification("443", "FREIGHT TRANSPORTATION ON THE GREAT LAKES¨ST. LAWRENCE SEAWAY", Category.GROUP),
+ Classification("4432", "FREIGHT TRANSPORTATION ON THE GREAT LAKES¨ST. LAWRENCE SEAWAY", Category.CLASS),
+ Classification("444", "WATER TRANSPORTATION OF FREIGHT, NOT ELSEWHERE CLASSIFIED", Category.GROUP),
+ Classification("4449", "WATER TRANSPORTATION OF FREIGHT, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("448", "WATER TRANSPORTATION OF PASSENGERS", Category.GROUP),
+ Classification("4481", "DEEP SEA TRANSPORTATION OF PASSENGERS, EXCEPT BY FERRY", Category.CLASS),
+ Classification("4482", "FERRIES", Category.CLASS),
+ Classification("4489", "WATER TRANSPORTATION OF PASSENGERS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("449", "SERVICES INCIDENTAL TO WATER TRANSPORTATION", Category.GROUP),
+ Classification("4491", "MARINE CARGO HANDLING", Category.CLASS),
+ Classification("4492", "TOWING AND TUGBOAT SERVICES", Category.CLASS),
+ Classification("4493", "MARINAS", Category.CLASS),
+ Classification("4499", "WATER TRANSPORTATION SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("45", "TRANSPORTATION BY AIR", Category.DIVISION),
+ Classification("451", "AIR TRANSPORTATION, SCHEDULED, AND AIR COURIER SERVICES", Category.GROUP),
+ Classification("4512", "AIR TRANSPORTATION, SCHEDULED", Category.CLASS),
+ Classification("4513", "AIR COURIER SERVICES", Category.CLASS),
+ Classification("452", "AIR TRANSPORTATION, NONSCHEDULED", Category.GROUP),
+ Classification("4522", "AIR TRANSPORTATION, NONSCHEDULED", Category.CLASS),
+ Classification("458", "AIRPORTS, FLYING FIELDS, AND AIRPORT TERMINAL SERVICES", Category.GROUP),
+ Classification("4581", "AIRPORTS, FLYING FIELDS, AND AIRPORT TERMINAL SERVICES", Category.CLASS),
+ Classification("46", "PIPELINES, EXCEPT NATURAL GAS", Category.DIVISION),
+ Classification("461", "PIPELINES, EXCEPT NATURAL GAS", Category.GROUP),
+ Classification("4612", "CRUDE PETROLEUM PIPELINES", Category.CLASS),
+ Classification("4613", "REFINED PETROLEUM PIPELINES", Category.CLASS),
+ Classification("4619", "PIPELINES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("47", "TRANSPORTATION SERVICES", Category.DIVISION),
+ Classification("472", "ARRANGEMENT OF PASSENGER TRANSPORTATION", Category.GROUP),
+ Classification("4724", "TRAVEL AGENCIES", Category.CLASS),
+ Classification("4725", "TOUR OPERATORS", Category.CLASS),
+ Classification("4729", "ARRANGEMENT OF PASSENGER TRANSPORTATION, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("473", "ARRANGEMENT OF TRANSPORTATION OF FREIGHT AND CARGO", Category.GROUP),
+ Classification("4731", "ARRANGEMENT OF TRANSPORTATION OF FREIGHT AND CARGO", Category.CLASS),
+ Classification("474", "RENTAL OF RAILROAD CARS", Category.GROUP),
+ Classification("4741", "RENTAL OF RAILROAD CARS", Category.CLASS),
+ Classification("478", "MISCELLANEOUS SERVICES INCIDENTAL TO TRANSPORTATION", Category.GROUP),
+ Classification("4783", "PACKING AND CRATING", Category.CLASS),
+ Classification("4785", "FIXED FACILITIES AND INSPECTION AND WEIGHING SERVICES FOR MOTOR V", Category.CLASS),
+ Classification("4785", "CARGO CHECKERS AND SURVEYORS, MARINE", Category.CLASS),
+ Classification("4789", "TRANSPORTATION SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("48", "COMMUNICATIONS", Category.DIVISION),
+ Classification("481", "TELEPHONE COMMUNICATIONS", Category.GROUP),
+ Classification("4812", "RADIOTELEPHONE COMMUNICATIONS", Category.CLASS),
+ Classification("4813", "TELEPHONE COMMUNICATIONS, EXCEPT RADIOTELEPHONE", Category.CLASS),
+ Classification("482", "TELEGRAPH AND OTHER MESSAGE COMMUNICATIONS", Category.GROUP),
+ Classification("4822", "TELEGRAPH AND OTHER MESSAGE COMMUNICATIONS", Category.CLASS),
+ Classification("483", "RADIO AND TELEVISION BROADCASTING STATIONS", Category.GROUP),
+ Classification("4832", "RADIO BROADCASTING STATIONS", Category.CLASS),
+ Classification("4833", "TELEVISION BROADCASTING STATIONS", Category.CLASS),
+ Classification("484", "CABLE AND OTHER PAY TELEVISION SERVICES", Category.GROUP),
+ Classification("4841", "CABLE AND OTHER PAY TELEVISION SERVICES", Category.CLASS),
+ Classification("489", "COMMUNICATIONS SERVICES, NOT ELSEWHERE CLASSIFIED", Category.GROUP),
+ Classification("4899", "COMMUNICATIONS SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("49", "ELECTRIC, GAS, AND SANITARY SERVICES", Category.DIVISION),
+ Classification("491", "ELECTRIC SERVICES", Category.GROUP),
+ Classification("4911", "ELECTRIC SERVICES", Category.CLASS),
+ Classification("492", "GAS PRODUCTION AND DISTRIBUTION", Category.GROUP),
+ Classification("4922", "NATURAL GAS TRANSMISSION", Category.CLASS),
+ Classification("4923", "NATURAL GAS TRANSMISSION AND DISTRIBUTION", Category.CLASS),
+ Classification("4924", "NATURAL GAS DISTRIBUTION", Category.CLASS),
+ Classification("4925", "MIXED, MANUFACTURED, OR LIQUEFIED PETROLEUM GAS PRODUCTION AND/OR", Category.CLASS),
+ Classification("493", "COMBINATION ELECTRIC AND GAS, AND OTHER UTILITY SERVICES", Category.GROUP),
+ Classification("4931", "ELECTRIC AND OTHER SERVICES COMBINED", Category.CLASS),
+ Classification("4932", "GAS AND OTHER SERVICES COMBINED", Category.CLASS),
+ Classification("4939", "COMBINATION UTILITIES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("494", "WATER SUPPLY", Category.GROUP),
+ Classification("4941", "WATER SUPPLY", Category.CLASS),
+ Classification("495", "SANITARY SERVICES", Category.GROUP),
+ Classification("4952", "SEWERAGE SYSTEMS", Category.CLASS),
+ Classification("4953", "REFUSE SYSTEMS", Category.CLASS),
+ Classification("4959", "SANITARY SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("496", "STEAM AND AIR-CONDITIONING SUPPLY", Category.GROUP),
+ Classification("4961", "STEAM AND AIR-CONDITIONING SUPPLY", Category.CLASS),
+ Classification("497", "IRRIGATION SYSTEMS", Category.GROUP),
+ Classification("4971", "IRRIGATION SYSTEMS", Category.CLASS),
+ Classification("F", "WHOLESALE TRADE", Category.SECTION),
+ Classification("50", "WHOLESALE TRADE¨DURABLE GOODS", Category.DIVISION),
+ Classification("501", "MOTOR VEHICLES AND MOTOR VEHICLE PARTS AND SUPPLIES", Category.GROUP),
+ Classification("5012", "AUTOMOBILES AND OTHER MOTOR VEHICLES", Category.CLASS),
+ Classification("5013", "MOTOR VEHICLE SUPPLIES AND NEW PARTS", Category.CLASS),
+ Classification("5014", "TIRES AND TUBES", Category.CLASS),
+ Classification("5015", "MOTOR VEHICLE PARTS, USED", Category.CLASS),
+ Classification("502", "FURNITURE AND HOMEFURNISHINGS", Category.GROUP),
+ Classification("5021", "FURNITURE", Category.CLASS),
+ Classification("5023", "HOMEFURNISHINGS", Category.CLASS),
+ Classification("503", "LUMBER AND OTHER CONSTRUCTION MATERIALS", Category.GROUP),
+ Classification("5031", "LUMBER, PLYWOOD, MILLWORK, AND WOOD PANELS", Category.CLASS),
+ Classification("5032", "BRICK, STONE, AND RELATED CONSTRUCTION MATERIALS", Category.CLASS),
+ Classification("5033", "ROOFING, SIDING, AND INSULATION MATERIALS", Category.CLASS),
+ Classification("5039", "CONSTRUCTION MATERIALS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("504", "PROFESSIONAL AND COMMERCIAL EQUIPMENT AND SUPPLIES", Category.GROUP),
+ Classification("5043", "PHOTOGRAPHIC EQUIPMENT AND SUPPLIES", Category.CLASS),
+ Classification("5044", "OFFICE EQUIPMENT", Category.CLASS),
+ Classification("5045", "COMPUTERS AND COMPUTER PERIPHERAL EQUIPMENT AND SOFTWARE", Category.CLASS),
+ Classification("5046", "COMMERCIAL EQUIPMENT, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("5047", "MEDICAL, DENTAL, AND HOSPITAL EQUIPMENT AND SUPPLIES", Category.CLASS),
+ Classification("5048", "OPHTHALMIC GOODS", Category.CLASS),
+ Classification("5049", "PROFESSIONAL EQUIPMENT AND SUPPLIES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("505", "METALS AND MINERALS, EXCEPT PETROLEUM", Category.GROUP),
+ Classification("5051", "METALS SERVICE CENTERS AND OFFICES", Category.CLASS),
+ Classification("5052", "COAL AND OTHER MINERALS AND ORES", Category.CLASS),
+ Classification("506", "ELECTRICAL GOODS", Category.GROUP),
+ Classification("5063", "ELECTRICAL APPARATUS AND EQUIPMENT, WIRING SUPPLIES, AND CONSTRUC", Category.CLASS),
+ Classification("5064", "ELECTRICAL APPLIANCES, TELEVISION AND RADIO SETS", Category.CLASS),
+ Classification("5065", "ELECTRONIC PARTS AND EQUIPMENT, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("507", "HARDWARE, AND PLUMBING AND HEATING EQUIPMENT AND SUPPLIES", Category.GROUP),
+ Classification("5072", "HARDWARE", Category.CLASS),
+ Classification("5074", "PLUMBING AND HEATING EQUIPMENT AND SUPPLIES (HYDRONICS)", Category.CLASS),
+ Classification("5075", "WARM AIR HEATING AND AIR-CONDITIONING EQUIPMENT AND SUPPLIES", Category.CLASS),
+ Classification("5078", "REFRIGERATION EQUIPMENT AND SUPPLIES", Category.CLASS),
+ Classification("508", "MACHINERY, EQUIPMENT, AND SUPPLIES", Category.GROUP),
+ Classification("5082", "CONSTRUCTION AND MINING (EXCEPT PETROLEUM) MACHINERY AND EQUIPMEN", Category.CLASS),
+ Classification("5083", "FARM AND GARDEN MACHINERY AND EQUIPMENT", Category.CLASS),
+ Classification("5084", "INDUSTRIAL MACHINERY AND EQUIPMENT", Category.CLASS),
+ Classification("5085", "INDUSTRIAL SUPPLIES", Category.CLASS),
+ Classification("5087", "SERVICE ESTABLISHMENT EQUIPMENT AND SUPPLIES", Category.CLASS),
+ Classification("5088", "TRANSPORTATION EQUIPMENT AND SUPPLIES, EXCEPT MOTOR VEHICLES", Category.CLASS),
+ Classification("509", "MISCELLANEOUS DURABLE GOODS", Category.GROUP),
+ Classification("5091", "SPORTING AND RECREATIONAL GOODS AND SUPPLIES", Category.CLASS),
+ Classification("5092", "TOYS AND HOBBY GOODS AND SUPPLIES", Category.CLASS),
+ Classification("5093", "SCRAP AND WASTE MATERIALS", Category.CLASS),
+ Classification("5094", "JEWELRY, WATCHES, PRECIOUS STONES, AND PRECIOUS METALS", Category.CLASS),
+ Classification("5099", "DURABLE GOODS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("51", "WHOLESALE TRADE¨NONDURABLE GOODS", Category.DIVISION),
+ Classification("511", "PAPER AND PAPER PRODUCTS", Category.GROUP),
+ Classification("5111", "PRINTING AND WRITING PAPER", Category.CLASS),
+ Classification("5112", "STATIONERY AND OFFICE SUPPLIES", Category.CLASS),
+ Classification("5113", "INDUSTRIAL AND PERSONAL SERVICE PAPER", Category.CLASS),
+ Classification("512", "DRUGS, DRUG PROPRIETARIES, AND DRUGGISTS' SUNDRIES", Category.GROUP),
+ Classification("5122", "DRUGS, DRUG PROPRIETARIES, AND DRUGGISTS' SUNDRIES", Category.CLASS),
+ Classification("513", "APPAREL, PIECE GOODS, AND NOTIONS", Category.GROUP),
+ Classification("5131", "PIECE GOODS, NOTIONS, AND OTHER DRY GOODS", Category.CLASS),
+ Classification("5136", "MEN'S AND BOYS' CLOTHING AND FURNISHINGS", Category.CLASS),
+ Classification("5137", "WOMEN'S, CHILDREN'S, AND INFANTS' CLOTHING AND ACCESSORIES", Category.CLASS),
+ Classification("5139", "FOOTWEAR", Category.CLASS),
+ Classification("514", "GROCERIES AND RELATED PRODUCTS", Category.GROUP),
+ Classification("5141", "GROCERIES, GENERAL LINE", Category.CLASS),
+ Classification("5142", "PACKAGED FROZEN FOODS", Category.CLASS),
+ Classification("5143", "DAIRY PRODUCTS, EXCEPT DRIED OR CANNED", Category.CLASS),
+ Classification("5144", "POULTRY AND POULTRY PRODUCTS", Category.CLASS),
+ Classification("5145", "CONFECTIONERY", Category.CLASS),
+ Classification("5146", "FISH AND SEAFOODS", Category.CLASS),
+ Classification("5147", "MEATS AND MEAT PRODUCTS", Category.CLASS),
+ Classification("5148", "FRESH FRUITS AND VEGETABLES", Category.CLASS),
+ Classification("5149", "GROCERIES AND RELATED PRODUCTS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("515", "FARM-PRODUCT RAW MATERIALS", Category.GROUP),
+ Classification("5153", "GRAIN AND FIELD BEANS", Category.CLASS),
+ Classification("5154", "LIVESTOCK", Category.CLASS),
+ Classification("5159", "FARM-PRODUCT RAW MATERIALS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("516", "CHEMICALS AND ALLIED PRODUCTS", Category.GROUP),
+ Classification("5162", "PLASTICS MATERIALS AND BASIC FORMS AND SHAPES", Category.CLASS),
+ Classification("5169", "CHEMICALS AND ALLIED PRODUCTS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("517", "PETROLEUM AND PETROLEUM PRODUCTS", Category.GROUP),
+ Classification("5171", "PETROLEUM BULK STATIONS AND TERMINALS", Category.CLASS),
+ Classification("5172", "PETROLEUM AND PETROLEUM PRODUCTS WHOLESALERS, EXCEPT BULK STATION", Category.CLASS),
+ Classification("518", "BEER, WINE, AND DISTILLED ALCOHOLIC BEVERAGES", Category.GROUP),
+ Classification("5181", "BEER AND ALE", Category.CLASS),
+ Classification("5182", "WINE AND DISTILLED ALCOHOLIC BEVERAGES", Category.CLASS),
+ Classification("519", "MISCELLANEOUS NONDURABLE GOODS", Category.GROUP),
+ Classification("5191", "FARM SUPPLIES", Category.CLASS),
+ Classification("5192", "BOOKS, PERIODICALS, AND NEWSPAPERS", Category.CLASS),
+ Classification("5193", "FLOWERS, NURSERY STOCK, AND FLORISTS' SUPPLIES", Category.CLASS),
+ Classification("5194", "TOBACCO AND TOBACCO PRODUCTS", Category.CLASS),
+ Classification("5198", "PAINTS, VARNISHES, AND SUPPLIES", Category.CLASS),
+ Classification("5199", "NONDURABLE GOODS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("G", "RETAIL TRADE", Category.SECTION),
+ Classification("52", "BUILDING MATERIALS, HARDWARE, GARDEN SUPPLY, AND MOBILE HOME DEALERS", Category.DIVISION),
+ Classification("521", "LUMBER AND OTHER BUILDING MATERIALS DEALERS", Category.GROUP),
+ Classification("5211", "LUMBER AND OTHER BUILDING MATERIALS DEALERS", Category.CLASS),
+ Classification("523", "PAINT, GLASS, AND WALLPAPER STORES", Category.GROUP),
+ Classification("5231", "PAINT, GLASS, AND WALLPAPER STORES", Category.CLASS),
+ Classification("525", "HARDWARE STORES", Category.GROUP),
+ Classification("5251", "HARDWARE STORES", Category.CLASS),
+ Classification("526", "RETAIL NURSERIES, LAWN AND GARDEN SUPPLY STORES", Category.GROUP),
+ Classification("5261", "RETAIL NURSERIES, LAWN AND GARDEN SUPPLY STORES", Category.CLASS),
+ Classification("527", "MOBILE HOME DEALERS", Category.GROUP),
+ Classification("5271", "MOBILE HOME DEALERS", Category.CLASS),
+ Classification("53", "GENERAL MERCHANDISE STORES", Category.DIVISION),
+ Classification("531", "DEPARTMENT STORES", Category.GROUP),
+ Classification("5311", "DEPARTMENT STORES", Category.CLASS),
+ Classification("533", "VARIETY STORES", Category.GROUP),
+ Classification("5331", "VARIETY STORES", Category.CLASS),
+ Classification("539", "MISCELLANEOUS GENERAL MERCHANDISE STORES", Category.GROUP),
+ Classification("5399", "MISCELLANEOUS GENERAL MERCHANDISE STORES", Category.CLASS),
+ Classification("54", "FOOD STORES", Category.DIVISION),
+ Classification("541", "GROCERY STORES", Category.GROUP),
+ Classification("5411", "GROCERY STORES", Category.CLASS),
+ Classification("542", "MEAT AND FISH (SEAFOOD) MARKETS, INCLUDING FREEZER PROVISIONERS", Category.GROUP),
+ Classification("5421", "MEAT AND FISH (SEAFOOD) MARKETS, INCLUDING FREEZER PROVISIONERS", Category.CLASS),
+ Classification("543", "FRUIT AND VEGETABLE MARKETS", Category.GROUP),
+ Classification("5431", "FRUIT AND VEGETABLE MARKETS", Category.CLASS),
+ Classification("544", "CANDY, NUT, AND CONFECTIONERY STORES", Category.GROUP),
+ Classification("5441", "CANDY, NUT, AND CONFECTIONERY STORES", Category.CLASS),
+ Classification("545", "DAIRY PRODUCTS STORES", Category.GROUP),
+ Classification("5451", "DAIRY PRODUCTS STORES", Category.CLASS),
+ Classification("546", "RETAIL BAKERIES", Category.GROUP),
+ Classification("5461", "RETAIL BAKERIES", Category.CLASS),
+ Classification("549", "MISCELLANEOUS FOOD STORES", Category.GROUP),
+ Classification("5499", "MISCELLANEOUS FOOD STORES", Category.CLASS),
+ Classification("55", "AUTOMOTIVE DEALERS AND GASOLINE SERVICE STATIONS", Category.DIVISION),
+ Classification("551", "MOTOR VEHICLE DEALERS (NEW AND USED)", Category.GROUP),
+ Classification("5511", "MOTOR VEHICLE DEALERS (NEW AND USED)", Category.CLASS),
+ Classification("552", "MOTOR VEHICLE DEALERS (USED ONLY)", Category.GROUP),
+ Classification("5521", "MOTOR VEHICLE DEALERS (USED ONLY)", Category.CLASS),
+ Classification("553", "AUTO AND HOME SUPPLY STORES", Category.GROUP),
+ Classification("5531", "AUTO AND HOME SUPPLY STORES", Category.CLASS),
+ Classification("554", "GASOLINE SERVICE STATIONS", Category.GROUP),
+ Classification("5541", "GASOLINE SERVICE STATIONS", Category.CLASS),
+ Classification("555", "BOAT DEALERS", Category.GROUP),
+ Classification("5551", "BOAT DEALERS", Category.CLASS),
+ Classification("556", "RECREATIONAL VEHICLE DEALERS", Category.GROUP),
+ Classification("5561", "RECREATIONAL VEHICLE DEALERS", Category.CLASS),
+ Classification("557", "MOTORCYCLE DEALERS", Category.GROUP),
+ Classification("5571", "MOTORCYCLE DEALERS", Category.CLASS),
+ Classification("559", "AUTOMOTIVE DEALERS, NOT ELSEWHERE CLASSIFIED", Category.GROUP),
+ Classification("5599", "AUTOMOTIVE DEALERS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("56", "APPAREL AND ACCESSORY STORES", Category.DIVISION),
+ Classification("561", "MEN'S AND BOYS' CLOTHING AND ACCESSORY STORES", Category.GROUP),
+ Classification("5611", "MEN'S AND BOYS' CLOTHING AND ACCESSORY STORES", Category.CLASS),
+ Classification("562", "WOMEN'S CLOTHING STORES", Category.GROUP),
+ Classification("5621", "WOMEN'S CLOTHING STORES", Category.CLASS),
+ Classification("563", "WOMEN'S ACCESSORY AND SPECIALTY STORES", Category.GROUP),
+ Classification("5632", "WOMEN'S ACCESSORY AND SPECIALTY STORES", Category.CLASS),
+ Classification("564", "CHILDREN'S AND INFANTS' WEAR STORES", Category.GROUP),
+ Classification("5641", "CHILDREN'S AND INFANTS' WEAR STORES", Category.CLASS),
+ Classification("565", "FAMILY CLOTHING STORES", Category.GROUP),
+ Classification("5651", "FAMILY CLOTHING STORES", Category.CLASS),
+ Classification("566", "SHOE STORES", Category.GROUP),
+ Classification("5661", "SHOE STORES", Category.CLASS),
+ Classification("569", "MISCELLANEOUS APPAREL AND ACCESSORY STORES", Category.GROUP),
+ Classification("5699", "MISCELLANEOUS APPAREL AND ACCESSORY STORES", Category.CLASS),
+ Classification("57", "HOME FURNITURE, FURNISHINGS, AND EQUIPMENT STORES", Category.DIVISION),
+ Classification("571", "HOME FURNITURE AND FURNISHINGS STORES", Category.GROUP),
+ Classification("5712", "FURNITURE STORES", Category.CLASS),
+ Classification("5713", "FLOOR COVERING STORES", Category.CLASS),
+ Classification("5714", "DRAPERY, CURTAIN, AND UPHOLSTERY STORES", Category.CLASS),
+ Classification("5719", "MISCELLANEOUS HOMEFURNISHINGS STORES", Category.CLASS),
+ Classification("572", "HOUSEHOLD APPLIANCE STORES", Category.GROUP),
+ Classification("5722", "HOUSEHOLD APPLIANCE STORES", Category.CLASS),
+ Classification("573", "RADIO, TELEVISION, CONSUMER ELECTRONICS, AND MUSIC STORES", Category.GROUP),
+ Classification("5731", "RADIO, TELEVISION, AND CONSUMER ELECTRONICS STORES", Category.CLASS),
+ Classification("5734", "COMPUTER AND COMPUTER SOFTWARE STORES", Category.CLASS),
+ Classification("5735", "RECORD AND PRERECORDED TAPE STORES", Category.CLASS),
+ Classification("5736", "MUSICAL INSTRUMENT STORES", Category.CLASS),
+ Classification("58", "EATING AND DRINKING PLACES", Category.DIVISION),
+ Classification("581", "EATING AND DRINKING PLACES", Category.GROUP),
+ Classification("5812", "EATING PLACES", Category.CLASS),
+ Classification("5813", "DRINKING PLACES (ALCOHOLIC BEVERAGES)", Category.CLASS),
+ Classification("59", "MISCELLANEOUS RETAIL", Category.DIVISION),
+ Classification("591", "DRUG STORES AND PROPRIETARY STORES", Category.GROUP),
+ Classification("5912", "DRUG STORES AND PROPRIETARY STORES", Category.CLASS),
+ Classification("592", "LIQUOR STORES", Category.GROUP),
+ Classification("5921", "LIQUOR STORES", Category.CLASS),
+ Classification("593", "USED MERCHANDISE STORES", Category.GROUP),
+ Classification("5932", "USED MERCHANDISE STORES", Category.CLASS),
+ Classification("594", "MISCELLANEOUS SHOPPING GOODS STORES", Category.GROUP),
+ Classification("5941", "SPORTING GOODS STORES AND BICYCLE SHOPS", Category.CLASS),
+ Classification("5942", "BOOK STORES", Category.CLASS),
+ Classification("5943", "STATIONERY STORES", Category.CLASS),
+ Classification("5944", "JEWELRY STORES", Category.CLASS),
+ Classification("5945", "HOBBY, TOY, AND GAME SHOPS", Category.CLASS),
+ Classification("5946", "CAMERA AND PHOTOGRAPHIC SUPPLY STORES", Category.CLASS),
+ Classification("5947", "GIFT, NOVELTY, AND SOUVENIR SHOPS", Category.CLASS),
+ Classification("5948", "LUGGAGE AND LEATHER GOODS STORES", Category.CLASS),
+ Classification("5949", "SEWING, NEEDLEWORK, AND PIECE GOODS STORES", Category.CLASS),
+ Classification("596", "NONSTORE RETAILERS", Category.GROUP),
+ Classification("5961", "CATALOG AND MAIL-ORDER HOUSES", Category.CLASS),
+ Classification("5962", "AUTOMATIC MERCHANDISING MACHINE OPERATORS", Category.CLASS),
+ Classification("5963", "DIRECT SELLING ESTABLISHMENTS", Category.CLASS),
+ Classification("598", "FUEL DEALERS", Category.GROUP),
+ Classification("5983", "FUEL OIL DEALERS", Category.CLASS),
+ Classification("5984", "LIQUEFIED PETROLEUM GAS (BOTTLED GAS) DEALERS", Category.CLASS),
+ Classification("5989", "FUEL DEALERS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("599", "RETAIL STORES, NOT ELSEWHERE CLASSIFIED", Category.GROUP),
+ Classification("5992", "FLORISTS", Category.CLASS),
+ Classification("5993", "TOBACCO STORES AND STANDS", Category.CLASS),
+ Classification("5994", "NEWS DEALERS AND NEWSSTANDS", Category.CLASS),
+ Classification("5995", "OPTICAL GOODS STORES", Category.CLASS),
+ Classification("5999", "MISCELLANEOUS RETAIL STORES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("H", "FINANCE, INSURANCE, AND REAL ESTATE", Category.SECTION),
+ Classification("60", "DEPOSITORY INSTITUTIONS", Category.DIVISION),
+ Classification("601", "CENTRAL RESERVE DEPOSITORY INSTITUTIONS", Category.GROUP),
+ Classification("6011", "FEDERAL RESERVE BANKS", Category.CLASS),
+ Classification("6019", "CENTRAL RESERVE DEPOSITORY INSTITUTIONS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("602", "COMMERCIAL BANKS", Category.GROUP),
+ Classification("6021", "NATIONAL COMMERCIAL BANKS", Category.CLASS),
+ Classification("6022", "STATE COMMERCIAL BANKS", Category.CLASS),
+ Classification("6029", "COMMERCIAL BANKS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("603", "SAVINGS INSTITUTIONS", Category.GROUP),
+ Classification("6035", "SAVINGS INSTITUTIONS, FEDERALLY CHARTERED", Category.CLASS),
+ Classification("6036", "SAVINGS INSTITUTIONS, NOT FEDERALLY CHARTERED", Category.CLASS),
+ Classification("606", "CREDIT UNIONS", Category.GROUP),
+ Classification("6061", "CREDIT UNIONS, FEDERALLY CHARTERED", Category.CLASS),
+ Classification("6062", "CREDIT UNIONS, NOT FEDERALLY CHARTERED", Category.CLASS),
+ Classification("608", "FOREIGN BANKING AND BRANCHES AND AGENCIES OF FOREIGN BANKS", Category.GROUP),
+ Classification("6081", "BRANCHES AND AGENCIES OF FOREIGN BANKS", Category.CLASS),
+ Classification("6082", "FOREIGN TRADE AND INTERNATIONAL BANKING INSTITUTIONS", Category.CLASS),
+ Classification("609", "FUNCTIONS RELATED TO DEPOSITORY BANKING", Category.GROUP),
+ Classification("6091", "NONDEPOSIT TRUST FACILITIES", Category.CLASS),
+ Classification("6099", "FUNCTIONS RELATED TO DEPOSITORY BANKING, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("61", "NONDEPOSITORY CREDIT INSTITUTIONS", Category.DIVISION),
+ Classification("611", "FEDERAL AND FEDERALLY-SPONSORED CREDIT AGENCIES", Category.GROUP),
+ Classification("6111", "FEDERAL AND FEDERALLY-SPONSORED CREDIT AGENCIES", Category.CLASS),
+ Classification("614", "PERSONAL CREDIT INSTITUTIONS", Category.GROUP),
+ Classification("6141", "PERSONAL CREDIT INSTITUTIONS", Category.CLASS),
+ Classification("615", "BUSINESS CREDIT INSTITUTIONS", Category.GROUP),
+ Classification("6153", "SHORT-TERM BUSINESS CREDIT INSTITUTIONS, EXCEPT AGRICULTURAL", Category.CLASS),
+ Classification("6159", "MISCELLANEOUS BUSINESS CREDIT INSTITUTIONS", Category.CLASS),
+ Classification("616", "MORTGAGE BANKERS AND BROKERS", Category.GROUP),
+ Classification("6162", "MORTGAGE BANKERS AND LOAN CORRESPONDENTS", Category.CLASS),
+ Classification("6163", "LOAN BROKERS", Category.CLASS),
+ Classification("62", "SECURITY AND COMMODITY BROKERS, DEALERS, EXCHANGES, AND SERVICES", Category.DIVISION),
+ Classification("621", "SECURITY BROKERS, DEALERS, AND FLOTATION COMPANIES", Category.GROUP),
+ Classification("6211", "SECURITY BROKERS, DEALERS, AND FLOTATION COMPANIES", Category.CLASS),
+ Classification("622", "COMMODITY CONTRACTS BROKERS AND DEALERS", Category.GROUP),
+ Classification("6221", "COMMODITY CONTRACTS BROKERS AND DEALERS", Category.CLASS),
+ Classification("623", "SECURITY AND COMMODITY EXCHANGES", Category.GROUP),
+ Classification("6231", "SECURITY AND COMMODITY EXCHANGES", Category.CLASS),
+ Classification("628", "SERVICES ALLIED WITH THE EXCHANGE OF SECURITIES OR COMMODITIES", Category.GROUP),
+ Classification("6282", "INVESTMENT ADVICE", Category.CLASS),
+ Classification("6289", "SERVICES ALLIED WITH THE EXCHANGE OF SECURITIES OR COMMODITIES, N", Category.CLASS),
+ Classification("63", "INSURANCE CARRIERS", Category.DIVISION),
+ Classification("631", "LIFE INSURANCE", Category.GROUP),
+ Classification("6311", "LIFE INSURANCE", Category.CLASS),
+ Classification("632", "ACCIDENT AND HEALTH INSURANCE AND MEDICAL SERVICE PLANS", Category.GROUP),
+ Classification("6321", "ACCIDENT AND HEALTH INSURANCE", Category.CLASS),
+ Classification("6324", "HOSPITAL AND MEDICAL SERVICE PLANS", Category.CLASS),
+ Classification("633", "FIRE, MARINE, AND CASUALTY INSURANCE", Category.GROUP),
+ Classification("6331", "FIRE, MARINE, AND CASUALTY INSURANCE", Category.CLASS),
+ Classification("635", "SURETY INSURANCE", Category.GROUP),
+ Classification("6351", "SURETY INSURANCE", Category.CLASS),
+ Classification("636", "TITLE INSURANCE", Category.GROUP),
+ Classification("6361", "TITLE INSURANCE", Category.CLASS),
+ Classification("637", "PENSION, HEALTH, AND WELFARE FUNDS", Category.GROUP),
+ Classification("6371", "PENSION, HEALTH, AND WELFARE FUNDS", Category.CLASS),
+ Classification("639", "INSURANCE CARRIERS, NOT ELSEWHERE CLASSIFIED", Category.GROUP),
+ Classification("6399", "INSURANCE CARRIERS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("64", "INSURANCE AGENTS, BROKERS, AND SERVICE", Category.DIVISION),
+ Classification("641", "INSURANCE AGENTS, BROKERS, AND SERVICE", Category.GROUP),
+ Classification("6411", "INSURANCE AGENTS, BROKERS, AND SERVICE", Category.CLASS),
+ Classification("65", "REAL ESTATE", Category.DIVISION),
+ Classification("651", "REAL ESTATE OPERATORS (EXCEPT DEVELOPERS) AND LESSORS", Category.GROUP),
+ Classification("6512", "OPERATORS OF NONRESIDENTIAL BUILDINGS", Category.CLASS),
+ Classification("6513", "OPERATORS OF APARTMENT BUILDINGS", Category.CLASS),
+ Classification("6514", "OPERATORS OF DWELLINGS OTHER THAN APARTMENT BUILDINGS", Category.CLASS),
+ Classification("6515", "OPERATORS OF RESIDENTIAL MOBILE HOME SITES", Category.CLASS),
+ Classification("6517", "LESSORS OF RAILROAD PROPERTY", Category.CLASS),
+ Classification("6519", "LESSORS OF REAL PROPERTY, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("653", "REAL ESTATE AGENTS AND MANAGERS", Category.GROUP),
+ Classification("6531", "REAL ESTATE AGENTS AND MANAGERS", Category.CLASS),
+ Classification("654", "TITLE ABSTRACT OFFICES", Category.GROUP),
+ Classification("6541", "TITLE ABSTRACT OFFICES", Category.CLASS),
+ Classification("655", "LAND SUBDIVIDERS AND DEVELOPERS", Category.GROUP),
+ Classification("6552", "LAND SUBDIVIDERS AND DEVELOPERS, EXCEPT CEMETERIES", Category.CLASS),
+ Classification("6553", "CEMETERY SUBDIVIDERS AND DEVELOPERS", Category.CLASS),
+ Classification("67", "HOLDING AND OTHER INVESTMENT OFFICES", Category.DIVISION),
+ Classification("671", "HOLDING OFFICES", Category.GROUP),
+ Classification("6712", "OFFICES OF BANK HOLDING COMPANIES", Category.CLASS),
+ Classification("6719", "OFFICES OF HOLDING COMPANIES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("672", "INVESTMENT OFFICES", Category.GROUP),
+ Classification("6722", "MANAGEMENT INVESTMENT OFFICES, OPEN-END", Category.CLASS),
+ Classification("6726", "UNIT INVESTMENT TRUSTS, FACE-AMOUNT CERTIFICATE OFFICES, AND CLOS", Category.CLASS),
+ Classification("673", "TRUSTS", Category.GROUP),
+ Classification("6732", "EDUCATIONAL, RELIGIOUS, AND CHARITABLE TRUSTS", Category.CLASS),
+ Classification("6733", "TRUSTS, EXCEPT EDUCATIONAL, RELIGIOUS, AND CHARITABLE", Category.CLASS),
+ Classification("679", "MISCELLANEOUS INVESTING", Category.GROUP),
+ Classification("6792", "OIL ROYALTY TRADERS", Category.CLASS),
+ Classification("6794", "PATENT OWNERS AND LESSORS", Category.CLASS),
+ Classification("6798", "REAL ESTATE INVESTMENT TRUSTS", Category.CLASS),
+ Classification("6799", "INVESTORS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("I", "SERVICES", Category.SECTION),
+ Classification("70", "HOTELS, ROOMING HOUSES, CAMPS, AND OTHER LODGING PLACES", Category.DIVISION),
+ Classification("701", "HOTELS AND MOTELS", Category.GROUP),
+ Classification("7011", "HOTELS AND MOTELS", Category.CLASS),
+ Classification("702", "ROOMING AND BOARDING HOUSES", Category.GROUP),
+ Classification("7021", "ROOMING AND BOARDING HOUSES", Category.CLASS),
+ Classification("703", "CAMPS AND RECREATIONAL VEHICLE PARKS", Category.GROUP),
+ Classification("7032", "SPORTING AND RECREATIONAL CAMPS", Category.CLASS),
+ Classification("7033", "RECREATIONAL VEHICLE PARKS AND CAMPSITES", Category.CLASS),
+ Classification("704", "ORGANIZATION HOTELS AND LODGING HOUSES, ON MEMBERSHIP BASIS", Category.GROUP),
+ Classification("7041", "ORGANIZATION HOTELS AND LODGING HOUSES, ON MEMBERSHIP BASIS", Category.CLASS),
+ Classification("72", "PERSONAL SERVICES", Category.DIVISION),
+ Classification("721", "LAUNDRY, CLEANING, AND GARMENT SERVICES", Category.GROUP),
+ Classification("7211", "POWER LAUNDRIES, FAMILY AND COMMERCIAL", Category.CLASS),
+ Classification("7212", "GARMENT PRESSING, AND AGENTS FOR LAUNDRIES AND DRYCLEANERS", Category.CLASS),
+ Classification("7213", "LINEN SUPPLY", Category.CLASS),
+ Classification("7215", "COIN-OPERATED LAUNDRIES AND DRYCLEANING", Category.CLASS),
+ Classification("7216", "DRYCLEANING PLANTS, EXCEPT RUG CLEANING", Category.CLASS),
+ Classification("7217", "CARPET AND UPHOLSTERY CLEANING", Category.CLASS),
+ Classification("7218", "INDUSTRIAL LAUNDERERS", Category.CLASS),
+ Classification("7219", "LAUNDRY AND GARMENT SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("722", "PHOTOGRAPHIC STUDIOS, PORTRAIT", Category.GROUP),
+ Classification("7221", "PHOTOGRAPHIC STUDIOS, PORTRAIT", Category.CLASS),
+ Classification("723", "BEAUTY SHOPS", Category.GROUP),
+ Classification("7231", "BEAUTY SHOPS", Category.CLASS),
+ Classification("724", "BARBER SHOPS", Category.GROUP),
+ Classification("7241", "BARBER SHOPS", Category.CLASS),
+ Classification("725", "SHOE REPAIR SHOPS AND SHOESHINE PARLORS", Category.GROUP),
+ Classification("7251", "SHOE REPAIR SHOPS AND SHOESHINE PARLORS", Category.CLASS),
+ Classification("726", "FUNERAL SERVICE AND CREMATORIES", Category.GROUP),
+ Classification("7261", "FUNERAL SERVICE AND CREMATORIES", Category.CLASS),
+ Classification("729", "MISCELLANEOUS PERSONAL SERVICES", Category.GROUP),
+ Classification("7291", "TAX RETURN PREPARATION SERVICES", Category.CLASS),
+ Classification("7299", "MISCELLANEOUS PERSONAL SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("73", "BUSINESS SERVICES", Category.DIVISION),
+ Classification("731", "ADVERTISING", Category.GROUP),
+ Classification("7311", "ADVERTISING AGENCIES", Category.CLASS),
+ Classification("7312", "OUTDOOR ADVERTISING SERVICES", Category.CLASS),
+ Classification("7313", "RADIO, TELEVISION, AND PUBLISHERS' ADVERTISING REPRESENTATIVES", Category.CLASS),
+ Classification("7319", "ADVERTISING, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("732", "CONSUMER CREDIT REPORTING AGENCIES, MERCANTILE REPORTING AGENCIES", Category.GROUP),
+ Classification("7322", "ADJUSTMENT AND COLLECTION SERVICES", Category.CLASS),
+ Classification("7323", "CREDIT REPORTING SERVICES", Category.CLASS),
+ Classification("733", "MAILING, REPRODUCTION, COMMERCIAL ART AND PHOTOGRAPHY, AND STENOGR", Category.GROUP),
+ Classification("7331", "DIRECT MAIL ADVERTISING SERVICES", Category.CLASS),
+ Classification("7334", "PHOTOCOPYING AND DUPLICATING SERVICES", Category.CLASS),
+ Classification("7335", "COMMERCIAL PHOTOGRAPHY", Category.CLASS),
+ Classification("7336", "COMMERCIAL ART AND GRAPHIC DESIGN", Category.CLASS),
+ Classification("7338", "SECRETARIAL AND COURT REPORTING SERVICES", Category.CLASS),
+ Classification("734", "SERVICES TO DWELLINGS AND OTHER BUILDINGS", Category.GROUP),
+ Classification("7342", "DISINFECTING AND PEST CONTROL SERVICES", Category.CLASS),
+ Classification("7349", "BUILDING CLEANING AND MAINTENANCE SERVICES, NOT ELSEWHERE CLASSIF", Category.CLASS),
+ Classification("735", "MISCELLANEOUS EQUIPMENT RENTAL AND LEASING", Category.GROUP),
+ Classification("7352", "MEDICAL EQUIPMENT RENTAL AND LEASING", Category.CLASS),
+ Classification("7353", "HEAVY CONSTRUCTION EQUIPMENT RENTAL AND LEASING", Category.CLASS),
+ Classification("7359", "EQUIPMENT RENTAL AND LEASING, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("736", "PERSONNEL SUPPLY SERVICES", Category.GROUP),
+ Classification("7361", "EMPLOYMENT AGENCIES", Category.CLASS),
+ Classification("7363", "HELP SUPPLY SERVICES", Category.CLASS),
+ Classification("737", "COMPUTER PROGRAMMING, DATA PROCESSING, AND OTHER COMPUTER RELATED", Category.GROUP),
+ Classification("7371", "COMPUTER PROGRAMMING SERVICES", Category.CLASS),
+ Classification("7372", "PREPACKAGED SOFTWARE", Category.CLASS),
+ Classification("7373", "COMPUTER INTEGRATED SYSTEMS DESIGN", Category.CLASS),
+ Classification("7374", "COMPUTER PROCESSING AND DATA PREPARATION AND PROCESSING SERVICES", Category.CLASS),
+ Classification("7375", "INFORMATION RETRIEVAL SERVICES", Category.CLASS),
+ Classification("7376", "COMPUTER FACILITIES MANAGEMENT SERVICES", Category.CLASS),
+ Classification("7377", "COMPUTER RENTAL AND LEASING", Category.CLASS),
+ Classification("7378", "COMPUTER MAINTENANCE AND REPAIR", Category.CLASS),
+ Classification("7379", "COMPUTER RELATED SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("738", "MISCELLANEOUS BUSINESS SERVICES", Category.GROUP),
+ Classification("7381", "DETECTIVE, GUARD, AND ARMORED CAR SERVICES", Category.CLASS),
+ Classification("7382", "SECURITY SYSTEMS SERVICES", Category.CLASS),
+ Classification("7383", "NEWS SYNDICATES", Category.CLASS),
+ Classification("7384", "PHOTOFINISHING LABORATORIES", Category.CLASS),
+ Classification("7389", "BUSINESS SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("75", "AUTOMOTIVE REPAIR, SERVICES, AND PARKING", Category.DIVISION),
+ Classification("751", "AUTOMOTIVE RENTAL AND LEASING, WITHOUT DRIVERS", Category.GROUP),
+ Classification("7513", "TRUCK RENTAL AND LEASING, WITHOUT DRIVERS", Category.CLASS),
+ Classification("7514", "PASSENGER CAR RENTAL", Category.CLASS),
+ Classification("7515", "PASSENGER CAR LEASING", Category.CLASS),
+ Classification("7519", "UTILITY TRAILER AND RECREATIONAL VEHICLE RENTAL", Category.CLASS),
+ Classification("752", "AUTOMOBILE PARKING", Category.GROUP),
+ Classification("7521", "AUTOMOBILE PARKING", Category.CLASS),
+ Classification("753", "AUTOMOTIVE REPAIR SHOPS", Category.GROUP),
+ Classification("7532", "TOP, BODY, AND UPHOLSTERY REPAIR SHOPS AND PAINT SHOPS", Category.CLASS),
+ Classification("7533", "AUTOMOTIVE EXHAUST SYSTEM REPAIR SHOPS", Category.CLASS),
+ Classification("7534", "TIRE RETREADING AND REPAIR SHOPS", Category.CLASS),
+ Classification("7536", "AUTOMOTIVE GLASS REPLACEMENT SHOPS", Category.CLASS),
+ Classification("7537", "AUTOMOTIVE TRANSMISSION REPAIR SHOPS", Category.CLASS),
+ Classification("7538", "GENERAL AUTOMOTIVE REPAIR SHOPS", Category.CLASS),
+ Classification("7539", "AUTOMOTIVE REPAIR SHOPS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("754", "AUTOMOTIVE SERVICES, EXCEPT REPAIR", Category.GROUP),
+ Classification("7542", "CARWASHES", Category.CLASS),
+ Classification("7549", "AUTOMOTIVE SERVICES, EXCEPT REPAIR AND CARWASHES", Category.CLASS),
+ Classification("76", "MISCELLANEOUS REPAIR SERVICES", Category.DIVISION),
+ Classification("762", "ELECTRICAL REPAIR SHOPS", Category.GROUP),
+ Classification("7622", "RADIO AND TELEVISION REPAIR SHOPS", Category.CLASS),
+ Classification("7623", "REFRIGERATION AND AIR-CONDITIONING SERVICE AND REPAIR SHOPS", Category.CLASS),
+ Classification("7629", "ELECTRICAL AND ELECTRONIC REPAIR SHOPS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("763", "WATCH, CLOCK, AND JEWELRY REPAIR", Category.GROUP),
+ Classification("7631", "WATCH, CLOCK, AND JEWELRY REPAIR", Category.CLASS),
+ Classification("764", "REUPHOLSTERY AND FURNITURE REPAIR", Category.GROUP),
+ Classification("7641", "REUPHOLSTERY AND FURNITURE REPAIR", Category.CLASS),
+ Classification("769", "MISCELLANEOUS REPAIR SHOPS AND RELATED SERVICES", Category.GROUP),
+ Classification("7692", "WELDING REPAIR", Category.CLASS),
+ Classification("7694", "ARMATURE REWINDING SHOPS", Category.CLASS),
+ Classification("7699", "REPAIR SHOPS AND RELATED SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("78", "MOTION PICTURES", Category.DIVISION),
+ Classification("781", "MOTION PICTURE PRODUCTION AND ALLIED SERVICES", Category.GROUP),
+ Classification("7812", "MOTION PICTURE AND VIDEO TAPE PRODUCTION", Category.CLASS),
+ Classification("7819", "SERVICES ALLIED TO MOTION PICTURE PRODUCTION", Category.CLASS),
+ Classification("782", "MOTION PICTURE DISTRIBUTION AND ALLIED SERVICES", Category.GROUP),
+ Classification("7822", "MOTION PICTURE AND VIDEO TAPE DISTRIBUTION", Category.CLASS),
+ Classification("7829", "SERVICES ALLIED TO MOTION PICTURE DISTRIBUTION", Category.CLASS),
+ Classification("783", "MOTION PICTURE THEATERS", Category.GROUP),
+ Classification("7832", "MOTION PICTURE THEATERS, EXCEPT DRIVE-IN", Category.CLASS),
+ Classification("7833", "DRIVE-IN MOTION PICTURE THEATERS", Category.CLASS),
+ Classification("784", "VIDEO TAPE RENTAL", Category.GROUP),
+ Classification("7841", "VIDEO TAPE RENTAL", Category.CLASS),
+ Classification("79", "AMUSEMENT AND RECREATION SERVICES", Category.DIVISION),
+ Classification("791", "DANCE STUDIOS, SCHOOLS, AND HALLS", Category.GROUP),
+ Classification("7911", "DANCE STUDIOS, SCHOOLS, AND HALLS", Category.CLASS),
+ Classification("792", "THEATRICAL PRODUCERS (EXCEPT MOTION PICTURE), BANDS, ORCHESTRAS, A", Category.GROUP),
+ Classification("7922", "THEATRICAL PRODUCERS (EXCEPT MOTION PICTURE) AND MISCELLANEOUS TH", Category.CLASS),
+ Classification("7929", "BANDS, ORCHESTRAS, ACTORS, AND OTHER ENTERTAINERS AND ENTERTAINME", Category.CLASS),
+ Classification("793", "BOWLING CENTERS", Category.GROUP),
+ Classification("7933", "BOWLING CENTERS", Category.CLASS),
+ Classification("794", "COMMERCIAL SPORTS", Category.GROUP),
+ Classification("7941", "PROFESSIONAL SPORTS CLUBS AND PROMOTERS", Category.CLASS),
+ Classification("7948", "RACING, INCLUDING TRACK OPERATION", Category.CLASS),
+ Classification("799", "MISCELLANEOUS AMUSEMENT AND RECREATION SERVICES", Category.GROUP),
+ Classification("7991", "PHYSICAL FITNESS FACILITIES", Category.CLASS),
+ Classification("7992", "PUBLIC GOLF COURSES", Category.CLASS),
+ Classification("7993", "COIN-OPERATED AMUSEMENT DEVICES", Category.CLASS),
+ Classification("7996", "AMUSEMENT PARKS", Category.CLASS),
+ Classification("7997", "MEMBERSHIP SPORTS AND RECREATION CLUBS", Category.CLASS),
+ Classification("7999", "AMUSEMENT AND RECREATION SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("80", "HEALTH SERVICES", Category.DIVISION),
+ Classification("801", "OFFICES AND CLINICS OF DOCTORS OF MEDICINE", Category.GROUP),
+ Classification("8011", "OFFICES AND CLINICS OF DOCTORS OF MEDICINE", Category.CLASS),
+ Classification("802", "OFFICES AND CLINICS OF DENTISTS", Category.GROUP),
+ Classification("8021", "OFFICES AND CLINICS OF DENTISTS", Category.CLASS),
+ Classification("803", "OFFICES AND CLINICS OF DOCTORS OF OSTEOPATHY", Category.GROUP),
+ Classification("8031", "OFFICES AND CLINICS OF DOCTORS OF OSTEOPATHY", Category.CLASS),
+ Classification("804", "OFFICES AND CLINICS OF OTHER HEALTH PRACTITIONERS", Category.GROUP),
+ Classification("8041", "OFFICES AND CLINICS OF CHIROPRACTORS", Category.CLASS),
+ Classification("8042", "OFFICES AND CLINICS OF OPTOMETRISTS", Category.CLASS),
+ Classification("8043", "OFFICES AND CLINICS OF PODIATRISTS", Category.CLASS),
+ Classification("8049", "OFFICES AND CLINICS OF HEALTH PRACTITIONERS, NOT ELSEWHERE CLASSI", Category.CLASS),
+ Classification("805", "NURSING AND PERSONAL CARE FACILITIES", Category.GROUP),
+ Classification("8051", "SKILLED NURSING CARE FACILITIES", Category.CLASS),
+ Classification("8052", "INTERMEDIATE CARE FACILITIES", Category.CLASS),
+ Classification("8059", "NURSING AND PERSONAL CARE FACILITIES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("806", "HOSPITALS", Category.GROUP),
+ Classification("8062", "GENERAL MEDICAL AND SURGICAL HOSPITALS", Category.CLASS),
+ Classification("8063", "PSYCHIATRIC HOSPITALS", Category.CLASS),
+ Classification("8069", "SPECIALTY HOSPITALS, EXCEPT PSYCHIATRIC", Category.CLASS),
+ Classification("807", "MEDICAL AND DENTAL LABORATORIES", Category.GROUP),
+ Classification("8071", "MEDICAL LABORATORIES", Category.CLASS),
+ Classification("8072", "DENTAL LABORATORIES", Category.CLASS),
+ Classification("808", "HOME HEALTH CARE SERVICES", Category.GROUP),
+ Classification("8082", "HOME HEALTH CARE SERVICES", Category.CLASS),
+ Classification("809", "MISCELLANEOUS HEALTH AND ALLIED SERVICES, NOT ELSEWHERE CLASSIFIED", Category.GROUP),
+ Classification("8092", "KIDNEY DIALYSIS CENTERS", Category.CLASS),
+ Classification("8093", "SPECIALTY OUTPATIENT FACILITIES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("8099", "HEALTH AND ALLIED SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("81", "LEGAL SERVICES", Category.DIVISION),
+ Classification("811", "LEGAL SERVICES", Category.GROUP),
+ Classification("8111", "LEGAL SERVICES", Category.CLASS),
+ Classification("82", "EDUCATIONAL SERVICES", Category.DIVISION),
+ Classification("821", "ELEMENTARY AND SECONDARY SCHOOLS", Category.GROUP),
+ Classification("8211", "ELEMENTARY AND SECONDARY SCHOOLS", Category.CLASS),
+ Classification("822", "COLLEGES, UNIVERSITIES, PROFESSIONAL SCHOOLS, AND JUNIOR COLLEGES", Category.GROUP),
+ Classification("8221", "COLLEGES, UNIVERSITIES, AND PROFESSIONAL SCHOOLS", Category.CLASS),
+ Classification("8222", "JUNIOR COLLEGES AND TECHNICAL INSTITUTES", Category.CLASS),
+ Classification("823", "LIBRARIES", Category.GROUP),
+ Classification("8231", "LIBRARIES", Category.CLASS),
+ Classification("824", "VOCATIONAL SCHOOLS", Category.GROUP),
+ Classification("8243", "DATA PROCESSING SCHOOLS", Category.CLASS),
+ Classification("8244", "BUSINESS AND SECRETARIAL SCHOOLS", Category.CLASS),
+ Classification("8249", "VOCATIONAL SCHOOLS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("829", "SCHOOLS AND EDUCATIONAL SERVICES, NOT ELSEWHERE CLASSIFIED", Category.GROUP),
+ Classification("8299", "SCHOOLS AND EDUCATIONAL SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("83", "SOCIAL SERVICES", Category.DIVISION),
+ Classification("832", "INDIVIDUAL AND FAMILY SOCIAL SERVICES", Category.GROUP),
+ Classification("8322", "INDIVIDUAL AND FAMILY SOCIAL SERVICES", Category.CLASS),
+ Classification("833", "JOB TRAINING AND VOCATIONAL REHABILITATION SERVICES", Category.GROUP),
+ Classification("8331", "JOB TRAINING AND VOCATIONAL REHABILITATION SERVICES", Category.CLASS),
+ Classification("835", "CHILD DAY CARE SERVICES", Category.GROUP),
+ Classification("8351", "CHILD DAY CARE SERVICES", Category.CLASS),
+ Classification("836", "RESIDENTIAL CARE", Category.GROUP),
+ Classification("8361", "RESIDENTIAL CARE", Category.CLASS),
+ Classification("839", "SOCIAL SERVICES, NOT ELSEWHERE CLASSIFIED", Category.GROUP),
+ Classification("8399", "SOCIAL SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("84", "MUSEUMS, ART GALLERIES, AND BOTANICAL AND ZOOLOGICAL GARDENS", Category.DIVISION),
+ Classification("841", "MUSEUMS AND ART GALLERIES", Category.GROUP),
+ Classification("8412", "MUSEUMS AND ART GALLERIES", Category.CLASS),
+ Classification("842", "ARBORETA AND BOTANICAL OR ZOOLOGICAL GARDENS", Category.GROUP),
+ Classification("8422", "ARBORETA AND BOTANICAL OR ZOOLOGICAL GARDENS", Category.CLASS),
+ Classification("86", "MEMBERSHIP ORGANIZATIONS", Category.DIVISION),
+ Classification("861", "BUSINESS ASSOCIATIONS", Category.GROUP),
+ Classification("8611", "BUSINESS ASSOCIATIONS", Category.CLASS),
+ Classification("862", "PROFESSIONAL MEMBERSHIP ORGANIZATIONS", Category.GROUP),
+ Classification("8621", "PROFESSIONAL MEMBERSHIP ORGANIZATIONS", Category.CLASS),
+ Classification("863", "LABOR UNIONS AND SIMILAR LABOR ORGANIZATIONS", Category.GROUP),
+ Classification("8631", "LABOR UNIONS AND SIMILAR LABOR ORGANIZATIONS", Category.CLASS),
+ Classification("864", "CIVIC, SOCIAL, AND FRATERNAL ASSOCIATIONS", Category.GROUP),
+ Classification("8641", "CIVIC, SOCIAL, AND FRATERNAL ASSOCIATIONS", Category.CLASS),
+ Classification("865", "POLITICAL ORGANIZATIONS", Category.GROUP),
+ Classification("8651", "POLITICAL ORGANIZATIONS", Category.CLASS),
+ Classification("866", "RELIGIOUS ORGANIZATIONS", Category.GROUP),
+ Classification("8661", "RELIGIOUS ORGANIZATIONS", Category.CLASS),
+ Classification("869", "MEMBERSHIP ORGANIZATIONS, NOT ELSEWHERE CLASSIFIED", Category.GROUP),
+ Classification("8699", "MEMBERSHIP ORGANIZATIONS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("87", "ENGINEERING, ACCOUNTING, RESEARCH, MANAGEMENT, AND RELATED SERVICES", Category.DIVISION),
+ Classification("871", "ENGINEERING, ARCHITECTURAL, AND SURVEYING SERVICES", Category.GROUP),
+ Classification("8711", "ENGINEERING SERVICES", Category.CLASS),
+ Classification("8712", "ARCHITECTURAL SERVICES", Category.CLASS),
+ Classification("8713", "SURVEYING SERVICES", Category.CLASS),
+ Classification("872", "ACCOUNTING, AUDITING, AND BOOKKEEPING SERVICES", Category.GROUP),
+ Classification("8721", "ACCOUNTING, AUDITING, AND BOOKKEEPING SERVICES", Category.CLASS),
+ Classification("873", "RESEARCH, DEVELOPMENT, AND TESTING SERVICES", Category.GROUP),
+ Classification("8731", "COMMERCIAL PHYSICAL AND BIOLOGICAL RESEARCH", Category.CLASS),
+ Classification("8732", "COMMERCIAL ECONOMIC, SOCIOLOGICAL, AND EDUCATIONAL RESEARCH", Category.CLASS),
+ Classification("8733", "NONCOMMERCIAL RESEARCH ORGANIZATIONS", Category.CLASS),
+ Classification("8734", "TESTING LABORATORIES", Category.CLASS),
+ Classification("874", "MANAGEMENT AND PUBLIC RELATIONS SERVICES", Category.GROUP),
+ Classification("8741", "MANAGEMENT SERVICES", Category.CLASS),
+ Classification("8742", "MANAGEMENT CONSULTING SERVICES", Category.CLASS),
+ Classification("8743", "PUBLIC RELATIONS SERVICES", Category.CLASS),
+ Classification("8744", "FACILITIES SUPPORT MANAGEMENT SERVICES", Category.CLASS),
+ Classification("8748", "BUSINESS CONSULTING SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("88", "PRIVATE HOUSEHOLDS", Category.DIVISION),
+ Classification("881", "PRIVATE HOUSEHOLDS", Category.GROUP),
+ Classification("8811", "PRIVATE HOUSEHOLDS", Category.CLASS),
+ Classification("89", "SERVICES, NOT ELSEWHERE CLASSIFIED", Category.DIVISION),
+ Classification("899", "SERVICES, NOT ELSEWHERE CLASSIFIED", Category.GROUP),
+ Classification("8999", "SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("J", "PUBLIC ADMINISTRATION", Category.SECTION),
+ Classification("91", "EXECUTIVE, LEGISLATIVE, AND GENERAL GOVERNMENT, EXCEPT FINANCE", Category.DIVISION),
+ Classification("911", "EXECUTIVE OFFICES", Category.GROUP),
+ Classification("9111", "EXECUTIVE OFFICES", Category.CLASS),
+ Classification("912", "LEGISLATIVE BODIES", Category.GROUP),
+ Classification("9121", "LEGISLATIVE BODIES", Category.CLASS),
+ Classification("913", "EXECUTIVE AND LEGISLATIVE OFFICES COMBINED", Category.GROUP),
+ Classification("9131", "EXECUTIVE AND LEGISLATIVE OFFICES COMBINED", Category.CLASS),
+ Classification("919", "GENERAL GOVERNMENT, NOT ELSEWHERE CLASSIFIED", Category.GROUP),
+ Classification("9199", "GENERAL GOVERNMENT, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("92", "JUSTICE, PUBLIC ORDER, AND SAFETY", Category.DIVISION),
+ Classification("921", "COURTS", Category.GROUP),
+ Classification("9211", "COURTS", Category.CLASS),
+ Classification("922", "PUBLIC ORDER AND SAFETY", Category.GROUP),
+ Classification("9221", "POLICE PROTECTION", Category.CLASS),
+ Classification("9222", "LEGAL COUNSEL AND PROSECUTION", Category.CLASS),
+ Classification("9223", "CORRECTIONAL INSTITUTIONS", Category.CLASS),
+ Classification("9224", "FIRE PROTECTION", Category.CLASS),
+ Classification("9229", "PUBLIC ORDER AND SAFETY, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("93", "PUBLIC FINANCE, TAXATION, AND MONETARY POLICY", Category.DIVISION),
+ Classification("931", "PUBLIC FINANCE, TAXATION, AND MONETARY POLICY", Category.GROUP),
+ Classification("9311", "PUBLIC FINANCE, TAXATION, AND MONETARY POLICY", Category.CLASS),
+ Classification("94", "ADMINISTRATION OF HUMAN RESOURCE PROGRAMS", Category.DIVISION),
+ Classification("941", "ADMINISTRATION OF EDUCATIONAL PROGRAMS", Category.GROUP),
+ Classification("9411", "ADMINISTRATION OF EDUCATIONAL PROGRAMS", Category.CLASS),
+ Classification("943", "ADMINISTRATION OF PUBLIC HEALTH PROGRAMS", Category.GROUP),
+ Classification("9431", "ADMINISTRATION OF PUBLIC HEALTH PROGRAMS", Category.CLASS),
+ Classification("944", "ADMINISTRATION OF SOCIAL, HUMAN RESOURCE AND INCOME MAINTENANCE PR", Category.GROUP),
+ Classification("9441", "ADMINISTRATION OF SOCIAL, HUMAN RESOURCE AND INCOME MAINTENANCE P", Category.CLASS),
+ Classification("945", "ADMINISTRATION OF VETERANS' AFFAIRS, EXCEPT HEALTH AND INSURANCE", Category.GROUP),
+ Classification("9451", "ADMINISTRATION OF VETERANS' AFFAIRS, EXCEPT HEALTH AND INSURANCE", Category.CLASS),
+ Classification("95", "ADMINISTRATION OF ENVIRONMENTAL QUALITY AND HOUSING PROGRAMS", Category.DIVISION),
+ Classification("951", "ADMINISTRATION OF ENVIRONMENTAL QUALITY PROGRAMS", Category.GROUP),
+ Classification("9511", "AIR AND WATER RESOURCE AND SOLID WASTE MANAGEMENT", Category.CLASS),
+ Classification("9512", "LAND, MINERAL, WILDLIFE, AND FOREST CONSERVATION", Category.CLASS),
+ Classification("953", "ADMINISTRATION OF HOUSING AND URBAN DEVELOPMENT PROGRAMS", Category.GROUP),
+ Classification("9531", "ADMINISTRATION OF HOUSING PROGRAMS", Category.CLASS),
+ Classification("9532", "ADMINISTRATION OF URBAN PLANNING AND COMMUNITY AND RURAL DEVELOPM", Category.CLASS),
+ Classification("96", "ADMINISTRATION OF ECONOMIC PROGRAMS", Category.DIVISION),
+ Classification("961", "ADMINISTRATION OF GENERAL ECONOMIC PROGRAMS", Category.GROUP),
+ Classification("9611", "ADMINISTRATION OF GENERAL ECONOMIC PROGRAMS", Category.CLASS),
+ Classification("962", "REGULATION AND ADMINISTRATION OF TRANSPORTATION PROGRAMS", Category.GROUP),
+ Classification("9621", "REGULATION AND ADMINISTRATION OF TRANSPORTATION PROGRAMS", Category.CLASS),
+ Classification("963", "REGULATION AND ADMINISTRATION OF COMMUNICATIONS, ELECTRIC, GAS, AN", Category.GROUP),
+ Classification("9631", "REGULATION AND ADMINISTRATION OF COMMUNICATIONS, ELECTRIC, GAS, A", Category.CLASS),
+ Classification("964", "REGULATION OF AGRICULTURAL MARKETING AND COMMODITIES", Category.GROUP),
+ Classification("9641", "REGULATION OF AGRICULTURAL MARKETING AND COMMODITIES", Category.CLASS),
+ Classification("965", "REGULATION, LICENSING, AND INSPECTION OF MISCELLANEOUS COMMERCIAL", Category.GROUP),
+ Classification("9651", "REGULATION, LICENSING, AND INSPECTION OF MISCELLANEOUS COMMERCIAL", Category.CLASS),
+ Classification("966", "SPACE RESEARCH AND TECHNOLOGY", Category.GROUP),
+ Classification("9661", "SPACE RESEARCH AND TECHNOLOGY", Category.CLASS),
+ Classification("97", "NATIONAL SECURITY AND INTERNATIONAL AFFAIRS", Category.DIVISION),
+ Classification("971", "NATIONAL SECURITY", Category.GROUP),
+ Classification("9711", "NATIONAL SECURITY", Category.CLASS),
+ Classification("972", "INTERNATIONAL AFFAIRS", Category.GROUP),
+ Classification("9721", "INTERNATIONAL AFFAIRS", Category.CLASS),
+ Classification("K", "NONCLASSIFIABLE ESTABLISHMENTS", Category.SECTION),
+ Classification("99", "NONCLASSIFIABLE ESTABLISHMENTS", Category.DIVISION),
+ Classification("999", "NONCLASSIFIABLE ESTABLISHMENTS", Category.GROUP),
+ Classification("9999", "NONCLASSIFIABLE ESTABLISHMENTS", Category.CLASS),
+ Classification("A", "AGRICULTURE, FORESTRY, AND FISHING", Category.SECTION),
+ Classification("01", "AGRICULTURAL PRODUCTION-CROPS", Category.DIVISION),
+ Classification("011", "CASH GRAINS", Category.GROUP),
+ Classification("0111", "WHEAT", Category.CLASS),
+ Classification("0112", "RICE", Category.CLASS),
+ Classification("0115", "CORN", Category.CLASS),
+ Classification("0116", "SOYBEANS", Category.CLASS),
+ Classification("0119", "CASH GRAINS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("013", "FIELD CROPS, EXCEPT CASH GRAINS", Category.GROUP),
+ Classification("0131", "COTTON", Category.CLASS),
+ Classification("0132", "TOBACCO", Category.CLASS),
+ Classification("0133", "SUGARCANE AND SUGAR BEETS", Category.CLASS),
+ Classification("0134", "IRISH POTATOES", Category.CLASS),
+ Classification("0139", "FIELD CROPS, EXCEPT CASH GRAINS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("016", "VEGETABLES AND MELONS", Category.GROUP),
+ Classification("0161", "VEGETABLES AND MELONS", Category.CLASS),
+ Classification("017", "FRUITS AND TREE NUTS", Category.GROUP),
+ Classification("0171", "BERRY CROPS", Category.CLASS),
+ Classification("0172", "GRAPES", Category.CLASS),
+ Classification("0173", "TREE NUTS", Category.CLASS),
+ Classification("0174", "CITRUS FRUITS", Category.CLASS),
+ Classification("0175", "DECIDUOUS TREE FRUITS", Category.CLASS),
+ Classification("0179", "FRUITS AND TREE NUTS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("018", "HORTICULTURAL SPECIALTIES", Category.GROUP),
+ Classification("0181", "ORNAMENTAL FLORICULTURE AND NURSERY PRODUCTS", Category.CLASS),
+ Classification("0182", "FOOD CROPS GROWN UNDER COVER", Category.CLASS),
+ Classification("019", "GENERAL FARMS, PRIMARILY CROP", Category.GROUP),
+ Classification("0191", "GENERAL FARMS, PRIMARILY CROP", Category.CLASS),
+ Classification("02", "AGRICULTURAL PRODUCTION-LIVESTOCK AND ANIMAL SPECIALTIES", Category.DIVISION),
+ Classification("021", "LIVESTOCK, EXCEPT DAIRY AND POULTRY", Category.GROUP),
+ Classification("0211", "BEEF CATTLE FEEDLOTS", Category.CLASS),
+ Classification("0212", "BEEF CATTLE, EXCEPT FEEDLOTS", Category.CLASS),
+ Classification("0213", "HOGS", Category.CLASS),
+ Classification("0214", "SHEEP AND GOATS", Category.CLASS),
+ Classification("0219", "GENERAL LIVESTOCK, EXCEPT DAIRY AND POULTRY", Category.CLASS),
+ Classification("024", "DAIRY FARMS", Category.GROUP),
+ Classification("0241", "DAIRY FARMS", Category.CLASS),
+ Classification("025", "POULTRY AND EGGS", Category.GROUP),
+ Classification("0251", "BROILER, FRYER, AND ROASTER CHICKENS", Category.CLASS),
+ Classification("0252", "CHICKEN EGGS", Category.CLASS),
+ Classification("0253", "TURKEYS AND TURKEY EGGS", Category.CLASS),
+ Classification("0254", "POULTRY HATCHERIES", Category.CLASS),
+ Classification("0259", "POULTRY AND EGGS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("027", "ANIMAL SPECIALTIES", Category.GROUP),
+ Classification("0271", "FUR-BEARING ANIMALS AND RABBITS", Category.CLASS),
+ Classification("0272", "HORSES AND OTHER EQUINES", Category.CLASS),
+ Classification("0273", "ANIMAL AQUACULTURE", Category.CLASS),
+ Classification("0279", "ANIMAL SPECIALTIES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("029", "GENERAL FARMS, PRIMARILY LIVESTOCK AND ANIMAL SPECIALTIES", Category.GROUP),
+ Classification("0291", "GENERAL FARMS, PRIMARILY LIVESTOCK AND ANIMAL SPECIALTIES", Category.CLASS),
+ Classification("07", "AGRICULTURAL SERVICES", Category.DIVISION),
+ Classification("071", "SOIL PREPARATION SERVICES", Category.GROUP),
+ Classification("0711", "SOIL PREPARATION SERVICES", Category.CLASS),
+ Classification("072", "CROP SERVICES", Category.GROUP),
+ Classification("0721", "CROP PLANTING, CULTIVATING, AND PROTECTING", Category.CLASS),
+ Classification("0722", "CROP HARVESTING, PRIMARILY BY MACHINE", Category.CLASS),
+ Classification("0723", "CROP PREPARATION SERVICES FOR MARKET, EXCEPT COTTON GINNING", Category.CLASS),
+ Classification("0724", "COTTON GINNING", Category.CLASS),
+ Classification("074", "VETERINARY SERVICES", Category.GROUP),
+ Classification("0741", "VETERINARY SERVICES FOR LIVESTOCK", Category.CLASS),
+ Classification("0742", "VETERINARY SERVICES FOR ANIMAL SPECIALTIES", Category.CLASS),
+ Classification("075", "ANIMAL SERVICES, EXCEPT VETERINARY", Category.GROUP),
+ Classification("0751", "LIVESTOCK SERVICES, EXCEPT VETERINARY", Category.CLASS),
+ Classification("0752", "ANIMAL SPECIALTY SERVICES, EXCEPT VETERINARY", Category.CLASS),
+ Classification("076", "FARM LABOR AND MANAGEMENT SERVICES", Category.GROUP),
+ Classification("0761", "FARM LABOR CONTRACTORS AND CREW LEADERS", Category.CLASS),
+ Classification("0762", "FARM MANAGEMENT SERVICES", Category.CLASS),
+ Classification("078", "LANDSCAPE AND HORTICULTURAL SERVICES", Category.GROUP),
+ Classification("0781", "LANDSCAPE COUNSELING AND PLANNING", Category.CLASS),
+ Classification("0782", "LAWN AND GARDEN SERVICES", Category.CLASS),
+ Classification("0783", "ORNAMENTAL SHRUB AND TREE SERVICES", Category.CLASS),
+ Classification("08", "FORESTRY", Category.DIVISION),
+ Classification("081", "TIMBER TRACTS", Category.GROUP),
+ Classification("0811", "TIMBER TRACTS", Category.CLASS),
+ Classification("083", "FOREST NURSERIES AND GATHERING OF FOREST PRODUCTS", Category.GROUP),
+ Classification("0831", "FOREST NURSERIES AND GATHERING OF FOREST PRODUCTS", Category.CLASS),
+ Classification("085", "FORESTRY SERVICES", Category.GROUP),
+ Classification("0851", "FORESTRY SERVICES", Category.CLASS),
+ Classification("09", "FISHING, HUNTING, AND TRAPPING", Category.DIVISION),
+ Classification("091", "COMMERCIAL FISHING", Category.GROUP),
+ Classification("0912", "FINFISH", Category.CLASS),
+ Classification("0913", "SHELLFISH", Category.CLASS),
+ Classification("0919", "MISCELLANEOUS MARINE PRODUCTS", Category.CLASS),
+ Classification("092", "FISH HATCHERIES AND PRESERVES", Category.GROUP),
+ Classification("0921", "FISH HATCHERIES AND PRESERVES", Category.CLASS),
+ Classification("097", "HUNTING AND TRAPPING, AND GAME PROPAGATION", Category.GROUP),
+ Classification("0971", "HUNTING AND TRAPPING, AND GAME PROPAGATION", Category.CLASS),
+ Classification("B", "MINING", Category.SECTION),
+ Classification("10", "METAL MINING", Category.DIVISION),
+ Classification("101", "IRON ORES", Category.GROUP),
+ Classification("1011", "IRON ORES", Category.CLASS),
+ Classification("102", "COPPER ORES", Category.GROUP),
+ Classification("1021", "COPPER ORES", Category.CLASS),
+ Classification("103", "LEAD AND ZINC ORES", Category.GROUP),
+ Classification("1031", "LEAD AND ZINC ORES", Category.CLASS),
+ Classification("104", "GOLD AND SILVER ORES", Category.GROUP),
+ Classification("1041", "GOLD ORES", Category.CLASS),
+ Classification("1044", "SILVER ORES", Category.CLASS),
+ Classification("106", "FERROALLOY ORES, EXCEPT VANADIUM", Category.GROUP),
+ Classification("1061", "FERROALLOY ORES, EXCEPT VANADIUM", Category.CLASS),
+ Classification("108", "METAL MINING SERVICES", Category.GROUP),
+ Classification("1081", "METAL MINING SERVICES", Category.CLASS),
+ Classification("109", "MISCELLANEOUS METAL ORES", Category.GROUP),
+ Classification("1094", "URANIUM-RADIUM-VANADIUM ORES", Category.CLASS),
+ Classification("1099", "MISCELLANEOUS METAL ORES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("12", "COAL MINING", Category.DIVISION),
+ Classification("122", "BITUMINOUS COAL AND LIGNITE MINING", Category.GROUP),
+ Classification("1221", "BITUMINOUS COAL AND LIGNITE SURFACE MINING", Category.CLASS),
+ Classification("1222", "BITUMINOUS COAL UNDERGROUND MINING", Category.CLASS),
+ Classification("123", "ANTHRACITE MINING", Category.GROUP),
+ Classification("1231", "ANTHRACITE MINING", Category.CLASS),
+ Classification("124", "COAL MINING SERVICES", Category.GROUP),
+ Classification("1241", "COAL MINING SERVICES", Category.CLASS),
+ Classification("13", "OIL AND GAS EXTRACTION", Category.DIVISION),
+ Classification("131", "CRUDE PETROLEUM AND NATURAL GAS", Category.GROUP),
+ Classification("1311", "CRUDE PETROLEUM AND NATURAL GAS", Category.CLASS),
+ Classification("132", "NATURAL GAS LIQUIDS", Category.GROUP),
+ Classification("1321", "NATURAL GAS LIQUIDS", Category.CLASS),
+ Classification("138", "OIL AND GAS FIELD SERVICES", Category.GROUP),
+ Classification("1381", "DRILLING OIL AND GAS WELLS", Category.CLASS),
+ Classification("1382", "OIL AND GAS FIELD EXPLORATION SERVICES", Category.CLASS),
+ Classification("1389", "OIL AND GAS FIELD SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("14", "MINING AND QUARRYING OF NONMETALLIC MINERALS, EXCEPT FUELS", Category.DIVISION),
+ Classification("141", "DIMENSION STONE", Category.GROUP),
+ Classification("1411", "DIMENSION STONE", Category.CLASS),
+ Classification("142", "CRUSHED AND BROKEN STONE, INCLUDING RIPRAP", Category.GROUP),
+ Classification("1422", "CRUSHED AND BROKEN LIMESTONE", Category.CLASS),
+ Classification("1423", "CRUSHED AND BROKEN GRANITE", Category.CLASS),
+ Classification("1429", "CRUSHED AND BROKEN STONE, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("144", "SAND AND GRAVEL", Category.GROUP),
+ Classification("1442", "CONSTRUCTION SAND AND GRAVEL", Category.CLASS),
+ Classification("1446", "INDUSTRIAL SAND", Category.CLASS),
+ Classification("145", "CLAY, CERAMIC, AND REFRACTORY MINERALS", Category.GROUP),
+ Classification("1455", "KAOLIN AND BALL CLAY", Category.CLASS),
+ Classification("1459", "CLAY, CERAMIC, AND REFRACTORY MINERALS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("147", "CHEMICAL AND FERTILIZER MINERAL MINING", Category.GROUP),
+ Classification("1474", "POTASH, SODA, AND BORATE MINERALS", Category.CLASS),
+ Classification("1475", "PHOSPHATE ROCK", Category.CLASS),
+ Classification("1479", "CHEMICAL AND FERTILIZER MINERAL MINING, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("148", "NONMETALLIC MINERALS SERVICES, EXCEPT FUELS", Category.GROUP),
+ Classification("1481", "NONMETALLIC MINERALS SERVICES, EXCEPT FUELS", Category.CLASS),
+ Classification("149", "MISCELLANEOUS NONMETALLIC MINERALS, EXCEPT FUELS", Category.GROUP),
+ Classification("1499", "MISCELLANEOUS NONMETALLIC MINERALS, EXCEPT FUELS", Category.CLASS),
+ Classification("C", "CONSTRUCTION", Category.SECTION),
+ Classification("15", "BUILDING CONSTRUCTION-GENERAL CONTRACTORS AND OPERATIVE BUILDERS", Category.DIVISION),
+ Classification("152", "GENERAL BUILDING CONTRACTORS-RESIDENTIAL BUILDINGS", Category.GROUP),
+ Classification("1521", "GENERAL CONTRACTORS-SINGLE-FAMILY HOUSES", Category.CLASS),
+ Classification("1522", "GENERAL CONTRACTORS-RESIDENTIAL BUILDINGS, OTHER THAN SINGLE-FAMI", Category.CLASS),
+ Classification("153", "OPERATIVE BUILDERS", Category.GROUP),
+ Classification("1531", "OPERATIVE BUILDERS", Category.CLASS),
+ Classification("154", "GENERAL BUILDING CONTRACTORS-NONRESIDENTIAL BUILDINGS", Category.GROUP),
+ Classification("1541", "GENERAL CONTRACTORS-INDUSTRIAL BUILDINGS AND WAREHOUSES", Category.CLASS),
+ Classification("1542", "GENERAL CONTRACTORS-NONRESIDENTIAL BUILDINGS, OTHER THAN INDUSTRI", Category.CLASS),
+ Classification("16", "HEAVY CONSTRUCTION OTHER THAN BUILDING CONSTRUCTION-CONTRACTORS", Category.DIVISION),
+ Classification("161", "HIGHWAY AND STREET CONSTRUCTION, EXCEPT ELEVATED HIGHWAYS", Category.GROUP),
+ Classification("1611", "HIGHWAY AND STREET CONSTRUCTION, EXCEPT ELEVATED HIGHWAYS", Category.CLASS),
+ Classification("162", "HEAVY CONSTRUCTION, EXCEPT HIGHWAY AND STREET CONSTRUCTION", Category.GROUP),
+ Classification("1622", "BRIDGE, TUNNEL, AND ELEVATED HIGHWAY CONSTRUCTION", Category.CLASS),
+ Classification("1623", "WATER, SEWER, PIPELINE, AND COMMUNICATIONS AND POWER LINE CONSTRU", Category.CLASS),
+ Classification("1629", "HEAVY CONSTRUCTION, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("17", "CONSTRUCTION-SPECIAL TRADE CONTRACTORS", Category.DIVISION),
+ Classification("171", "PLUMBING, HEATING AND AIR-CONDITIONING", Category.GROUP),
+ Classification("1711", "PLUMBING, HEATING AND AIR-CONDITIONING", Category.CLASS),
+ Classification("172", "PAINTING AND PAPER HANGING", Category.GROUP),
+ Classification("1721", "PAINTING AND PAPER HANGING", Category.CLASS),
+ Classification("173", "ELECTRICAL WORK", Category.GROUP),
+ Classification("1731", "ELECTRICAL WORK", Category.CLASS),
+ Classification("174", "MASONRY, STONEWORK, TILE SETTING, AND PLASTERING", Category.GROUP),
+ Classification("1741", "MASONRY, STONE SETTING, AND OTHER STONE WORK", Category.CLASS),
+ Classification("1742", "PLASTERING, DRYWALL, ACOUSTICAL, AND INSULATION WORK", Category.CLASS),
+ Classification("1743", "TERRAZZO, TILE, MARBLE, AND MOSAIC WORK", Category.CLASS),
+ Classification("175", "CARPENTRY AND FLOOR WORK", Category.GROUP),
+ Classification("1751", "CARPENTRY WORK", Category.CLASS),
+ Classification("1752", "FLOOR LAYING AND OTHER FLOOR WORK, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("176", "ROOFING, SIDING, AND SHEET METAL WORK", Category.GROUP),
+ Classification("1761", "ROOFING, SIDING, AND SHEET METAL WORK", Category.CLASS),
+ Classification("177", "CONCRETE WORK", Category.GROUP),
+ Classification("1771", "CONCRETE WORK", Category.CLASS),
+ Classification("178", "WATER WELL DRILLING", Category.GROUP),
+ Classification("1781", "WATER WELL DRILLING", Category.CLASS),
+ Classification("179", "MISCELLANEOUS SPECIAL TRADE CONTRACTORS", Category.GROUP),
+ Classification("1791", "STRUCTURAL STEEL ERECTION", Category.CLASS),
+ Classification("1793", "GLASS AND GLAZING WORK", Category.CLASS),
+ Classification("1793", "GLASS INSTALLATION, EXCEPT AUTOMOTIVE-CONTRACTORS", Category.CLASS),
+ Classification("1794", "EXCAVATION WORK", Category.CLASS),
+ Classification("1795", "WRECKING AND DEMOLITION WORK", Category.CLASS),
+ Classification("1796", "INSTALLATION OR ERECTION OF BUILDING EQUIPMENT, NOT ELSEWHERE CLA", Category.CLASS),
+ Classification("1799", "SPECIAL TRADE CONTRACTORS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("D", "MANUFACTURING", Category.SECTION),
+ Classification("20", "FOOD AND KINDRED PRODUCTS", Category.DIVISION),
+ Classification("201", "MEAT PRODUCTS", Category.GROUP),
+ Classification("2011", "MEAT PACKING PLANTS", Category.CLASS),
+ Classification("2013", "SAUSAGES AND OTHER PREPARED MEAT PRODUCTS", Category.CLASS),
+ Classification("2015", "POULTRY SLAUGHTERING AND PROCESSING", Category.CLASS),
+ Classification("202", "DAIRY PRODUCTS", Category.GROUP),
+ Classification("2021", "CREAMERY BUTTER", Category.CLASS),
+ Classification("2022", "NATURAL, PROCESSED, AND IMITATION CHEESE", Category.CLASS),
+ Classification("2023", "DRY, CONDENSED, AND EVAPORATED DAIRY PRODUCTS", Category.CLASS),
+ Classification("2024", "ICE CREAM AND FROZEN DESSERTS", Category.CLASS),
+ Classification("2026", "FLUID MILK", Category.CLASS),
+ Classification("203", "CANNED, FROZEN, AND PRESERVED FRUITS, VEGETABLES, AND FOOD SPECIAL", Category.GROUP),
+ Classification("2032", "CANNED SPECIALTIES", Category.CLASS),
+ Classification("2033", "CANNED FRUITS, VEGETABLES, PRESERVES, JAMS, AND JELLIES", Category.CLASS),
+ Classification("2034", "DRIED AND DEHYDRATED FRUITS, VEGETABLES, AND SOUP MIXES", Category.CLASS),
+ Classification("2035", "PICKLED FRUITS AND VEGETABLES, VEGETABLE SAUCES AND SEASONINGS, A", Category.CLASS),
+ Classification("2037", "FROZEN FRUITS, FRUIT JUICES, AND VEGETABLES", Category.CLASS),
+ Classification("2038", "FROZEN SPECIALTIES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("204", "GRAIN MILL PRODUCTS", Category.GROUP),
+ Classification("2041", "FLOUR AND OTHER GRAIN MILL PRODUCTS", Category.CLASS),
+ Classification("2043", "CEREAL BREAKFAST FOODS", Category.CLASS),
+ Classification("2044", "RICE MILLING", Category.CLASS),
+ Classification("2045", "PREPARED FLOUR MIXES AND DOUGHS", Category.CLASS),
+ Classification("2046", "WET CORN MILLING", Category.CLASS),
+ Classification("2047", "DOG AND CAT FOOD", Category.CLASS),
+ Classification("2048", "PREPARED FEEDS AND FEED INGREDIENTS FOR ANIMALS AND FOWLS, EXCEPT", Category.CLASS),
+ Classification("205", "BAKERY PRODUCTS", Category.GROUP),
+ Classification("2051", "BREAD AND OTHER BAKERY PRODUCTS, EXCEPT COOKIES AND CRACKERS", Category.CLASS),
+ Classification("2052", "COOKIES AND CRACKERS", Category.CLASS),
+ Classification("2053", "FROZEN BAKERY PRODUCTS, EXCEPT BREAD", Category.CLASS),
+ Classification("206", "SUGAR AND CONFECTIONERY PRODUCTS", Category.GROUP),
+ Classification("2061", "CANE SUGAR, EXCEPT REFINING", Category.CLASS),
+ Classification("2062", "CANE SUGAR REFINING", Category.CLASS),
+ Classification("2063", "BEET SUGAR", Category.CLASS),
+ Classification("2064", "CANDY AND OTHER CONFECTIONERY PRODUCTS", Category.CLASS),
+ Classification("2066", "CHOCOLATE AND COCOA PRODUCTS", Category.CLASS),
+ Classification("2067", "CHEWING GUM", Category.CLASS),
+ Classification("2068", "SALTED AND ROASTED NUTS AND SEEDS", Category.CLASS),
+ Classification("207", "FATS AND OILS", Category.GROUP),
+ Classification("2074", "COTTONSEED OIL MILLS", Category.CLASS),
+ Classification("2075", "SOYBEAN OIL MILLS", Category.CLASS),
+ Classification("2076", "VEGETABLE OIL MILLS, EXCEPT CORN, COTTONSEED, AND SOYBEAN", Category.CLASS),
+ Classification("2077", "ANIMAL AND MARINE FATS AND OILS", Category.CLASS),
+ Classification("2079", "SHORTENING, TABLE OILS, MARGARINE, AND OTHER EDIBLE FATS AND OILS", Category.CLASS),
+ Classification("208", "BEVERAGES", Category.GROUP),
+ Classification("2082", "MALT BEVERAGES", Category.CLASS),
+ Classification("2083", "MALT", Category.CLASS),
+ Classification("2084", "WINES, BRANDY, AND BRANDY SPIRITS", Category.CLASS),
+ Classification("2085", "DISTILLED AND BLENDED LIQUORS", Category.CLASS),
+ Classification("2086", "BOTTLED AND CANNED SOFT DRINKS AND CARBONATED WATERS", Category.CLASS),
+ Classification("2087", "FLAVORING EXTRACTS AND FLAVORING SYRUPS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("209", "MISCELLANEOUS FOOD PREPARATIONS AND KINDRED PRODUCTS", Category.GROUP),
+ Classification("2091", "CANNED AND CURED FISH AND SEAFOODS", Category.CLASS),
+ Classification("2092", "PREPARED FRESH OR FROZEN FISH AND SEAFOODS", Category.CLASS),
+ Classification("2095", "ROASTED COFFEE", Category.CLASS),
+ Classification("2096", "POTATO CHIPS, CORN CHIPS, AND SIMILAR SNACKS", Category.CLASS),
+ Classification("2097", "MANUFACTURED ICE", Category.CLASS),
+ Classification("2098", "MACARONI, SPAGHETTI, VERMICELLI, AND NOODLES", Category.CLASS),
+ Classification("2099", "FOOD PREPARATIONS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("21", "TOBACCO PRODUCTS", Category.DIVISION),
+ Classification("211", "CIGARETTES", Category.GROUP),
+ Classification("2111", "CIGARETTES", Category.CLASS),
+ Classification("212", "CIGARS", Category.GROUP),
+ Classification("2121", "CIGARS", Category.CLASS),
+ Classification("213", "CHEWING AND SMOKING TOBACCO AND SNUFF", Category.GROUP),
+ Classification("2131", "CHEWING AND SMOKING TOBACCO AND SNUFF", Category.CLASS),
+ Classification("214", "TOBACCO STEMMING AND REDRYING", Category.GROUP),
+ Classification("2141", "TOBACCO STEMMING AND REDRYING", Category.CLASS),
+ Classification("22", "TEXTILE MILL PRODUCTS", Category.DIVISION),
+ Classification("221", "BROADWOVEN FABRIC MILLS, COTTON", Category.GROUP),
+ Classification("2211", "BROADWOVEN FABRIC MILLS, COTTON", Category.CLASS),
+ Classification("222", "BROADWOVEN FABRIC MILLS, MANMADE FIBER AND SILK", Category.GROUP),
+ Classification("2221", "BROADWOVEN FABRIC MILLS, MANMADE FIBER AND SILK", Category.CLASS),
+ Classification("223", "BROADWOVEN FABRIC MILLS, WOOL (INCLUDING DYEING AND FINISHING)", Category.GROUP),
+ Classification("2231", "BROADWOVEN FABRIC MILLS, WOOL (INCLUDING DYEING AND FINISHING)", Category.CLASS),
+ Classification("224", "NARROW FABRIC AND OTHER SMALLWARES MILLS: COTTON, WOOL, SILK, AND", Category.GROUP),
+ Classification("2241", "NARROW FABRIC AND OTHER SMALLWARES MILLS: COTTON, WOOL, SILK, AND", Category.CLASS),
+ Classification("225", "KNITTING MILLS", Category.GROUP),
+ Classification("2251", "WOMEN'S FULL-LENGTH AND KNEE-LENGTH HOSIERY, EXCEPT SOCKS", Category.CLASS),
+ Classification("2252", "HOSIERY, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("2253", "KNIT OUTERWEAR MILLS", Category.CLASS),
+ Classification("2254", "KNIT UNDERWEAR AND NIGHTWEAR MILLS", Category.CLASS),
+ Classification("2257", "WEFT KNIT FABRIC MILLS", Category.CLASS),
+ Classification("2258", "LACE AND WARP KNIT FABRIC MILLS", Category.CLASS),
+ Classification("2259", "KNITTING MILLS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("226", "DYEING AND FINISHING TEXTILES, EXCEPT WOOL FABRICS AND KNIT GOODS", Category.GROUP),
+ Classification("2261", "FINISHERS OF BROADWOVEN FABRICS OF COTTON", Category.CLASS),
+ Classification("2262", "FINISHERS OF BROADWOVEN FABRICS OF MANMADE FIBER AND SILK", Category.CLASS),
+ Classification("2269", "FINISHERS OF TEXTILES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("227", "CARPETS AND RUGS", Category.GROUP),
+ Classification("2273", "CARPETS AND RUGS", Category.CLASS),
+ Classification("228", "YARN AND THREAD MILLS", Category.GROUP),
+ Classification("2281", "YARN SPINNING MILLS", Category.CLASS),
+ Classification("2282", "YARN TEXTURIZING, THROWING, TWISTING, AND WINDING MILLS", Category.CLASS),
+ Classification("2282", "ACETATE FILAMENT YARN: THROWING, TWISTING, WINDING, OR SPOOLING", Category.CLASS),
+ Classification("2284", "THREAD MILLS", Category.CLASS),
+ Classification("229", "MISCELLANEOUS TEXTILE GOODS", Category.GROUP),
+ Classification("2295", "COATED FABRICS, NOT RUBBERIZED", Category.CLASS),
+ Classification("2296", "TIRE CORD AND FABRICS", Category.CLASS),
+ Classification("2297", "NONWOVEN FABRICS", Category.CLASS),
+ Classification("2298", "CORDAGE AND TWINE", Category.CLASS),
+ Classification("2299", "TEXTILE GOODS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification(
+ "23", "APPAREL AND OTHER FINISHED PRODUCTS MADE FROM FABRICS AND SIMILAR MATERIAL", Category.DIVISION
+ ),
+ Classification("231", "MEN'S AND BOYS' SUITS, COATS, AND OVERCOATS", Category.GROUP),
+ Classification("2311", "MEN'S AND BOYS' SUITS, COATS, AND OVERCOATS", Category.CLASS),
+ Classification("232", "MEN'S AND BOYS' FURNISHINGS, WORK CLOTHING, AND ALLIED GARMENTS", Category.GROUP),
+ Classification("2321", "MEN'S AND BOYS' SHIRTS, EXCEPT WORK SHIRTS", Category.CLASS),
+ Classification("2322", "MEN'S AND BOYS' UNDERWEAR AND NIGHTWEAR", Category.CLASS),
+ Classification("2323", "MEN'S AND BOYS' NECKWEAR", Category.CLASS),
+ Classification("2325", "MEN'S AND BOYS' SEPARATE TROUSERS AND SLACKS", Category.CLASS),
+ Classification("2326", "MEN'S AND BOYS' WORK CLOTHING", Category.CLASS),
+ Classification("2329", "MEN'S AND BOYS' CLOTHING, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("233", "WOMEN'S, MISSES', AND JUNIORS' OUTERWEAR", Category.GROUP),
+ Classification("2331", "WOMEN'S, MISSES', AND JUNIORS' BLOUSES AND SHIRTS", Category.CLASS),
+ Classification("2335", "WOMEN'S, MISSES', AND JUNIORS' DRESSES", Category.CLASS),
+ Classification("2337", "WOMEN'S, MISSES', AND JUNIORS' SUITS, SKIRTS, AND COATS", Category.CLASS),
+ Classification("2339", "WOMEN'S, MISSES', AND JUNIORS' OUTERWEAR, NOT ELSEWHERE CLASSIFIE", Category.CLASS),
+ Classification("234", "WOMEN'S, MISSES', CHILDREN'S, AND INFANTS' UNDERGARMENTS", Category.GROUP),
+ Classification("2341", "WOMEN'S, MISSES', CHILDREN'S, AND INFANTS' UNDERWEAR AND NIGHTWEA", Category.CLASS),
+ Classification("2342", "BRASSIERES, GIRDLES, AND ALLIED GARMENTS", Category.CLASS),
+ Classification("235", "HATS, CAPS, AND MILLINERY", Category.GROUP),
+ Classification("2353", "HATS, CAPS, AND MILLINERY", Category.CLASS),
+ Classification("236", "GIRLS', CHILDREN'S, AND INFANTS' OUTERWEAR", Category.GROUP),
+ Classification("2361", "GIRLS', CHILDREN'S, AND INFANTS' DRESSES, BLOUSES, AND SHIRTS", Category.CLASS),
+ Classification("2369", "GIRLS', CHILDREN'S, AND INFANTS' OUTERWEAR, NOT ELSEWHERE CLASSIF", Category.CLASS),
+ Classification("237", "FUR GOODS", Category.GROUP),
+ Classification("2371", "FUR GOODS", Category.CLASS),
+ Classification("238", "MISCELLANEOUS APPAREL AND ACCESSORIES", Category.GROUP),
+ Classification("2381", "DRESS AND WORK GLOVES, EXCEPT KNIT AND ALL-LEATHER", Category.CLASS),
+ Classification("2384", "ROBES AND DRESSING GOWNS", Category.CLASS),
+ Classification("2385", "WATERPROOF OUTERWEAR", Category.CLASS),
+ Classification("2386", "LEATHER AND SHEEP-LINED CLOTHING", Category.CLASS),
+ Classification("2387", "APPAREL BELTS", Category.CLASS),
+ Classification("2389", "APPAREL AND ACCESSORIES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("239", "MISCELLANEOUS FABRICATED TEXTILE PRODUCTS", Category.GROUP),
+ Classification("2391", "CURTAINS AND DRAPERIES", Category.CLASS),
+ Classification("2392", "HOUSEFURNISHINGS, EXCEPT CURTAINS AND DRAPERIES", Category.CLASS),
+ Classification("2393", "TEXTILE BAGS", Category.CLASS),
+ Classification("2394", "CANVAS AND RELATED PRODUCTS", Category.CLASS),
+ Classification("2395", "PLEATING, DECORATIVE AND NOVELTY STITCHING, AND TUCKING FOR THE T", Category.CLASS),
+ Classification("2396", "AUTOMOTIVE TRIMMINGS, APPAREL FINDINGS, AND RELATED PRODUCTS", Category.CLASS),
+ Classification("2397", "SCHIFFLI MACHINE EMBROIDERIES", Category.CLASS),
+ Classification("2399", "FABRICATED TEXTILE PRODUCTS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("24", "LUMBER AND WOOD PRODUCTS, EXCEPT FURNITURE", Category.DIVISION),
+ Classification("241", "LOGGING", Category.GROUP),
+ Classification("2411", "LOGGING", Category.CLASS),
+ Classification("242", "SAWMILLS AND PLANING MILLS", Category.GROUP),
+ Classification("2421", "SAWMILLS AND PLANING MILLS, GENERAL", Category.CLASS),
+ Classification("2426", "HARDWOOD DIMENSION AND FLOORING MILLS", Category.CLASS),
+ Classification("2429", "SPECIAL PRODUCT SAWMILLS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("243", "MILLWORK, VENEER, PLYWOOD, AND STRUCTURAL WOOD MEMBERS", Category.GROUP),
+ Classification("2431", "MILLWORK", Category.CLASS),
+ Classification("2434", "WOOD KITCHEN CABINETS", Category.CLASS),
+ Classification("2435", "HARDWOOD VENEER AND PLYWOOD", Category.CLASS),
+ Classification("2436", "SOFTWOOD VENEER AND PLYWOOD", Category.CLASS),
+ Classification("2439", "STRUCTURAL WOOD MEMBERS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("244", "WOOD CONTAINERS", Category.GROUP),
+ Classification("2441", "NAILED AND LOCK CORNER WOOD BOXES AND SHOOK", Category.CLASS),
+ Classification("2448", "WOOD PALLETS AND SKIDS", Category.CLASS),
+ Classification("2449", "WOOD CONTAINERS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("245", "WOOD BUILDINGS AND MOBILE HOMES", Category.GROUP),
+ Classification("2451", "MOBILE HOMES", Category.CLASS),
+ Classification("2452", "PREFABRICATED WOOD BUILDINGS AND COMPONENTS", Category.CLASS),
+ Classification("249", "MISCELLANEOUS WOOD PRODUCTS", Category.GROUP),
+ Classification("2491", "WOOD PRESERVING", Category.CLASS),
+ Classification("2493", "RECONSTITUTED WOOD PRODUCTS", Category.CLASS),
+ Classification("2499", "WOOD PRODUCTS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("25", "FURNITURE AND FIXTURES", Category.DIVISION),
+ Classification("251", "HOUSEHOLD FURNITURE", Category.GROUP),
+ Classification("2511", "WOOD HOUSEHOLD FURNITURE, EXCEPT UPHOLSTERED", Category.CLASS),
+ Classification("2512", "WOOD HOUSEHOLD FURNITURE, UPHOLSTERED", Category.CLASS),
+ Classification("2514", "METAL HOUSEHOLD FURNITURE", Category.CLASS),
+ Classification("2515", "MATTRESSES, FOUNDATIONS, AND CONVERTIBLE BEDS", Category.CLASS),
+ Classification("2517", "WOOD TELEVISION, RADIO, PHONOGRAPH, AND SEWING MACHINE CABINETS", Category.CLASS),
+ Classification("2519", "HOUSEHOLD FURNITURE, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("252", "OFFICE FURNITURE", Category.GROUP),
+ Classification("2521", "WOOD OFFICE FURNITURE", Category.CLASS),
+ Classification("2522", "OFFICE FURNITURE, EXCEPT WOOD", Category.CLASS),
+ Classification("253", "PUBLIC BUILDING AND RELATED FURNITURE", Category.GROUP),
+ Classification("2531", "PUBLIC BUILDING AND RELATED FURNITURE", Category.CLASS),
+ Classification("254", "PARTITIONS, SHELVING, LOCKERS, AND OFFICE AND STORE FIXTURES", Category.GROUP),
+ Classification("2541", "WOOD OFFICE AND STORE FIXTURES, PARTITIONS, SHELVING, AND LOCKERS", Category.CLASS),
+ Classification("2542", "OFFICE AND STORE FIXTURES, PARTITIONS, SHELVING, AND LOCKERS, EXC", Category.CLASS),
+ Classification("259", "MISCELLANEOUS FURNITURE AND FIXTURES", Category.GROUP),
+ Classification("2591", "DRAPERY HARDWARE AND WINDOW BLINDS AND SHADES", Category.CLASS),
+ Classification("2599", "FURNITURE AND FIXTURES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("26", "PAPER AND ALLIED PRODUCTS", Category.DIVISION),
+ Classification("261", "PULP MILLS", Category.GROUP),
+ Classification("2611", "PULP MILLS", Category.CLASS),
+ Classification("262", "PAPER MILLS", Category.GROUP),
+ Classification("2621", "PAPER MILLS", Category.CLASS),
+ Classification("263", "PAPERBOARD MILLS", Category.GROUP),
+ Classification("2631", "PAPERBOARD MILLS", Category.CLASS),
+ Classification("265", "PAPERBOARD CONTAINERS AND BOXES", Category.GROUP),
+ Classification("2652", "SETUP PAPERBOARD BOXES", Category.CLASS),
+ Classification("2653", "CORRUGATED AND SOLID FIBER BOXES", Category.CLASS),
+ Classification("2655", "FIBER CANS, TUBES, DRUMS, AND SIMILAR PRODUCTS", Category.CLASS),
+ Classification("2656", "SANITARY FOOD CONTAINERS, EXCEPT FOLDING", Category.CLASS),
+ Classification("2657", "FOLDING PAPERBOARD BOXES, INCLUDING SANITARY", Category.CLASS),
+ Classification("267", "CONVERTED PAPER AND PAPERBOARD PRODUCTS, EXCEPT CONTAINERS AND BOX", Category.GROUP),
+ Classification("2671", "PACKAGING PAPER AND PLASTICS FILM, COATED AND LAMINATED", Category.CLASS),
+ Classification("2672", "COATED AND LAMINATED PAPER, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("2673", "PLASTICS, FOIL, AND COATED PAPER BAGS", Category.CLASS),
+ Classification("2674", "UNCOATED PAPER AND MULTIWALL BAGS", Category.CLASS),
+ Classification("2675", "DIE-CUT PAPER AND PAPERBOARD AND CARDBOARD", Category.CLASS),
+ Classification("2676", "SANITARY PAPER PRODUCTS", Category.CLASS),
+ Classification("2677", "ENVELOPES", Category.CLASS),
+ Classification("2678", "STATIONERY, TABLETS, AND RELATED PRODUCTS", Category.CLASS),
+ Classification("2679", "CONVERTED PAPER AND PAPERBOARD PRODUCTS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("27", "PRINTING, PUBLISHING, AND ALLIED INDUSTRIES", Category.DIVISION),
+ Classification("271", "NEWSPAPERS: PUBLISHING, OR PUBLISHING AND PRINTING", Category.GROUP),
+ Classification("2711", "NEWSPAPERS: PUBLISHING, OR PUBLISHING AND PRINTING", Category.CLASS),
+ Classification("272", "PERIODICALS: PUBLISHING, OR PUBLISHING AND PRINTING", Category.GROUP),
+ Classification("2721", "PERIODICALS: PUBLISHING, OR PUBLISHING AND PRINTING", Category.CLASS),
+ Classification("273", "BOOKS", Category.GROUP),
+ Classification("2731", "BOOKS: PUBLISHING, OR PUBLISHING AND PRINTING", Category.CLASS),
+ Classification("2732", "BOOK PRINTING", Category.CLASS),
+ Classification("274", "MISCELLANEOUS PUBLISHING", Category.GROUP),
+ Classification("2741", "MISCELLANEOUS PUBLISHING", Category.CLASS),
+ Classification("275", "COMMERCIAL PRINTING", Category.GROUP),
+ Classification("2752", "COMMERCIAL PRINTING, LITHOGRAPHIC", Category.CLASS),
+ Classification("2754", "COMMERCIAL PRINTING, GRAVURE", Category.CLASS),
+ Classification("2759", "COMMERCIAL PRINTING, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("276", "MANIFOLD BUSINESS FORMS", Category.GROUP),
+ Classification("2761", "MANIFOLD BUSINESS FORMS", Category.CLASS),
+ Classification("277", "GREETING CARDS", Category.GROUP),
+ Classification("2771", "GREETING CARDS", Category.CLASS),
+ Classification("278", "BLANKBOOKS, LOOSELEAF BINDERS, AND BOOKBINDING AND RELATED WORK", Category.GROUP),
+ Classification("2782", "BLANKBOOKS, LOOSELEAF BINDERS AND DEVICES", Category.CLASS),
+ Classification("2789", "BOOKBINDING AND RELATED WORK", Category.CLASS),
+ Classification("279", "SERVICE INDUSTRIES FOR THE PRINTING TRADE", Category.GROUP),
+ Classification("2791", "TYPESETTING", Category.CLASS),
+ Classification("2796", "PLATEMAKING AND RELATED SERVICES", Category.CLASS),
+ Classification("28", "CHEMICALS AND ALLIED PRODUCTS", Category.DIVISION),
+ Classification("281", "INDUSTRIAL INORGANIC CHEMICALS", Category.GROUP),
+ Classification("2812", "ALKALIES AND CHLORINE", Category.CLASS),
+ Classification("2813", "INDUSTRIAL GASES", Category.CLASS),
+ Classification("2816", "INORGANIC PIGMENTS", Category.CLASS),
+ Classification("2819", "INDUSTRIAL INORGANIC CHEMICALS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("282", "PLASTICS MATERIALS AND SYNTHETIC RESINS, SYNTHETIC RUBBER, CELLULO", Category.GROUP),
+ Classification("2821", "PLASTICS MATERIALS, SYNTHETIC RESINS, AND NONVULCANIZABLE ELASTOM", Category.CLASS),
+ Classification("2822", "SYNTHETIC RUBBER (VULCANIZABLE ELASTOMERS)", Category.CLASS),
+ Classification("2823", "CELLULOSIC MANMADE FIBERS", Category.CLASS),
+ Classification("2824", "MANMADE ORGANIC FIBERS, EXCEPT CELLULOSIC", Category.CLASS),
+ Classification("283", "DRUGS", Category.GROUP),
+ Classification("2833", "MEDICINAL CHEMICALS AND BOTANICAL PRODUCTS", Category.CLASS),
+ Classification("2834", "PHARMACEUTICAL PREPARATIONS", Category.CLASS),
+ Classification("2835", "IN VITRO AND IN VIVO DIAGNOSTIC SUBSTANCES", Category.CLASS),
+ Classification("2836", "BIOLOGICAL PRODUCTS, EXCEPT DIAGNOSTIC SUBSTANCES", Category.CLASS),
+ Classification("284", "SOAP, DETERGENTS, AND CLEANING PREPARATIONS; PERFUMES, COSMETICS", Category.GROUP),
+ Classification("2841", "SOAP AND OTHER DETERGENTS, EXCEPT SPECIALTY CLEANERS", Category.CLASS),
+ Classification("2842", "SPECIALTY CLEANING, POLISHING, AND SANITATION PREPARATIONS", Category.CLASS),
+ Classification("2843", "SURFACE ACTIVE AGENTS, FINISHING AGENTS, SULFONATED OILS, AND ASS", Category.CLASS),
+ Classification("2844", "PERFUMES, COSMETICS, AND OTHER TOILET PREPARATIONS", Category.CLASS),
+ Classification("285", "PAINTS, VARNISHES, LACQUERS, ENAMELS, AND ALLIED PRODUCTS", Category.GROUP),
+ Classification("2851", "PAINTS, VARNISHES, LACQUERS, ENAMELS, AND ALLIED PRODUCTS", Category.CLASS),
+ Classification("286", "INDUSTRIAL ORGANIC CHEMICALS", Category.GROUP),
+ Classification("2861", "GUM AND WOOD CHEMICALS", Category.CLASS),
+ Classification("2865", "CYCLIC ORGANIC CRUDES AND INTERMEDIATES, AND ORGANIC DYES AND PIG", Category.CLASS),
+ Classification("2869", "INDUSTRIAL ORGANIC CHEMICALS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("287", "AGRICULTURAL CHEMICALS", Category.GROUP),
+ Classification("2873", "NITROGENOUS FERTILIZERS", Category.CLASS),
+ Classification("2874", "PHOSPHATIC FERTILIZERS", Category.CLASS),
+ Classification("2875", "FERTILIZERS, MIXING ONLY", Category.CLASS),
+ Classification("2879", "PESTICIDES AND AGRICULTURAL CHEMICALS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("289", "MISCELLANEOUS CHEMICAL PRODUCTS", Category.GROUP),
+ Classification("2891", "ADHESIVES AND SEALANTS", Category.CLASS),
+ Classification("2892", "EXPLOSIVES", Category.CLASS),
+ Classification("2893", "PRINTING INK", Category.CLASS),
+ Classification("2895", "CARBON BLACK", Category.CLASS),
+ Classification("2899", "CHEMICALS AND CHEMICAL PREPARATIONS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("29", "PETROLEUM REFINING AND RELATED INDUSTRIES", Category.DIVISION),
+ Classification("291", "PETROLEUM REFINING", Category.GROUP),
+ Classification("2911", "PETROLEUM REFINING", Category.CLASS),
+ Classification("295", "ASPHALT PAVING AND ROOFING MATERIALS", Category.GROUP),
+ Classification("2951", "ASPHALT PAVING MIXTURES AND BLOCKS", Category.CLASS),
+ Classification("2952", "ASPHALT FELTS AND COATINGS", Category.CLASS),
+ Classification("299", "MISCELLANEOUS PRODUCTS OF PETROLEUM AND COAL", Category.GROUP),
+ Classification("2992", "LUBRICATING OILS AND GREASES", Category.CLASS),
+ Classification("2999", "PRODUCTS OF PETROLEUM AND COAL, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("30", "RUBBER AND MISCELLANEOUS PLASTICS PRODUCTS", Category.DIVISION),
+ Classification("301", "TIRES AND INNER TUBES", Category.GROUP),
+ Classification("3011", "TIRES AND INNER TUBES", Category.CLASS),
+ Classification("302", "RUBBER AND PLASTICS FOOTWEAR", Category.GROUP),
+ Classification("3021", "RUBBER AND PLASTICS FOOTWEAR", Category.CLASS),
+ Classification("305", "GASKETS, PACKING, AND SEALING DEVICES AND RUBBER AND PLASTICS HOSE", Category.GROUP),
+ Classification("3052", "RUBBER AND PLASTICS HOSE AND BELTING", Category.CLASS),
+ Classification("3053", "GASKETS, PACKING, AND SEALING DEVICES", Category.CLASS),
+ Classification("306", "FABRICATED RUBBER PRODUCTS, NOT ELSEWHERE CLASSIFIED", Category.GROUP),
+ Classification("3061", "MOLDED, EXTRUDED, AND LATHE-CUT MECHANICAL RUBBER GOODS", Category.CLASS),
+ Classification("3069", "FABRICATED RUBBER PRODUCTS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("308", "MISCELLANEOUS PLASTICS PRODUCTS", Category.GROUP),
+ Classification("3081", "UNSUPPORTED PLASTICS FILM AND SHEET", Category.CLASS),
+ Classification("3082", "UNSUPPORTED PLASTICS PROFILE SHAPES", Category.CLASS),
+ Classification("3083", "LAMINATED PLASTICS PLATE, SHEET, AND PROFILE SHAPES", Category.CLASS),
+ Classification("3084", "PLASTICS PIPE", Category.CLASS),
+ Classification("3085", "PLASTICS BOTTLES", Category.CLASS),
+ Classification("3086", "PLASTICS FOAM PRODUCTS", Category.CLASS),
+ Classification("3087", "CUSTOM COMPOUNDING OF PURCHASED PLASTICS RESINS", Category.CLASS),
+ Classification("3088", "PLASTICS PLUMBING FIXTURES", Category.CLASS),
+ Classification("3089", "PLASTICS PRODUCTS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("31", "LEATHER AND LEATHER PRODUCTS", Category.DIVISION),
+ Classification("311", "LEATHER TANNING AND FINISHING", Category.GROUP),
+ Classification("3111", "LEATHER TANNING AND FINISHING", Category.CLASS),
+ Classification("313", "BOOT AND SHOE CUT STOCK AND FINDINGS", Category.GROUP),
+ Classification("3131", "BOOT AND SHOE CUT STOCK AND FINDINGS", Category.CLASS),
+ Classification("314", "FOOTWEAR, EXCEPT RUBBER", Category.GROUP),
+ Classification("3142", "HOUSE SLIPPERS", Category.CLASS),
+ Classification("3143", "MEN'S FOOTWEAR, EXCEPT ATHLETIC", Category.CLASS),
+ Classification("3144", "WOMEN'S FOOTWEAR, EXCEPT ATHLETIC", Category.CLASS),
+ Classification("3149", "FOOTWEAR, EXCEPT RUBBER, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("315", "LEATHER GLOVES AND MITTENS", Category.GROUP),
+ Classification("3151", "LEATHER GLOVES AND MITTENS", Category.CLASS),
+ Classification("316", "LUGGAGE", Category.GROUP),
+ Classification("3161", "LUGGAGE", Category.CLASS),
+ Classification("317", "HANDBAGS AND OTHER PERSONAL LEATHER GOODS", Category.GROUP),
+ Classification("3171", "WOMEN'S HANDBAGS AND PURSES", Category.CLASS),
+ Classification("3172", "PERSONAL LEATHER GOODS, EXCEPT WOMEN'S HANDBAGS AND PURSES", Category.CLASS),
+ Classification("319", "LEATHER GOODS, NOT ELSEWHERE CLASSIFIED", Category.GROUP),
+ Classification("3199", "LEATHER GOODS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("32", "STONE, CLAY, GLASS, AND CONCRETE PRODUCTS", Category.DIVISION),
+ Classification("321", "FLAT GLASS", Category.GROUP),
+ Classification("3211", "FLAT GLASS", Category.CLASS),
+ Classification("322", "GLASS AND GLASSWARE, PRESSED OR BLOWN", Category.GROUP),
+ Classification("3221", "GLASS CONTAINERS", Category.CLASS),
+ Classification("3229", "PRESSED AND BLOWN GLASS AND GLASSWARE, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("323", "GLASS PRODUCTS, MADE OF PURCHASED GLASS", Category.GROUP),
+ Classification("3231", "GLASS PRODUCTS, MADE OF PURCHASED GLASS", Category.CLASS),
+ Classification("324", "CEMENT, HYDRAULIC", Category.GROUP),
+ Classification("3241", "CEMENT, HYDRAULIC", Category.CLASS),
+ Classification("325", "STRUCTURAL CLAY PRODUCTS", Category.GROUP),
+ Classification("3251", "BRICK AND STRUCTURAL CLAY TILE", Category.CLASS),
+ Classification("3253", "CERAMIC WALL AND FLOOR TILE", Category.CLASS),
+ Classification("3255", "CLAY REFRACTORIES", Category.CLASS),
+ Classification("3259", "STRUCTURAL CLAY PRODUCTS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("326", "POTTERY AND RELATED PRODUCTS", Category.GROUP),
+ Classification("3261", "VITREOUS CHINA PLUMBING FIXTURES AND CHINA AND EARTHENWARE FITTIN", Category.CLASS),
+ Classification("3262", "VITREOUS CHINA TABLE AND KITCHEN ARTICLES", Category.CLASS),
+ Classification("3263", "FINE EARTHENWARE (WHITEWARE) TABLE AND KITCHEN ARTICLES", Category.CLASS),
+ Classification("3264", "PORCELAIN ELECTRICAL SUPPLIES", Category.CLASS),
+ Classification("3269", "POTTERY PRODUCTS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("327", "CONCRETE, GYPSUM, AND PLASTER PRODUCTS", Category.GROUP),
+ Classification("3271", "CONCRETE BLOCK AND BRICK", Category.CLASS),
+ Classification("3272", "CONCRETE PRODUCTS, EXCEPT BLOCK AND BRICK", Category.CLASS),
+ Classification("3273", "READY-MIXED CONCRETE", Category.CLASS),
+ Classification("3274", "LIME", Category.CLASS),
+ Classification("3275", "GYPSUM PRODUCTS", Category.CLASS),
+ Classification("328", "CUT STONE AND STONE PRODUCTS", Category.GROUP),
+ Classification("3281", "CUT STONE AND STONE PRODUCTS", Category.CLASS),
+ Classification("329", "ABRASIVE, ASBESTOS, AND MISCELLANEOUS NONMETALLIC MINERAL PRODUCTS", Category.GROUP),
+ Classification("3291", "ABRASIVE PRODUCTS", Category.CLASS),
+ Classification("3292", "ASBESTOS PRODUCTS", Category.CLASS),
+ Classification("3295", "MINERALS AND EARTHS, GROUND OR OTHERWISE TREATED", Category.CLASS),
+ Classification("3296", "MINERAL WOOL", Category.CLASS),
+ Classification("3297", "NONCLAY REFRACTORIES", Category.CLASS),
+ Classification("3299", "NONMETALLIC MINERAL PRODUCTS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("33", "PRIMARY METAL INDUSTRIES", Category.DIVISION),
+ Classification("331", "STEEL WORKS, BLAST FURNACES, AND ROLLING AND FINISHING MILLS", Category.GROUP),
+ Classification("3312", "STEEL WORKS, BLAST FURNACES (INCLUDING COKE OVENS), AND ROLLING M", Category.CLASS),
+ Classification("3313", "ELECTROMETALLURGICAL PRODUCTS, EXCEPT STEEL", Category.CLASS),
+ Classification("3315", "STEEL WIREDRAWING AND STEEL NAILS AND SPIKES", Category.CLASS),
+ Classification("3316", "COLD-ROLLED STEEL SHEET, STRIP, AND BARS", Category.CLASS),
+ Classification("3317", "STEEL PIPE AND TUBES", Category.CLASS),
+ Classification("332", "IRON AND STEEL FOUNDRIES", Category.GROUP),
+ Classification("3321", "GRAY AND DUCTILE IRON FOUNDRIES", Category.CLASS),
+ Classification("3322", "MALLEABLE IRON FOUNDRIES", Category.CLASS),
+ Classification("3324", "STEEL INVESTMENT FOUNDRIES", Category.CLASS),
+ Classification("3325", "STEEL FOUNDRIES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("333", "PRIMARY SMELTING AND REFINING OF NONFERROUS METALS", Category.GROUP),
+ Classification("3331", "PRIMARY SMELTING AND REFINING OF COPPER", Category.CLASS),
+ Classification("3334", "PRIMARY PRODUCTION OF ALUMINUM", Category.CLASS),
+ Classification("3339", "PRIMARY SMELTING AND REFINING OF NONFERROUS METALS, EXCEPT COPPER", Category.CLASS),
+ Classification("334", "SECONDARY SMELTING AND REFINING OF NONFERROUS METALS", Category.GROUP),
+ Classification("3341", "SECONDARY SMELTING AND REFINING OF NONFERROUS METALS", Category.CLASS),
+ Classification("335", "ROLLING, DRAWING, AND EXTRUDING OF NONFERROUS METALS", Category.GROUP),
+ Classification("3351", "ROLLING, DRAWING, AND EXTRUDING OF COPPER", Category.CLASS),
+ Classification("3353", "ALUMINUM SHEET, PLATE, AND FOIL", Category.CLASS),
+ Classification("3354", "ALUMINUM EXTRUDED PRODUCTS", Category.CLASS),
+ Classification("3355", "ALUMINUM ROLLING AND DRAWING, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("3356", "ROLLING, DRAWING, AND EXTRUDING OF NONFERROUS METALS, EXCEPT COPP", Category.CLASS),
+ Classification("3357", "DRAWING AND INSULATING OF NONFERROUS WIRE", Category.CLASS),
+ Classification("336", "NONFERROUS FOUNDRIES (CASTINGS)", Category.GROUP),
+ Classification("3363", "ALUMINUM DIE-CASTINGS", Category.CLASS),
+ Classification("3364", "NONFERROUS DIE-CASTINGS, EXCEPT ALUMINUM", Category.CLASS),
+ Classification("3365", "ALUMINUM FOUNDRIES", Category.CLASS),
+ Classification("3366", "COPPER FOUNDRIES", Category.CLASS),
+ Classification("3369", "NONFERROUS FOUNDRIES, EXCEPT ALUMINUM AND COPPER", Category.CLASS),
+ Classification("339", "MISCELLANEOUS PRIMARY METAL PRODUCTS", Category.GROUP),
+ Classification("3398", "METAL HEAT TREATING", Category.CLASS),
+ Classification("3399", "PRIMARY METAL PRODUCTS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification(
+ "34", "FABRICATED METAL PRODUCTS, EXCEPT MACHINERY AND TRANSPORTATION EQUIPMENT", Category.DIVISION
+ ),
+ Classification("341", "METAL CANS AND SHIPPING CONTAINERS", Category.GROUP),
+ Classification("3411", "METAL CANS", Category.CLASS),
+ Classification("3412", "METAL SHIPPING BARRELS, DRUMS, KEGS, AND PAILS", Category.CLASS),
+ Classification("342", "CUTLERY, HANDTOOLS, AND GENERAL HARDWARE", Category.GROUP),
+ Classification("3421", "CUTLERY", Category.CLASS),
+ Classification("3423", "HAND AND EDGE TOOLS, EXCEPT MACHINE TOOLS AND HANDSAWS", Category.CLASS),
+ Classification("3425", "SAW BLADES AND HANDSAWS", Category.CLASS),
+ Classification("3429", "HARDWARE, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("343", "HEATING EQUIPMENT, EXCEPT ELECTRIC AND WARM AIR; AND PLUMBING FIXT", Category.GROUP),
+ Classification("3431", "ENAMELED IRON AND METAL SANITARY WARE", Category.CLASS),
+ Classification("3432", "PLUMBING FIXTURE FITTINGS AND TRIM", Category.CLASS),
+ Classification("3433", "HEATING EQUIPMENT, EXCEPT ELECTRIC AND WARM AIR FURNACES", Category.CLASS),
+ Classification("344", "FABRICATED STRUCTURAL METAL PRODUCTS", Category.GROUP),
+ Classification("3441", "FABRICATED STRUCTURAL METAL", Category.CLASS),
+ Classification("3442", "METAL DOORS, SASH, FRAMES, MOLDING, AND TRIM", Category.CLASS),
+ Classification("3443", "FABRICATED PLATE WORK (BOILER SHOPS)", Category.CLASS),
+ Classification("3444", "SHEET METALWORK", Category.CLASS),
+ Classification("3446", "ARCHITECTURAL AND ORNAMENTAL METALWORK", Category.CLASS),
+ Classification("3448", "PREFABRICATED METAL BUILDINGS AND COMPONENTS", Category.CLASS),
+ Classification("3449", "MISCELLANEOUS STRUCTURAL METALWORK", Category.CLASS),
+ Classification("345", "SCREW MACHINE PRODUCTS, AND BOLTS, NUTS, SCREWS, RIVETS, AND WASHE", Category.GROUP),
+ Classification("3451", "SCREW MACHINE PRODUCTS", Category.CLASS),
+ Classification("3452", "BOLTS, NUTS, SCREWS, RIVETS, AND WASHERS", Category.CLASS),
+ Classification("346", "METAL FORGINGS AND STAMPINGS", Category.GROUP),
+ Classification("3462", "IRON AND STEEL FORGINGS", Category.CLASS),
+ Classification("3463", "NONFERROUS FORGINGS", Category.CLASS),
+ Classification("3465", "AUTOMOTIVE STAMPINGS", Category.CLASS),
+ Classification("3466", "CROWNS AND CLOSURES", Category.CLASS),
+ Classification("3469", "METAL STAMPINGS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("347", "COATING, ENGRAVING, AND ALLIED SERVICES", Category.GROUP),
+ Classification("3471", "ELECTROPLATING, PLATING, POLISHING, ANODIZING, AND COLORING", Category.CLASS),
+ Classification("3479", "COATING, ENGRAVING, AND ALLIED SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("348", "ORDNANCE AND ACCESSORIES, EXCEPT VEHICLES AND GUIDED MISSILES", Category.GROUP),
+ Classification("3482", "SMALL ARMS AMMUNITION", Category.CLASS),
+ Classification("3483", "AMMUNITION, EXCEPT FOR SMALL ARMS", Category.CLASS),
+ Classification("3484", "SMALL ARMS", Category.CLASS),
+ Classification("3489", "ORDNANCE AND ACCESSORIES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("349", "MISCELLANEOUS FABRICATED METAL PRODUCTS", Category.GROUP),
+ Classification("3491", "INDUSTRIAL VALVES", Category.CLASS),
+ Classification("3492", "FLUID POWER VALVES AND HOSE FITTINGS", Category.CLASS),
+ Classification("3493", "STEEL SPRINGS, EXCEPT WIRE", Category.CLASS),
+ Classification("3494", "VALVES AND PIPE FITTINGS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("3495", "WIRE SPRINGS", Category.CLASS),
+ Classification("3496", "MISCELLANEOUS FABRICATED WIRE PRODUCTS", Category.CLASS),
+ Classification("3497", "METAL FOIL AND LEAF", Category.CLASS),
+ Classification("3498", "FABRICATED PIPE AND PIPE FITTINGS", Category.CLASS),
+ Classification("3499", "FABRICATED METAL PRODUCTS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("35", "INDUSTRIAL AND COMMERCIAL MACHINERY AND COMPUTER EQUIPMENT", Category.DIVISION),
+ Classification("351", "ENGINES AND TURBINES", Category.GROUP),
+ Classification("3511", "STEAM, GAS, AND HYDRAULIC TURBINES, AND TURBINE GENERATOR SET UNI", Category.CLASS),
+ Classification("3519", "INTERNAL COMBUSTION ENGINES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("352", "FARM AND GARDEN MACHINERY AND EQUIPMENT", Category.GROUP),
+ Classification("3523", "FARM MACHINERY AND EQUIPMENT", Category.CLASS),
+ Classification("3524", "LAWN AND GARDEN TRACTORS AND HOME LAWN AND GARDEN EQUIPMENT", Category.CLASS),
+ Classification("3524", "BLOWERS, RESIDENTIAL LAWN", Category.CLASS),
+ Classification("353", "CONSTRUCTION, MINING, AND MATERIALS HANDLING MACHINERY AND EQUIPME", Category.GROUP),
+ Classification("3531", "CONSTRUCTION MACHINERY AND EQUIPMENT", Category.CLASS),
+ Classification("3532", "MINING MACHINERY AND EQUIPMENT, EXCEPT OIL AND GAS FIELD MACHINER", Category.CLASS),
+ Classification("3533", "OIL AND GAS FIELD MACHINERY AND EQUIPMENT", Category.CLASS),
+ Classification("3534", "ELEVATORS AND MOVING STAIRWAYS", Category.CLASS),
+ Classification("3535", "CONVEYORS AND CONVEYING EQUIPMENT", Category.CLASS),
+ Classification("3536", "OVERHEAD TRAVELING CRANES, HOISTS, AND MONORAIL SYSTEMS", Category.CLASS),
+ Classification("3537", "INDUSTRIAL TRUCKS, TRACTORS, TRAILERS, AND STACKERS", Category.CLASS),
+ Classification("354", "METALWORKING MACHINERY AND EQUIPMENT", Category.GROUP),
+ Classification("3541", "MACHINE TOOLS, METAL CUTTING TYPES", Category.CLASS),
+ Classification("3542", "MACHINE TOOLS, METAL FORMING TYPES", Category.CLASS),
+ Classification("3543", "INDUSTRIAL PATTERNS", Category.CLASS),
+ Classification("3544", "SPECIAL DIES AND TOOLS, DIE SETS, JIGS AND FIXTURES, AND INDUSTRI", Category.CLASS),
+ Classification("3545", "CUTTING TOOLS, MACHINE TOOL ACCESSORIES, AND MACHINISTS' PRECISIO", Category.CLASS),
+ Classification("3546", "POWER-DRIVEN HANDTOOLS", Category.CLASS),
+ Classification("3547", "ROLLING MILL MACHINERY AND EQUIPMENT", Category.CLASS),
+ Classification("3548", "ELECTRIC AND GAS WELDING AND SOLDERING EQUIPMENT", Category.CLASS),
+ Classification("3549", "METALWORKING MACHINERY, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("355", "SPECIAL INDUSTRY MACHINERY, EXCEPT METALWORKING MACHINERY", Category.GROUP),
+ Classification("3552", "TEXTILE MACHINERY", Category.CLASS),
+ Classification("3553", "WOODWORKING MACHINERY", Category.CLASS),
+ Classification("3554", "PAPER INDUSTRIES MACHINERY", Category.CLASS),
+ Classification("3555", "PRINTING TRADES MACHINERY AND EQUIPMENT", Category.CLASS),
+ Classification("3556", "FOOD PRODUCTS MACHINERY", Category.CLASS),
+ Classification("3559", "SPECIAL INDUSTRY MACHINERY, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("356", "GENERAL INDUSTRIAL MACHINERY AND EQUIPMENT", Category.GROUP),
+ Classification("3561", "PUMPS AND PUMPING EQUIPMENT", Category.CLASS),
+ Classification("3562", "BALL AND ROLLER BEARINGS", Category.CLASS),
+ Classification("3563", "AIR AND GAS COMPRESSORS", Category.CLASS),
+ Classification("3564", "INDUSTRIAL AND COMMERCIAL FANS AND BLOWERS AND AIR PURIFICATION E", Category.CLASS),
+ Classification("3565", "PACKAGING MACHINERY", Category.CLASS),
+ Classification("3566", "SPEED CHANGERS, INDUSTRIAL HIGH-SPEED DRIVES, AND GEARS", Category.CLASS),
+ Classification("3567", "INDUSTRIAL PROCESS FURNACES AND OVENS", Category.CLASS),
+ Classification("3568", "MECHANICAL POWER TRANSMISSION EQUIPMENT, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("3569", "GENERAL INDUSTRIAL MACHINERY AND EQUIPMENT, NOT ELSEWHERE CLASSIF", Category.CLASS),
+ Classification("357", "COMPUTER AND OFFICE EQUIPMENT", Category.GROUP),
+ Classification("3571", "ELECTRONIC COMPUTERS", Category.CLASS),
+ Classification("3572", "COMPUTER STORAGE DEVICES", Category.CLASS),
+ Classification("3575", "COMPUTER TERMINALS", Category.CLASS),
+ Classification("3577", "COMPUTER PERIPHERAL EQUIPMENT, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("3578", "CALCULATING AND ACCOUNTING MACHINES, EXCEPT ELECTRONIC COMPUTERS", Category.CLASS),
+ Classification("3579", "OFFICE MACHINES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("358", "REFRIGERATION AND SERVICE INDUSTRY MACHINERY", Category.GROUP),
+ Classification("3581", "AUTOMATIC VENDING MACHINES", Category.CLASS),
+ Classification("3582", "COMMERCIAL LAUNDRY, DRYCLEANING, AND PRESSING MACHINES", Category.CLASS),
+ Classification("3585", "AIR-CONDITIONING AND WARM AIR HEATING EQUIPMENT AND COMMERCIAL AN", Category.CLASS),
+ Classification("3586", "MEASURING AND DISPENSING PUMPS", Category.CLASS),
+ Classification("3589", "SERVICE INDUSTRY MACHINERY, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("359", "MISCELLANEOUS INDUSTRIAL AND COMMERCIAL MACHINERY AND EQUIPMENT", Category.GROUP),
+ Classification("3592", "CARBURETORS, PISTONS, PISTON RINGS, AND VALVES", Category.CLASS),
+ Classification("3593", "FLUID POWER CYLINDERS AND ACTUATORS", Category.CLASS),
+ Classification("3594", "FLUID POWER PUMPS AND MOTORS", Category.CLASS),
+ Classification("3596", "SCALES AND BALANCES, EXCEPT LABORATORY", Category.CLASS),
+ Classification("3599", "INDUSTRIAL AND COMMERCIAL MACHINERY AND EQUIPMENT, NOT ELSEWHERE", Category.CLASS),
+ Classification(
+ "36", "ELECTRONIC AND OTHER ELECTRICAL EQUIPMENT AND COMPONENTS, EXCEPT COMPUTER", Category.DIVISION
+ ),
+ Classification("361", "ELECTRIC TRANSMISSION AND DISTRIBUTION EQUIPMENT", Category.GROUP),
+ Classification("3612", "POWER, DISTRIBUTION, AND SPECIALTY TRANSFORMERS", Category.CLASS),
+ Classification("3613", "SWITCHGEAR AND SWITCHBOARD APPARATUS", Category.CLASS),
+ Classification("362", "ELECTRICAL INDUSTRIAL APPARATUS", Category.GROUP),
+ Classification("3621", "MOTORS AND GENERATORS", Category.CLASS),
+ Classification("3624", "CARBON AND GRAPHITE PRODUCTS", Category.CLASS),
+ Classification("3625", "RELAYS AND INDUSTRIAL CONTROLS", Category.CLASS),
+ Classification("3629", "ELECTRICAL INDUSTRIAL APPARATUS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("363", "HOUSEHOLD APPLIANCES", Category.GROUP),
+ Classification("3631", "HOUSEHOLD COOKING EQUIPMENT", Category.CLASS),
+ Classification("3632", "HOUSEHOLD REFRIGERATORS AND HOME AND FARM FREEZERS", Category.CLASS),
+ Classification("3633", "HOUSEHOLD LAUNDRY EQUIPMENT", Category.CLASS),
+ Classification("3634", "ELECTRIC HOUSEWARES AND FANS", Category.CLASS),
+ Classification("3635", "HOUSEHOLD VACUUM CLEANERS", Category.CLASS),
+ Classification("3639", "HOUSEHOLD APPLIANCES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("364", "ELECTRIC LIGHTING AND WIRING EQUIPMENT", Category.GROUP),
+ Classification("3641", "ELECTRIC LAMP BULBS AND TUBES", Category.CLASS),
+ Classification("3643", "CURRENT-CARRYING WIRING DEVICES", Category.CLASS),
+ Classification("3644", "NONCURRENT-CARRYING WIRING DEVICES", Category.CLASS),
+ Classification("3645", "RESIDENTIAL ELECTRIC LIGHTING FIXTURES", Category.CLASS),
+ Classification("3646", "COMMERCIAL, INDUSTRIAL, AND INSTITUTIONAL ELECTRIC LIGHTING FIXTU", Category.CLASS),
+ Classification("3647", "VEHICULAR LIGHTING EQUIPMENT", Category.CLASS),
+ Classification("3648", "LIGHTING EQUIPMENT, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("365", "HOUSEHOLD AUDIO AND VIDEO EQUIPMENT, AND AUDIO RECORDINGS", Category.GROUP),
+ Classification("3651", "HOUSEHOLD AUDIO AND VIDEO EQUIPMENT", Category.CLASS),
+ Classification("3652", "PHONOGRAPH RECORDS AND PRERECORDED AUDIO TAPES AND DISKS", Category.CLASS),
+ Classification("366", "COMMUNICATIONS EQUIPMENT", Category.GROUP),
+ Classification("3661", "TELEPHONE AND TELEGRAPH APPARATUS", Category.CLASS),
+ Classification("3663", "RADIO AND TELEVISION BROADCASTING AND COMMUNICATIONS EQUIPMENT", Category.CLASS),
+ Classification("3669", "COMMUNICATIONS EQUIPMENT, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("367", "ELECTRONIC COMPONENTS AND ACCESSORIES", Category.GROUP),
+ Classification("3671", "ELECTRON TUBES", Category.CLASS),
+ Classification("3672", "PRINTED CIRCUIT BOARDS", Category.CLASS),
+ Classification("3674", "SEMICONDUCTORS AND RELATED DEVICES", Category.CLASS),
+ Classification("3675", "ELECTRONIC CAPACITORS", Category.CLASS),
+ Classification("3676", "ELECTRONIC RESISTORS", Category.CLASS),
+ Classification("3677", "ELECTRONIC COILS, TRANSFORMERS, AND OTHER INDUCTORS", Category.CLASS),
+ Classification("3678", "ELECTRONIC CONNECTORS", Category.CLASS),
+ Classification("3679", "ELECTRONIC COMPONENTS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("369", "MISCELLANEOUS ELECTRICAL MACHINERY, EQUIPMENT, AND SUPPLIES", Category.GROUP),
+ Classification("3691", "STORAGE BATTERIES", Category.CLASS),
+ Classification("3692", "PRIMARY BATTERIES, DRY AND WET", Category.CLASS),
+ Classification("3694", "ELECTRICAL EQUIPMENT FOR INTERNAL COMBUSTION ENGINES", Category.CLASS),
+ Classification("3695", "MAGNETIC AND OPTICAL RECORDING MEDIA", Category.CLASS),
+ Classification("3699", "ELECTRICAL MACHINERY, EQUIPMENT, AND SUPPLIES, NOT ELSEWHERE CLAS", Category.CLASS),
+ Classification("37", "TRANSPORTATION EQUIPMENT", Category.DIVISION),
+ Classification("371", "MOTOR VEHICLES AND MOTOR VEHICLE EQUIPMENT", Category.GROUP),
+ Classification("3711", "MOTOR VEHICLES AND PASSENGER CAR BODIES", Category.CLASS),
+ Classification("3713", "TRUCK AND BUS BODIES", Category.CLASS),
+ Classification("3714", "MOTOR VEHICLE PARTS AND ACCESSORIES", Category.CLASS),
+ Classification("3715", "TRUCK TRAILERS", Category.CLASS),
+ Classification("3716", "MOTOR HOMES", Category.CLASS),
+ Classification("372", "AIRCRAFT AND PARTS", Category.GROUP),
+ Classification("3721", "AIRCRAFT", Category.CLASS),
+ Classification("3724", "AIRCRAFT ENGINES AND ENGINE PARTS", Category.CLASS),
+ Classification("3728", "AIRCRAFT PARTS AND AUXILIARY EQUIPMENT, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("373", "SHIP AND BOAT BUILDING AND REPAIRING", Category.GROUP),
+ Classification("3731", "SHIP BUILDING AND REPAIRING", Category.CLASS),
+ Classification("3732", "BOAT BUILDING AND REPAIRING", Category.CLASS),
+ Classification("374", "RAILROAD EQUIPMENT", Category.GROUP),
+ Classification("3743", "RAILROAD EQUIPMENT", Category.CLASS),
+ Classification("375", "MOTORCYCLES, BICYCLES, AND PARTS", Category.GROUP),
+ Classification("3751", "MOTORCYCLES, BICYCLES, AND PARTS", Category.CLASS),
+ Classification("376", "GUIDED MISSILES AND SPACE VEHICLES AND PARTS", Category.GROUP),
+ Classification("3761", "GUIDED MISSILES AND SPACE VEHICLES", Category.CLASS),
+ Classification("3764", "GUIDED MISSILE AND SPACE VEHICLE PROPULSION UNITS AND PROPULSION", Category.CLASS),
+ Classification("3769", "GUIDED MISSILE AND SPACE VEHICLE PARTS AND AUXILIARY EQUIPMENT, N", Category.CLASS),
+ Classification("379", "MISCELLANEOUS TRANSPORTATION EQUIPMENT", Category.GROUP),
+ Classification("3792", "TRAVEL TRAILERS AND CAMPERS", Category.CLASS),
+ Classification("3795", "TANKS AND TANK COMPONENTS", Category.CLASS),
+ Classification("3799", "TRANSPORTATION EQUIPMENT, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification(
+ "38", "MEASURING, ANALYZING AND CONTROLLING INSTRUMENTS; PHOTOGRAPHIC, MEDICAL AN", Category.DIVISION
+ ),
+ Classification("381", "SEARCH, DETECTION, NAVIGATION, GUIDANCE, AERONAUTICAL, AND NAUTICA", Category.GROUP),
+ Classification("3812", "SEARCH, DETECTION, NAVIGATION, GUIDANCE, AERONAUTICAL, AND NAUTIC", Category.CLASS),
+ Classification("382", "LABORATORY APPARATUS AND ANALYTICAL, OPTICAL, MEASURING, AND CONTR", Category.GROUP),
+ Classification("3821", "LABORATORY APPARATUS AND FURNITURE", Category.CLASS),
+ Classification("3822", "AUTOMATIC CONTROLS FOR REGULATING RESIDENTIAL AND COMMERCIAL ENVI", Category.CLASS),
+ Classification("3823", "INDUSTRIAL INSTRUMENTS FOR MEASUREMENT, DISPLAY, AND CONTROL OF P", Category.CLASS),
+ Classification("3824", "TOTALIZING FLUID METERS AND COUNTING DEVICES", Category.CLASS),
+ Classification("3825", "INSTRUMENTS FOR MEASURING AND TESTING OF ELECTRICITY AND ELECTRIC", Category.CLASS),
+ Classification("3826", "LABORATORY ANALYTICAL INSTRUMENTS", Category.CLASS),
+ Classification("3827", "OPTICAL INSTRUMENTS AND LENSES", Category.CLASS),
+ Classification("3829", "MEASURING AND CONTROLLING DEVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("384", "SURGICAL, MEDICAL, AND DENTAL INSTRUMENTS AND SUPPLIES", Category.GROUP),
+ Classification("3841", "SURGICAL AND MEDICAL INSTRUMENTS AND APPARATUS", Category.CLASS),
+ Classification("3842", "ORTHOPEDIC, PROSTHETIC, AND SURGICAL APPLIANCES AND SUPPLIES", Category.CLASS),
+ Classification("3843", "DENTAL EQUIPMENT AND SUPPLIES", Category.CLASS),
+ Classification("3844", "X-RAY APPARATUS AND TUBES AND RELATED IRRADIATION APPARATUS", Category.CLASS),
+ Classification("3845", "ELECTROMEDICAL AND ELECTROTHERAPEUTIC APPARATUS", Category.CLASS),
+ Classification("385", "OPHTHALMIC GOODS", Category.GROUP),
+ Classification("3851", "OPHTHALMIC GOODS", Category.CLASS),
+ Classification("386", "PHOTOGRAPHIC EQUIPMENT AND SUPPLIES", Category.GROUP),
+ Classification("3861", "PHOTOGRAPHIC EQUIPMENT AND SUPPLIES", Category.CLASS),
+ Classification("387", "WATCHES, CLOCKS, CLOCKWORK OPERATED DEVICES, AND PARTS", Category.GROUP),
+ Classification("3873", "WATCHES, CLOCKS, CLOCKWORK OPERATED DEVICES, AND PARTS", Category.CLASS),
+ Classification("39", "MISCELLANEOUS MANUFACTURING INDUSTRIES", Category.DIVISION),
+ Classification("391", "JEWELRY, SILVERWARE, AND PLATED WARE", Category.GROUP),
+ Classification("3911", "JEWELRY, PRECIOUS METAL", Category.CLASS),
+ Classification("3914", "SILVERWARE, PLATED WARE, AND STAINLESS STEEL WARE", Category.CLASS),
+ Classification("3915", "JEWELERS' FINDINGS AND MATERIALS, AND LAPIDARY WORK", Category.CLASS),
+ Classification("393", "MUSICAL INSTRUMENTS", Category.GROUP),
+ Classification("3931", "MUSICAL INSTRUMENTS", Category.CLASS),
+ Classification("394", "DOLLS, TOYS, GAMES AND SPORTING AND ATHLETIC GOODS", Category.GROUP),
+ Classification("3942", "DOLLS AND STUFFED TOYS", Category.CLASS),
+ Classification("3944", "GAMES, TOYS, AND CHILDREN'S VEHICLES, EXCEPT DOLLS AND BICYCLES", Category.CLASS),
+ Classification("3949", "SPORTING AND ATHLETIC GOODS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("395", "PENS, PENCILS, AND OTHER ARTISTS' MATERIALS", Category.GROUP),
+ Classification("3951", "PENS, MECHANICAL PENCILS, AND PARTS", Category.CLASS),
+ Classification("3952", "LEAD PENCILS, CRAYONS, AND ARTISTS' MATERIALS", Category.CLASS),
+ Classification("3953", "MARKING DEVICES", Category.CLASS),
+ Classification("3955", "CARBON PAPER AND INKED RIBBONS", Category.CLASS),
+ Classification("396", "COSTUME JEWELRY, COSTUME NOVELTIES, BUTTONS, AND MISCELLANEOUS NOT", Category.GROUP),
+ Classification("3961", "COSTUME JEWELRY AND COSTUME NOVELTIES, EXCEPT PRECIOUS METAL", Category.CLASS),
+ Classification("3965", "FASTENERS, BUTTONS, NEEDLES, AND PINS", Category.CLASS),
+ Classification("399", "MISCELLANEOUS MANUFACTURING INDUSTRIES", Category.GROUP),
+ Classification("3991", "BROOMS AND BRUSHES", Category.CLASS),
+ Classification("3993", "SIGNS AND ADVERTISING SPECIALTIES", Category.CLASS),
+ Classification("3995", "BURIAL CASKETS", Category.CLASS),
+ Classification("3996", "LINOLEUM, ASPHALTED-FELT-BASE, AND OTHER HARD SURFACE FLOOR COVER", Category.CLASS),
+ Classification("3999", "MANUFACTURING INDUSTRIES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("E", "TRANSPORTATION, COMMUNICATIONS, ELECTRIC, GAS, AND SANITARY SERVICE", Category.SECTION),
+ Classification("40", "RAILROAD TRANSPORTATION", Category.DIVISION),
+ Classification("401", "RAILROADS", Category.GROUP),
+ Classification("4011", "RAILROADS, LINE-HAUL OPERATING", Category.CLASS),
+ Classification("4013", "RAILROAD SWITCHING AND TERMINAL ESTABLISHMENTS", Category.CLASS),
+ Classification(
+ "41", "LOCAL AND SUBURBAN TRANSIT AND INTERURBAN HIGHWAY PASSENGER TRANSPORTATION", Category.DIVISION
+ ),
+ Classification("411", "LOCAL AND SUBURBAN PASSENGER TRANSPORTATION", Category.GROUP),
+ Classification("4111", "LOCAL AND SUBURBAN TRANSIT", Category.CLASS),
+ Classification("4119", "LOCAL PASSENGER TRANSPORTATION, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("412", "TAXICABS", Category.GROUP),
+ Classification("4121", "TAXICABS", Category.CLASS),
+ Classification("413", "INTERCITY AND RURAL BUS TRANSPORTATION", Category.GROUP),
+ Classification("4131", "INTERCITY AND RURAL BUS TRANSPORTATION", Category.CLASS),
+ Classification("414", "BUS CHARTER SERVICE", Category.GROUP),
+ Classification("4141", "LOCAL BUS CHARTER SERVICE", Category.CLASS),
+ Classification("4142", "BUS CHARTER SERVICE, EXCEPT LOCAL", Category.CLASS),
+ Classification("415", "SCHOOL BUSES", Category.GROUP),
+ Classification("4151", "SCHOOL BUSES", Category.CLASS),
+ Classification("417", "TERMINAL AND SERVICE FACILITIES FOR MOTOR VEHICLE PASSENGER TRANSP", Category.GROUP),
+ Classification("4173", "TERMINAL AND SERVICE FACILITIES FOR MOTOR VEHICLE PASSENGER TRANS", Category.CLASS),
+ Classification("42", "MOTOR FREIGHT TRANSPORTATION AND WAREHOUSING", Category.DIVISION),
+ Classification("421", "TRUCKING AND COURIER SERVICES, EXCEPT AIR", Category.GROUP),
+ Classification("4212", "LOCAL TRUCKING WITHOUT STORAGE", Category.CLASS),
+ Classification("4213", "TRUCKING, EXCEPT LOCAL", Category.CLASS),
+ Classification("4214", "LOCAL TRUCKING WITH STORAGE", Category.CLASS),
+ Classification("4215", "COURIER SERVICES, EXCEPT BY AIR", Category.CLASS),
+ Classification("422", "PUBLIC WAREHOUSING AND STORAGE", Category.GROUP),
+ Classification("4221", "FARM PRODUCT WAREHOUSING AND STORAGE", Category.CLASS),
+ Classification("4222", "REFRIGERATED WAREHOUSING AND STORAGE", Category.CLASS),
+ Classification("4225", "GENERAL WAREHOUSING AND STORAGE", Category.CLASS),
+ Classification("4226", "SPECIAL WAREHOUSING AND STORAGE, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("423", "TERMINAL AND JOINT TERMINAL MAINTENANCE FACILITIES FOR MOTOR FREIG", Category.GROUP),
+ Classification("4231", "TERMINAL AND JOINT TERMINAL MAINTENANCE FACILITIES FOR MOTOR FREI", Category.CLASS),
+ Classification("43", "UNITED STATES POSTAL SERVICE", Category.DIVISION),
+ Classification("431", "UNITED STATES POSTAL SERVICE", Category.GROUP),
+ Classification("4311", "UNITED STATES POSTAL SERVICE", Category.CLASS),
+ Classification("44", "WATER TRANSPORTATION", Category.DIVISION),
+ Classification("441", "DEEP SEA FOREIGN TRANSPORTATION OF FREIGHT", Category.GROUP),
+ Classification("4412", "DEEP SEA FOREIGN TRANSPORTATION OF FREIGHT", Category.CLASS),
+ Classification("442", "DEEP SEA DOMESTIC TRANSPORTATION OF FREIGHT", Category.GROUP),
+ Classification("4424", "DEEP SEA DOMESTIC TRANSPORTATION OF FREIGHT", Category.CLASS),
+ Classification("443", "FREIGHT TRANSPORTATION ON THE GREAT LAKES¨ST. LAWRENCE SEAWAY", Category.GROUP),
+ Classification("4432", "FREIGHT TRANSPORTATION ON THE GREAT LAKES¨ST. LAWRENCE SEAWAY", Category.CLASS),
+ Classification("444", "WATER TRANSPORTATION OF FREIGHT, NOT ELSEWHERE CLASSIFIED", Category.GROUP),
+ Classification("4449", "WATER TRANSPORTATION OF FREIGHT, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("448", "WATER TRANSPORTATION OF PASSENGERS", Category.GROUP),
+ Classification("4481", "DEEP SEA TRANSPORTATION OF PASSENGERS, EXCEPT BY FERRY", Category.CLASS),
+ Classification("4482", "FERRIES", Category.CLASS),
+ Classification("4489", "WATER TRANSPORTATION OF PASSENGERS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("449", "SERVICES INCIDENTAL TO WATER TRANSPORTATION", Category.GROUP),
+ Classification("4491", "MARINE CARGO HANDLING", Category.CLASS),
+ Classification("4492", "TOWING AND TUGBOAT SERVICES", Category.CLASS),
+ Classification("4493", "MARINAS", Category.CLASS),
+ Classification("4499", "WATER TRANSPORTATION SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("45", "TRANSPORTATION BY AIR", Category.DIVISION),
+ Classification("451", "AIR TRANSPORTATION, SCHEDULED, AND AIR COURIER SERVICES", Category.GROUP),
+ Classification("4512", "AIR TRANSPORTATION, SCHEDULED", Category.CLASS),
+ Classification("4513", "AIR COURIER SERVICES", Category.CLASS),
+ Classification("452", "AIR TRANSPORTATION, NONSCHEDULED", Category.GROUP),
+ Classification("4522", "AIR TRANSPORTATION, NONSCHEDULED", Category.CLASS),
+ Classification("458", "AIRPORTS, FLYING FIELDS, AND AIRPORT TERMINAL SERVICES", Category.GROUP),
+ Classification("4581", "AIRPORTS, FLYING FIELDS, AND AIRPORT TERMINAL SERVICES", Category.CLASS),
+ Classification("46", "PIPELINES, EXCEPT NATURAL GAS", Category.DIVISION),
+ Classification("461", "PIPELINES, EXCEPT NATURAL GAS", Category.GROUP),
+ Classification("4612", "CRUDE PETROLEUM PIPELINES", Category.CLASS),
+ Classification("4613", "REFINED PETROLEUM PIPELINES", Category.CLASS),
+ Classification("4619", "PIPELINES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("47", "TRANSPORTATION SERVICES", Category.DIVISION),
+ Classification("472", "ARRANGEMENT OF PASSENGER TRANSPORTATION", Category.GROUP),
+ Classification("4724", "TRAVEL AGENCIES", Category.CLASS),
+ Classification("4725", "TOUR OPERATORS", Category.CLASS),
+ Classification("4729", "ARRANGEMENT OF PASSENGER TRANSPORTATION, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("473", "ARRANGEMENT OF TRANSPORTATION OF FREIGHT AND CARGO", Category.GROUP),
+ Classification("4731", "ARRANGEMENT OF TRANSPORTATION OF FREIGHT AND CARGO", Category.CLASS),
+ Classification("474", "RENTAL OF RAILROAD CARS", Category.GROUP),
+ Classification("4741", "RENTAL OF RAILROAD CARS", Category.CLASS),
+ Classification("478", "MISCELLANEOUS SERVICES INCIDENTAL TO TRANSPORTATION", Category.GROUP),
+ Classification("4783", "PACKING AND CRATING", Category.CLASS),
+ Classification("4785", "FIXED FACILITIES AND INSPECTION AND WEIGHING SERVICES FOR MOTOR V", Category.CLASS),
+ Classification("4785", "CARGO CHECKERS AND SURVEYORS, MARINE", Category.CLASS),
+ Classification("4789", "TRANSPORTATION SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("48", "COMMUNICATIONS", Category.DIVISION),
+ Classification("481", "TELEPHONE COMMUNICATIONS", Category.GROUP),
+ Classification("4812", "RADIOTELEPHONE COMMUNICATIONS", Category.CLASS),
+ Classification("4813", "TELEPHONE COMMUNICATIONS, EXCEPT RADIOTELEPHONE", Category.CLASS),
+ Classification("482", "TELEGRAPH AND OTHER MESSAGE COMMUNICATIONS", Category.GROUP),
+ Classification("4822", "TELEGRAPH AND OTHER MESSAGE COMMUNICATIONS", Category.CLASS),
+ Classification("483", "RADIO AND TELEVISION BROADCASTING STATIONS", Category.GROUP),
+ Classification("4832", "RADIO BROADCASTING STATIONS", Category.CLASS),
+ Classification("4833", "TELEVISION BROADCASTING STATIONS", Category.CLASS),
+ Classification("484", "CABLE AND OTHER PAY TELEVISION SERVICES", Category.GROUP),
+ Classification("4841", "CABLE AND OTHER PAY TELEVISION SERVICES", Category.CLASS),
+ Classification("489", "COMMUNICATIONS SERVICES, NOT ELSEWHERE CLASSIFIED", Category.GROUP),
+ Classification("4899", "COMMUNICATIONS SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("49", "ELECTRIC, GAS, AND SANITARY SERVICES", Category.DIVISION),
+ Classification("491", "ELECTRIC SERVICES", Category.GROUP),
+ Classification("4911", "ELECTRIC SERVICES", Category.CLASS),
+ Classification("492", "GAS PRODUCTION AND DISTRIBUTION", Category.GROUP),
+ Classification("4922", "NATURAL GAS TRANSMISSION", Category.CLASS),
+ Classification("4923", "NATURAL GAS TRANSMISSION AND DISTRIBUTION", Category.CLASS),
+ Classification("4924", "NATURAL GAS DISTRIBUTION", Category.CLASS),
+ Classification("4925", "MIXED, MANUFACTURED, OR LIQUEFIED PETROLEUM GAS PRODUCTION AND/OR", Category.CLASS),
+ Classification("493", "COMBINATION ELECTRIC AND GAS, AND OTHER UTILITY SERVICES", Category.GROUP),
+ Classification("4931", "ELECTRIC AND OTHER SERVICES COMBINED", Category.CLASS),
+ Classification("4932", "GAS AND OTHER SERVICES COMBINED", Category.CLASS),
+ Classification("4939", "COMBINATION UTILITIES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("494", "WATER SUPPLY", Category.GROUP),
+ Classification("4941", "WATER SUPPLY", Category.CLASS),
+ Classification("495", "SANITARY SERVICES", Category.GROUP),
+ Classification("4952", "SEWERAGE SYSTEMS", Category.CLASS),
+ Classification("4953", "REFUSE SYSTEMS", Category.CLASS),
+ Classification("4959", "SANITARY SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("496", "STEAM AND AIR-CONDITIONING SUPPLY", Category.GROUP),
+ Classification("4961", "STEAM AND AIR-CONDITIONING SUPPLY", Category.CLASS),
+ Classification("497", "IRRIGATION SYSTEMS", Category.GROUP),
+ Classification("4971", "IRRIGATION SYSTEMS", Category.CLASS),
+ Classification("F", "WHOLESALE TRADE", Category.SECTION),
+ Classification("50", "WHOLESALE TRADE¨DURABLE GOODS", Category.DIVISION),
+ Classification("501", "MOTOR VEHICLES AND MOTOR VEHICLE PARTS AND SUPPLIES", Category.GROUP),
+ Classification("5012", "AUTOMOBILES AND OTHER MOTOR VEHICLES", Category.CLASS),
+ Classification("5013", "MOTOR VEHICLE SUPPLIES AND NEW PARTS", Category.CLASS),
+ Classification("5014", "TIRES AND TUBES", Category.CLASS),
+ Classification("5015", "MOTOR VEHICLE PARTS, USED", Category.CLASS),
+ Classification("502", "FURNITURE AND HOMEFURNISHINGS", Category.GROUP),
+ Classification("5021", "FURNITURE", Category.CLASS),
+ Classification("5023", "HOMEFURNISHINGS", Category.CLASS),
+ Classification("503", "LUMBER AND OTHER CONSTRUCTION MATERIALS", Category.GROUP),
+ Classification("5031", "LUMBER, PLYWOOD, MILLWORK, AND WOOD PANELS", Category.CLASS),
+ Classification("5032", "BRICK, STONE, AND RELATED CONSTRUCTION MATERIALS", Category.CLASS),
+ Classification("5033", "ROOFING, SIDING, AND INSULATION MATERIALS", Category.CLASS),
+ Classification("5039", "CONSTRUCTION MATERIALS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("504", "PROFESSIONAL AND COMMERCIAL EQUIPMENT AND SUPPLIES", Category.GROUP),
+ Classification("5043", "PHOTOGRAPHIC EQUIPMENT AND SUPPLIES", Category.CLASS),
+ Classification("5044", "OFFICE EQUIPMENT", Category.CLASS),
+ Classification("5045", "COMPUTERS AND COMPUTER PERIPHERAL EQUIPMENT AND SOFTWARE", Category.CLASS),
+ Classification("5046", "COMMERCIAL EQUIPMENT, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("5047", "MEDICAL, DENTAL, AND HOSPITAL EQUIPMENT AND SUPPLIES", Category.CLASS),
+ Classification("5048", "OPHTHALMIC GOODS", Category.CLASS),
+ Classification("5049", "PROFESSIONAL EQUIPMENT AND SUPPLIES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("505", "METALS AND MINERALS, EXCEPT PETROLEUM", Category.GROUP),
+ Classification("5051", "METALS SERVICE CENTERS AND OFFICES", Category.CLASS),
+ Classification("5052", "COAL AND OTHER MINERALS AND ORES", Category.CLASS),
+ Classification("506", "ELECTRICAL GOODS", Category.GROUP),
+ Classification("5063", "ELECTRICAL APPARATUS AND EQUIPMENT, WIRING SUPPLIES, AND CONSTRUC", Category.CLASS),
+ Classification("5064", "ELECTRICAL APPLIANCES, TELEVISION AND RADIO SETS", Category.CLASS),
+ Classification("5065", "ELECTRONIC PARTS AND EQUIPMENT, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("507", "HARDWARE, AND PLUMBING AND HEATING EQUIPMENT AND SUPPLIES", Category.GROUP),
+ Classification("5072", "HARDWARE", Category.CLASS),
+ Classification("5074", "PLUMBING AND HEATING EQUIPMENT AND SUPPLIES (HYDRONICS)", Category.CLASS),
+ Classification("5075", "WARM AIR HEATING AND AIR-CONDITIONING EQUIPMENT AND SUPPLIES", Category.CLASS),
+ Classification("5078", "REFRIGERATION EQUIPMENT AND SUPPLIES", Category.CLASS),
+ Classification("508", "MACHINERY, EQUIPMENT, AND SUPPLIES", Category.GROUP),
+ Classification("5082", "CONSTRUCTION AND MINING (EXCEPT PETROLEUM) MACHINERY AND EQUIPMEN", Category.CLASS),
+ Classification("5083", "FARM AND GARDEN MACHINERY AND EQUIPMENT", Category.CLASS),
+ Classification("5084", "INDUSTRIAL MACHINERY AND EQUIPMENT", Category.CLASS),
+ Classification("5085", "INDUSTRIAL SUPPLIES", Category.CLASS),
+ Classification("5087", "SERVICE ESTABLISHMENT EQUIPMENT AND SUPPLIES", Category.CLASS),
+ Classification("5088", "TRANSPORTATION EQUIPMENT AND SUPPLIES, EXCEPT MOTOR VEHICLES", Category.CLASS),
+ Classification("509", "MISCELLANEOUS DURABLE GOODS", Category.GROUP),
+ Classification("5091", "SPORTING AND RECREATIONAL GOODS AND SUPPLIES", Category.CLASS),
+ Classification("5092", "TOYS AND HOBBY GOODS AND SUPPLIES", Category.CLASS),
+ Classification("5093", "SCRAP AND WASTE MATERIALS", Category.CLASS),
+ Classification("5094", "JEWELRY, WATCHES, PRECIOUS STONES, AND PRECIOUS METALS", Category.CLASS),
+ Classification("5099", "DURABLE GOODS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("51", "WHOLESALE TRADE¨NONDURABLE GOODS", Category.DIVISION),
+ Classification("511", "PAPER AND PAPER PRODUCTS", Category.GROUP),
+ Classification("5111", "PRINTING AND WRITING PAPER", Category.CLASS),
+ Classification("5112", "STATIONERY AND OFFICE SUPPLIES", Category.CLASS),
+ Classification("5113", "INDUSTRIAL AND PERSONAL SERVICE PAPER", Category.CLASS),
+ Classification("512", "DRUGS, DRUG PROPRIETARIES, AND DRUGGISTS' SUNDRIES", Category.GROUP),
+ Classification("5122", "DRUGS, DRUG PROPRIETARIES, AND DRUGGISTS' SUNDRIES", Category.CLASS),
+ Classification("513", "APPAREL, PIECE GOODS, AND NOTIONS", Category.GROUP),
+ Classification("5131", "PIECE GOODS, NOTIONS, AND OTHER DRY GOODS", Category.CLASS),
+ Classification("5136", "MEN'S AND BOYS' CLOTHING AND FURNISHINGS", Category.CLASS),
+ Classification("5137", "WOMEN'S, CHILDREN'S, AND INFANTS' CLOTHING AND ACCESSORIES", Category.CLASS),
+ Classification("5139", "FOOTWEAR", Category.CLASS),
+ Classification("514", "GROCERIES AND RELATED PRODUCTS", Category.GROUP),
+ Classification("5141", "GROCERIES, GENERAL LINE", Category.CLASS),
+ Classification("5142", "PACKAGED FROZEN FOODS", Category.CLASS),
+ Classification("5143", "DAIRY PRODUCTS, EXCEPT DRIED OR CANNED", Category.CLASS),
+ Classification("5144", "POULTRY AND POULTRY PRODUCTS", Category.CLASS),
+ Classification("5145", "CONFECTIONERY", Category.CLASS),
+ Classification("5146", "FISH AND SEAFOODS", Category.CLASS),
+ Classification("5147", "MEATS AND MEAT PRODUCTS", Category.CLASS),
+ Classification("5148", "FRESH FRUITS AND VEGETABLES", Category.CLASS),
+ Classification("5149", "GROCERIES AND RELATED PRODUCTS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("515", "FARM-PRODUCT RAW MATERIALS", Category.GROUP),
+ Classification("5153", "GRAIN AND FIELD BEANS", Category.CLASS),
+ Classification("5154", "LIVESTOCK", Category.CLASS),
+ Classification("5159", "FARM-PRODUCT RAW MATERIALS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("516", "CHEMICALS AND ALLIED PRODUCTS", Category.GROUP),
+ Classification("5162", "PLASTICS MATERIALS AND BASIC FORMS AND SHAPES", Category.CLASS),
+ Classification("5169", "CHEMICALS AND ALLIED PRODUCTS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("517", "PETROLEUM AND PETROLEUM PRODUCTS", Category.GROUP),
+ Classification("5171", "PETROLEUM BULK STATIONS AND TERMINALS", Category.CLASS),
+ Classification("5172", "PETROLEUM AND PETROLEUM PRODUCTS WHOLESALERS, EXCEPT BULK STATION", Category.CLASS),
+ Classification("518", "BEER, WINE, AND DISTILLED ALCOHOLIC BEVERAGES", Category.GROUP),
+ Classification("5181", "BEER AND ALE", Category.CLASS),
+ Classification("5182", "WINE AND DISTILLED ALCOHOLIC BEVERAGES", Category.CLASS),
+ Classification("519", "MISCELLANEOUS NONDURABLE GOODS", Category.GROUP),
+ Classification("5191", "FARM SUPPLIES", Category.CLASS),
+ Classification("5192", "BOOKS, PERIODICALS, AND NEWSPAPERS", Category.CLASS),
+ Classification("5193", "FLOWERS, NURSERY STOCK, AND FLORISTS' SUPPLIES", Category.CLASS),
+ Classification("5194", "TOBACCO AND TOBACCO PRODUCTS", Category.CLASS),
+ Classification("5198", "PAINTS, VARNISHES, AND SUPPLIES", Category.CLASS),
+ Classification("5199", "NONDURABLE GOODS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("G", "RETAIL TRADE", Category.SECTION),
+ Classification("52", "BUILDING MATERIALS, HARDWARE, GARDEN SUPPLY, AND MOBILE HOME DEALERS", Category.DIVISION),
+ Classification("521", "LUMBER AND OTHER BUILDING MATERIALS DEALERS", Category.GROUP),
+ Classification("5211", "LUMBER AND OTHER BUILDING MATERIALS DEALERS", Category.CLASS),
+ Classification("523", "PAINT, GLASS, AND WALLPAPER STORES", Category.GROUP),
+ Classification("5231", "PAINT, GLASS, AND WALLPAPER STORES", Category.CLASS),
+ Classification("525", "HARDWARE STORES", Category.GROUP),
+ Classification("5251", "HARDWARE STORES", Category.CLASS),
+ Classification("526", "RETAIL NURSERIES, LAWN AND GARDEN SUPPLY STORES", Category.GROUP),
+ Classification("5261", "RETAIL NURSERIES, LAWN AND GARDEN SUPPLY STORES", Category.CLASS),
+ Classification("527", "MOBILE HOME DEALERS", Category.GROUP),
+ Classification("5271", "MOBILE HOME DEALERS", Category.CLASS),
+ Classification("53", "GENERAL MERCHANDISE STORES", Category.DIVISION),
+ Classification("531", "DEPARTMENT STORES", Category.GROUP),
+ Classification("5311", "DEPARTMENT STORES", Category.CLASS),
+ Classification("533", "VARIETY STORES", Category.GROUP),
+ Classification("5331", "VARIETY STORES", Category.CLASS),
+ Classification("539", "MISCELLANEOUS GENERAL MERCHANDISE STORES", Category.GROUP),
+ Classification("5399", "MISCELLANEOUS GENERAL MERCHANDISE STORES", Category.CLASS),
+ Classification("54", "FOOD STORES", Category.DIVISION),
+ Classification("541", "GROCERY STORES", Category.GROUP),
+ Classification("5411", "GROCERY STORES", Category.CLASS),
+ Classification("542", "MEAT AND FISH (SEAFOOD) MARKETS, INCLUDING FREEZER PROVISIONERS", Category.GROUP),
+ Classification("5421", "MEAT AND FISH (SEAFOOD) MARKETS, INCLUDING FREEZER PROVISIONERS", Category.CLASS),
+ Classification("543", "FRUIT AND VEGETABLE MARKETS", Category.GROUP),
+ Classification("5431", "FRUIT AND VEGETABLE MARKETS", Category.CLASS),
+ Classification("544", "CANDY, NUT, AND CONFECTIONERY STORES", Category.GROUP),
+ Classification("5441", "CANDY, NUT, AND CONFECTIONERY STORES", Category.CLASS),
+ Classification("545", "DAIRY PRODUCTS STORES", Category.GROUP),
+ Classification("5451", "DAIRY PRODUCTS STORES", Category.CLASS),
+ Classification("546", "RETAIL BAKERIES", Category.GROUP),
+ Classification("5461", "RETAIL BAKERIES", Category.CLASS),
+ Classification("549", "MISCELLANEOUS FOOD STORES", Category.GROUP),
+ Classification("5499", "MISCELLANEOUS FOOD STORES", Category.CLASS),
+ Classification("55", "AUTOMOTIVE DEALERS AND GASOLINE SERVICE STATIONS", Category.DIVISION),
+ Classification("551", "MOTOR VEHICLE DEALERS (NEW AND USED)", Category.GROUP),
+ Classification("5511", "MOTOR VEHICLE DEALERS (NEW AND USED)", Category.CLASS),
+ Classification("552", "MOTOR VEHICLE DEALERS (USED ONLY)", Category.GROUP),
+ Classification("5521", "MOTOR VEHICLE DEALERS (USED ONLY)", Category.CLASS),
+ Classification("553", "AUTO AND HOME SUPPLY STORES", Category.GROUP),
+ Classification("5531", "AUTO AND HOME SUPPLY STORES", Category.CLASS),
+ Classification("554", "GASOLINE SERVICE STATIONS", Category.GROUP),
+ Classification("5541", "GASOLINE SERVICE STATIONS", Category.CLASS),
+ Classification("555", "BOAT DEALERS", Category.GROUP),
+ Classification("5551", "BOAT DEALERS", Category.CLASS),
+ Classification("556", "RECREATIONAL VEHICLE DEALERS", Category.GROUP),
+ Classification("5561", "RECREATIONAL VEHICLE DEALERS", Category.CLASS),
+ Classification("557", "MOTORCYCLE DEALERS", Category.GROUP),
+ Classification("5571", "MOTORCYCLE DEALERS", Category.CLASS),
+ Classification("559", "AUTOMOTIVE DEALERS, NOT ELSEWHERE CLASSIFIED", Category.GROUP),
+ Classification("5599", "AUTOMOTIVE DEALERS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("56", "APPAREL AND ACCESSORY STORES", Category.DIVISION),
+ Classification("561", "MEN'S AND BOYS' CLOTHING AND ACCESSORY STORES", Category.GROUP),
+ Classification("5611", "MEN'S AND BOYS' CLOTHING AND ACCESSORY STORES", Category.CLASS),
+ Classification("562", "WOMEN'S CLOTHING STORES", Category.GROUP),
+ Classification("5621", "WOMEN'S CLOTHING STORES", Category.CLASS),
+ Classification("563", "WOMEN'S ACCESSORY AND SPECIALTY STORES", Category.GROUP),
+ Classification("5632", "WOMEN'S ACCESSORY AND SPECIALTY STORES", Category.CLASS),
+ Classification("564", "CHILDREN'S AND INFANTS' WEAR STORES", Category.GROUP),
+ Classification("5641", "CHILDREN'S AND INFANTS' WEAR STORES", Category.CLASS),
+ Classification("565", "FAMILY CLOTHING STORES", Category.GROUP),
+ Classification("5651", "FAMILY CLOTHING STORES", Category.CLASS),
+ Classification("566", "SHOE STORES", Category.GROUP),
+ Classification("5661", "SHOE STORES", Category.CLASS),
+ Classification("569", "MISCELLANEOUS APPAREL AND ACCESSORY STORES", Category.GROUP),
+ Classification("5699", "MISCELLANEOUS APPAREL AND ACCESSORY STORES", Category.CLASS),
+ Classification("57", "HOME FURNITURE, FURNISHINGS, AND EQUIPMENT STORES", Category.DIVISION),
+ Classification("571", "HOME FURNITURE AND FURNISHINGS STORES", Category.GROUP),
+ Classification("5712", "FURNITURE STORES", Category.CLASS),
+ Classification("5713", "FLOOR COVERING STORES", Category.CLASS),
+ Classification("5714", "DRAPERY, CURTAIN, AND UPHOLSTERY STORES", Category.CLASS),
+ Classification("5719", "MISCELLANEOUS HOMEFURNISHINGS STORES", Category.CLASS),
+ Classification("572", "HOUSEHOLD APPLIANCE STORES", Category.GROUP),
+ Classification("5722", "HOUSEHOLD APPLIANCE STORES", Category.CLASS),
+ Classification("573", "RADIO, TELEVISION, CONSUMER ELECTRONICS, AND MUSIC STORES", Category.GROUP),
+ Classification("5731", "RADIO, TELEVISION, AND CONSUMER ELECTRONICS STORES", Category.CLASS),
+ Classification("5734", "COMPUTER AND COMPUTER SOFTWARE STORES", Category.CLASS),
+ Classification("5735", "RECORD AND PRERECORDED TAPE STORES", Category.CLASS),
+ Classification("5736", "MUSICAL INSTRUMENT STORES", Category.CLASS),
+ Classification("58", "EATING AND DRINKING PLACES", Category.DIVISION),
+ Classification("581", "EATING AND DRINKING PLACES", Category.GROUP),
+ Classification("5812", "EATING PLACES", Category.CLASS),
+ Classification("5813", "DRINKING PLACES (ALCOHOLIC BEVERAGES)", Category.CLASS),
+ Classification("59", "MISCELLANEOUS RETAIL", Category.DIVISION),
+ Classification("591", "DRUG STORES AND PROPRIETARY STORES", Category.GROUP),
+ Classification("5912", "DRUG STORES AND PROPRIETARY STORES", Category.CLASS),
+ Classification("592", "LIQUOR STORES", Category.GROUP),
+ Classification("5921", "LIQUOR STORES", Category.CLASS),
+ Classification("593", "USED MERCHANDISE STORES", Category.GROUP),
+ Classification("5932", "USED MERCHANDISE STORES", Category.CLASS),
+ Classification("594", "MISCELLANEOUS SHOPPING GOODS STORES", Category.GROUP),
+ Classification("5941", "SPORTING GOODS STORES AND BICYCLE SHOPS", Category.CLASS),
+ Classification("5942", "BOOK STORES", Category.CLASS),
+ Classification("5943", "STATIONERY STORES", Category.CLASS),
+ Classification("5944", "JEWELRY STORES", Category.CLASS),
+ Classification("5945", "HOBBY, TOY, AND GAME SHOPS", Category.CLASS),
+ Classification("5946", "CAMERA AND PHOTOGRAPHIC SUPPLY STORES", Category.CLASS),
+ Classification("5947", "GIFT, NOVELTY, AND SOUVENIR SHOPS", Category.CLASS),
+ Classification("5948", "LUGGAGE AND LEATHER GOODS STORES", Category.CLASS),
+ Classification("5949", "SEWING, NEEDLEWORK, AND PIECE GOODS STORES", Category.CLASS),
+ Classification("596", "NONSTORE RETAILERS", Category.GROUP),
+ Classification("5961", "CATALOG AND MAIL-ORDER HOUSES", Category.CLASS),
+ Classification("5962", "AUTOMATIC MERCHANDISING MACHINE OPERATORS", Category.CLASS),
+ Classification("5963", "DIRECT SELLING ESTABLISHMENTS", Category.CLASS),
+ Classification("598", "FUEL DEALERS", Category.GROUP),
+ Classification("5983", "FUEL OIL DEALERS", Category.CLASS),
+ Classification("5984", "LIQUEFIED PETROLEUM GAS (BOTTLED GAS) DEALERS", Category.CLASS),
+ Classification("5989", "FUEL DEALERS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("599", "RETAIL STORES, NOT ELSEWHERE CLASSIFIED", Category.GROUP),
+ Classification("5992", "FLORISTS", Category.CLASS),
+ Classification("5993", "TOBACCO STORES AND STANDS", Category.CLASS),
+ Classification("5994", "NEWS DEALERS AND NEWSSTANDS", Category.CLASS),
+ Classification("5995", "OPTICAL GOODS STORES", Category.CLASS),
+ Classification("5999", "MISCELLANEOUS RETAIL STORES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("H", "FINANCE, INSURANCE, AND REAL ESTATE", Category.SECTION),
+ Classification("60", "DEPOSITORY INSTITUTIONS", Category.DIVISION),
+ Classification("601", "CENTRAL RESERVE DEPOSITORY INSTITUTIONS", Category.GROUP),
+ Classification("6011", "FEDERAL RESERVE BANKS", Category.CLASS),
+ Classification("6019", "CENTRAL RESERVE DEPOSITORY INSTITUTIONS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("602", "COMMERCIAL BANKS", Category.GROUP),
+ Classification("6021", "NATIONAL COMMERCIAL BANKS", Category.CLASS),
+ Classification("6022", "STATE COMMERCIAL BANKS", Category.CLASS),
+ Classification("6029", "COMMERCIAL BANKS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("603", "SAVINGS INSTITUTIONS", Category.GROUP),
+ Classification("6035", "SAVINGS INSTITUTIONS, FEDERALLY CHARTERED", Category.CLASS),
+ Classification("6036", "SAVINGS INSTITUTIONS, NOT FEDERALLY CHARTERED", Category.CLASS),
+ Classification("606", "CREDIT UNIONS", Category.GROUP),
+ Classification("6061", "CREDIT UNIONS, FEDERALLY CHARTERED", Category.CLASS),
+ Classification("6062", "CREDIT UNIONS, NOT FEDERALLY CHARTERED", Category.CLASS),
+ Classification("608", "FOREIGN BANKING AND BRANCHES AND AGENCIES OF FOREIGN BANKS", Category.GROUP),
+ Classification("6081", "BRANCHES AND AGENCIES OF FOREIGN BANKS", Category.CLASS),
+ Classification("6082", "FOREIGN TRADE AND INTERNATIONAL BANKING INSTITUTIONS", Category.CLASS),
+ Classification("609", "FUNCTIONS RELATED TO DEPOSITORY BANKING", Category.GROUP),
+ Classification("6091", "NONDEPOSIT TRUST FACILITIES", Category.CLASS),
+ Classification("6099", "FUNCTIONS RELATED TO DEPOSITORY BANKING, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("61", "NONDEPOSITORY CREDIT INSTITUTIONS", Category.DIVISION),
+ Classification("611", "FEDERAL AND FEDERALLY-SPONSORED CREDIT AGENCIES", Category.GROUP),
+ Classification("6111", "FEDERAL AND FEDERALLY-SPONSORED CREDIT AGENCIES", Category.CLASS),
+ Classification("614", "PERSONAL CREDIT INSTITUTIONS", Category.GROUP),
+ Classification("6141", "PERSONAL CREDIT INSTITUTIONS", Category.CLASS),
+ Classification("615", "BUSINESS CREDIT INSTITUTIONS", Category.GROUP),
+ Classification("6153", "SHORT-TERM BUSINESS CREDIT INSTITUTIONS, EXCEPT AGRICULTURAL", Category.CLASS),
+ Classification("6159", "MISCELLANEOUS BUSINESS CREDIT INSTITUTIONS", Category.CLASS),
+ Classification("616", "MORTGAGE BANKERS AND BROKERS", Category.GROUP),
+ Classification("6162", "MORTGAGE BANKERS AND LOAN CORRESPONDENTS", Category.CLASS),
+ Classification("6163", "LOAN BROKERS", Category.CLASS),
+ Classification("62", "SECURITY AND COMMODITY BROKERS, DEALERS, EXCHANGES, AND SERVICES", Category.DIVISION),
+ Classification("621", "SECURITY BROKERS, DEALERS, AND FLOTATION COMPANIES", Category.GROUP),
+ Classification("6211", "SECURITY BROKERS, DEALERS, AND FLOTATION COMPANIES", Category.CLASS),
+ Classification("622", "COMMODITY CONTRACTS BROKERS AND DEALERS", Category.GROUP),
+ Classification("6221", "COMMODITY CONTRACTS BROKERS AND DEALERS", Category.CLASS),
+ Classification("623", "SECURITY AND COMMODITY EXCHANGES", Category.GROUP),
+ Classification("6231", "SECURITY AND COMMODITY EXCHANGES", Category.CLASS),
+ Classification("628", "SERVICES ALLIED WITH THE EXCHANGE OF SECURITIES OR COMMODITIES", Category.GROUP),
+ Classification("6282", "INVESTMENT ADVICE", Category.CLASS),
+ Classification("6289", "SERVICES ALLIED WITH THE EXCHANGE OF SECURITIES OR COMMODITIES, N", Category.CLASS),
+ Classification("63", "INSURANCE CARRIERS", Category.DIVISION),
+ Classification("631", "LIFE INSURANCE", Category.GROUP),
+ Classification("6311", "LIFE INSURANCE", Category.CLASS),
+ Classification("632", "ACCIDENT AND HEALTH INSURANCE AND MEDICAL SERVICE PLANS", Category.GROUP),
+ Classification("6321", "ACCIDENT AND HEALTH INSURANCE", Category.CLASS),
+ Classification("6324", "HOSPITAL AND MEDICAL SERVICE PLANS", Category.CLASS),
+ Classification("633", "FIRE, MARINE, AND CASUALTY INSURANCE", Category.GROUP),
+ Classification("6331", "FIRE, MARINE, AND CASUALTY INSURANCE", Category.CLASS),
+ Classification("635", "SURETY INSURANCE", Category.GROUP),
+ Classification("6351", "SURETY INSURANCE", Category.CLASS),
+ Classification("636", "TITLE INSURANCE", Category.GROUP),
+ Classification("6361", "TITLE INSURANCE", Category.CLASS),
+ Classification("637", "PENSION, HEALTH, AND WELFARE FUNDS", Category.GROUP),
+ Classification("6371", "PENSION, HEALTH, AND WELFARE FUNDS", Category.CLASS),
+ Classification("639", "INSURANCE CARRIERS, NOT ELSEWHERE CLASSIFIED", Category.GROUP),
+ Classification("6399", "INSURANCE CARRIERS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("64", "INSURANCE AGENTS, BROKERS, AND SERVICE", Category.DIVISION),
+ Classification("641", "INSURANCE AGENTS, BROKERS, AND SERVICE", Category.GROUP),
+ Classification("6411", "INSURANCE AGENTS, BROKERS, AND SERVICE", Category.CLASS),
+ Classification("65", "REAL ESTATE", Category.DIVISION),
+ Classification("651", "REAL ESTATE OPERATORS (EXCEPT DEVELOPERS) AND LESSORS", Category.GROUP),
+ Classification("6512", "OPERATORS OF NONRESIDENTIAL BUILDINGS", Category.CLASS),
+ Classification("6513", "OPERATORS OF APARTMENT BUILDINGS", Category.CLASS),
+ Classification("6514", "OPERATORS OF DWELLINGS OTHER THAN APARTMENT BUILDINGS", Category.CLASS),
+ Classification("6515", "OPERATORS OF RESIDENTIAL MOBILE HOME SITES", Category.CLASS),
+ Classification("6517", "LESSORS OF RAILROAD PROPERTY", Category.CLASS),
+ Classification("6519", "LESSORS OF REAL PROPERTY, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("653", "REAL ESTATE AGENTS AND MANAGERS", Category.GROUP),
+ Classification("6531", "REAL ESTATE AGENTS AND MANAGERS", Category.CLASS),
+ Classification("654", "TITLE ABSTRACT OFFICES", Category.GROUP),
+ Classification("6541", "TITLE ABSTRACT OFFICES", Category.CLASS),
+ Classification("655", "LAND SUBDIVIDERS AND DEVELOPERS", Category.GROUP),
+ Classification("6552", "LAND SUBDIVIDERS AND DEVELOPERS, EXCEPT CEMETERIES", Category.CLASS),
+ Classification("6553", "CEMETERY SUBDIVIDERS AND DEVELOPERS", Category.CLASS),
+ Classification("67", "HOLDING AND OTHER INVESTMENT OFFICES", Category.DIVISION),
+ Classification("671", "HOLDING OFFICES", Category.GROUP),
+ Classification("6712", "OFFICES OF BANK HOLDING COMPANIES", Category.CLASS),
+ Classification("6719", "OFFICES OF HOLDING COMPANIES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("672", "INVESTMENT OFFICES", Category.GROUP),
+ Classification("6722", "MANAGEMENT INVESTMENT OFFICES, OPEN-END", Category.CLASS),
+ Classification("6726", "UNIT INVESTMENT TRUSTS, FACE-AMOUNT CERTIFICATE OFFICES, AND CLOS", Category.CLASS),
+ Classification("673", "TRUSTS", Category.GROUP),
+ Classification("6732", "EDUCATIONAL, RELIGIOUS, AND CHARITABLE TRUSTS", Category.CLASS),
+ Classification("6733", "TRUSTS, EXCEPT EDUCATIONAL, RELIGIOUS, AND CHARITABLE", Category.CLASS),
+ Classification("679", "MISCELLANEOUS INVESTING", Category.GROUP),
+ Classification("6792", "OIL ROYALTY TRADERS", Category.CLASS),
+ Classification("6794", "PATENT OWNERS AND LESSORS", Category.CLASS),
+ Classification("6798", "REAL ESTATE INVESTMENT TRUSTS", Category.CLASS),
+ Classification("6799", "INVESTORS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("I", "SERVICES", Category.SECTION),
+ Classification("70", "HOTELS, ROOMING HOUSES, CAMPS, AND OTHER LODGING PLACES", Category.DIVISION),
+ Classification("701", "HOTELS AND MOTELS", Category.GROUP),
+ Classification("7011", "HOTELS AND MOTELS", Category.CLASS),
+ Classification("702", "ROOMING AND BOARDING HOUSES", Category.GROUP),
+ Classification("7021", "ROOMING AND BOARDING HOUSES", Category.CLASS),
+ Classification("703", "CAMPS AND RECREATIONAL VEHICLE PARKS", Category.GROUP),
+ Classification("7032", "SPORTING AND RECREATIONAL CAMPS", Category.CLASS),
+ Classification("7033", "RECREATIONAL VEHICLE PARKS AND CAMPSITES", Category.CLASS),
+ Classification("704", "ORGANIZATION HOTELS AND LODGING HOUSES, ON MEMBERSHIP BASIS", Category.GROUP),
+ Classification("7041", "ORGANIZATION HOTELS AND LODGING HOUSES, ON MEMBERSHIP BASIS", Category.CLASS),
+ Classification("72", "PERSONAL SERVICES", Category.DIVISION),
+ Classification("721", "LAUNDRY, CLEANING, AND GARMENT SERVICES", Category.GROUP),
+ Classification("7211", "POWER LAUNDRIES, FAMILY AND COMMERCIAL", Category.CLASS),
+ Classification("7212", "GARMENT PRESSING, AND AGENTS FOR LAUNDRIES AND DRYCLEANERS", Category.CLASS),
+ Classification("7213", "LINEN SUPPLY", Category.CLASS),
+ Classification("7215", "COIN-OPERATED LAUNDRIES AND DRYCLEANING", Category.CLASS),
+ Classification("7216", "DRYCLEANING PLANTS, EXCEPT RUG CLEANING", Category.CLASS),
+ Classification("7217", "CARPET AND UPHOLSTERY CLEANING", Category.CLASS),
+ Classification("7218", "INDUSTRIAL LAUNDERERS", Category.CLASS),
+ Classification("7219", "LAUNDRY AND GARMENT SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("722", "PHOTOGRAPHIC STUDIOS, PORTRAIT", Category.GROUP),
+ Classification("7221", "PHOTOGRAPHIC STUDIOS, PORTRAIT", Category.CLASS),
+ Classification("723", "BEAUTY SHOPS", Category.GROUP),
+ Classification("7231", "BEAUTY SHOPS", Category.CLASS),
+ Classification("724", "BARBER SHOPS", Category.GROUP),
+ Classification("7241", "BARBER SHOPS", Category.CLASS),
+ Classification("725", "SHOE REPAIR SHOPS AND SHOESHINE PARLORS", Category.GROUP),
+ Classification("7251", "SHOE REPAIR SHOPS AND SHOESHINE PARLORS", Category.CLASS),
+ Classification("726", "FUNERAL SERVICE AND CREMATORIES", Category.GROUP),
+ Classification("7261", "FUNERAL SERVICE AND CREMATORIES", Category.CLASS),
+ Classification("729", "MISCELLANEOUS PERSONAL SERVICES", Category.GROUP),
+ Classification("7291", "TAX RETURN PREPARATION SERVICES", Category.CLASS),
+ Classification("7299", "MISCELLANEOUS PERSONAL SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("73", "BUSINESS SERVICES", Category.DIVISION),
+ Classification("731", "ADVERTISING", Category.GROUP),
+ Classification("7311", "ADVERTISING AGENCIES", Category.CLASS),
+ Classification("7312", "OUTDOOR ADVERTISING SERVICES", Category.CLASS),
+ Classification("7313", "RADIO, TELEVISION, AND PUBLISHERS' ADVERTISING REPRESENTATIVES", Category.CLASS),
+ Classification("7319", "ADVERTISING, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("732", "CONSUMER CREDIT REPORTING AGENCIES, MERCANTILE REPORTING AGENCIES", Category.GROUP),
+ Classification("7322", "ADJUSTMENT AND COLLECTION SERVICES", Category.CLASS),
+ Classification("7323", "CREDIT REPORTING SERVICES", Category.CLASS),
+ Classification("733", "MAILING, REPRODUCTION, COMMERCIAL ART AND PHOTOGRAPHY, AND STENOGR", Category.GROUP),
+ Classification("7331", "DIRECT MAIL ADVERTISING SERVICES", Category.CLASS),
+ Classification("7334", "PHOTOCOPYING AND DUPLICATING SERVICES", Category.CLASS),
+ Classification("7335", "COMMERCIAL PHOTOGRAPHY", Category.CLASS),
+ Classification("7336", "COMMERCIAL ART AND GRAPHIC DESIGN", Category.CLASS),
+ Classification("7338", "SECRETARIAL AND COURT REPORTING SERVICES", Category.CLASS),
+ Classification("734", "SERVICES TO DWELLINGS AND OTHER BUILDINGS", Category.GROUP),
+ Classification("7342", "DISINFECTING AND PEST CONTROL SERVICES", Category.CLASS),
+ Classification("7349", "BUILDING CLEANING AND MAINTENANCE SERVICES, NOT ELSEWHERE CLASSIF", Category.CLASS),
+ Classification("735", "MISCELLANEOUS EQUIPMENT RENTAL AND LEASING", Category.GROUP),
+ Classification("7352", "MEDICAL EQUIPMENT RENTAL AND LEASING", Category.CLASS),
+ Classification("7353", "HEAVY CONSTRUCTION EQUIPMENT RENTAL AND LEASING", Category.CLASS),
+ Classification("7359", "EQUIPMENT RENTAL AND LEASING, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("736", "PERSONNEL SUPPLY SERVICES", Category.GROUP),
+ Classification("7361", "EMPLOYMENT AGENCIES", Category.CLASS),
+ Classification("7363", "HELP SUPPLY SERVICES", Category.CLASS),
+ Classification("737", "COMPUTER PROGRAMMING, DATA PROCESSING, AND OTHER COMPUTER RELATED", Category.GROUP),
+ Classification("7371", "COMPUTER PROGRAMMING SERVICES", Category.CLASS),
+ Classification("7372", "PREPACKAGED SOFTWARE", Category.CLASS),
+ Classification("7373", "COMPUTER INTEGRATED SYSTEMS DESIGN", Category.CLASS),
+ Classification("7374", "COMPUTER PROCESSING AND DATA PREPARATION AND PROCESSING SERVICES", Category.CLASS),
+ Classification("7375", "INFORMATION RETRIEVAL SERVICES", Category.CLASS),
+ Classification("7376", "COMPUTER FACILITIES MANAGEMENT SERVICES", Category.CLASS),
+ Classification("7377", "COMPUTER RENTAL AND LEASING", Category.CLASS),
+ Classification("7378", "COMPUTER MAINTENANCE AND REPAIR", Category.CLASS),
+ Classification("7379", "COMPUTER RELATED SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("738", "MISCELLANEOUS BUSINESS SERVICES", Category.GROUP),
+ Classification("7381", "DETECTIVE, GUARD, AND ARMORED CAR SERVICES", Category.CLASS),
+ Classification("7382", "SECURITY SYSTEMS SERVICES", Category.CLASS),
+ Classification("7383", "NEWS SYNDICATES", Category.CLASS),
+ Classification("7384", "PHOTOFINISHING LABORATORIES", Category.CLASS),
+ Classification("7389", "BUSINESS SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("75", "AUTOMOTIVE REPAIR, SERVICES, AND PARKING", Category.DIVISION),
+ Classification("751", "AUTOMOTIVE RENTAL AND LEASING, WITHOUT DRIVERS", Category.GROUP),
+ Classification("7513", "TRUCK RENTAL AND LEASING, WITHOUT DRIVERS", Category.CLASS),
+ Classification("7514", "PASSENGER CAR RENTAL", Category.CLASS),
+ Classification("7515", "PASSENGER CAR LEASING", Category.CLASS),
+ Classification("7519", "UTILITY TRAILER AND RECREATIONAL VEHICLE RENTAL", Category.CLASS),
+ Classification("752", "AUTOMOBILE PARKING", Category.GROUP),
+ Classification("7521", "AUTOMOBILE PARKING", Category.CLASS),
+ Classification("753", "AUTOMOTIVE REPAIR SHOPS", Category.GROUP),
+ Classification("7532", "TOP, BODY, AND UPHOLSTERY REPAIR SHOPS AND PAINT SHOPS", Category.CLASS),
+ Classification("7533", "AUTOMOTIVE EXHAUST SYSTEM REPAIR SHOPS", Category.CLASS),
+ Classification("7534", "TIRE RETREADING AND REPAIR SHOPS", Category.CLASS),
+ Classification("7536", "AUTOMOTIVE GLASS REPLACEMENT SHOPS", Category.CLASS),
+ Classification("7537", "AUTOMOTIVE TRANSMISSION REPAIR SHOPS", Category.CLASS),
+ Classification("7538", "GENERAL AUTOMOTIVE REPAIR SHOPS", Category.CLASS),
+ Classification("7539", "AUTOMOTIVE REPAIR SHOPS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("754", "AUTOMOTIVE SERVICES, EXCEPT REPAIR", Category.GROUP),
+ Classification("7542", "CARWASHES", Category.CLASS),
+ Classification("7549", "AUTOMOTIVE SERVICES, EXCEPT REPAIR AND CARWASHES", Category.CLASS),
+ Classification("76", "MISCELLANEOUS REPAIR SERVICES", Category.DIVISION),
+ Classification("762", "ELECTRICAL REPAIR SHOPS", Category.GROUP),
+ Classification("7622", "RADIO AND TELEVISION REPAIR SHOPS", Category.CLASS),
+ Classification("7623", "REFRIGERATION AND AIR-CONDITIONING SERVICE AND REPAIR SHOPS", Category.CLASS),
+ Classification("7629", "ELECTRICAL AND ELECTRONIC REPAIR SHOPS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("763", "WATCH, CLOCK, AND JEWELRY REPAIR", Category.GROUP),
+ Classification("7631", "WATCH, CLOCK, AND JEWELRY REPAIR", Category.CLASS),
+ Classification("764", "REUPHOLSTERY AND FURNITURE REPAIR", Category.GROUP),
+ Classification("7641", "REUPHOLSTERY AND FURNITURE REPAIR", Category.CLASS),
+ Classification("769", "MISCELLANEOUS REPAIR SHOPS AND RELATED SERVICES", Category.GROUP),
+ Classification("7692", "WELDING REPAIR", Category.CLASS),
+ Classification("7694", "ARMATURE REWINDING SHOPS", Category.CLASS),
+ Classification("7699", "REPAIR SHOPS AND RELATED SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("78", "MOTION PICTURES", Category.DIVISION),
+ Classification("781", "MOTION PICTURE PRODUCTION AND ALLIED SERVICES", Category.GROUP),
+ Classification("7812", "MOTION PICTURE AND VIDEO TAPE PRODUCTION", Category.CLASS),
+ Classification("7819", "SERVICES ALLIED TO MOTION PICTURE PRODUCTION", Category.CLASS),
+ Classification("782", "MOTION PICTURE DISTRIBUTION AND ALLIED SERVICES", Category.GROUP),
+ Classification("7822", "MOTION PICTURE AND VIDEO TAPE DISTRIBUTION", Category.CLASS),
+ Classification("7829", "SERVICES ALLIED TO MOTION PICTURE DISTRIBUTION", Category.CLASS),
+ Classification("783", "MOTION PICTURE THEATERS", Category.GROUP),
+ Classification("7832", "MOTION PICTURE THEATERS, EXCEPT DRIVE-IN", Category.CLASS),
+ Classification("7833", "DRIVE-IN MOTION PICTURE THEATERS", Category.CLASS),
+ Classification("784", "VIDEO TAPE RENTAL", Category.GROUP),
+ Classification("7841", "VIDEO TAPE RENTAL", Category.CLASS),
+ Classification("79", "AMUSEMENT AND RECREATION SERVICES", Category.DIVISION),
+ Classification("791", "DANCE STUDIOS, SCHOOLS, AND HALLS", Category.GROUP),
+ Classification("7911", "DANCE STUDIOS, SCHOOLS, AND HALLS", Category.CLASS),
+ Classification("792", "THEATRICAL PRODUCERS (EXCEPT MOTION PICTURE), BANDS, ORCHESTRAS, A", Category.GROUP),
+ Classification("7922", "THEATRICAL PRODUCERS (EXCEPT MOTION PICTURE) AND MISCELLANEOUS TH", Category.CLASS),
+ Classification("7929", "BANDS, ORCHESTRAS, ACTORS, AND OTHER ENTERTAINERS AND ENTERTAINME", Category.CLASS),
+ Classification("793", "BOWLING CENTERS", Category.GROUP),
+ Classification("7933", "BOWLING CENTERS", Category.CLASS),
+ Classification("794", "COMMERCIAL SPORTS", Category.GROUP),
+ Classification("7941", "PROFESSIONAL SPORTS CLUBS AND PROMOTERS", Category.CLASS),
+ Classification("7948", "RACING, INCLUDING TRACK OPERATION", Category.CLASS),
+ Classification("799", "MISCELLANEOUS AMUSEMENT AND RECREATION SERVICES", Category.GROUP),
+ Classification("7991", "PHYSICAL FITNESS FACILITIES", Category.CLASS),
+ Classification("7992", "PUBLIC GOLF COURSES", Category.CLASS),
+ Classification("7993", "COIN-OPERATED AMUSEMENT DEVICES", Category.CLASS),
+ Classification("7996", "AMUSEMENT PARKS", Category.CLASS),
+ Classification("7997", "MEMBERSHIP SPORTS AND RECREATION CLUBS", Category.CLASS),
+ Classification("7999", "AMUSEMENT AND RECREATION SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("80", "HEALTH SERVICES", Category.DIVISION),
+ Classification("801", "OFFICES AND CLINICS OF DOCTORS OF MEDICINE", Category.GROUP),
+ Classification("8011", "OFFICES AND CLINICS OF DOCTORS OF MEDICINE", Category.CLASS),
+ Classification("802", "OFFICES AND CLINICS OF DENTISTS", Category.GROUP),
+ Classification("8021", "OFFICES AND CLINICS OF DENTISTS", Category.CLASS),
+ Classification("803", "OFFICES AND CLINICS OF DOCTORS OF OSTEOPATHY", Category.GROUP),
+ Classification("8031", "OFFICES AND CLINICS OF DOCTORS OF OSTEOPATHY", Category.CLASS),
+ Classification("804", "OFFICES AND CLINICS OF OTHER HEALTH PRACTITIONERS", Category.GROUP),
+ Classification("8041", "OFFICES AND CLINICS OF CHIROPRACTORS", Category.CLASS),
+ Classification("8042", "OFFICES AND CLINICS OF OPTOMETRISTS", Category.CLASS),
+ Classification("8043", "OFFICES AND CLINICS OF PODIATRISTS", Category.CLASS),
+ Classification("8049", "OFFICES AND CLINICS OF HEALTH PRACTITIONERS, NOT ELSEWHERE CLASSI", Category.CLASS),
+ Classification("805", "NURSING AND PERSONAL CARE FACILITIES", Category.GROUP),
+ Classification("8051", "SKILLED NURSING CARE FACILITIES", Category.CLASS),
+ Classification("8052", "INTERMEDIATE CARE FACILITIES", Category.CLASS),
+ Classification("8059", "NURSING AND PERSONAL CARE FACILITIES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("806", "HOSPITALS", Category.GROUP),
+ Classification("8062", "GENERAL MEDICAL AND SURGICAL HOSPITALS", Category.CLASS),
+ Classification("8063", "PSYCHIATRIC HOSPITALS", Category.CLASS),
+ Classification("8069", "SPECIALTY HOSPITALS, EXCEPT PSYCHIATRIC", Category.CLASS),
+ Classification("807", "MEDICAL AND DENTAL LABORATORIES", Category.GROUP),
+ Classification("8071", "MEDICAL LABORATORIES", Category.CLASS),
+ Classification("8072", "DENTAL LABORATORIES", Category.CLASS),
+ Classification("808", "HOME HEALTH CARE SERVICES", Category.GROUP),
+ Classification("8082", "HOME HEALTH CARE SERVICES", Category.CLASS),
+ Classification("809", "MISCELLANEOUS HEALTH AND ALLIED SERVICES, NOT ELSEWHERE CLASSIFIED", Category.GROUP),
+ Classification("8092", "KIDNEY DIALYSIS CENTERS", Category.CLASS),
+ Classification("8093", "SPECIALTY OUTPATIENT FACILITIES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("8099", "HEALTH AND ALLIED SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("81", "LEGAL SERVICES", Category.DIVISION),
+ Classification("811", "LEGAL SERVICES", Category.GROUP),
+ Classification("8111", "LEGAL SERVICES", Category.CLASS),
+ Classification("82", "EDUCATIONAL SERVICES", Category.DIVISION),
+ Classification("821", "ELEMENTARY AND SECONDARY SCHOOLS", Category.GROUP),
+ Classification("8211", "ELEMENTARY AND SECONDARY SCHOOLS", Category.CLASS),
+ Classification("822", "COLLEGES, UNIVERSITIES, PROFESSIONAL SCHOOLS, AND JUNIOR COLLEGES", Category.GROUP),
+ Classification("8221", "COLLEGES, UNIVERSITIES, AND PROFESSIONAL SCHOOLS", Category.CLASS),
+ Classification("8222", "JUNIOR COLLEGES AND TECHNICAL INSTITUTES", Category.CLASS),
+ Classification("823", "LIBRARIES", Category.GROUP),
+ Classification("8231", "LIBRARIES", Category.CLASS),
+ Classification("824", "VOCATIONAL SCHOOLS", Category.GROUP),
+ Classification("8243", "DATA PROCESSING SCHOOLS", Category.CLASS),
+ Classification("8244", "BUSINESS AND SECRETARIAL SCHOOLS", Category.CLASS),
+ Classification("8249", "VOCATIONAL SCHOOLS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("829", "SCHOOLS AND EDUCATIONAL SERVICES, NOT ELSEWHERE CLASSIFIED", Category.GROUP),
+ Classification("8299", "SCHOOLS AND EDUCATIONAL SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("83", "SOCIAL SERVICES", Category.DIVISION),
+ Classification("832", "INDIVIDUAL AND FAMILY SOCIAL SERVICES", Category.GROUP),
+ Classification("8322", "INDIVIDUAL AND FAMILY SOCIAL SERVICES", Category.CLASS),
+ Classification("833", "JOB TRAINING AND VOCATIONAL REHABILITATION SERVICES", Category.GROUP),
+ Classification("8331", "JOB TRAINING AND VOCATIONAL REHABILITATION SERVICES", Category.CLASS),
+ Classification("835", "CHILD DAY CARE SERVICES", Category.GROUP),
+ Classification("8351", "CHILD DAY CARE SERVICES", Category.CLASS),
+ Classification("836", "RESIDENTIAL CARE", Category.GROUP),
+ Classification("8361", "RESIDENTIAL CARE", Category.CLASS),
+ Classification("839", "SOCIAL SERVICES, NOT ELSEWHERE CLASSIFIED", Category.GROUP),
+ Classification("8399", "SOCIAL SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("84", "MUSEUMS, ART GALLERIES, AND BOTANICAL AND ZOOLOGICAL GARDENS", Category.DIVISION),
+ Classification("841", "MUSEUMS AND ART GALLERIES", Category.GROUP),
+ Classification("8412", "MUSEUMS AND ART GALLERIES", Category.CLASS),
+ Classification("842", "ARBORETA AND BOTANICAL OR ZOOLOGICAL GARDENS", Category.GROUP),
+ Classification("8422", "ARBORETA AND BOTANICAL OR ZOOLOGICAL GARDENS", Category.CLASS),
+ Classification("86", "MEMBERSHIP ORGANIZATIONS", Category.DIVISION),
+ Classification("861", "BUSINESS ASSOCIATIONS", Category.GROUP),
+ Classification("8611", "BUSINESS ASSOCIATIONS", Category.CLASS),
+ Classification("862", "PROFESSIONAL MEMBERSHIP ORGANIZATIONS", Category.GROUP),
+ Classification("8621", "PROFESSIONAL MEMBERSHIP ORGANIZATIONS", Category.CLASS),
+ Classification("863", "LABOR UNIONS AND SIMILAR LABOR ORGANIZATIONS", Category.GROUP),
+ Classification("8631", "LABOR UNIONS AND SIMILAR LABOR ORGANIZATIONS", Category.CLASS),
+ Classification("864", "CIVIC, SOCIAL, AND FRATERNAL ASSOCIATIONS", Category.GROUP),
+ Classification("8641", "CIVIC, SOCIAL, AND FRATERNAL ASSOCIATIONS", Category.CLASS),
+ Classification("865", "POLITICAL ORGANIZATIONS", Category.GROUP),
+ Classification("8651", "POLITICAL ORGANIZATIONS", Category.CLASS),
+ Classification("866", "RELIGIOUS ORGANIZATIONS", Category.GROUP),
+ Classification("8661", "RELIGIOUS ORGANIZATIONS", Category.CLASS),
+ Classification("869", "MEMBERSHIP ORGANIZATIONS, NOT ELSEWHERE CLASSIFIED", Category.GROUP),
+ Classification("8699", "MEMBERSHIP ORGANIZATIONS, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("87", "ENGINEERING, ACCOUNTING, RESEARCH, MANAGEMENT, AND RELATED SERVICES", Category.DIVISION),
+ Classification("871", "ENGINEERING, ARCHITECTURAL, AND SURVEYING SERVICES", Category.GROUP),
+ Classification("8711", "ENGINEERING SERVICES", Category.CLASS),
+ Classification("8712", "ARCHITECTURAL SERVICES", Category.CLASS),
+ Classification("8713", "SURVEYING SERVICES", Category.CLASS),
+ Classification("872", "ACCOUNTING, AUDITING, AND BOOKKEEPING SERVICES", Category.GROUP),
+ Classification("8721", "ACCOUNTING, AUDITING, AND BOOKKEEPING SERVICES", Category.CLASS),
+ Classification("873", "RESEARCH, DEVELOPMENT, AND TESTING SERVICES", Category.GROUP),
+ Classification("8731", "COMMERCIAL PHYSICAL AND BIOLOGICAL RESEARCH", Category.CLASS),
+ Classification("8732", "COMMERCIAL ECONOMIC, SOCIOLOGICAL, AND EDUCATIONAL RESEARCH", Category.CLASS),
+ Classification("8733", "NONCOMMERCIAL RESEARCH ORGANIZATIONS", Category.CLASS),
+ Classification("8734", "TESTING LABORATORIES", Category.CLASS),
+ Classification("874", "MANAGEMENT AND PUBLIC RELATIONS SERVICES", Category.GROUP),
+ Classification("8741", "MANAGEMENT SERVICES", Category.CLASS),
+ Classification("8742", "MANAGEMENT CONSULTING SERVICES", Category.CLASS),
+ Classification("8743", "PUBLIC RELATIONS SERVICES", Category.CLASS),
+ Classification("8744", "FACILITIES SUPPORT MANAGEMENT SERVICES", Category.CLASS),
+ Classification("8748", "BUSINESS CONSULTING SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("88", "PRIVATE HOUSEHOLDS", Category.DIVISION),
+ Classification("881", "PRIVATE HOUSEHOLDS", Category.GROUP),
+ Classification("8811", "PRIVATE HOUSEHOLDS", Category.CLASS),
+ Classification("89", "SERVICES, NOT ELSEWHERE CLASSIFIED", Category.DIVISION),
+ Classification("899", "SERVICES, NOT ELSEWHERE CLASSIFIED", Category.GROUP),
+ Classification("8999", "SERVICES, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("J", "PUBLIC ADMINISTRATION", Category.SECTION),
+ Classification("91", "EXECUTIVE, LEGISLATIVE, AND GENERAL GOVERNMENT, EXCEPT FINANCE", Category.DIVISION),
+ Classification("911", "EXECUTIVE OFFICES", Category.GROUP),
+ Classification("9111", "EXECUTIVE OFFICES", Category.CLASS),
+ Classification("912", "LEGISLATIVE BODIES", Category.GROUP),
+ Classification("9121", "LEGISLATIVE BODIES", Category.CLASS),
+ Classification("913", "EXECUTIVE AND LEGISLATIVE OFFICES COMBINED", Category.GROUP),
+ Classification("9131", "EXECUTIVE AND LEGISLATIVE OFFICES COMBINED", Category.CLASS),
+ Classification("919", "GENERAL GOVERNMENT, NOT ELSEWHERE CLASSIFIED", Category.GROUP),
+ Classification("9199", "GENERAL GOVERNMENT, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("92", "JUSTICE, PUBLIC ORDER, AND SAFETY", Category.DIVISION),
+ Classification("921", "COURTS", Category.GROUP),
+ Classification("9211", "COURTS", Category.CLASS),
+ Classification("922", "PUBLIC ORDER AND SAFETY", Category.GROUP),
+ Classification("9221", "POLICE PROTECTION", Category.CLASS),
+ Classification("9222", "LEGAL COUNSEL AND PROSECUTION", Category.CLASS),
+ Classification("9223", "CORRECTIONAL INSTITUTIONS", Category.CLASS),
+ Classification("9224", "FIRE PROTECTION", Category.CLASS),
+ Classification("9229", "PUBLIC ORDER AND SAFETY, NOT ELSEWHERE CLASSIFIED", Category.CLASS),
+ Classification("93", "PUBLIC FINANCE, TAXATION, AND MONETARY POLICY", Category.DIVISION),
+ Classification("931", "PUBLIC FINANCE, TAXATION, AND MONETARY POLICY", Category.GROUP),
+ Classification("9311", "PUBLIC FINANCE, TAXATION, AND MONETARY POLICY", Category.CLASS),
+ Classification("94", "ADMINISTRATION OF HUMAN RESOURCE PROGRAMS", Category.DIVISION),
+ Classification("941", "ADMINISTRATION OF EDUCATIONAL PROGRAMS", Category.GROUP),
+ Classification("9411", "ADMINISTRATION OF EDUCATIONAL PROGRAMS", Category.CLASS),
+ Classification("943", "ADMINISTRATION OF PUBLIC HEALTH PROGRAMS", Category.GROUP),
+ Classification("9431", "ADMINISTRATION OF PUBLIC HEALTH PROGRAMS", Category.CLASS),
+ Classification("944", "ADMINISTRATION OF SOCIAL, HUMAN RESOURCE AND INCOME MAINTENANCE PR", Category.GROUP),
+ Classification("9441", "ADMINISTRATION OF SOCIAL, HUMAN RESOURCE AND INCOME MAINTENANCE P", Category.CLASS),
+ Classification("945", "ADMINISTRATION OF VETERANS' AFFAIRS, EXCEPT HEALTH AND INSURANCE", Category.GROUP),
+ Classification("9451", "ADMINISTRATION OF VETERANS' AFFAIRS, EXCEPT HEALTH AND INSURANCE", Category.CLASS),
+ Classification("95", "ADMINISTRATION OF ENVIRONMENTAL QUALITY AND HOUSING PROGRAMS", Category.DIVISION),
+ Classification("951", "ADMINISTRATION OF ENVIRONMENTAL QUALITY PROGRAMS", Category.GROUP),
+ Classification("9511", "AIR AND WATER RESOURCE AND SOLID WASTE MANAGEMENT", Category.CLASS),
+ Classification("9512", "LAND, MINERAL, WILDLIFE, AND FOREST CONSERVATION", Category.CLASS),
+ Classification("953", "ADMINISTRATION OF HOUSING AND URBAN DEVELOPMENT PROGRAMS", Category.GROUP),
+ Classification("9531", "ADMINISTRATION OF HOUSING PROGRAMS", Category.CLASS),
+ Classification("9532", "ADMINISTRATION OF URBAN PLANNING AND COMMUNITY AND RURAL DEVELOPM", Category.CLASS),
+ Classification("96", "ADMINISTRATION OF ECONOMIC PROGRAMS", Category.DIVISION),
+ Classification("961", "ADMINISTRATION OF GENERAL ECONOMIC PROGRAMS", Category.GROUP),
+ Classification("9611", "ADMINISTRATION OF GENERAL ECONOMIC PROGRAMS", Category.CLASS),
+ Classification("962", "REGULATION AND ADMINISTRATION OF TRANSPORTATION PROGRAMS", Category.GROUP),
+ Classification("9621", "REGULATION AND ADMINISTRATION OF TRANSPORTATION PROGRAMS", Category.CLASS),
+ Classification("963", "REGULATION AND ADMINISTRATION OF COMMUNICATIONS, ELECTRIC, GAS, AN", Category.GROUP),
+ Classification("9631", "REGULATION AND ADMINISTRATION OF COMMUNICATIONS, ELECTRIC, GAS, A", Category.CLASS),
+ Classification("964", "REGULATION OF AGRICULTURAL MARKETING AND COMMODITIES", Category.GROUP),
+ Classification("9641", "REGULATION OF AGRICULTURAL MARKETING AND COMMODITIES", Category.CLASS),
+ Classification("965", "REGULATION, LICENSING, AND INSPECTION OF MISCELLANEOUS COMMERCIAL", Category.GROUP),
+ Classification("9651", "REGULATION, LICENSING, AND INSPECTION OF MISCELLANEOUS COMMERCIAL", Category.CLASS),
+ Classification("966", "SPACE RESEARCH AND TECHNOLOGY", Category.GROUP),
+ Classification("9661", "SPACE RESEARCH AND TECHNOLOGY", Category.CLASS),
+ Classification("97", "NATIONAL SECURITY AND INTERNATIONAL AFFAIRS", Category.DIVISION),
+ Classification("971", "NATIONAL SECURITY", Category.GROUP),
+ Classification("9711", "NATIONAL SECURITY", Category.CLASS),
+ Classification("972", "INTERNATIONAL AFFAIRS", Category.GROUP),
+ Classification("9721", "INTERNATIONAL AFFAIRS", Category.CLASS),
+ Classification("K", "NONCLASSIFIABLE ESTABLISHMENTS", Category.SECTION),
+ Classification("99", "NONCLASSIFIABLE ESTABLISHMENTS", Category.DIVISION),
+ Classification("999", "NONCLASSIFIABLE ESTABLISHMENTS", Category.GROUP),
+ Classification("9999", "NONCLASSIFIABLE ESTABLISHMENTS", Category.CLASS),
+ ],
+)
diff --git a/pyisic/_standards/sic/sic_to_naics2017.py b/pyisic/_standards/sic/sic_to_naics2017.py
new file mode 100644
index 0000000..ba1d354
--- /dev/null
+++ b/pyisic/_standards/sic/sic_to_naics2017.py
@@ -0,0 +1,2203 @@
+# -*- coding: utf-8 -*-
+"""`Concordance between SIC and NAICS2017 <https://www.inegi.org.mx/app/scian/>`_.
+"""
+from ...types import Concordance, Standards
+from ..naics2017 import NAICS2017
+from . import SIC
+
+SIC_to_NAICS2017 = Concordance(
+ src=SIC,
+ dst=NAICS2017,
+ concordances=[
+ ((Standards.SIC, "0111"), (Standards.NAICS2017, "111140")),
+ ((Standards.SIC, "0112"), (Standards.NAICS2017, "111160")),
+ ((Standards.SIC, "0115"), (Standards.NAICS2017, "111150")),
+ ((Standards.SIC, "0116"), (Standards.NAICS2017, "111110")),
+ ((Standards.SIC, "0119"), (Standards.NAICS2017, "111120")),
+ ((Standards.SIC, "0119"), (Standards.NAICS2017, "111130")),
+ ((Standards.SIC, "0119"), (Standards.NAICS2017, "111150")),
+ ((Standards.SIC, "0119"), (Standards.NAICS2017, "111191")),
+ ((Standards.SIC, "0119"), (Standards.NAICS2017, "111199")),
+ ((Standards.SIC, "0131"), (Standards.NAICS2017, "111920")),
+ ((Standards.SIC, "0132"), (Standards.NAICS2017, "111910")),
+ ((Standards.SIC, "0133"), (Standards.NAICS2017, "111930")),
+ ((Standards.SIC, "0133"), (Standards.NAICS2017, "111991")),
+ ((Standards.SIC, "0134"), (Standards.NAICS2017, "111211")),
+ ((Standards.SIC, "0139"), (Standards.NAICS2017, "111199")),
+ ((Standards.SIC, "0139"), (Standards.NAICS2017, "111211")),
+ ((Standards.SIC, "0139"), (Standards.NAICS2017, "111219")),
+ ((Standards.SIC, "0139"), (Standards.NAICS2017, "111940")),
+ ((Standards.SIC, "0139"), (Standards.NAICS2017, "111992")),
+ ((Standards.SIC, "0139"), (Standards.NAICS2017, "111998")),
+ ((Standards.SIC, "0139"), (Standards.NAICS2017, "112519")),
+ ((Standards.SIC, "0161"), (Standards.NAICS2017, "111211")),
+ ((Standards.SIC, "0161"), (Standards.NAICS2017, "111219")),
+ ((Standards.SIC, "0171"), (Standards.NAICS2017, "111333")),
+ ((Standards.SIC, "0171"), (Standards.NAICS2017, "111334")),
+ ((Standards.SIC, "0172"), (Standards.NAICS2017, "111332")),
+ ((Standards.SIC, "0173"), (Standards.NAICS2017, "111335")),
+ ((Standards.SIC, "0174"), (Standards.NAICS2017, "111310")),
+ ((Standards.SIC, "0174"), (Standards.NAICS2017, "111320")),
+ ((Standards.SIC, "0175"), (Standards.NAICS2017, "111331")),
+ ((Standards.SIC, "0175"), (Standards.NAICS2017, "111339")),
+ ((Standards.SIC, "0179"), (Standards.NAICS2017, "111336")),
+ ((Standards.SIC, "0179"), (Standards.NAICS2017, "111339")),
+ ((Standards.SIC, "0181"), (Standards.NAICS2017, "111421")),
+ ((Standards.SIC, "0181"), (Standards.NAICS2017, "111422")),
+ ((Standards.SIC, "0182"), (Standards.NAICS2017, "111411")),
+ ((Standards.SIC, "0182"), (Standards.NAICS2017, "111419")),
+ ((Standards.SIC, "0191"), (Standards.NAICS2017, "111998")),
+ ((Standards.SIC, "0191"), (Standards.NAICS2017, "112519")),
+ ((Standards.SIC, "0211"), (Standards.NAICS2017, "112112")),
+ ((Standards.SIC, "0212"), (Standards.NAICS2017, "112111")),
+ ((Standards.SIC, "0213"), (Standards.NAICS2017, "112210")),
+ ((Standards.SIC, "0214"), (Standards.NAICS2017, "112410")),
+ ((Standards.SIC, "0214"), (Standards.NAICS2017, "112420")),
+ ((Standards.SIC, "0219"), (Standards.NAICS2017, "112990")),
+ ((Standards.SIC, "0241"), (Standards.NAICS2017, "112111")),
+ ((Standards.SIC, "0241"), (Standards.NAICS2017, "112120")),
+ ((Standards.SIC, "0251"), (Standards.NAICS2017, "112320")),
+ ((Standards.SIC, "0252"), (Standards.NAICS2017, "112310")),
+ ((Standards.SIC, "0253"), (Standards.NAICS2017, "112330")),
+ ((Standards.SIC, "0254"), (Standards.NAICS2017, "112340")),
+ ((Standards.SIC, "0259"), (Standards.NAICS2017, "112390")),
+ ((Standards.SIC, "0271"), (Standards.NAICS2017, "112930")),
+ ((Standards.SIC, "0272"), (Standards.NAICS2017, "112920")),
+ ((Standards.SIC, "0273"), (Standards.NAICS2017, "112511")),
+ ((Standards.SIC, "0273"), (Standards.NAICS2017, "112512")),
+ ((Standards.SIC, "0273"), (Standards.NAICS2017, "112519")),
+ ((Standards.SIC, "0279"), (Standards.NAICS2017, "112519")),
+ ((Standards.SIC, "0279"), (Standards.NAICS2017, "112910")),
+ ((Standards.SIC, "0279"), (Standards.NAICS2017, "112990")),
+ ((Standards.SIC, "0291"), (Standards.NAICS2017, "112990")),
+ ((Standards.SIC, "0711"), (Standards.NAICS2017, "115112")),
+ ((Standards.SIC, "0721"), (Standards.NAICS2017, "115112")),
+ ((Standards.SIC, "0722"), (Standards.NAICS2017, "115113")),
+ ((Standards.SIC, "0723"), (Standards.NAICS2017, "115114")),
+ ((Standards.SIC, "0723"), (Standards.NAICS2017, "311119")),
+ ((Standards.SIC, "0724"), (Standards.NAICS2017, "115111")),
+ ((Standards.SIC, "0741"), (Standards.NAICS2017, "541940")),
+ ((Standards.SIC, "0742"), (Standards.NAICS2017, "541940")),
+ ((Standards.SIC, "0751"), (Standards.NAICS2017, "115210")),
+ ((Standards.SIC, "0751"), (Standards.NAICS2017, "311611")),
+ ((Standards.SIC, "0752"), (Standards.NAICS2017, "115210")),
+ ((Standards.SIC, "0752"), (Standards.NAICS2017, "812910")),
+ ((Standards.SIC, "0761"), (Standards.NAICS2017, "115115")),
+ ((Standards.SIC, "0762"), (Standards.NAICS2017, "115116")),
+ ((Standards.SIC, "0781"), (Standards.NAICS2017, "541320")),
+ ((Standards.SIC, "0781"), (Standards.NAICS2017, "541690")),
+ ((Standards.SIC, "0782"), (Standards.NAICS2017, "561730")),
+ ((Standards.SIC, "0783"), (Standards.NAICS2017, "561730")),
+ ((Standards.SIC, "0811"), (Standards.NAICS2017, "111421")),
+ ((Standards.SIC, "0811"), (Standards.NAICS2017, "113110")),
+ ((Standards.SIC, "0831"), (Standards.NAICS2017, "111998")),
+ ((Standards.SIC, "0831"), (Standards.NAICS2017, "112519")),
+ ((Standards.SIC, "0831"), (Standards.NAICS2017, "113210")),
+ ((Standards.SIC, "0851"), (Standards.NAICS2017, "115310")),
+ ((Standards.SIC, "0912"), (Standards.NAICS2017, "114111")),
+ ((Standards.SIC, "0913"), (Standards.NAICS2017, "114112")),
+ ((Standards.SIC, "0919"), (Standards.NAICS2017, "111998")),
+ ((Standards.SIC, "0919"), (Standards.NAICS2017, "112512")),
+ ((Standards.SIC, "0919"), (Standards.NAICS2017, "112519")),
+ ((Standards.SIC, "0919"), (Standards.NAICS2017, "114112")),
+ ((Standards.SIC, "0919"), (Standards.NAICS2017, "114119")),
+ ((Standards.SIC, "0921"), (Standards.NAICS2017, "112511")),
+ ((Standards.SIC, "0921"), (Standards.NAICS2017, "112512")),
+ ((Standards.SIC, "0971"), (Standards.NAICS2017, "114210")),
+ ((Standards.SIC, "1011"), (Standards.NAICS2017, "212210")),
+ ((Standards.SIC, "1021"), (Standards.NAICS2017, "212230")),
+ ((Standards.SIC, "1031"), (Standards.NAICS2017, "212230")),
+ ((Standards.SIC, "1041"), (Standards.NAICS2017, "212221")),
+ ((Standards.SIC, "1044"), (Standards.NAICS2017, "212222")),
+ ((Standards.SIC, "1061"), (Standards.NAICS2017, "212230")),
+ ((Standards.SIC, "1061"), (Standards.NAICS2017, "212299")),
+ ((Standards.SIC, "1081"), (Standards.NAICS2017, "213114")),
+ ((Standards.SIC, "1081"), (Standards.NAICS2017, "238910")),
+ ((Standards.SIC, "1081"), (Standards.NAICS2017, "541360")),
+ ((Standards.SIC, "1094"), (Standards.NAICS2017, "212291")),
+ ((Standards.SIC, "1099"), (Standards.NAICS2017, "212299")),
+ ((Standards.SIC, "1221"), (Standards.NAICS2017, "212111")),
+ ((Standards.SIC, "1222"), (Standards.NAICS2017, "212112")),
+ ((Standards.SIC, "1231"), (Standards.NAICS2017, "212113")),
+ ((Standards.SIC, "1241"), (Standards.NAICS2017, "213113")),
+ ((Standards.SIC, "1241"), (Standards.NAICS2017, "238910")),
+ ((Standards.SIC, "1311"), (Standards.NAICS2017, "211120")),
+ ((Standards.SIC, "1321"), (Standards.NAICS2017, "211130")),
+ ((Standards.SIC, "1381"), (Standards.NAICS2017, "213111")),
+ ((Standards.SIC, "1382"), (Standards.NAICS2017, "213112")),
+ ((Standards.SIC, "1382"), (Standards.NAICS2017, "541360")),
+ ((Standards.SIC, "1389"), (Standards.NAICS2017, "213112")),
+ ((Standards.SIC, "1389"), (Standards.NAICS2017, "237120")),
+ ((Standards.SIC, "1389"), (Standards.NAICS2017, "238910")),
+ ((Standards.SIC, "1411"), (Standards.NAICS2017, "212311")),
+ ((Standards.SIC, "1422"), (Standards.NAICS2017, "212312")),
+ ((Standards.SIC, "1423"), (Standards.NAICS2017, "212313")),
+ ((Standards.SIC, "1429"), (Standards.NAICS2017, "212319")),
+ ((Standards.SIC, "1442"), (Standards.NAICS2017, "212321")),
+ ((Standards.SIC, "1446"), (Standards.NAICS2017, "212322")),
+ ((Standards.SIC, "1455"), (Standards.NAICS2017, "212324")),
+ ((Standards.SIC, "1459"), (Standards.NAICS2017, "212325")),
+ ((Standards.SIC, "1474"), (Standards.NAICS2017, "212391")),
+ ((Standards.SIC, "1475"), (Standards.NAICS2017, "212392")),
+ ((Standards.SIC, "1479"), (Standards.NAICS2017, "212393")),
+ ((Standards.SIC, "1481"), (Standards.NAICS2017, "213115")),
+ ((Standards.SIC, "1481"), (Standards.NAICS2017, "238910")),
+ ((Standards.SIC, "1481"), (Standards.NAICS2017, "541360")),
+ ((Standards.SIC, "1499"), (Standards.NAICS2017, "212319")),
+ ((Standards.SIC, "1499"), (Standards.NAICS2017, "212399")),
+ ((Standards.SIC, "1521"), (Standards.NAICS2017, "236115")),
+ ((Standards.SIC, "1521"), (Standards.NAICS2017, "236118")),
+ ((Standards.SIC, "1522"), (Standards.NAICS2017, "236116")),
+ ((Standards.SIC, "1522"), (Standards.NAICS2017, "236118")),
+ ((Standards.SIC, "1522"), (Standards.NAICS2017, "236220")),
+ ((Standards.SIC, "1531"), (Standards.NAICS2017, "236117")),
+ ((Standards.SIC, "1531"), (Standards.NAICS2017, "236118")),
+ ((Standards.SIC, "1531"), (Standards.NAICS2017, "236210")),
+ ((Standards.SIC, "1531"), (Standards.NAICS2017, "236220")),
+ ((Standards.SIC, "1541"), (Standards.NAICS2017, "236210")),
+ ((Standards.SIC, "1541"), (Standards.NAICS2017, "236220")),
+ ((Standards.SIC, "1542"), (Standards.NAICS2017, "236220")),
+ ((Standards.SIC, "1611"), (Standards.NAICS2017, "237310")),
+ ((Standards.SIC, "1622"), (Standards.NAICS2017, "237310")),
+ ((Standards.SIC, "1622"), (Standards.NAICS2017, "237990")),
+ ((Standards.SIC, "1623"), (Standards.NAICS2017, "237110")),
+ ((Standards.SIC, "1623"), (Standards.NAICS2017, "237120")),
+ ((Standards.SIC, "1623"), (Standards.NAICS2017, "237130")),
+ ((Standards.SIC, "1629"), (Standards.NAICS2017, "236210")),
+ ((Standards.SIC, "1629"), (Standards.NAICS2017, "237110")),
+ ((Standards.SIC, "1629"), (Standards.NAICS2017, "237120")),
+ ((Standards.SIC, "1629"), (Standards.NAICS2017, "237130")),
+ ((Standards.SIC, "1629"), (Standards.NAICS2017, "237990")),
+ ((Standards.SIC, "1629"), (Standards.NAICS2017, "238910")),
+ ((Standards.SIC, "1711"), (Standards.NAICS2017, "238210")),
+ ((Standards.SIC, "1711"), (Standards.NAICS2017, "238220")),
+ ((Standards.SIC, "1711"), (Standards.NAICS2017, "238910")),
+ ((Standards.SIC, "1721"), (Standards.NAICS2017, "237310")),
+ ((Standards.SIC, "1721"), (Standards.NAICS2017, "238320")),
+ ((Standards.SIC, "1731"), (Standards.NAICS2017, "238210")),
+ ((Standards.SIC, "1741"), (Standards.NAICS2017, "238140")),
+ ((Standards.SIC, "1742"), (Standards.NAICS2017, "238310")),
+ ((Standards.SIC, "1743"), (Standards.NAICS2017, "238310")),
+ ((Standards.SIC, "1743"), (Standards.NAICS2017, "238340")),
+ ((Standards.SIC, "1751"), (Standards.NAICS2017, "238130")),
+ ((Standards.SIC, "1751"), (Standards.NAICS2017, "238350")),
+ ((Standards.SIC, "1752"), (Standards.NAICS2017, "238310")),
+ ((Standards.SIC, "1752"), (Standards.NAICS2017, "238330")),
+ ((Standards.SIC, "1761"), (Standards.NAICS2017, "238160")),
+ ((Standards.SIC, "1761"), (Standards.NAICS2017, "238170")),
+ ((Standards.SIC, "1761"), (Standards.NAICS2017, "238390")),
+ ((Standards.SIC, "1771"), (Standards.NAICS2017, "238110")),
+ ((Standards.SIC, "1771"), (Standards.NAICS2017, "238140")),
+ ((Standards.SIC, "1771"), (Standards.NAICS2017, "238990")),
+ ((Standards.SIC, "1781"), (Standards.NAICS2017, "237110")),
+ ((Standards.SIC, "1791"), (Standards.NAICS2017, "238120")),
+ ((Standards.SIC, "1791"), (Standards.NAICS2017, "238190")),
+ ((Standards.SIC, "1791"), (Standards.NAICS2017, "238220")),
+ ((Standards.SIC, "1791"), (Standards.NAICS2017, "238310")),
+ ((Standards.SIC, "1793"), (Standards.NAICS2017, "238150")),
+ ((Standards.SIC, "1794"), (Standards.NAICS2017, "238910")),
+ ((Standards.SIC, "1795"), (Standards.NAICS2017, "238910")),
+ ((Standards.SIC, "1796"), (Standards.NAICS2017, "238220")),
+ ((Standards.SIC, "1796"), (Standards.NAICS2017, "238290")),
+ ((Standards.SIC, "1799"), (Standards.NAICS2017, "236220")),
+ ((Standards.SIC, "1799"), (Standards.NAICS2017, "237990")),
+ ((Standards.SIC, "1799"), (Standards.NAICS2017, "238150")),
+ ((Standards.SIC, "1799"), (Standards.NAICS2017, "238190")),
+ ((Standards.SIC, "1799"), (Standards.NAICS2017, "238290")),
+ ((Standards.SIC, "1799"), (Standards.NAICS2017, "238310")),
+ ((Standards.SIC, "1799"), (Standards.NAICS2017, "238320")),
+ ((Standards.SIC, "1799"), (Standards.NAICS2017, "238350")),
+ ((Standards.SIC, "1799"), (Standards.NAICS2017, "238390")),
+ ((Standards.SIC, "1799"), (Standards.NAICS2017, "238910")),
+ ((Standards.SIC, "1799"), (Standards.NAICS2017, "238990")),
+ ((Standards.SIC, "1799"), (Standards.NAICS2017, "561790")),
+ ((Standards.SIC, "1799"), (Standards.NAICS2017, "562910")),
+ ((Standards.SIC, "2011"), (Standards.NAICS2017, "311611")),
+ ((Standards.SIC, "2013"), (Standards.NAICS2017, "311612")),
+ ((Standards.SIC, "2013"), (Standards.NAICS2017, "311613")),
+ ((Standards.SIC, "2015"), (Standards.NAICS2017, "311615")),
+ ((Standards.SIC, "2015"), (Standards.NAICS2017, "311999")),
+ ((Standards.SIC, "2021"), (Standards.NAICS2017, "311512")),
+ ((Standards.SIC, "2022"), (Standards.NAICS2017, "311513")),
+ ((Standards.SIC, "2023"), (Standards.NAICS2017, "311511")),
+ ((Standards.SIC, "2023"), (Standards.NAICS2017, "311514")),
+ ((Standards.SIC, "2024"), (Standards.NAICS2017, "311520")),
+ ((Standards.SIC, "2026"), (Standards.NAICS2017, "311511")),
+ ((Standards.SIC, "2026"), (Standards.NAICS2017, "311514")),
+ ((Standards.SIC, "2032"), (Standards.NAICS2017, "311422")),
+ ((Standards.SIC, "2032"), (Standards.NAICS2017, "311999")),
+ ((Standards.SIC, "2033"), (Standards.NAICS2017, "311421")),
+ ((Standards.SIC, "2034"), (Standards.NAICS2017, "311211")),
+ ((Standards.SIC, "2034"), (Standards.NAICS2017, "311423")),
+ ((Standards.SIC, "2034"), (Standards.NAICS2017, "311999")),
+ ((Standards.SIC, "2035"), (Standards.NAICS2017, "311421")),
+ ((Standards.SIC, "2035"), (Standards.NAICS2017, "311941")),
+ ((Standards.SIC, "2037"), (Standards.NAICS2017, "311411")),
+ ((Standards.SIC, "2038"), (Standards.NAICS2017, "311412")),
+ ((Standards.SIC, "2041"), (Standards.NAICS2017, "311211")),
+ ((Standards.SIC, "2043"), (Standards.NAICS2017, "311230")),
+ ((Standards.SIC, "2043"), (Standards.NAICS2017, "311920")),
+ ((Standards.SIC, "2044"), (Standards.NAICS2017, "311212")),
+ ((Standards.SIC, "2045"), (Standards.NAICS2017, "311824")),
+ ((Standards.SIC, "2046"), (Standards.NAICS2017, "311221")),
+ ((Standards.SIC, "2046"), (Standards.NAICS2017, "311225")),
+ ((Standards.SIC, "2047"), (Standards.NAICS2017, "311111")),
+ ((Standards.SIC, "2048"), (Standards.NAICS2017, "311119")),
+ ((Standards.SIC, "2048"), (Standards.NAICS2017, "311611")),
+ ((Standards.SIC, "2051"), (Standards.NAICS2017, "311812")),
+ ((Standards.SIC, "2052"), (Standards.NAICS2017, "311812")),
+ ((Standards.SIC, "2052"), (Standards.NAICS2017, "311821")),
+ ((Standards.SIC, "2052"), (Standards.NAICS2017, "311919")),
+ ((Standards.SIC, "2053"), (Standards.NAICS2017, "311813")),
+ ((Standards.SIC, "2061"), (Standards.NAICS2017, "311314")),
+ ((Standards.SIC, "2062"), (Standards.NAICS2017, "311314")),
+ ((Standards.SIC, "2063"), (Standards.NAICS2017, "311313")),
+ ((Standards.SIC, "2064"), (Standards.NAICS2017, "311340")),
+ ((Standards.SIC, "2064"), (Standards.NAICS2017, "311352")),
+ ((Standards.SIC, "2066"), (Standards.NAICS2017, "311351")),
+ ((Standards.SIC, "2066"), (Standards.NAICS2017, "311352")),
+ ((Standards.SIC, "2067"), (Standards.NAICS2017, "311340")),
+ ((Standards.SIC, "2068"), (Standards.NAICS2017, "311911")),
+ ((Standards.SIC, "2074"), (Standards.NAICS2017, "311224")),
+ ((Standards.SIC, "2074"), (Standards.NAICS2017, "311225")),
+ ((Standards.SIC, "2075"), (Standards.NAICS2017, "311224")),
+ ((Standards.SIC, "2075"), (Standards.NAICS2017, "311225")),
+ ((Standards.SIC, "2076"), (Standards.NAICS2017, "311224")),
+ ((Standards.SIC, "2076"), (Standards.NAICS2017, "311225")),
+ ((Standards.SIC, "2077"), (Standards.NAICS2017, "311613")),
+ ((Standards.SIC, "2077"), (Standards.NAICS2017, "311710")),
+ ((Standards.SIC, "2079"), (Standards.NAICS2017, "311224")),
+ ((Standards.SIC, "2079"), (Standards.NAICS2017, "311225")),
+ ((Standards.SIC, "2082"), (Standards.NAICS2017, "311942")),
+ ((Standards.SIC, "2082"), (Standards.NAICS2017, "312120")),
+ ((Standards.SIC, "2083"), (Standards.NAICS2017, "311213")),
+ ((Standards.SIC, "2084"), (Standards.NAICS2017, "312130")),
+ ((Standards.SIC, "2085"), (Standards.NAICS2017, "312130")),
+ ((Standards.SIC, "2085"), (Standards.NAICS2017, "312140")),
+ ((Standards.SIC, "2086"), (Standards.NAICS2017, "312111")),
+ ((Standards.SIC, "2086"), (Standards.NAICS2017, "312112")),
+ ((Standards.SIC, "2087"), (Standards.NAICS2017, "311920")),
+ ((Standards.SIC, "2087"), (Standards.NAICS2017, "311930")),
+ ((Standards.SIC, "2087"), (Standards.NAICS2017, "311942")),
+ ((Standards.SIC, "2087"), (Standards.NAICS2017, "311999")),
+ ((Standards.SIC, "2091"), (Standards.NAICS2017, "311710")),
+ ((Standards.SIC, "2092"), (Standards.NAICS2017, "311710")),
+ ((Standards.SIC, "2095"), (Standards.NAICS2017, "311920")),
+ ((Standards.SIC, "2096"), (Standards.NAICS2017, "311919")),
+ ((Standards.SIC, "2097"), (Standards.NAICS2017, "312113")),
+ ((Standards.SIC, "2098"), (Standards.NAICS2017, "311824")),
+ ((Standards.SIC, "2099"), (Standards.NAICS2017, "111998")),
+ ((Standards.SIC, "2099"), (Standards.NAICS2017, "112519")),
+ ((Standards.SIC, "2099"), (Standards.NAICS2017, "311212")),
+ ((Standards.SIC, "2099"), (Standards.NAICS2017, "311340")),
+ ((Standards.SIC, "2099"), (Standards.NAICS2017, "311423")),
+ ((Standards.SIC, "2099"), (Standards.NAICS2017, "311824")),
+ ((Standards.SIC, "2099"), (Standards.NAICS2017, "311830")),
+ ((Standards.SIC, "2099"), (Standards.NAICS2017, "311911")),
+ ((Standards.SIC, "2099"), (Standards.NAICS2017, "311920")),
+ ((Standards.SIC, "2099"), (Standards.NAICS2017, "311941")),
+ ((Standards.SIC, "2099"), (Standards.NAICS2017, "311942")),
+ ((Standards.SIC, "2099"), (Standards.NAICS2017, "311991")),
+ ((Standards.SIC, "2099"), (Standards.NAICS2017, "311999")),
+ ((Standards.SIC, "2111"), (Standards.NAICS2017, "312230")),
+ ((Standards.SIC, "2121"), (Standards.NAICS2017, "312230")),
+ ((Standards.SIC, "2131"), (Standards.NAICS2017, "312230")),
+ ((Standards.SIC, "2141"), (Standards.NAICS2017, "312230")),
+ ((Standards.SIC, "2211"), (Standards.NAICS2017, "313210")),
+ ((Standards.SIC, "2221"), (Standards.NAICS2017, "313210")),
+ ((Standards.SIC, "2231"), (Standards.NAICS2017, "313210")),
+ ((Standards.SIC, "2231"), (Standards.NAICS2017, "313310")),
+ ((Standards.SIC, "2241"), (Standards.NAICS2017, "313220")),
+ ((Standards.SIC, "2251"), (Standards.NAICS2017, "313310")),
+ ((Standards.SIC, "2251"), (Standards.NAICS2017, "315110")),
+ ((Standards.SIC, "2252"), (Standards.NAICS2017, "313310")),
+ ((Standards.SIC, "2252"), (Standards.NAICS2017, "315110")),
+ ((Standards.SIC, "2253"), (Standards.NAICS2017, "313310")),
+ ((Standards.SIC, "2253"), (Standards.NAICS2017, "315190")),
+ ((Standards.SIC, "2254"), (Standards.NAICS2017, "313310")),
+ ((Standards.SIC, "2254"), (Standards.NAICS2017, "315190")),
+ ((Standards.SIC, "2257"), (Standards.NAICS2017, "313240")),
+ ((Standards.SIC, "2257"), (Standards.NAICS2017, "313310")),
+ ((Standards.SIC, "2258"), (Standards.NAICS2017, "313240")),
+ ((Standards.SIC, "2258"), (Standards.NAICS2017, "313310")),
+ ((Standards.SIC, "2259"), (Standards.NAICS2017, "313240")),
+ ((Standards.SIC, "2259"), (Standards.NAICS2017, "313310")),
+ ((Standards.SIC, "2259"), (Standards.NAICS2017, "315190")),
+ ((Standards.SIC, "2261"), (Standards.NAICS2017, "313310")),
+ ((Standards.SIC, "2262"), (Standards.NAICS2017, "313310")),
+ ((Standards.SIC, "2269"), (Standards.NAICS2017, "313310")),
+ ((Standards.SIC, "2273"), (Standards.NAICS2017, "314110")),
+ ((Standards.SIC, "2281"), (Standards.NAICS2017, "313110")),
+ ((Standards.SIC, "2282"), (Standards.NAICS2017, "313110")),
+ ((Standards.SIC, "2284"), (Standards.NAICS2017, "313110")),
+ ((Standards.SIC, "2284"), (Standards.NAICS2017, "313310")),
+ ((Standards.SIC, "2295"), (Standards.NAICS2017, "313320")),
+ ((Standards.SIC, "2296"), (Standards.NAICS2017, "314994")),
+ ((Standards.SIC, "2297"), (Standards.NAICS2017, "313230")),
+ ((Standards.SIC, "2298"), (Standards.NAICS2017, "313110")),
+ ((Standards.SIC, "2298"), (Standards.NAICS2017, "314994")),
+ ((Standards.SIC, "2299"), (Standards.NAICS2017, "313110")),
+ ((Standards.SIC, "2299"), (Standards.NAICS2017, "313210")),
+ ((Standards.SIC, "2299"), (Standards.NAICS2017, "313220")),
+ ((Standards.SIC, "2299"), (Standards.NAICS2017, "313230")),
+ ((Standards.SIC, "2299"), (Standards.NAICS2017, "313310")),
+ ((Standards.SIC, "2299"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "2311"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "2311"), (Standards.NAICS2017, "315210")),
+ ((Standards.SIC, "2311"), (Standards.NAICS2017, "315220")),
+ ((Standards.SIC, "2321"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "2321"), (Standards.NAICS2017, "315210")),
+ ((Standards.SIC, "2321"), (Standards.NAICS2017, "315220")),
+ ((Standards.SIC, "2322"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "2322"), (Standards.NAICS2017, "315210")),
+ ((Standards.SIC, "2322"), (Standards.NAICS2017, "315220")),
+ ((Standards.SIC, "2323"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "2323"), (Standards.NAICS2017, "315210")),
+ ((Standards.SIC, "2323"), (Standards.NAICS2017, "315990")),
+ ((Standards.SIC, "2325"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "2325"), (Standards.NAICS2017, "315210")),
+ ((Standards.SIC, "2325"), (Standards.NAICS2017, "315220")),
+ ((Standards.SIC, "2326"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "2326"), (Standards.NAICS2017, "315210")),
+ ((Standards.SIC, "2326"), (Standards.NAICS2017, "315220")),
+ ((Standards.SIC, "2329"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "2329"), (Standards.NAICS2017, "315210")),
+ ((Standards.SIC, "2329"), (Standards.NAICS2017, "315220")),
+ ((Standards.SIC, "2329"), (Standards.NAICS2017, "315280")),
+ ((Standards.SIC, "2331"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "2331"), (Standards.NAICS2017, "315210")),
+ ((Standards.SIC, "2331"), (Standards.NAICS2017, "315240")),
+ ((Standards.SIC, "2335"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "2335"), (Standards.NAICS2017, "315210")),
+ ((Standards.SIC, "2335"), (Standards.NAICS2017, "315240")),
+ ((Standards.SIC, "2337"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "2337"), (Standards.NAICS2017, "315210")),
+ ((Standards.SIC, "2337"), (Standards.NAICS2017, "315240")),
+ ((Standards.SIC, "2339"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "2339"), (Standards.NAICS2017, "315210")),
+ ((Standards.SIC, "2339"), (Standards.NAICS2017, "315240")),
+ ((Standards.SIC, "2339"), (Standards.NAICS2017, "315280")),
+ ((Standards.SIC, "2339"), (Standards.NAICS2017, "315990")),
+ ((Standards.SIC, "2341"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "2341"), (Standards.NAICS2017, "315210")),
+ ((Standards.SIC, "2341"), (Standards.NAICS2017, "315220")),
+ ((Standards.SIC, "2341"), (Standards.NAICS2017, "315240")),
+ ((Standards.SIC, "2342"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "2342"), (Standards.NAICS2017, "315210")),
+ ((Standards.SIC, "2342"), (Standards.NAICS2017, "315240")),
+ ((Standards.SIC, "2353"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "2353"), (Standards.NAICS2017, "315210")),
+ ((Standards.SIC, "2353"), (Standards.NAICS2017, "315990")),
+ ((Standards.SIC, "2361"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "2361"), (Standards.NAICS2017, "315210")),
+ ((Standards.SIC, "2361"), (Standards.NAICS2017, "315220")),
+ ((Standards.SIC, "2361"), (Standards.NAICS2017, "315240")),
+ ((Standards.SIC, "2369"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "2369"), (Standards.NAICS2017, "315210")),
+ ((Standards.SIC, "2369"), (Standards.NAICS2017, "315220")),
+ ((Standards.SIC, "2369"), (Standards.NAICS2017, "315240")),
+ ((Standards.SIC, "2371"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "2371"), (Standards.NAICS2017, "315210")),
+ ((Standards.SIC, "2371"), (Standards.NAICS2017, "315280")),
+ ((Standards.SIC, "2381"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "2381"), (Standards.NAICS2017, "315210")),
+ ((Standards.SIC, "2381"), (Standards.NAICS2017, "315990")),
+ ((Standards.SIC, "2384"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "2384"), (Standards.NAICS2017, "315210")),
+ ((Standards.SIC, "2384"), (Standards.NAICS2017, "315220")),
+ ((Standards.SIC, "2384"), (Standards.NAICS2017, "315240")),
+ ((Standards.SIC, "2385"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "2385"), (Standards.NAICS2017, "315210")),
+ ((Standards.SIC, "2385"), (Standards.NAICS2017, "315220")),
+ ((Standards.SIC, "2385"), (Standards.NAICS2017, "315240")),
+ ((Standards.SIC, "2385"), (Standards.NAICS2017, "315280")),
+ ((Standards.SIC, "2385"), (Standards.NAICS2017, "315990")),
+ ((Standards.SIC, "2386"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "2386"), (Standards.NAICS2017, "315210")),
+ ((Standards.SIC, "2386"), (Standards.NAICS2017, "315280")),
+ ((Standards.SIC, "2387"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "2387"), (Standards.NAICS2017, "315210")),
+ ((Standards.SIC, "2387"), (Standards.NAICS2017, "315990")),
+ ((Standards.SIC, "2389"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "2389"), (Standards.NAICS2017, "315210")),
+ ((Standards.SIC, "2389"), (Standards.NAICS2017, "315240")),
+ ((Standards.SIC, "2389"), (Standards.NAICS2017, "315280")),
+ ((Standards.SIC, "2389"), (Standards.NAICS2017, "315990")),
+ ((Standards.SIC, "2391"), (Standards.NAICS2017, "314120")),
+ ((Standards.SIC, "2392"), (Standards.NAICS2017, "314120")),
+ ((Standards.SIC, "2392"), (Standards.NAICS2017, "314910")),
+ ((Standards.SIC, "2392"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "2392"), (Standards.NAICS2017, "339994")),
+ ((Standards.SIC, "2393"), (Standards.NAICS2017, "314910")),
+ ((Standards.SIC, "2394"), (Standards.NAICS2017, "314910")),
+ ((Standards.SIC, "2395"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "2395"), (Standards.NAICS2017, "315210")),
+ ((Standards.SIC, "2396"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "2396"), (Standards.NAICS2017, "315210")),
+ ((Standards.SIC, "2396"), (Standards.NAICS2017, "315990")),
+ ((Standards.SIC, "2396"), (Standards.NAICS2017, "323113")),
+ ((Standards.SIC, "2396"), (Standards.NAICS2017, "336360")),
+ ((Standards.SIC, "2397"), (Standards.NAICS2017, "313220")),
+ ((Standards.SIC, "2399"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "2399"), (Standards.NAICS2017, "315210")),
+ ((Standards.SIC, "2399"), (Standards.NAICS2017, "315990")),
+ ((Standards.SIC, "2399"), (Standards.NAICS2017, "336360")),
+ ((Standards.SIC, "2411"), (Standards.NAICS2017, "113310")),
+ ((Standards.SIC, "2421"), (Standards.NAICS2017, "321113")),
+ ((Standards.SIC, "2421"), (Standards.NAICS2017, "321912")),
+ ((Standards.SIC, "2421"), (Standards.NAICS2017, "321918")),
+ ((Standards.SIC, "2421"), (Standards.NAICS2017, "321920")),
+ ((Standards.SIC, "2421"), (Standards.NAICS2017, "321999")),
+ ((Standards.SIC, "2426"), (Standards.NAICS2017, "321113")),
+ ((Standards.SIC, "2426"), (Standards.NAICS2017, "321912")),
+ ((Standards.SIC, "2426"), (Standards.NAICS2017, "321918")),
+ ((Standards.SIC, "2426"), (Standards.NAICS2017, "337215")),
+ ((Standards.SIC, "2429"), (Standards.NAICS2017, "321113")),
+ ((Standards.SIC, "2429"), (Standards.NAICS2017, "321920")),
+ ((Standards.SIC, "2429"), (Standards.NAICS2017, "321999")),
+ ((Standards.SIC, "2431"), (Standards.NAICS2017, "321911")),
+ ((Standards.SIC, "2431"), (Standards.NAICS2017, "321918")),
+ ((Standards.SIC, "2434"), (Standards.NAICS2017, "337110")),
+ ((Standards.SIC, "2435"), (Standards.NAICS2017, "321211")),
+ ((Standards.SIC, "2436"), (Standards.NAICS2017, "321212")),
+ ((Standards.SIC, "2439"), (Standards.NAICS2017, "321213")),
+ ((Standards.SIC, "2439"), (Standards.NAICS2017, "321214")),
+ ((Standards.SIC, "2441"), (Standards.NAICS2017, "321920")),
+ ((Standards.SIC, "2448"), (Standards.NAICS2017, "321920")),
+ ((Standards.SIC, "2449"), (Standards.NAICS2017, "321920")),
+ ((Standards.SIC, "2451"), (Standards.NAICS2017, "321991")),
+ ((Standards.SIC, "2452"), (Standards.NAICS2017, "321992")),
+ ((Standards.SIC, "2491"), (Standards.NAICS2017, "321114")),
+ ((Standards.SIC, "2493"), (Standards.NAICS2017, "321219")),
+ ((Standards.SIC, "2499"), (Standards.NAICS2017, "321920")),
+ ((Standards.SIC, "2499"), (Standards.NAICS2017, "321999")),
+ ((Standards.SIC, "2499"), (Standards.NAICS2017, "333415")),
+ ((Standards.SIC, "2499"), (Standards.NAICS2017, "337125")),
+ ((Standards.SIC, "2499"), (Standards.NAICS2017, "339113")),
+ ((Standards.SIC, "2499"), (Standards.NAICS2017, "339999")),
+ ((Standards.SIC, "2511"), (Standards.NAICS2017, "337122")),
+ ((Standards.SIC, "2511"), (Standards.NAICS2017, "337215")),
+ ((Standards.SIC, "2512"), (Standards.NAICS2017, "337121")),
+ ((Standards.SIC, "2514"), (Standards.NAICS2017, "337121")),
+ ((Standards.SIC, "2514"), (Standards.NAICS2017, "337124")),
+ ((Standards.SIC, "2514"), (Standards.NAICS2017, "337215")),
+ ((Standards.SIC, "2515"), (Standards.NAICS2017, "337121")),
+ ((Standards.SIC, "2515"), (Standards.NAICS2017, "337910")),
+ ((Standards.SIC, "2517"), (Standards.NAICS2017, "321999")),
+ ((Standards.SIC, "2519"), (Standards.NAICS2017, "337125")),
+ ((Standards.SIC, "2521"), (Standards.NAICS2017, "337211")),
+ ((Standards.SIC, "2522"), (Standards.NAICS2017, "337214")),
+ ((Standards.SIC, "2531"), (Standards.NAICS2017, "336360")),
+ ((Standards.SIC, "2531"), (Standards.NAICS2017, "337127")),
+ ((Standards.SIC, "2531"), (Standards.NAICS2017, "339940")),
+ ((Standards.SIC, "2541"), (Standards.NAICS2017, "337110")),
+ ((Standards.SIC, "2541"), (Standards.NAICS2017, "337127")),
+ ((Standards.SIC, "2541"), (Standards.NAICS2017, "337212")),
+ ((Standards.SIC, "2541"), (Standards.NAICS2017, "337215")),
+ ((Standards.SIC, "2542"), (Standards.NAICS2017, "337127")),
+ ((Standards.SIC, "2542"), (Standards.NAICS2017, "337215")),
+ ((Standards.SIC, "2591"), (Standards.NAICS2017, "337920")),
+ ((Standards.SIC, "2599"), (Standards.NAICS2017, "333249")),
+ ((Standards.SIC, "2599"), (Standards.NAICS2017, "333415")),
+ ((Standards.SIC, "2599"), (Standards.NAICS2017, "333994")),
+ ((Standards.SIC, "2599"), (Standards.NAICS2017, "333997")),
+ ((Standards.SIC, "2599"), (Standards.NAICS2017, "333999")),
+ ((Standards.SIC, "2599"), (Standards.NAICS2017, "337127")),
+ ((Standards.SIC, "2599"), (Standards.NAICS2017, "339113")),
+ ((Standards.SIC, "2611"), (Standards.NAICS2017, "322110")),
+ ((Standards.SIC, "2611"), (Standards.NAICS2017, "322121")),
+ ((Standards.SIC, "2611"), (Standards.NAICS2017, "322122")),
+ ((Standards.SIC, "2611"), (Standards.NAICS2017, "322130")),
+ ((Standards.SIC, "2621"), (Standards.NAICS2017, "322121")),
+ ((Standards.SIC, "2621"), (Standards.NAICS2017, "322122")),
+ ((Standards.SIC, "2631"), (Standards.NAICS2017, "322130")),
+ ((Standards.SIC, "2652"), (Standards.NAICS2017, "322219")),
+ ((Standards.SIC, "2653"), (Standards.NAICS2017, "322211")),
+ ((Standards.SIC, "2655"), (Standards.NAICS2017, "322219")),
+ ((Standards.SIC, "2656"), (Standards.NAICS2017, "322219")),
+ ((Standards.SIC, "2657"), (Standards.NAICS2017, "322212")),
+ ((Standards.SIC, "2671"), (Standards.NAICS2017, "322220")),
+ ((Standards.SIC, "2671"), (Standards.NAICS2017, "326112")),
+ ((Standards.SIC, "2672"), (Standards.NAICS2017, "322220")),
+ ((Standards.SIC, "2673"), (Standards.NAICS2017, "322220")),
+ ((Standards.SIC, "2673"), (Standards.NAICS2017, "326111")),
+ ((Standards.SIC, "2674"), (Standards.NAICS2017, "322220")),
+ ((Standards.SIC, "2675"), (Standards.NAICS2017, "322220")),
+ ((Standards.SIC, "2675"), (Standards.NAICS2017, "322230")),
+ ((Standards.SIC, "2675"), (Standards.NAICS2017, "322299")),
+ ((Standards.SIC, "2676"), (Standards.NAICS2017, "322291")),
+ ((Standards.SIC, "2677"), (Standards.NAICS2017, "322230")),
+ ((Standards.SIC, "2678"), (Standards.NAICS2017, "322230")),
+ ((Standards.SIC, "2679"), (Standards.NAICS2017, "322211")),
+ ((Standards.SIC, "2679"), (Standards.NAICS2017, "322220")),
+ ((Standards.SIC, "2679"), (Standards.NAICS2017, "322230")),
+ ((Standards.SIC, "2679"), (Standards.NAICS2017, "322299")),
+ ((Standards.SIC, "2711"), (Standards.NAICS2017, "511110")),
+ ((Standards.SIC, "2711"), (Standards.NAICS2017, "519130")),
+ ((Standards.SIC, "2721"), (Standards.NAICS2017, "511120")),
+ ((Standards.SIC, "2721"), (Standards.NAICS2017, "519130")),
+ ((Standards.SIC, "2731"), (Standards.NAICS2017, "511130")),
+ ((Standards.SIC, "2731"), (Standards.NAICS2017, "512230")),
+ ((Standards.SIC, "2731"), (Standards.NAICS2017, "519130")),
+ ((Standards.SIC, "2732"), (Standards.NAICS2017, "323117")),
+ ((Standards.SIC, "2741"), (Standards.NAICS2017, "511120")),
+ ((Standards.SIC, "2741"), (Standards.NAICS2017, "511130")),
+ ((Standards.SIC, "2741"), (Standards.NAICS2017, "511140")),
+ ((Standards.SIC, "2741"), (Standards.NAICS2017, "511199")),
+ ((Standards.SIC, "2741"), (Standards.NAICS2017, "512230")),
+ ((Standards.SIC, "2741"), (Standards.NAICS2017, "519130")),
+ ((Standards.SIC, "2752"), (Standards.NAICS2017, "323111")),
+ ((Standards.SIC, "2754"), (Standards.NAICS2017, "323111")),
+ ((Standards.SIC, "2759"), (Standards.NAICS2017, "323111")),
+ ((Standards.SIC, "2759"), (Standards.NAICS2017, "323113")),
+ ((Standards.SIC, "2761"), (Standards.NAICS2017, "323111")),
+ ((Standards.SIC, "2771"), (Standards.NAICS2017, "323111")),
+ ((Standards.SIC, "2771"), (Standards.NAICS2017, "323113")),
+ ((Standards.SIC, "2771"), (Standards.NAICS2017, "511191")),
+ ((Standards.SIC, "2771"), (Standards.NAICS2017, "519130")),
+ ((Standards.SIC, "2782"), (Standards.NAICS2017, "323111")),
+ ((Standards.SIC, "2789"), (Standards.NAICS2017, "323120")),
+ ((Standards.SIC, "2791"), (Standards.NAICS2017, "323120")),
+ ((Standards.SIC, "2796"), (Standards.NAICS2017, "323120")),
+ ((Standards.SIC, "2812"), (Standards.NAICS2017, "325180")),
+ ((Standards.SIC, "2813"), (Standards.NAICS2017, "325120")),
+ ((Standards.SIC, "2816"), (Standards.NAICS2017, "325130")),
+ ((Standards.SIC, "2816"), (Standards.NAICS2017, "325180")),
+ ((Standards.SIC, "2819"), (Standards.NAICS2017, "211130")),
+ ((Standards.SIC, "2819"), (Standards.NAICS2017, "325130")),
+ ((Standards.SIC, "2819"), (Standards.NAICS2017, "325180")),
+ ((Standards.SIC, "2819"), (Standards.NAICS2017, "325998")),
+ ((Standards.SIC, "2819"), (Standards.NAICS2017, "331313")),
+ ((Standards.SIC, "2821"), (Standards.NAICS2017, "325211")),
+ ((Standards.SIC, "2822"), (Standards.NAICS2017, "325212")),
+ ((Standards.SIC, "2823"), (Standards.NAICS2017, "325220")),
+ ((Standards.SIC, "2824"), (Standards.NAICS2017, "325220")),
+ ((Standards.SIC, "2833"), (Standards.NAICS2017, "325411")),
+ ((Standards.SIC, "2834"), (Standards.NAICS2017, "325412")),
+ ((Standards.SIC, "2835"), (Standards.NAICS2017, "325412")),
+ ((Standards.SIC, "2835"), (Standards.NAICS2017, "325413")),
+ ((Standards.SIC, "2836"), (Standards.NAICS2017, "325414")),
+ ((Standards.SIC, "2841"), (Standards.NAICS2017, "325611")),
+ ((Standards.SIC, "2842"), (Standards.NAICS2017, "325612")),
+ ((Standards.SIC, "2843"), (Standards.NAICS2017, "325613")),
+ ((Standards.SIC, "2844"), (Standards.NAICS2017, "325611")),
+ ((Standards.SIC, "2844"), (Standards.NAICS2017, "325620")),
+ ((Standards.SIC, "2851"), (Standards.NAICS2017, "325510")),
+ ((Standards.SIC, "2861"), (Standards.NAICS2017, "325194")),
+ ((Standards.SIC, "2865"), (Standards.NAICS2017, "325110")),
+ ((Standards.SIC, "2865"), (Standards.NAICS2017, "325130")),
+ ((Standards.SIC, "2865"), (Standards.NAICS2017, "325194")),
+ ((Standards.SIC, "2869"), (Standards.NAICS2017, "325110")),
+ ((Standards.SIC, "2869"), (Standards.NAICS2017, "325120")),
+ ((Standards.SIC, "2869"), (Standards.NAICS2017, "325180")),
+ ((Standards.SIC, "2869"), (Standards.NAICS2017, "325193")),
+ ((Standards.SIC, "2869"), (Standards.NAICS2017, "325194")),
+ ((Standards.SIC, "2869"), (Standards.NAICS2017, "325199")),
+ ((Standards.SIC, "2869"), (Standards.NAICS2017, "325998")),
+ ((Standards.SIC, "2873"), (Standards.NAICS2017, "325311")),
+ ((Standards.SIC, "2874"), (Standards.NAICS2017, "325312")),
+ ((Standards.SIC, "2875"), (Standards.NAICS2017, "325314")),
+ ((Standards.SIC, "2879"), (Standards.NAICS2017, "325320")),
+ ((Standards.SIC, "2891"), (Standards.NAICS2017, "325520")),
+ ((Standards.SIC, "2892"), (Standards.NAICS2017, "325920")),
+ ((Standards.SIC, "2893"), (Standards.NAICS2017, "325910")),
+ ((Standards.SIC, "2895"), (Standards.NAICS2017, "325180")),
+ ((Standards.SIC, "2899"), (Standards.NAICS2017, "311942")),
+ ((Standards.SIC, "2899"), (Standards.NAICS2017, "325199")),
+ ((Standards.SIC, "2899"), (Standards.NAICS2017, "325510")),
+ ((Standards.SIC, "2899"), (Standards.NAICS2017, "325998")),
+ ((Standards.SIC, "2911"), (Standards.NAICS2017, "324110")),
+ ((Standards.SIC, "2951"), (Standards.NAICS2017, "324121")),
+ ((Standards.SIC, "2952"), (Standards.NAICS2017, "324122")),
+ ((Standards.SIC, "2992"), (Standards.NAICS2017, "324191")),
+ ((Standards.SIC, "2999"), (Standards.NAICS2017, "324199")),
+ ((Standards.SIC, "3011"), (Standards.NAICS2017, "326211")),
+ ((Standards.SIC, "3021"), (Standards.NAICS2017, "316210")),
+ ((Standards.SIC, "3052"), (Standards.NAICS2017, "326220")),
+ ((Standards.SIC, "3053"), (Standards.NAICS2017, "339991")),
+ ((Standards.SIC, "3061"), (Standards.NAICS2017, "326291")),
+ ((Standards.SIC, "3061"), (Standards.NAICS2017, "326299")),
+ ((Standards.SIC, "3069"), (Standards.NAICS2017, "313320")),
+ ((Standards.SIC, "3069"), (Standards.NAICS2017, "314910")),
+ ((Standards.SIC, "3069"), (Standards.NAICS2017, "315280")),
+ ((Standards.SIC, "3069"), (Standards.NAICS2017, "315990")),
+ ((Standards.SIC, "3069"), (Standards.NAICS2017, "326199")),
+ ((Standards.SIC, "3069"), (Standards.NAICS2017, "326299")),
+ ((Standards.SIC, "3069"), (Standards.NAICS2017, "336612")),
+ ((Standards.SIC, "3069"), (Standards.NAICS2017, "339113")),
+ ((Standards.SIC, "3069"), (Standards.NAICS2017, "339920")),
+ ((Standards.SIC, "3069"), (Standards.NAICS2017, "339930")),
+ ((Standards.SIC, "3081"), (Standards.NAICS2017, "326113")),
+ ((Standards.SIC, "3082"), (Standards.NAICS2017, "326121")),
+ ((Standards.SIC, "3083"), (Standards.NAICS2017, "326130")),
+ ((Standards.SIC, "3084"), (Standards.NAICS2017, "326122")),
+ ((Standards.SIC, "3085"), (Standards.NAICS2017, "326160")),
+ ((Standards.SIC, "3086"), (Standards.NAICS2017, "326140")),
+ ((Standards.SIC, "3086"), (Standards.NAICS2017, "326150")),
+ ((Standards.SIC, "3087"), (Standards.NAICS2017, "325991")),
+ ((Standards.SIC, "3088"), (Standards.NAICS2017, "326191")),
+ ((Standards.SIC, "3089"), (Standards.NAICS2017, "326121")),
+ ((Standards.SIC, "3089"), (Standards.NAICS2017, "326122")),
+ ((Standards.SIC, "3089"), (Standards.NAICS2017, "326199")),
+ ((Standards.SIC, "3089"), (Standards.NAICS2017, "336612")),
+ ((Standards.SIC, "3089"), (Standards.NAICS2017, "337215")),
+ ((Standards.SIC, "3089"), (Standards.NAICS2017, "339113")),
+ ((Standards.SIC, "3111"), (Standards.NAICS2017, "316110")),
+ ((Standards.SIC, "3131"), (Standards.NAICS2017, "316998")),
+ ((Standards.SIC, "3131"), (Standards.NAICS2017, "321999")),
+ ((Standards.SIC, "3131"), (Standards.NAICS2017, "339993")),
+ ((Standards.SIC, "3142"), (Standards.NAICS2017, "316210")),
+ ((Standards.SIC, "3143"), (Standards.NAICS2017, "316210")),
+ ((Standards.SIC, "3144"), (Standards.NAICS2017, "316210")),
+ ((Standards.SIC, "3149"), (Standards.NAICS2017, "316210")),
+ ((Standards.SIC, "3151"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "3151"), (Standards.NAICS2017, "315210")),
+ ((Standards.SIC, "3151"), (Standards.NAICS2017, "315990")),
+ ((Standards.SIC, "3161"), (Standards.NAICS2017, "316998")),
+ ((Standards.SIC, "3171"), (Standards.NAICS2017, "316992")),
+ ((Standards.SIC, "3172"), (Standards.NAICS2017, "316998")),
+ ((Standards.SIC, "3172"), (Standards.NAICS2017, "339910")),
+ ((Standards.SIC, "3199"), (Standards.NAICS2017, "316998")),
+ ((Standards.SIC, "3211"), (Standards.NAICS2017, "327211")),
+ ((Standards.SIC, "3221"), (Standards.NAICS2017, "327213")),
+ ((Standards.SIC, "3229"), (Standards.NAICS2017, "327212")),
+ ((Standards.SIC, "3231"), (Standards.NAICS2017, "327215")),
+ ((Standards.SIC, "3241"), (Standards.NAICS2017, "327310")),
+ ((Standards.SIC, "3251"), (Standards.NAICS2017, "327120")),
+ ((Standards.SIC, "3251"), (Standards.NAICS2017, "327331")),
+ ((Standards.SIC, "3253"), (Standards.NAICS2017, "327120")),
+ ((Standards.SIC, "3255"), (Standards.NAICS2017, "327120")),
+ ((Standards.SIC, "3259"), (Standards.NAICS2017, "327120")),
+ ((Standards.SIC, "3261"), (Standards.NAICS2017, "327110")),
+ ((Standards.SIC, "3262"), (Standards.NAICS2017, "327110")),
+ ((Standards.SIC, "3263"), (Standards.NAICS2017, "327110")),
+ ((Standards.SIC, "3264"), (Standards.NAICS2017, "327110")),
+ ((Standards.SIC, "3269"), (Standards.NAICS2017, "327110")),
+ ((Standards.SIC, "3271"), (Standards.NAICS2017, "327331")),
+ ((Standards.SIC, "3272"), (Standards.NAICS2017, "327332")),
+ ((Standards.SIC, "3272"), (Standards.NAICS2017, "327390")),
+ ((Standards.SIC, "3272"), (Standards.NAICS2017, "327999")),
+ ((Standards.SIC, "3273"), (Standards.NAICS2017, "327320")),
+ ((Standards.SIC, "3274"), (Standards.NAICS2017, "327410")),
+ ((Standards.SIC, "3275"), (Standards.NAICS2017, "327420")),
+ ((Standards.SIC, "3281"), (Standards.NAICS2017, "327991")),
+ ((Standards.SIC, "3291"), (Standards.NAICS2017, "327910")),
+ ((Standards.SIC, "3291"), (Standards.NAICS2017, "332999")),
+ ((Standards.SIC, "3292"), (Standards.NAICS2017, "327999")),
+ ((Standards.SIC, "3292"), (Standards.NAICS2017, "336340")),
+ ((Standards.SIC, "3292"), (Standards.NAICS2017, "336350")),
+ ((Standards.SIC, "3295"), (Standards.NAICS2017, "212324")),
+ ((Standards.SIC, "3295"), (Standards.NAICS2017, "212325")),
+ ((Standards.SIC, "3295"), (Standards.NAICS2017, "212393")),
+ ((Standards.SIC, "3295"), (Standards.NAICS2017, "212399")),
+ ((Standards.SIC, "3295"), (Standards.NAICS2017, "327992")),
+ ((Standards.SIC, "3296"), (Standards.NAICS2017, "327993")),
+ ((Standards.SIC, "3297"), (Standards.NAICS2017, "327120")),
+ ((Standards.SIC, "3299"), (Standards.NAICS2017, "327110")),
+ ((Standards.SIC, "3299"), (Standards.NAICS2017, "327420")),
+ ((Standards.SIC, "3299"), (Standards.NAICS2017, "327999")),
+ ((Standards.SIC, "3312"), (Standards.NAICS2017, "324199")),
+ ((Standards.SIC, "3312"), (Standards.NAICS2017, "331110")),
+ ((Standards.SIC, "3312"), (Standards.NAICS2017, "331221")),
+ ((Standards.SIC, "3313"), (Standards.NAICS2017, "331110")),
+ ((Standards.SIC, "3315"), (Standards.NAICS2017, "331222")),
+ ((Standards.SIC, "3315"), (Standards.NAICS2017, "332618")),
+ ((Standards.SIC, "3316"), (Standards.NAICS2017, "331221")),
+ ((Standards.SIC, "3317"), (Standards.NAICS2017, "331210")),
+ ((Standards.SIC, "3321"), (Standards.NAICS2017, "331511")),
+ ((Standards.SIC, "3322"), (Standards.NAICS2017, "331511")),
+ ((Standards.SIC, "3324"), (Standards.NAICS2017, "331512")),
+ ((Standards.SIC, "3325"), (Standards.NAICS2017, "331513")),
+ ((Standards.SIC, "3331"), (Standards.NAICS2017, "331410")),
+ ((Standards.SIC, "3334"), (Standards.NAICS2017, "331313")),
+ ((Standards.SIC, "3339"), (Standards.NAICS2017, "331410")),
+ ((Standards.SIC, "3341"), (Standards.NAICS2017, "331314")),
+ ((Standards.SIC, "3341"), (Standards.NAICS2017, "331420")),
+ ((Standards.SIC, "3341"), (Standards.NAICS2017, "331492")),
+ ((Standards.SIC, "3351"), (Standards.NAICS2017, "331420")),
+ ((Standards.SIC, "3353"), (Standards.NAICS2017, "331315")),
+ ((Standards.SIC, "3354"), (Standards.NAICS2017, "331318")),
+ ((Standards.SIC, "3355"), (Standards.NAICS2017, "331318")),
+ ((Standards.SIC, "3356"), (Standards.NAICS2017, "331491")),
+ ((Standards.SIC, "3357"), (Standards.NAICS2017, "331318")),
+ ((Standards.SIC, "3357"), (Standards.NAICS2017, "331420")),
+ ((Standards.SIC, "3357"), (Standards.NAICS2017, "331491")),
+ ((Standards.SIC, "3357"), (Standards.NAICS2017, "335921")),
+ ((Standards.SIC, "3357"), (Standards.NAICS2017, "335929")),
+ ((Standards.SIC, "3363"), (Standards.NAICS2017, "331523")),
+ ((Standards.SIC, "3364"), (Standards.NAICS2017, "331523")),
+ ((Standards.SIC, "3365"), (Standards.NAICS2017, "331524")),
+ ((Standards.SIC, "3366"), (Standards.NAICS2017, "331529")),
+ ((Standards.SIC, "3369"), (Standards.NAICS2017, "331529")),
+ ((Standards.SIC, "3398"), (Standards.NAICS2017, "332811")),
+ ((Standards.SIC, "3399"), (Standards.NAICS2017, "331110")),
+ ((Standards.SIC, "3399"), (Standards.NAICS2017, "331221")),
+ ((Standards.SIC, "3399"), (Standards.NAICS2017, "331314")),
+ ((Standards.SIC, "3399"), (Standards.NAICS2017, "331420")),
+ ((Standards.SIC, "3399"), (Standards.NAICS2017, "331492")),
+ ((Standards.SIC, "3399"), (Standards.NAICS2017, "332618")),
+ ((Standards.SIC, "3399"), (Standards.NAICS2017, "332813")),
+ ((Standards.SIC, "3411"), (Standards.NAICS2017, "332431")),
+ ((Standards.SIC, "3412"), (Standards.NAICS2017, "332439")),
+ ((Standards.SIC, "3421"), (Standards.NAICS2017, "332215")),
+ ((Standards.SIC, "3421"), (Standards.NAICS2017, "332216")),
+ ((Standards.SIC, "3423"), (Standards.NAICS2017, "332216")),
+ ((Standards.SIC, "3425"), (Standards.NAICS2017, "332216")),
+ ((Standards.SIC, "3429"), (Standards.NAICS2017, "332439")),
+ ((Standards.SIC, "3429"), (Standards.NAICS2017, "332510")),
+ ((Standards.SIC, "3429"), (Standards.NAICS2017, "332722")),
+ ((Standards.SIC, "3429"), (Standards.NAICS2017, "332919")),
+ ((Standards.SIC, "3429"), (Standards.NAICS2017, "332999")),
+ ((Standards.SIC, "3429"), (Standards.NAICS2017, "333923")),
+ ((Standards.SIC, "3429"), (Standards.NAICS2017, "334519")),
+ ((Standards.SIC, "3429"), (Standards.NAICS2017, "336390")),
+ ((Standards.SIC, "3429"), (Standards.NAICS2017, "337215")),
+ ((Standards.SIC, "3431"), (Standards.NAICS2017, "332999")),
+ ((Standards.SIC, "3432"), (Standards.NAICS2017, "332913")),
+ ((Standards.SIC, "3432"), (Standards.NAICS2017, "332919")),
+ ((Standards.SIC, "3432"), (Standards.NAICS2017, "332999")),
+ ((Standards.SIC, "3433"), (Standards.NAICS2017, "333414")),
+ ((Standards.SIC, "3441"), (Standards.NAICS2017, "332312")),
+ ((Standards.SIC, "3442"), (Standards.NAICS2017, "332321")),
+ ((Standards.SIC, "3443"), (Standards.NAICS2017, "332313")),
+ ((Standards.SIC, "3443"), (Standards.NAICS2017, "332410")),
+ ((Standards.SIC, "3443"), (Standards.NAICS2017, "332420")),
+ ((Standards.SIC, "3443"), (Standards.NAICS2017, "333415")),
+ ((Standards.SIC, "3444"), (Standards.NAICS2017, "332321")),
+ ((Standards.SIC, "3444"), (Standards.NAICS2017, "332322")),
+ ((Standards.SIC, "3444"), (Standards.NAICS2017, "332439")),
+ ((Standards.SIC, "3444"), (Standards.NAICS2017, "333415")),
+ ((Standards.SIC, "3446"), (Standards.NAICS2017, "332323")),
+ ((Standards.SIC, "3448"), (Standards.NAICS2017, "332311")),
+ ((Standards.SIC, "3449"), (Standards.NAICS2017, "332114")),
+ ((Standards.SIC, "3449"), (Standards.NAICS2017, "332312")),
+ ((Standards.SIC, "3449"), (Standards.NAICS2017, "332323")),
+ ((Standards.SIC, "3451"), (Standards.NAICS2017, "332721")),
+ ((Standards.SIC, "3452"), (Standards.NAICS2017, "332722")),
+ ((Standards.SIC, "3462"), (Standards.NAICS2017, "332111")),
+ ((Standards.SIC, "3463"), (Standards.NAICS2017, "332112")),
+ ((Standards.SIC, "3465"), (Standards.NAICS2017, "336370")),
+ ((Standards.SIC, "3466"), (Standards.NAICS2017, "332119")),
+ ((Standards.SIC, "3469"), (Standards.NAICS2017, "332119")),
+ ((Standards.SIC, "3469"), (Standards.NAICS2017, "332215")),
+ ((Standards.SIC, "3469"), (Standards.NAICS2017, "332439")),
+ ((Standards.SIC, "3471"), (Standards.NAICS2017, "332813")),
+ ((Standards.SIC, "3479"), (Standards.NAICS2017, "332812")),
+ ((Standards.SIC, "3479"), (Standards.NAICS2017, "339910")),
+ ((Standards.SIC, "3482"), (Standards.NAICS2017, "332992")),
+ ((Standards.SIC, "3483"), (Standards.NAICS2017, "332993")),
+ ((Standards.SIC, "3484"), (Standards.NAICS2017, "332994")),
+ ((Standards.SIC, "3489"), (Standards.NAICS2017, "332994")),
+ ((Standards.SIC, "3491"), (Standards.NAICS2017, "332911")),
+ ((Standards.SIC, "3492"), (Standards.NAICS2017, "332912")),
+ ((Standards.SIC, "3493"), (Standards.NAICS2017, "332613")),
+ ((Standards.SIC, "3494"), (Standards.NAICS2017, "332919")),
+ ((Standards.SIC, "3494"), (Standards.NAICS2017, "332999")),
+ ((Standards.SIC, "3495"), (Standards.NAICS2017, "332613")),
+ ((Standards.SIC, "3495"), (Standards.NAICS2017, "334519")),
+ ((Standards.SIC, "3496"), (Standards.NAICS2017, "332215")),
+ ((Standards.SIC, "3496"), (Standards.NAICS2017, "332618")),
+ ((Standards.SIC, "3496"), (Standards.NAICS2017, "333924")),
+ ((Standards.SIC, "3497"), (Standards.NAICS2017, "322220")),
+ ((Standards.SIC, "3497"), (Standards.NAICS2017, "332999")),
+ ((Standards.SIC, "3498"), (Standards.NAICS2017, "332996")),
+ ((Standards.SIC, "3499"), (Standards.NAICS2017, "332117")),
+ ((Standards.SIC, "3499"), (Standards.NAICS2017, "332439")),
+ ((Standards.SIC, "3499"), (Standards.NAICS2017, "332510")),
+ ((Standards.SIC, "3499"), (Standards.NAICS2017, "332919")),
+ ((Standards.SIC, "3499"), (Standards.NAICS2017, "332999")),
+ ((Standards.SIC, "3499"), (Standards.NAICS2017, "336360")),
+ ((Standards.SIC, "3499"), (Standards.NAICS2017, "337215")),
+ ((Standards.SIC, "3511"), (Standards.NAICS2017, "333611")),
+ ((Standards.SIC, "3519"), (Standards.NAICS2017, "333618")),
+ ((Standards.SIC, "3519"), (Standards.NAICS2017, "336390")),
+ ((Standards.SIC, "3523"), (Standards.NAICS2017, "332216")),
+ ((Standards.SIC, "3523"), (Standards.NAICS2017, "332323")),
+ ((Standards.SIC, "3523"), (Standards.NAICS2017, "333111")),
+ ((Standards.SIC, "3523"), (Standards.NAICS2017, "333922")),
+ ((Standards.SIC, "3524"), (Standards.NAICS2017, "332216")),
+ ((Standards.SIC, "3524"), (Standards.NAICS2017, "333112")),
+ ((Standards.SIC, "3531"), (Standards.NAICS2017, "333120")),
+ ((Standards.SIC, "3531"), (Standards.NAICS2017, "333923")),
+ ((Standards.SIC, "3531"), (Standards.NAICS2017, "336510")),
+ ((Standards.SIC, "3532"), (Standards.NAICS2017, "333131")),
+ ((Standards.SIC, "3533"), (Standards.NAICS2017, "333132")),
+ ((Standards.SIC, "3534"), (Standards.NAICS2017, "333921")),
+ ((Standards.SIC, "3535"), (Standards.NAICS2017, "333922")),
+ ((Standards.SIC, "3536"), (Standards.NAICS2017, "333923")),
+ ((Standards.SIC, "3537"), (Standards.NAICS2017, "332439")),
+ ((Standards.SIC, "3537"), (Standards.NAICS2017, "332999")),
+ ((Standards.SIC, "3537"), (Standards.NAICS2017, "333924")),
+ ((Standards.SIC, "3541"), (Standards.NAICS2017, "333517")),
+ ((Standards.SIC, "3542"), (Standards.NAICS2017, "333517")),
+ ((Standards.SIC, "3543"), (Standards.NAICS2017, "332999")),
+ ((Standards.SIC, "3544"), (Standards.NAICS2017, "333511")),
+ ((Standards.SIC, "3544"), (Standards.NAICS2017, "333514")),
+ ((Standards.SIC, "3545"), (Standards.NAICS2017, "332216")),
+ ((Standards.SIC, "3545"), (Standards.NAICS2017, "333515")),
+ ((Standards.SIC, "3546"), (Standards.NAICS2017, "333991")),
+ ((Standards.SIC, "3547"), (Standards.NAICS2017, "333519")),
+ ((Standards.SIC, "3548"), (Standards.NAICS2017, "333992")),
+ ((Standards.SIC, "3548"), (Standards.NAICS2017, "335311")),
+ ((Standards.SIC, "3549"), (Standards.NAICS2017, "333519")),
+ ((Standards.SIC, "3552"), (Standards.NAICS2017, "333249")),
+ ((Standards.SIC, "3553"), (Standards.NAICS2017, "333243")),
+ ((Standards.SIC, "3554"), (Standards.NAICS2017, "333243")),
+ ((Standards.SIC, "3555"), (Standards.NAICS2017, "333244")),
+ ((Standards.SIC, "3556"), (Standards.NAICS2017, "333241")),
+ ((Standards.SIC, "3559"), (Standards.NAICS2017, "332410")),
+ ((Standards.SIC, "3559"), (Standards.NAICS2017, "333111")),
+ ((Standards.SIC, "3559"), (Standards.NAICS2017, "333242")),
+ ((Standards.SIC, "3559"), (Standards.NAICS2017, "333249")),
+ ((Standards.SIC, "3559"), (Standards.NAICS2017, "333318")),
+ ((Standards.SIC, "3561"), (Standards.NAICS2017, "333914")),
+ ((Standards.SIC, "3562"), (Standards.NAICS2017, "332991")),
+ ((Standards.SIC, "3563"), (Standards.NAICS2017, "333912")),
+ ((Standards.SIC, "3564"), (Standards.NAICS2017, "333413")),
+ ((Standards.SIC, "3565"), (Standards.NAICS2017, "333993")),
+ ((Standards.SIC, "3566"), (Standards.NAICS2017, "333612")),
+ ((Standards.SIC, "3567"), (Standards.NAICS2017, "333994")),
+ ((Standards.SIC, "3568"), (Standards.NAICS2017, "333613")),
+ ((Standards.SIC, "3569"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "3569"), (Standards.NAICS2017, "333414")),
+ ((Standards.SIC, "3569"), (Standards.NAICS2017, "333999")),
+ ((Standards.SIC, "3571"), (Standards.NAICS2017, "334111")),
+ ((Standards.SIC, "3572"), (Standards.NAICS2017, "334112")),
+ ((Standards.SIC, "3575"), (Standards.NAICS2017, "334118")),
+ ((Standards.SIC, "3577"), (Standards.NAICS2017, "333316")),
+ ((Standards.SIC, "3577"), (Standards.NAICS2017, "334118")),
+ ((Standards.SIC, "3577"), (Standards.NAICS2017, "334418")),
+ ((Standards.SIC, "3577"), (Standards.NAICS2017, "334613")),
+ ((Standards.SIC, "3578"), (Standards.NAICS2017, "333316")),
+ ((Standards.SIC, "3578"), (Standards.NAICS2017, "333318")),
+ ((Standards.SIC, "3578"), (Standards.NAICS2017, "334118")),
+ ((Standards.SIC, "3579"), (Standards.NAICS2017, "333318")),
+ ((Standards.SIC, "3579"), (Standards.NAICS2017, "334519")),
+ ((Standards.SIC, "3579"), (Standards.NAICS2017, "339940")),
+ ((Standards.SIC, "3581"), (Standards.NAICS2017, "333318")),
+ ((Standards.SIC, "3582"), (Standards.NAICS2017, "333318")),
+ ((Standards.SIC, "3585"), (Standards.NAICS2017, "333415")),
+ ((Standards.SIC, "3585"), (Standards.NAICS2017, "336390")),
+ ((Standards.SIC, "3586"), (Standards.NAICS2017, "333914")),
+ ((Standards.SIC, "3589"), (Standards.NAICS2017, "333318")),
+ ((Standards.SIC, "3592"), (Standards.NAICS2017, "336310")),
+ ((Standards.SIC, "3593"), (Standards.NAICS2017, "333995")),
+ ((Standards.SIC, "3594"), (Standards.NAICS2017, "333996")),
+ ((Standards.SIC, "3596"), (Standards.NAICS2017, "333997")),
+ ((Standards.SIC, "3599"), (Standards.NAICS2017, "332710")),
+ ((Standards.SIC, "3599"), (Standards.NAICS2017, "332813")),
+ ((Standards.SIC, "3599"), (Standards.NAICS2017, "332999")),
+ ((Standards.SIC, "3599"), (Standards.NAICS2017, "333318")),
+ ((Standards.SIC, "3599"), (Standards.NAICS2017, "333999")),
+ ((Standards.SIC, "3599"), (Standards.NAICS2017, "334519")),
+ ((Standards.SIC, "3599"), (Standards.NAICS2017, "336390")),
+ ((Standards.SIC, "3612"), (Standards.NAICS2017, "335311")),
+ ((Standards.SIC, "3613"), (Standards.NAICS2017, "335313")),
+ ((Standards.SIC, "3621"), (Standards.NAICS2017, "335312")),
+ ((Standards.SIC, "3624"), (Standards.NAICS2017, "335991")),
+ ((Standards.SIC, "3625"), (Standards.NAICS2017, "335314")),
+ ((Standards.SIC, "3629"), (Standards.NAICS2017, "335999")),
+ ((Standards.SIC, "3631"), (Standards.NAICS2017, "335220")),
+ ((Standards.SIC, "3632"), (Standards.NAICS2017, "335220")),
+ ((Standards.SIC, "3633"), (Standards.NAICS2017, "335220")),
+ ((Standards.SIC, "3634"), (Standards.NAICS2017, "333414")),
+ ((Standards.SIC, "3634"), (Standards.NAICS2017, "335210")),
+ ((Standards.SIC, "3634"), (Standards.NAICS2017, "339999")),
+ ((Standards.SIC, "3635"), (Standards.NAICS2017, "335210")),
+ ((Standards.SIC, "3639"), (Standards.NAICS2017, "333249")),
+ ((Standards.SIC, "3639"), (Standards.NAICS2017, "335210")),
+ ((Standards.SIC, "3639"), (Standards.NAICS2017, "335220")),
+ ((Standards.SIC, "3641"), (Standards.NAICS2017, "335110")),
+ ((Standards.SIC, "3643"), (Standards.NAICS2017, "335931")),
+ ((Standards.SIC, "3644"), (Standards.NAICS2017, "332216")),
+ ((Standards.SIC, "3644"), (Standards.NAICS2017, "335932")),
+ ((Standards.SIC, "3645"), (Standards.NAICS2017, "335121")),
+ ((Standards.SIC, "3646"), (Standards.NAICS2017, "335122")),
+ ((Standards.SIC, "3647"), (Standards.NAICS2017, "336320")),
+ ((Standards.SIC, "3648"), (Standards.NAICS2017, "335129")),
+ ((Standards.SIC, "3651"), (Standards.NAICS2017, "334310")),
+ ((Standards.SIC, "3652"), (Standards.NAICS2017, "334614")),
+ ((Standards.SIC, "3652"), (Standards.NAICS2017, "512250")),
+ ((Standards.SIC, "3661"), (Standards.NAICS2017, "334210")),
+ ((Standards.SIC, "3661"), (Standards.NAICS2017, "334418")),
+ ((Standards.SIC, "3663"), (Standards.NAICS2017, "334220")),
+ ((Standards.SIC, "3663"), (Standards.NAICS2017, "334515")),
+ ((Standards.SIC, "3669"), (Standards.NAICS2017, "334290")),
+ ((Standards.SIC, "3671"), (Standards.NAICS2017, "334419")),
+ ((Standards.SIC, "3672"), (Standards.NAICS2017, "334412")),
+ ((Standards.SIC, "3674"), (Standards.NAICS2017, "334413")),
+ ((Standards.SIC, "3675"), (Standards.NAICS2017, "334416")),
+ ((Standards.SIC, "3676"), (Standards.NAICS2017, "334416")),
+ ((Standards.SIC, "3677"), (Standards.NAICS2017, "334416")),
+ ((Standards.SIC, "3678"), (Standards.NAICS2017, "334417")),
+ ((Standards.SIC, "3679"), (Standards.NAICS2017, "334220")),
+ ((Standards.SIC, "3679"), (Standards.NAICS2017, "334310")),
+ ((Standards.SIC, "3679"), (Standards.NAICS2017, "334418")),
+ ((Standards.SIC, "3679"), (Standards.NAICS2017, "334419")),
+ ((Standards.SIC, "3679"), (Standards.NAICS2017, "334515")),
+ ((Standards.SIC, "3691"), (Standards.NAICS2017, "335911")),
+ ((Standards.SIC, "3692"), (Standards.NAICS2017, "335912")),
+ ((Standards.SIC, "3694"), (Standards.NAICS2017, "336320")),
+ ((Standards.SIC, "3695"), (Standards.NAICS2017, "334613")),
+ ((Standards.SIC, "3699"), (Standards.NAICS2017, "333318")),
+ ((Standards.SIC, "3699"), (Standards.NAICS2017, "333618")),
+ ((Standards.SIC, "3699"), (Standards.NAICS2017, "333992")),
+ ((Standards.SIC, "3699"), (Standards.NAICS2017, "335129")),
+ ((Standards.SIC, "3699"), (Standards.NAICS2017, "335999")),
+ ((Standards.SIC, "3711"), (Standards.NAICS2017, "336111")),
+ ((Standards.SIC, "3711"), (Standards.NAICS2017, "336112")),
+ ((Standards.SIC, "3711"), (Standards.NAICS2017, "336120")),
+ ((Standards.SIC, "3711"), (Standards.NAICS2017, "336211")),
+ ((Standards.SIC, "3711"), (Standards.NAICS2017, "336992")),
+ ((Standards.SIC, "3713"), (Standards.NAICS2017, "336211")),
+ ((Standards.SIC, "3714"), (Standards.NAICS2017, "336211")),
+ ((Standards.SIC, "3714"), (Standards.NAICS2017, "336310")),
+ ((Standards.SIC, "3714"), (Standards.NAICS2017, "336320")),
+ ((Standards.SIC, "3714"), (Standards.NAICS2017, "336330")),
+ ((Standards.SIC, "3714"), (Standards.NAICS2017, "336340")),
+ ((Standards.SIC, "3714"), (Standards.NAICS2017, "336350")),
+ ((Standards.SIC, "3714"), (Standards.NAICS2017, "336390")),
+ ((Standards.SIC, "3715"), (Standards.NAICS2017, "336212")),
+ ((Standards.SIC, "3716"), (Standards.NAICS2017, "336213")),
+ ((Standards.SIC, "3721"), (Standards.NAICS2017, "336411")),
+ ((Standards.SIC, "3721"), (Standards.NAICS2017, "541713")),
+ ((Standards.SIC, "3724"), (Standards.NAICS2017, "336412")),
+ ((Standards.SIC, "3724"), (Standards.NAICS2017, "541713")),
+ ((Standards.SIC, "3728"), (Standards.NAICS2017, "332912")),
+ ((Standards.SIC, "3728"), (Standards.NAICS2017, "336411")),
+ ((Standards.SIC, "3728"), (Standards.NAICS2017, "336413")),
+ ((Standards.SIC, "3728"), (Standards.NAICS2017, "541713")),
+ ((Standards.SIC, "3731"), (Standards.NAICS2017, "336611")),
+ ((Standards.SIC, "3731"), (Standards.NAICS2017, "488390")),
+ ((Standards.SIC, "3732"), (Standards.NAICS2017, "336612")),
+ ((Standards.SIC, "3732"), (Standards.NAICS2017, "811490")),
+ ((Standards.SIC, "3743"), (Standards.NAICS2017, "333914")),
+ ((Standards.SIC, "3743"), (Standards.NAICS2017, "336510")),
+ ((Standards.SIC, "3751"), (Standards.NAICS2017, "336991")),
+ ((Standards.SIC, "3761"), (Standards.NAICS2017, "336414")),
+ ((Standards.SIC, "3761"), (Standards.NAICS2017, "541713")),
+ ((Standards.SIC, "3764"), (Standards.NAICS2017, "336415")),
+ ((Standards.SIC, "3764"), (Standards.NAICS2017, "541713")),
+ ((Standards.SIC, "3769"), (Standards.NAICS2017, "336419")),
+ ((Standards.SIC, "3769"), (Standards.NAICS2017, "541713")),
+ ((Standards.SIC, "3792"), (Standards.NAICS2017, "336214")),
+ ((Standards.SIC, "3795"), (Standards.NAICS2017, "336992")),
+ ((Standards.SIC, "3799"), (Standards.NAICS2017, "333924")),
+ ((Standards.SIC, "3799"), (Standards.NAICS2017, "336214")),
+ ((Standards.SIC, "3799"), (Standards.NAICS2017, "336390")),
+ ((Standards.SIC, "3799"), (Standards.NAICS2017, "336999")),
+ ((Standards.SIC, "3812"), (Standards.NAICS2017, "334511")),
+ ((Standards.SIC, "3821"), (Standards.NAICS2017, "333249")),
+ ((Standards.SIC, "3821"), (Standards.NAICS2017, "333415")),
+ ((Standards.SIC, "3821"), (Standards.NAICS2017, "333994")),
+ ((Standards.SIC, "3821"), (Standards.NAICS2017, "333997")),
+ ((Standards.SIC, "3821"), (Standards.NAICS2017, "333999")),
+ ((Standards.SIC, "3821"), (Standards.NAICS2017, "337127")),
+ ((Standards.SIC, "3821"), (Standards.NAICS2017, "339113")),
+ ((Standards.SIC, "3822"), (Standards.NAICS2017, "334512")),
+ ((Standards.SIC, "3823"), (Standards.NAICS2017, "334513")),
+ ((Standards.SIC, "3824"), (Standards.NAICS2017, "334514")),
+ ((Standards.SIC, "3825"), (Standards.NAICS2017, "334514")),
+ ((Standards.SIC, "3825"), (Standards.NAICS2017, "334515")),
+ ((Standards.SIC, "3826"), (Standards.NAICS2017, "334516")),
+ ((Standards.SIC, "3827"), (Standards.NAICS2017, "333314")),
+ ((Standards.SIC, "3829"), (Standards.NAICS2017, "334514")),
+ ((Standards.SIC, "3829"), (Standards.NAICS2017, "334519")),
+ ((Standards.SIC, "3829"), (Standards.NAICS2017, "339112")),
+ ((Standards.SIC, "3841"), (Standards.NAICS2017, "332994")),
+ ((Standards.SIC, "3841"), (Standards.NAICS2017, "333249")),
+ ((Standards.SIC, "3841"), (Standards.NAICS2017, "333415")),
+ ((Standards.SIC, "3841"), (Standards.NAICS2017, "333994")),
+ ((Standards.SIC, "3841"), (Standards.NAICS2017, "333997")),
+ ((Standards.SIC, "3841"), (Standards.NAICS2017, "333999")),
+ ((Standards.SIC, "3841"), (Standards.NAICS2017, "337127")),
+ ((Standards.SIC, "3841"), (Standards.NAICS2017, "339112")),
+ ((Standards.SIC, "3841"), (Standards.NAICS2017, "339113")),
+ ((Standards.SIC, "3842"), (Standards.NAICS2017, "322291")),
+ ((Standards.SIC, "3842"), (Standards.NAICS2017, "334510")),
+ ((Standards.SIC, "3842"), (Standards.NAICS2017, "339113")),
+ ((Standards.SIC, "3842"), (Standards.NAICS2017, "339999")),
+ ((Standards.SIC, "3843"), (Standards.NAICS2017, "339114")),
+ ((Standards.SIC, "3844"), (Standards.NAICS2017, "334517")),
+ ((Standards.SIC, "3845"), (Standards.NAICS2017, "334510")),
+ ((Standards.SIC, "3845"), (Standards.NAICS2017, "334517")),
+ ((Standards.SIC, "3851"), (Standards.NAICS2017, "339113")),
+ ((Standards.SIC, "3851"), (Standards.NAICS2017, "339115")),
+ ((Standards.SIC, "3861"), (Standards.NAICS2017, "325992")),
+ ((Standards.SIC, "3861"), (Standards.NAICS2017, "333316")),
+ ((Standards.SIC, "3873"), (Standards.NAICS2017, "334519")),
+ ((Standards.SIC, "3911"), (Standards.NAICS2017, "339910")),
+ ((Standards.SIC, "3914"), (Standards.NAICS2017, "332215")),
+ ((Standards.SIC, "3914"), (Standards.NAICS2017, "332999")),
+ ((Standards.SIC, "3914"), (Standards.NAICS2017, "339910")),
+ ((Standards.SIC, "3915"), (Standards.NAICS2017, "334519")),
+ ((Standards.SIC, "3915"), (Standards.NAICS2017, "339910")),
+ ((Standards.SIC, "3931"), (Standards.NAICS2017, "339992")),
+ ((Standards.SIC, "3942"), (Standards.NAICS2017, "339930")),
+ ((Standards.SIC, "3944"), (Standards.NAICS2017, "336991")),
+ ((Standards.SIC, "3944"), (Standards.NAICS2017, "339930")),
+ ((Standards.SIC, "3949"), (Standards.NAICS2017, "339920")),
+ ((Standards.SIC, "3951"), (Standards.NAICS2017, "339940")),
+ ((Standards.SIC, "3952"), (Standards.NAICS2017, "325998")),
+ ((Standards.SIC, "3952"), (Standards.NAICS2017, "337127")),
+ ((Standards.SIC, "3952"), (Standards.NAICS2017, "339940")),
+ ((Standards.SIC, "3953"), (Standards.NAICS2017, "339940")),
+ ((Standards.SIC, "3955"), (Standards.NAICS2017, "339940")),
+ ((Standards.SIC, "3961"), (Standards.NAICS2017, "339910")),
+ ((Standards.SIC, "3961"), (Standards.NAICS2017, "339993")),
+ ((Standards.SIC, "3965"), (Standards.NAICS2017, "339993")),
+ ((Standards.SIC, "3991"), (Standards.NAICS2017, "339994")),
+ ((Standards.SIC, "3993"), (Standards.NAICS2017, "323113")),
+ ((Standards.SIC, "3993"), (Standards.NAICS2017, "339950")),
+ ((Standards.SIC, "3995"), (Standards.NAICS2017, "339995")),
+ ((Standards.SIC, "3996"), (Standards.NAICS2017, "326199")),
+ ((Standards.SIC, "3999"), (Standards.NAICS2017, "316110")),
+ ((Standards.SIC, "3999"), (Standards.NAICS2017, "321999")),
+ ((Standards.SIC, "3999"), (Standards.NAICS2017, "325998")),
+ ((Standards.SIC, "3999"), (Standards.NAICS2017, "326199")),
+ ((Standards.SIC, "3999"), (Standards.NAICS2017, "332215")),
+ ((Standards.SIC, "3999"), (Standards.NAICS2017, "332216")),
+ ((Standards.SIC, "3999"), (Standards.NAICS2017, "332812")),
+ ((Standards.SIC, "3999"), (Standards.NAICS2017, "332999")),
+ ((Standards.SIC, "3999"), (Standards.NAICS2017, "333318")),
+ ((Standards.SIC, "3999"), (Standards.NAICS2017, "335121")),
+ ((Standards.SIC, "3999"), (Standards.NAICS2017, "335210")),
+ ((Standards.SIC, "3999"), (Standards.NAICS2017, "336612")),
+ ((Standards.SIC, "3999"), (Standards.NAICS2017, "337127")),
+ ((Standards.SIC, "3999"), (Standards.NAICS2017, "339930")),
+ ((Standards.SIC, "3999"), (Standards.NAICS2017, "339999")),
+ ((Standards.SIC, "4011"), (Standards.NAICS2017, "482111")),
+ ((Standards.SIC, "4013"), (Standards.NAICS2017, "482112")),
+ ((Standards.SIC, "4013"), (Standards.NAICS2017, "488210")),
+ ((Standards.SIC, "4111"), (Standards.NAICS2017, "485111")),
+ ((Standards.SIC, "4111"), (Standards.NAICS2017, "485112")),
+ ((Standards.SIC, "4111"), (Standards.NAICS2017, "485113")),
+ ((Standards.SIC, "4111"), (Standards.NAICS2017, "485119")),
+ ((Standards.SIC, "4111"), (Standards.NAICS2017, "485999")),
+ ((Standards.SIC, "4119"), (Standards.NAICS2017, "485320")),
+ ((Standards.SIC, "4119"), (Standards.NAICS2017, "485410")),
+ ((Standards.SIC, "4119"), (Standards.NAICS2017, "485991")),
+ ((Standards.SIC, "4119"), (Standards.NAICS2017, "485999")),
+ ((Standards.SIC, "4119"), (Standards.NAICS2017, "487110")),
+ ((Standards.SIC, "4119"), (Standards.NAICS2017, "621910")),
+ ((Standards.SIC, "4121"), (Standards.NAICS2017, "485310")),
+ ((Standards.SIC, "4131"), (Standards.NAICS2017, "485210")),
+ ((Standards.SIC, "4141"), (Standards.NAICS2017, "485510")),
+ ((Standards.SIC, "4142"), (Standards.NAICS2017, "485510")),
+ ((Standards.SIC, "4151"), (Standards.NAICS2017, "485410")),
+ ((Standards.SIC, "4173"), (Standards.NAICS2017, "488490")),
+ ((Standards.SIC, "4212"), (Standards.NAICS2017, "484110")),
+ ((Standards.SIC, "4212"), (Standards.NAICS2017, "484210")),
+ ((Standards.SIC, "4212"), (Standards.NAICS2017, "484220")),
+ ((Standards.SIC, "4212"), (Standards.NAICS2017, "562111")),
+ ((Standards.SIC, "4212"), (Standards.NAICS2017, "562112")),
+ ((Standards.SIC, "4212"), (Standards.NAICS2017, "562119")),
+ ((Standards.SIC, "4213"), (Standards.NAICS2017, "484121")),
+ ((Standards.SIC, "4213"), (Standards.NAICS2017, "484122")),
+ ((Standards.SIC, "4213"), (Standards.NAICS2017, "484210")),
+ ((Standards.SIC, "4213"), (Standards.NAICS2017, "484230")),
+ ((Standards.SIC, "4214"), (Standards.NAICS2017, "484110")),
+ ((Standards.SIC, "4214"), (Standards.NAICS2017, "484210")),
+ ((Standards.SIC, "4214"), (Standards.NAICS2017, "484220")),
+ ((Standards.SIC, "4215"), (Standards.NAICS2017, "492110")),
+ ((Standards.SIC, "4215"), (Standards.NAICS2017, "492210")),
+ ((Standards.SIC, "4221"), (Standards.NAICS2017, "493130")),
+ ((Standards.SIC, "4222"), (Standards.NAICS2017, "493120")),
+ ((Standards.SIC, "4225"), (Standards.NAICS2017, "493110")),
+ ((Standards.SIC, "4225"), (Standards.NAICS2017, "531130")),
+ ((Standards.SIC, "4226"), (Standards.NAICS2017, "493110")),
+ ((Standards.SIC, "4226"), (Standards.NAICS2017, "493120")),
+ ((Standards.SIC, "4226"), (Standards.NAICS2017, "493190")),
+ ((Standards.SIC, "4231"), (Standards.NAICS2017, "488490")),
+ ((Standards.SIC, "4311"), (Standards.NAICS2017, "491110")),
+ ((Standards.SIC, "4412"), (Standards.NAICS2017, "483111")),
+ ((Standards.SIC, "4424"), (Standards.NAICS2017, "483113")),
+ ((Standards.SIC, "4432"), (Standards.NAICS2017, "483113")),
+ ((Standards.SIC, "4449"), (Standards.NAICS2017, "483211")),
+ ((Standards.SIC, "4481"), (Standards.NAICS2017, "483112")),
+ ((Standards.SIC, "4481"), (Standards.NAICS2017, "483114")),
+ ((Standards.SIC, "4482"), (Standards.NAICS2017, "483114")),
+ ((Standards.SIC, "4482"), (Standards.NAICS2017, "483212")),
+ ((Standards.SIC, "4489"), (Standards.NAICS2017, "483212")),
+ ((Standards.SIC, "4489"), (Standards.NAICS2017, "487210")),
+ ((Standards.SIC, "4491"), (Standards.NAICS2017, "488310")),
+ ((Standards.SIC, "4491"), (Standards.NAICS2017, "488320")),
+ ((Standards.SIC, "4492"), (Standards.NAICS2017, "488330")),
+ ((Standards.SIC, "4493"), (Standards.NAICS2017, "713930")),
+ ((Standards.SIC, "4499"), (Standards.NAICS2017, "483211")),
+ ((Standards.SIC, "4499"), (Standards.NAICS2017, "488310")),
+ ((Standards.SIC, "4499"), (Standards.NAICS2017, "488330")),
+ ((Standards.SIC, "4499"), (Standards.NAICS2017, "488390")),
+ ((Standards.SIC, "4499"), (Standards.NAICS2017, "532411")),
+ ((Standards.SIC, "4499"), (Standards.NAICS2017, "541990")),
+ ((Standards.SIC, "4512"), (Standards.NAICS2017, "481111")),
+ ((Standards.SIC, "4512"), (Standards.NAICS2017, "481112")),
+ ((Standards.SIC, "4513"), (Standards.NAICS2017, "492110")),
+ ((Standards.SIC, "4522"), (Standards.NAICS2017, "481211")),
+ ((Standards.SIC, "4522"), (Standards.NAICS2017, "481212")),
+ ((Standards.SIC, "4522"), (Standards.NAICS2017, "481219")),
+ ((Standards.SIC, "4522"), (Standards.NAICS2017, "487990")),
+ ((Standards.SIC, "4522"), (Standards.NAICS2017, "621910")),
+ ((Standards.SIC, "4581"), (Standards.NAICS2017, "488111")),
+ ((Standards.SIC, "4581"), (Standards.NAICS2017, "488119")),
+ ((Standards.SIC, "4581"), (Standards.NAICS2017, "488190")),
+ ((Standards.SIC, "4581"), (Standards.NAICS2017, "561720")),
+ ((Standards.SIC, "4581"), (Standards.NAICS2017, "811420")),
+ ((Standards.SIC, "4612"), (Standards.NAICS2017, "486110")),
+ ((Standards.SIC, "4613"), (Standards.NAICS2017, "486910")),
+ ((Standards.SIC, "4619"), (Standards.NAICS2017, "486990")),
+ ((Standards.SIC, "4724"), (Standards.NAICS2017, "561510")),
+ ((Standards.SIC, "4725"), (Standards.NAICS2017, "561520")),
+ ((Standards.SIC, "4729"), (Standards.NAICS2017, "488999")),
+ ((Standards.SIC, "4729"), (Standards.NAICS2017, "561599")),
+ ((Standards.SIC, "4731"), (Standards.NAICS2017, "488510")),
+ ((Standards.SIC, "4731"), (Standards.NAICS2017, "541614")),
+ ((Standards.SIC, "4741"), (Standards.NAICS2017, "488210")),
+ ((Standards.SIC, "4741"), (Standards.NAICS2017, "532411")),
+ ((Standards.SIC, "4783"), (Standards.NAICS2017, "488991")),
+ ((Standards.SIC, "4785"), (Standards.NAICS2017, "488390")),
+ ((Standards.SIC, "4785"), (Standards.NAICS2017, "488490")),
+ ((Standards.SIC, "4789"), (Standards.NAICS2017, "487110")),
+ ((Standards.SIC, "4789"), (Standards.NAICS2017, "488210")),
+ ((Standards.SIC, "4789"), (Standards.NAICS2017, "488999")),
+ ((Standards.SIC, "4789"), (Standards.NAICS2017, "722310")),
+ ((Standards.SIC, "4812"), (Standards.NAICS2017, "517312")),
+ ((Standards.SIC, "4812"), (Standards.NAICS2017, "517911")),
+ ((Standards.SIC, "4813"), (Standards.NAICS2017, "517311")),
+ ((Standards.SIC, "4813"), (Standards.NAICS2017, "517911")),
+ ((Standards.SIC, "4822"), (Standards.NAICS2017, "517311")),
+ ((Standards.SIC, "4832"), (Standards.NAICS2017, "515111")),
+ ((Standards.SIC, "4832"), (Standards.NAICS2017, "515112")),
+ ((Standards.SIC, "4833"), (Standards.NAICS2017, "515120")),
+ ((Standards.SIC, "4841"), (Standards.NAICS2017, "515210")),
+ ((Standards.SIC, "4841"), (Standards.NAICS2017, "517311")),
+ ((Standards.SIC, "4899"), (Standards.NAICS2017, "485310")),
+ ((Standards.SIC, "4899"), (Standards.NAICS2017, "517312")),
+ ((Standards.SIC, "4899"), (Standards.NAICS2017, "517410")),
+ ((Standards.SIC, "4899"), (Standards.NAICS2017, "517919")),
+ ((Standards.SIC, "4899"), (Standards.NAICS2017, "812990")),
+ ((Standards.SIC, "4911"), (Standards.NAICS2017, "221111")),
+ ((Standards.SIC, "4911"), (Standards.NAICS2017, "221112")),
+ ((Standards.SIC, "4911"), (Standards.NAICS2017, "221113")),
+ ((Standards.SIC, "4911"), (Standards.NAICS2017, "221114")),
+ ((Standards.SIC, "4911"), (Standards.NAICS2017, "221115")),
+ ((Standards.SIC, "4911"), (Standards.NAICS2017, "221116")),
+ ((Standards.SIC, "4911"), (Standards.NAICS2017, "221117")),
+ ((Standards.SIC, "4911"), (Standards.NAICS2017, "221118")),
+ ((Standards.SIC, "4911"), (Standards.NAICS2017, "221121")),
+ ((Standards.SIC, "4911"), (Standards.NAICS2017, "221122")),
+ ((Standards.SIC, "4922"), (Standards.NAICS2017, "486210")),
+ ((Standards.SIC, "4923"), (Standards.NAICS2017, "221210")),
+ ((Standards.SIC, "4923"), (Standards.NAICS2017, "486210")),
+ ((Standards.SIC, "4924"), (Standards.NAICS2017, "221210")),
+ ((Standards.SIC, "4925"), (Standards.NAICS2017, "221210")),
+ ((Standards.SIC, "4931"), (Standards.NAICS2017, "221111")),
+ ((Standards.SIC, "4931"), (Standards.NAICS2017, "221112")),
+ ((Standards.SIC, "4931"), (Standards.NAICS2017, "221113")),
+ ((Standards.SIC, "4931"), (Standards.NAICS2017, "221114")),
+ ((Standards.SIC, "4931"), (Standards.NAICS2017, "221115")),
+ ((Standards.SIC, "4931"), (Standards.NAICS2017, "221116")),
+ ((Standards.SIC, "4931"), (Standards.NAICS2017, "221117")),
+ ((Standards.SIC, "4931"), (Standards.NAICS2017, "221118")),
+ ((Standards.SIC, "4931"), (Standards.NAICS2017, "221121")),
+ ((Standards.SIC, "4931"), (Standards.NAICS2017, "221122")),
+ ((Standards.SIC, "4931"), (Standards.NAICS2017, "221210")),
+ ((Standards.SIC, "4932"), (Standards.NAICS2017, "221210")),
+ ((Standards.SIC, "4939"), (Standards.NAICS2017, "221111")),
+ ((Standards.SIC, "4939"), (Standards.NAICS2017, "221112")),
+ ((Standards.SIC, "4939"), (Standards.NAICS2017, "221113")),
+ ((Standards.SIC, "4939"), (Standards.NAICS2017, "221114")),
+ ((Standards.SIC, "4939"), (Standards.NAICS2017, "221115")),
+ ((Standards.SIC, "4939"), (Standards.NAICS2017, "221116")),
+ ((Standards.SIC, "4939"), (Standards.NAICS2017, "221117")),
+ ((Standards.SIC, "4939"), (Standards.NAICS2017, "221118")),
+ ((Standards.SIC, "4939"), (Standards.NAICS2017, "221121")),
+ ((Standards.SIC, "4939"), (Standards.NAICS2017, "221122")),
+ ((Standards.SIC, "4939"), (Standards.NAICS2017, "221210")),
+ ((Standards.SIC, "4941"), (Standards.NAICS2017, "221310")),
+ ((Standards.SIC, "4952"), (Standards.NAICS2017, "221320")),
+ ((Standards.SIC, "4953"), (Standards.NAICS2017, "562211")),
+ ((Standards.SIC, "4953"), (Standards.NAICS2017, "562212")),
+ ((Standards.SIC, "4953"), (Standards.NAICS2017, "562213")),
+ ((Standards.SIC, "4953"), (Standards.NAICS2017, "562219")),
+ ((Standards.SIC, "4953"), (Standards.NAICS2017, "562920")),
+ ((Standards.SIC, "4959"), (Standards.NAICS2017, "488119")),
+ ((Standards.SIC, "4959"), (Standards.NAICS2017, "488490")),
+ ((Standards.SIC, "4959"), (Standards.NAICS2017, "561710")),
+ ((Standards.SIC, "4959"), (Standards.NAICS2017, "561790")),
+ ((Standards.SIC, "4959"), (Standards.NAICS2017, "562910")),
+ ((Standards.SIC, "4959"), (Standards.NAICS2017, "562998")),
+ ((Standards.SIC, "4961"), (Standards.NAICS2017, "221330")),
+ ((Standards.SIC, "4971"), (Standards.NAICS2017, "221310")),
+ ((Standards.SIC, "5012"), (Standards.NAICS2017, "423110")),
+ ((Standards.SIC, "5012"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5012"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5013"), (Standards.NAICS2017, "423120")),
+ ((Standards.SIC, "5013"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5013"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5013"), (Standards.NAICS2017, "441310")),
+ ((Standards.SIC, "5014"), (Standards.NAICS2017, "423130")),
+ ((Standards.SIC, "5014"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5014"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5014"), (Standards.NAICS2017, "441320")),
+ ((Standards.SIC, "5015"), (Standards.NAICS2017, "423140")),
+ ((Standards.SIC, "5015"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5015"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5015"), (Standards.NAICS2017, "441310")),
+ ((Standards.SIC, "5021"), (Standards.NAICS2017, "423210")),
+ ((Standards.SIC, "5021"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5021"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5021"), (Standards.NAICS2017, "442110")),
+ ((Standards.SIC, "5023"), (Standards.NAICS2017, "423220")),
+ ((Standards.SIC, "5023"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5023"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5023"), (Standards.NAICS2017, "442210")),
+ ((Standards.SIC, "5031"), (Standards.NAICS2017, "423310")),
+ ((Standards.SIC, "5031"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5031"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5031"), (Standards.NAICS2017, "444110")),
+ ((Standards.SIC, "5032"), (Standards.NAICS2017, "423320")),
+ ((Standards.SIC, "5032"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5032"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5032"), (Standards.NAICS2017, "444190")),
+ ((Standards.SIC, "5033"), (Standards.NAICS2017, "423330")),
+ ((Standards.SIC, "5033"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5033"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5033"), (Standards.NAICS2017, "444190")),
+ ((Standards.SIC, "5039"), (Standards.NAICS2017, "423310")),
+ ((Standards.SIC, "5039"), (Standards.NAICS2017, "423390")),
+ ((Standards.SIC, "5039"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5039"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5039"), (Standards.NAICS2017, "444190")),
+ ((Standards.SIC, "5043"), (Standards.NAICS2017, "423410")),
+ ((Standards.SIC, "5043"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5043"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5044"), (Standards.NAICS2017, "423420")),
+ ((Standards.SIC, "5044"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5044"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5044"), (Standards.NAICS2017, "453210")),
+ ((Standards.SIC, "5045"), (Standards.NAICS2017, "423430")),
+ ((Standards.SIC, "5045"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5045"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5045"), (Standards.NAICS2017, "443142")),
+ ((Standards.SIC, "5046"), (Standards.NAICS2017, "423440")),
+ ((Standards.SIC, "5046"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5046"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5047"), (Standards.NAICS2017, "423450")),
+ ((Standards.SIC, "5047"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5047"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5047"), (Standards.NAICS2017, "446199")),
+ ((Standards.SIC, "5048"), (Standards.NAICS2017, "423460")),
+ ((Standards.SIC, "5048"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5048"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5049"), (Standards.NAICS2017, "423490")),
+ ((Standards.SIC, "5049"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5049"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5049"), (Standards.NAICS2017, "453210")),
+ ((Standards.SIC, "5051"), (Standards.NAICS2017, "423510")),
+ ((Standards.SIC, "5051"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5051"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5052"), (Standards.NAICS2017, "423520")),
+ ((Standards.SIC, "5052"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5052"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5063"), (Standards.NAICS2017, "423610")),
+ ((Standards.SIC, "5063"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5063"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5063"), (Standards.NAICS2017, "444190")),
+ ((Standards.SIC, "5064"), (Standards.NAICS2017, "423620")),
+ ((Standards.SIC, "5064"), (Standards.NAICS2017, "423720")),
+ ((Standards.SIC, "5064"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5064"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5064"), (Standards.NAICS2017, "443141")),
+ ((Standards.SIC, "5064"), (Standards.NAICS2017, "443142")),
+ ((Standards.SIC, "5064"), (Standards.NAICS2017, "444190")),
+ ((Standards.SIC, "5065"), (Standards.NAICS2017, "423690")),
+ ((Standards.SIC, "5065"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5065"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5065"), (Standards.NAICS2017, "443142")),
+ ((Standards.SIC, "5072"), (Standards.NAICS2017, "423710")),
+ ((Standards.SIC, "5072"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5072"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5072"), (Standards.NAICS2017, "444130")),
+ ((Standards.SIC, "5074"), (Standards.NAICS2017, "423620")),
+ ((Standards.SIC, "5074"), (Standards.NAICS2017, "423720")),
+ ((Standards.SIC, "5074"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5074"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5074"), (Standards.NAICS2017, "444190")),
+ ((Standards.SIC, "5075"), (Standards.NAICS2017, "423730")),
+ ((Standards.SIC, "5075"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5075"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5078"), (Standards.NAICS2017, "423740")),
+ ((Standards.SIC, "5078"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5078"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5082"), (Standards.NAICS2017, "423810")),
+ ((Standards.SIC, "5082"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5082"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5083"), (Standards.NAICS2017, "423820")),
+ ((Standards.SIC, "5083"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5083"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5083"), (Standards.NAICS2017, "444210")),
+ ((Standards.SIC, "5084"), (Standards.NAICS2017, "423830")),
+ ((Standards.SIC, "5084"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5084"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5085"), (Standards.NAICS2017, "423830")),
+ ((Standards.SIC, "5085"), (Standards.NAICS2017, "423840")),
+ ((Standards.SIC, "5085"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5085"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5085"), (Standards.NAICS2017, "453998")),
+ ((Standards.SIC, "5087"), (Standards.NAICS2017, "423850")),
+ ((Standards.SIC, "5087"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5087"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5087"), (Standards.NAICS2017, "446120")),
+ ((Standards.SIC, "5088"), (Standards.NAICS2017, "423860")),
+ ((Standards.SIC, "5088"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5088"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5091"), (Standards.NAICS2017, "423910")),
+ ((Standards.SIC, "5091"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5091"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5091"), (Standards.NAICS2017, "451110")),
+ ((Standards.SIC, "5092"), (Standards.NAICS2017, "423920")),
+ ((Standards.SIC, "5092"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5092"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5092"), (Standards.NAICS2017, "451120")),
+ ((Standards.SIC, "5093"), (Standards.NAICS2017, "423930")),
+ ((Standards.SIC, "5093"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5093"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5094"), (Standards.NAICS2017, "423940")),
+ ((Standards.SIC, "5094"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5094"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5094"), (Standards.NAICS2017, "448310")),
+ ((Standards.SIC, "5099"), (Standards.NAICS2017, "423990")),
+ ((Standards.SIC, "5099"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5099"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5099"), (Standards.NAICS2017, "443142")),
+ ((Standards.SIC, "5099"), (Standards.NAICS2017, "444190")),
+ ((Standards.SIC, "5099"), (Standards.NAICS2017, "451110")),
+ ((Standards.SIC, "5099"), (Standards.NAICS2017, "451120")),
+ ((Standards.SIC, "5111"), (Standards.NAICS2017, "424110")),
+ ((Standards.SIC, "5111"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5111"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5111"), (Standards.NAICS2017, "453210")),
+ ((Standards.SIC, "5112"), (Standards.NAICS2017, "424120")),
+ ((Standards.SIC, "5112"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5112"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5112"), (Standards.NAICS2017, "453210")),
+ ((Standards.SIC, "5113"), (Standards.NAICS2017, "424130")),
+ ((Standards.SIC, "5113"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5113"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5113"), (Standards.NAICS2017, "453998")),
+ ((Standards.SIC, "5122"), (Standards.NAICS2017, "424210")),
+ ((Standards.SIC, "5122"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5122"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5122"), (Standards.NAICS2017, "446110")),
+ ((Standards.SIC, "5122"), (Standards.NAICS2017, "446120")),
+ ((Standards.SIC, "5122"), (Standards.NAICS2017, "446191")),
+ ((Standards.SIC, "5131"), (Standards.NAICS2017, "313310")),
+ ((Standards.SIC, "5131"), (Standards.NAICS2017, "424310")),
+ ((Standards.SIC, "5131"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5131"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5131"), (Standards.NAICS2017, "451130")),
+ ((Standards.SIC, "5136"), (Standards.NAICS2017, "423910")),
+ ((Standards.SIC, "5136"), (Standards.NAICS2017, "424320")),
+ ((Standards.SIC, "5136"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5136"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5136"), (Standards.NAICS2017, "448110")),
+ ((Standards.SIC, "5136"), (Standards.NAICS2017, "448190")),
+ ((Standards.SIC, "5137"), (Standards.NAICS2017, "423910")),
+ ((Standards.SIC, "5137"), (Standards.NAICS2017, "424330")),
+ ((Standards.SIC, "5137"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5137"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5137"), (Standards.NAICS2017, "448120")),
+ ((Standards.SIC, "5137"), (Standards.NAICS2017, "448130")),
+ ((Standards.SIC, "5137"), (Standards.NAICS2017, "448190")),
+ ((Standards.SIC, "5139"), (Standards.NAICS2017, "424340")),
+ ((Standards.SIC, "5139"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5139"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5139"), (Standards.NAICS2017, "448210")),
+ ((Standards.SIC, "5141"), (Standards.NAICS2017, "424410")),
+ ((Standards.SIC, "5141"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5141"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5141"), (Standards.NAICS2017, "445110")),
+ ((Standards.SIC, "5142"), (Standards.NAICS2017, "424420")),
+ ((Standards.SIC, "5142"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5142"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5142"), (Standards.NAICS2017, "454390")),
+ ((Standards.SIC, "5143"), (Standards.NAICS2017, "424430")),
+ ((Standards.SIC, "5143"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5143"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5143"), (Standards.NAICS2017, "445299")),
+ ((Standards.SIC, "5144"), (Standards.NAICS2017, "424440")),
+ ((Standards.SIC, "5144"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5144"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5144"), (Standards.NAICS2017, "445210")),
+ ((Standards.SIC, "5145"), (Standards.NAICS2017, "424450")),
+ ((Standards.SIC, "5145"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5145"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5145"), (Standards.NAICS2017, "445292")),
+ ((Standards.SIC, "5146"), (Standards.NAICS2017, "424460")),
+ ((Standards.SIC, "5146"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5146"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5146"), (Standards.NAICS2017, "445220")),
+ ((Standards.SIC, "5147"), (Standards.NAICS2017, "311612")),
+ ((Standards.SIC, "5147"), (Standards.NAICS2017, "424470")),
+ ((Standards.SIC, "5147"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5147"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5147"), (Standards.NAICS2017, "445210")),
+ ((Standards.SIC, "5148"), (Standards.NAICS2017, "424480")),
+ ((Standards.SIC, "5148"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5148"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5148"), (Standards.NAICS2017, "445230")),
+ ((Standards.SIC, "5149"), (Standards.NAICS2017, "312112")),
+ ((Standards.SIC, "5149"), (Standards.NAICS2017, "424490")),
+ ((Standards.SIC, "5149"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5149"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5149"), (Standards.NAICS2017, "445299")),
+ ((Standards.SIC, "5149"), (Standards.NAICS2017, "453910")),
+ ((Standards.SIC, "5153"), (Standards.NAICS2017, "424510")),
+ ((Standards.SIC, "5153"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5153"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5153"), (Standards.NAICS2017, "444220")),
+ ((Standards.SIC, "5154"), (Standards.NAICS2017, "424520")),
+ ((Standards.SIC, "5154"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5154"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5159"), (Standards.NAICS2017, "424590")),
+ ((Standards.SIC, "5159"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5159"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5159"), (Standards.NAICS2017, "444220")),
+ ((Standards.SIC, "5162"), (Standards.NAICS2017, "424610")),
+ ((Standards.SIC, "5162"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5162"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5162"), (Standards.NAICS2017, "453998")),
+ ((Standards.SIC, "5169"), (Standards.NAICS2017, "424690")),
+ ((Standards.SIC, "5169"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5169"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5171"), (Standards.NAICS2017, "424710")),
+ ((Standards.SIC, "5171"), (Standards.NAICS2017, "454310")),
+ ((Standards.SIC, "5172"), (Standards.NAICS2017, "424720")),
+ ((Standards.SIC, "5172"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5172"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5181"), (Standards.NAICS2017, "424810")),
+ ((Standards.SIC, "5181"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5181"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5181"), (Standards.NAICS2017, "445310")),
+ ((Standards.SIC, "5182"), (Standards.NAICS2017, "424820")),
+ ((Standards.SIC, "5182"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5182"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5182"), (Standards.NAICS2017, "445310")),
+ ((Standards.SIC, "5191"), (Standards.NAICS2017, "424910")),
+ ((Standards.SIC, "5191"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5191"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5191"), (Standards.NAICS2017, "444220")),
+ ((Standards.SIC, "5192"), (Standards.NAICS2017, "424920")),
+ ((Standards.SIC, "5192"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5192"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5192"), (Standards.NAICS2017, "451211")),
+ ((Standards.SIC, "5193"), (Standards.NAICS2017, "424930")),
+ ((Standards.SIC, "5193"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5193"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5193"), (Standards.NAICS2017, "444220")),
+ ((Standards.SIC, "5194"), (Standards.NAICS2017, "424940")),
+ ((Standards.SIC, "5194"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5194"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5194"), (Standards.NAICS2017, "453991")),
+ ((Standards.SIC, "5198"), (Standards.NAICS2017, "424950")),
+ ((Standards.SIC, "5198"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5198"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5199"), (Standards.NAICS2017, "424310")),
+ ((Standards.SIC, "5199"), (Standards.NAICS2017, "424340")),
+ ((Standards.SIC, "5199"), (Standards.NAICS2017, "424610")),
+ ((Standards.SIC, "5199"), (Standards.NAICS2017, "424990")),
+ ((Standards.SIC, "5199"), (Standards.NAICS2017, "425110")),
+ ((Standards.SIC, "5199"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "5199"), (Standards.NAICS2017, "453220")),
+ ((Standards.SIC, "5199"), (Standards.NAICS2017, "453910")),
+ ((Standards.SIC, "5199"), (Standards.NAICS2017, "453991")),
+ ((Standards.SIC, "5199"), (Standards.NAICS2017, "541890")),
+ ((Standards.SIC, "5211"), (Standards.NAICS2017, "444110")),
+ ((Standards.SIC, "5211"), (Standards.NAICS2017, "444190")),
+ ((Standards.SIC, "5231"), (Standards.NAICS2017, "444120")),
+ ((Standards.SIC, "5231"), (Standards.NAICS2017, "444190")),
+ ((Standards.SIC, "5251"), (Standards.NAICS2017, "444130")),
+ ((Standards.SIC, "5261"), (Standards.NAICS2017, "444210")),
+ ((Standards.SIC, "5261"), (Standards.NAICS2017, "444220")),
+ ((Standards.SIC, "5271"), (Standards.NAICS2017, "453930")),
+ ((Standards.SIC, "5311"), (Standards.NAICS2017, "452210")),
+ ((Standards.SIC, "5311"), (Standards.NAICS2017, "452311")),
+ ((Standards.SIC, "5331"), (Standards.NAICS2017, "452319")),
+ ((Standards.SIC, "5399"), (Standards.NAICS2017, "452210")),
+ ((Standards.SIC, "5399"), (Standards.NAICS2017, "452311")),
+ ((Standards.SIC, "5399"), (Standards.NAICS2017, "452319")),
+ ((Standards.SIC, "5411"), (Standards.NAICS2017, "445110")),
+ ((Standards.SIC, "5411"), (Standards.NAICS2017, "445120")),
+ ((Standards.SIC, "5411"), (Standards.NAICS2017, "447110")),
+ ((Standards.SIC, "5411"), (Standards.NAICS2017, "452210")),
+ ((Standards.SIC, "5411"), (Standards.NAICS2017, "452311")),
+ ((Standards.SIC, "5411"), (Standards.NAICS2017, "454390")),
+ ((Standards.SIC, "5421"), (Standards.NAICS2017, "445210")),
+ ((Standards.SIC, "5421"), (Standards.NAICS2017, "445220")),
+ ((Standards.SIC, "5421"), (Standards.NAICS2017, "454390")),
+ ((Standards.SIC, "5431"), (Standards.NAICS2017, "445230")),
+ ((Standards.SIC, "5431"), (Standards.NAICS2017, "454390")),
+ ((Standards.SIC, "5441"), (Standards.NAICS2017, "311340")),
+ ((Standards.SIC, "5441"), (Standards.NAICS2017, "311352")),
+ ((Standards.SIC, "5441"), (Standards.NAICS2017, "445292")),
+ ((Standards.SIC, "5451"), (Standards.NAICS2017, "445299")),
+ ((Standards.SIC, "5461"), (Standards.NAICS2017, "311811")),
+ ((Standards.SIC, "5461"), (Standards.NAICS2017, "445291")),
+ ((Standards.SIC, "5461"), (Standards.NAICS2017, "722515")),
+ ((Standards.SIC, "5499"), (Standards.NAICS2017, "445210")),
+ ((Standards.SIC, "5499"), (Standards.NAICS2017, "445299")),
+ ((Standards.SIC, "5499"), (Standards.NAICS2017, "446191")),
+ ((Standards.SIC, "5511"), (Standards.NAICS2017, "441110")),
+ ((Standards.SIC, "5521"), (Standards.NAICS2017, "441120")),
+ ((Standards.SIC, "5531"), (Standards.NAICS2017, "441310")),
+ ((Standards.SIC, "5531"), (Standards.NAICS2017, "441320")),
+ ((Standards.SIC, "5531"), (Standards.NAICS2017, "452319")),
+ ((Standards.SIC, "5541"), (Standards.NAICS2017, "447110")),
+ ((Standards.SIC, "5541"), (Standards.NAICS2017, "447190")),
+ ((Standards.SIC, "5551"), (Standards.NAICS2017, "441222")),
+ ((Standards.SIC, "5561"), (Standards.NAICS2017, "441210")),
+ ((Standards.SIC, "5571"), (Standards.NAICS2017, "441228")),
+ ((Standards.SIC, "5599"), (Standards.NAICS2017, "441228")),
+ ((Standards.SIC, "5611"), (Standards.NAICS2017, "448110")),
+ ((Standards.SIC, "5611"), (Standards.NAICS2017, "448150")),
+ ((Standards.SIC, "5621"), (Standards.NAICS2017, "448120")),
+ ((Standards.SIC, "5621"), (Standards.NAICS2017, "448190")),
+ ((Standards.SIC, "5632"), (Standards.NAICS2017, "448150")),
+ ((Standards.SIC, "5632"), (Standards.NAICS2017, "448190")),
+ ((Standards.SIC, "5641"), (Standards.NAICS2017, "448130")),
+ ((Standards.SIC, "5651"), (Standards.NAICS2017, "448140")),
+ ((Standards.SIC, "5661"), (Standards.NAICS2017, "448210")),
+ ((Standards.SIC, "5699"), (Standards.NAICS2017, "315220")),
+ ((Standards.SIC, "5699"), (Standards.NAICS2017, "315240")),
+ ((Standards.SIC, "5699"), (Standards.NAICS2017, "448150")),
+ ((Standards.SIC, "5699"), (Standards.NAICS2017, "448190")),
+ ((Standards.SIC, "5712"), (Standards.NAICS2017, "337110")),
+ ((Standards.SIC, "5712"), (Standards.NAICS2017, "337121")),
+ ((Standards.SIC, "5712"), (Standards.NAICS2017, "337122")),
+ ((Standards.SIC, "5712"), (Standards.NAICS2017, "442110")),
+ ((Standards.SIC, "5713"), (Standards.NAICS2017, "442210")),
+ ((Standards.SIC, "5714"), (Standards.NAICS2017, "314120")),
+ ((Standards.SIC, "5714"), (Standards.NAICS2017, "442291")),
+ ((Standards.SIC, "5714"), (Standards.NAICS2017, "451130")),
+ ((Standards.SIC, "5719"), (Standards.NAICS2017, "327110")),
+ ((Standards.SIC, "5719"), (Standards.NAICS2017, "442291")),
+ ((Standards.SIC, "5719"), (Standards.NAICS2017, "442299")),
+ ((Standards.SIC, "5722"), (Standards.NAICS2017, "443141")),
+ ((Standards.SIC, "5731"), (Standards.NAICS2017, "441310")),
+ ((Standards.SIC, "5731"), (Standards.NAICS2017, "443142")),
+ ((Standards.SIC, "5734"), (Standards.NAICS2017, "443142")),
+ ((Standards.SIC, "5735"), (Standards.NAICS2017, "443142")),
+ ((Standards.SIC, "5736"), (Standards.NAICS2017, "451140")),
+ ((Standards.SIC, "5812"), (Standards.NAICS2017, "711110")),
+ ((Standards.SIC, "5812"), (Standards.NAICS2017, "722310")),
+ ((Standards.SIC, "5812"), (Standards.NAICS2017, "722320")),
+ ((Standards.SIC, "5812"), (Standards.NAICS2017, "722511")),
+ ((Standards.SIC, "5812"), (Standards.NAICS2017, "722513")),
+ ((Standards.SIC, "5812"), (Standards.NAICS2017, "722514")),
+ ((Standards.SIC, "5812"), (Standards.NAICS2017, "722515")),
+ ((Standards.SIC, "5813"), (Standards.NAICS2017, "722410")),
+ ((Standards.SIC, "5912"), (Standards.NAICS2017, "446110")),
+ ((Standards.SIC, "5921"), (Standards.NAICS2017, "445310")),
+ ((Standards.SIC, "5932"), (Standards.NAICS2017, "453310")),
+ ((Standards.SIC, "5932"), (Standards.NAICS2017, "522298")),
+ ((Standards.SIC, "5941"), (Standards.NAICS2017, "451110")),
+ ((Standards.SIC, "5942"), (Standards.NAICS2017, "451211")),
+ ((Standards.SIC, "5943"), (Standards.NAICS2017, "453210")),
+ ((Standards.SIC, "5944"), (Standards.NAICS2017, "448310")),
+ ((Standards.SIC, "5945"), (Standards.NAICS2017, "451120")),
+ ((Standards.SIC, "5946"), (Standards.NAICS2017, "443142")),
+ ((Standards.SIC, "5947"), (Standards.NAICS2017, "453220")),
+ ((Standards.SIC, "5948"), (Standards.NAICS2017, "448320")),
+ ((Standards.SIC, "5949"), (Standards.NAICS2017, "451130")),
+ ((Standards.SIC, "5961"), (Standards.NAICS2017, "454110")),
+ ((Standards.SIC, "5962"), (Standards.NAICS2017, "454210")),
+ ((Standards.SIC, "5963"), (Standards.NAICS2017, "454390")),
+ ((Standards.SIC, "5963"), (Standards.NAICS2017, "722330")),
+ ((Standards.SIC, "5983"), (Standards.NAICS2017, "454310")),
+ ((Standards.SIC, "5984"), (Standards.NAICS2017, "454310")),
+ ((Standards.SIC, "5989"), (Standards.NAICS2017, "454310")),
+ ((Standards.SIC, "5992"), (Standards.NAICS2017, "453110")),
+ ((Standards.SIC, "5993"), (Standards.NAICS2017, "453991")),
+ ((Standards.SIC, "5994"), (Standards.NAICS2017, "451212")),
+ ((Standards.SIC, "5995"), (Standards.NAICS2017, "339115")),
+ ((Standards.SIC, "5995"), (Standards.NAICS2017, "446130")),
+ ((Standards.SIC, "5999"), (Standards.NAICS2017, "339113")),
+ ((Standards.SIC, "5999"), (Standards.NAICS2017, "443142")),
+ ((Standards.SIC, "5999"), (Standards.NAICS2017, "446120")),
+ ((Standards.SIC, "5999"), (Standards.NAICS2017, "446199")),
+ ((Standards.SIC, "5999"), (Standards.NAICS2017, "453910")),
+ ((Standards.SIC, "5999"), (Standards.NAICS2017, "453920")),
+ ((Standards.SIC, "5999"), (Standards.NAICS2017, "453998")),
+ ((Standards.SIC, "6011"), (Standards.NAICS2017, "521110")),
+ ((Standards.SIC, "6019"), (Standards.NAICS2017, "522298")),
+ ((Standards.SIC, "6021"), (Standards.NAICS2017, "522110")),
+ ((Standards.SIC, "6021"), (Standards.NAICS2017, "522210")),
+ ((Standards.SIC, "6022"), (Standards.NAICS2017, "522110")),
+ ((Standards.SIC, "6022"), (Standards.NAICS2017, "522190")),
+ ((Standards.SIC, "6022"), (Standards.NAICS2017, "522210")),
+ ((Standards.SIC, "6029"), (Standards.NAICS2017, "522110")),
+ ((Standards.SIC, "6035"), (Standards.NAICS2017, "522120")),
+ ((Standards.SIC, "6036"), (Standards.NAICS2017, "522120")),
+ ((Standards.SIC, "6061"), (Standards.NAICS2017, "522130")),
+ ((Standards.SIC, "6062"), (Standards.NAICS2017, "522130")),
+ ((Standards.SIC, "6081"), (Standards.NAICS2017, "522110")),
+ ((Standards.SIC, "6081"), (Standards.NAICS2017, "522293")),
+ ((Standards.SIC, "6081"), (Standards.NAICS2017, "522298")),
+ ((Standards.SIC, "6082"), (Standards.NAICS2017, "522293")),
+ ((Standards.SIC, "6082"), (Standards.NAICS2017, "522298")),
+ ((Standards.SIC, "6091"), (Standards.NAICS2017, "523991")),
+ ((Standards.SIC, "6099"), (Standards.NAICS2017, "522320")),
+ ((Standards.SIC, "6099"), (Standards.NAICS2017, "522390")),
+ ((Standards.SIC, "6099"), (Standards.NAICS2017, "523130")),
+ ((Standards.SIC, "6099"), (Standards.NAICS2017, "523991")),
+ ((Standards.SIC, "6111"), (Standards.NAICS2017, "522292")),
+ ((Standards.SIC, "6111"), (Standards.NAICS2017, "522293")),
+ ((Standards.SIC, "6111"), (Standards.NAICS2017, "522294")),
+ ((Standards.SIC, "6111"), (Standards.NAICS2017, "522298")),
+ ((Standards.SIC, "6141"), (Standards.NAICS2017, "522210")),
+ ((Standards.SIC, "6141"), (Standards.NAICS2017, "522220")),
+ ((Standards.SIC, "6141"), (Standards.NAICS2017, "522291")),
+ ((Standards.SIC, "6141"), (Standards.NAICS2017, "522298")),
+ ((Standards.SIC, "6153"), (Standards.NAICS2017, "522210")),
+ ((Standards.SIC, "6153"), (Standards.NAICS2017, "522220")),
+ ((Standards.SIC, "6153"), (Standards.NAICS2017, "522298")),
+ ((Standards.SIC, "6153"), (Standards.NAICS2017, "522320")),
+ ((Standards.SIC, "6153"), (Standards.NAICS2017, "523910")),
+ ((Standards.SIC, "6159"), (Standards.NAICS2017, "522220")),
+ ((Standards.SIC, "6159"), (Standards.NAICS2017, "522292")),
+ ((Standards.SIC, "6159"), (Standards.NAICS2017, "522293")),
+ ((Standards.SIC, "6159"), (Standards.NAICS2017, "522294")),
+ ((Standards.SIC, "6159"), (Standards.NAICS2017, "522298")),
+ ((Standards.SIC, "6162"), (Standards.NAICS2017, "522292")),
+ ((Standards.SIC, "6162"), (Standards.NAICS2017, "522390")),
+ ((Standards.SIC, "6163"), (Standards.NAICS2017, "522310")),
+ ((Standards.SIC, "6211"), (Standards.NAICS2017, "523110")),
+ ((Standards.SIC, "6211"), (Standards.NAICS2017, "523120")),
+ ((Standards.SIC, "6211"), (Standards.NAICS2017, "523910")),
+ ((Standards.SIC, "6211"), (Standards.NAICS2017, "523999")),
+ ((Standards.SIC, "6221"), (Standards.NAICS2017, "523130")),
+ ((Standards.SIC, "6221"), (Standards.NAICS2017, "523140")),
+ ((Standards.SIC, "6231"), (Standards.NAICS2017, "523210")),
+ ((Standards.SIC, "6282"), (Standards.NAICS2017, "523920")),
+ ((Standards.SIC, "6282"), (Standards.NAICS2017, "523930")),
+ ((Standards.SIC, "6289"), (Standards.NAICS2017, "523991")),
+ ((Standards.SIC, "6289"), (Standards.NAICS2017, "523999")),
+ ((Standards.SIC, "6311"), (Standards.NAICS2017, "524113")),
+ ((Standards.SIC, "6311"), (Standards.NAICS2017, "524128")),
+ ((Standards.SIC, "6311"), (Standards.NAICS2017, "524130")),
+ ((Standards.SIC, "6321"), (Standards.NAICS2017, "524113")),
+ ((Standards.SIC, "6321"), (Standards.NAICS2017, "524114")),
+ ((Standards.SIC, "6321"), (Standards.NAICS2017, "524130")),
+ ((Standards.SIC, "6321"), (Standards.NAICS2017, "525190")),
+ ((Standards.SIC, "6324"), (Standards.NAICS2017, "524114")),
+ ((Standards.SIC, "6324"), (Standards.NAICS2017, "524130")),
+ ((Standards.SIC, "6324"), (Standards.NAICS2017, "525190")),
+ ((Standards.SIC, "6331"), (Standards.NAICS2017, "524126")),
+ ((Standards.SIC, "6331"), (Standards.NAICS2017, "524128")),
+ ((Standards.SIC, "6331"), (Standards.NAICS2017, "524130")),
+ ((Standards.SIC, "6331"), (Standards.NAICS2017, "525190")),
+ ((Standards.SIC, "6351"), (Standards.NAICS2017, "524126")),
+ ((Standards.SIC, "6351"), (Standards.NAICS2017, "524128")),
+ ((Standards.SIC, "6351"), (Standards.NAICS2017, "524130")),
+ ((Standards.SIC, "6361"), (Standards.NAICS2017, "524127")),
+ ((Standards.SIC, "6361"), (Standards.NAICS2017, "524130")),
+ ((Standards.SIC, "6371"), (Standards.NAICS2017, "523920")),
+ ((Standards.SIC, "6371"), (Standards.NAICS2017, "524292")),
+ ((Standards.SIC, "6371"), (Standards.NAICS2017, "525110")),
+ ((Standards.SIC, "6371"), (Standards.NAICS2017, "525120")),
+ ((Standards.SIC, "6371"), (Standards.NAICS2017, "525990")),
+ ((Standards.SIC, "6399"), (Standards.NAICS2017, "524128")),
+ ((Standards.SIC, "6411"), (Standards.NAICS2017, "524210")),
+ ((Standards.SIC, "6411"), (Standards.NAICS2017, "524291")),
+ ((Standards.SIC, "6411"), (Standards.NAICS2017, "524292")),
+ ((Standards.SIC, "6411"), (Standards.NAICS2017, "524298")),
+ ((Standards.SIC, "6512"), (Standards.NAICS2017, "531120")),
+ ((Standards.SIC, "6512"), (Standards.NAICS2017, "711310")),
+ ((Standards.SIC, "6513"), (Standards.NAICS2017, "531110")),
+ ((Standards.SIC, "6514"), (Standards.NAICS2017, "531110")),
+ ((Standards.SIC, "6515"), (Standards.NAICS2017, "531190")),
+ ((Standards.SIC, "6517"), (Standards.NAICS2017, "531190")),
+ ((Standards.SIC, "6519"), (Standards.NAICS2017, "531190")),
+ ((Standards.SIC, "6531"), (Standards.NAICS2017, "531110")),
+ ((Standards.SIC, "6531"), (Standards.NAICS2017, "531210")),
+ ((Standards.SIC, "6531"), (Standards.NAICS2017, "531311")),
+ ((Standards.SIC, "6531"), (Standards.NAICS2017, "531312")),
+ ((Standards.SIC, "6531"), (Standards.NAICS2017, "531320")),
+ ((Standards.SIC, "6531"), (Standards.NAICS2017, "531390")),
+ ((Standards.SIC, "6531"), (Standards.NAICS2017, "812220")),
+ ((Standards.SIC, "6531"), (Standards.NAICS2017, "813990")),
+ ((Standards.SIC, "6541"), (Standards.NAICS2017, "541191")),
+ ((Standards.SIC, "6552"), (Standards.NAICS2017, "237210")),
+ ((Standards.SIC, "6553"), (Standards.NAICS2017, "812220")),
+ ((Standards.SIC, "6712"), (Standards.NAICS2017, "551111")),
+ ((Standards.SIC, "6719"), (Standards.NAICS2017, "551112")),
+ ((Standards.SIC, "6722"), (Standards.NAICS2017, "525910")),
+ ((Standards.SIC, "6726"), (Standards.NAICS2017, "525990")),
+ ((Standards.SIC, "6732"), (Standards.NAICS2017, "813211")),
+ ((Standards.SIC, "6733"), (Standards.NAICS2017, "523920")),
+ ((Standards.SIC, "6733"), (Standards.NAICS2017, "523991")),
+ ((Standards.SIC, "6733"), (Standards.NAICS2017, "525190")),
+ ((Standards.SIC, "6733"), (Standards.NAICS2017, "525920")),
+ ((Standards.SIC, "6792"), (Standards.NAICS2017, "523910")),
+ ((Standards.SIC, "6792"), (Standards.NAICS2017, "533110")),
+ ((Standards.SIC, "6794"), (Standards.NAICS2017, "533110")),
+ ((Standards.SIC, "6798"), (Standards.NAICS2017, "525990")),
+ ((Standards.SIC, "6798"), (Standards.NAICS2017, "531110")),
+ ((Standards.SIC, "6798"), (Standards.NAICS2017, "531120")),
+ ((Standards.SIC, "6798"), (Standards.NAICS2017, "531130")),
+ ((Standards.SIC, "6798"), (Standards.NAICS2017, "531190")),
+ ((Standards.SIC, "6799"), (Standards.NAICS2017, "523130")),
+ ((Standards.SIC, "6799"), (Standards.NAICS2017, "523910")),
+ ((Standards.SIC, "6799"), (Standards.NAICS2017, "523920")),
+ ((Standards.SIC, "7011"), (Standards.NAICS2017, "721110")),
+ ((Standards.SIC, "7011"), (Standards.NAICS2017, "721120")),
+ ((Standards.SIC, "7011"), (Standards.NAICS2017, "721191")),
+ ((Standards.SIC, "7011"), (Standards.NAICS2017, "721199")),
+ ((Standards.SIC, "7021"), (Standards.NAICS2017, "721310")),
+ ((Standards.SIC, "7032"), (Standards.NAICS2017, "721214")),
+ ((Standards.SIC, "7033"), (Standards.NAICS2017, "721211")),
+ ((Standards.SIC, "7041"), (Standards.NAICS2017, "721110")),
+ ((Standards.SIC, "7041"), (Standards.NAICS2017, "721310")),
+ ((Standards.SIC, "7211"), (Standards.NAICS2017, "812320")),
+ ((Standards.SIC, "7212"), (Standards.NAICS2017, "812320")),
+ ((Standards.SIC, "7213"), (Standards.NAICS2017, "812331")),
+ ((Standards.SIC, "7215"), (Standards.NAICS2017, "812310")),
+ ((Standards.SIC, "7216"), (Standards.NAICS2017, "812320")),
+ ((Standards.SIC, "7217"), (Standards.NAICS2017, "561740")),
+ ((Standards.SIC, "7218"), (Standards.NAICS2017, "812332")),
+ ((Standards.SIC, "7219"), (Standards.NAICS2017, "811490")),
+ ((Standards.SIC, "7219"), (Standards.NAICS2017, "812320")),
+ ((Standards.SIC, "7219"), (Standards.NAICS2017, "812331")),
+ ((Standards.SIC, "7221"), (Standards.NAICS2017, "541921")),
+ ((Standards.SIC, "7231"), (Standards.NAICS2017, "611511")),
+ ((Standards.SIC, "7231"), (Standards.NAICS2017, "812112")),
+ ((Standards.SIC, "7231"), (Standards.NAICS2017, "812113")),
+ ((Standards.SIC, "7241"), (Standards.NAICS2017, "611511")),
+ ((Standards.SIC, "7241"), (Standards.NAICS2017, "812111")),
+ ((Standards.SIC, "7251"), (Standards.NAICS2017, "811430")),
+ ((Standards.SIC, "7251"), (Standards.NAICS2017, "812320")),
+ ((Standards.SIC, "7251"), (Standards.NAICS2017, "812990")),
+ ((Standards.SIC, "7261"), (Standards.NAICS2017, "812210")),
+ ((Standards.SIC, "7261"), (Standards.NAICS2017, "812220")),
+ ((Standards.SIC, "7291"), (Standards.NAICS2017, "541213")),
+ ((Standards.SIC, "7299"), (Standards.NAICS2017, "532281")),
+ ((Standards.SIC, "7299"), (Standards.NAICS2017, "541990")),
+ ((Standards.SIC, "7299"), (Standards.NAICS2017, "561311")),
+ ((Standards.SIC, "7299"), (Standards.NAICS2017, "561990")),
+ ((Standards.SIC, "7299"), (Standards.NAICS2017, "812191")),
+ ((Standards.SIC, "7299"), (Standards.NAICS2017, "812199")),
+ ((Standards.SIC, "7299"), (Standards.NAICS2017, "812930")),
+ ((Standards.SIC, "7299"), (Standards.NAICS2017, "812990")),
+ ((Standards.SIC, "7311"), (Standards.NAICS2017, "541810")),
+ ((Standards.SIC, "7312"), (Standards.NAICS2017, "541850")),
+ ((Standards.SIC, "7313"), (Standards.NAICS2017, "541840")),
+ ((Standards.SIC, "7319"), (Standards.NAICS2017, "481219")),
+ ((Standards.SIC, "7319"), (Standards.NAICS2017, "541830")),
+ ((Standards.SIC, "7319"), (Standards.NAICS2017, "541850")),
+ ((Standards.SIC, "7319"), (Standards.NAICS2017, "541870")),
+ ((Standards.SIC, "7319"), (Standards.NAICS2017, "541890")),
+ ((Standards.SIC, "7322"), (Standards.NAICS2017, "561440")),
+ ((Standards.SIC, "7323"), (Standards.NAICS2017, "561450")),
+ ((Standards.SIC, "7331"), (Standards.NAICS2017, "511140")),
+ ((Standards.SIC, "7331"), (Standards.NAICS2017, "541860")),
+ ((Standards.SIC, "7334"), (Standards.NAICS2017, "323111")),
+ ((Standards.SIC, "7334"), (Standards.NAICS2017, "561439")),
+ ((Standards.SIC, "7335"), (Standards.NAICS2017, "481219")),
+ ((Standards.SIC, "7335"), (Standards.NAICS2017, "541922")),
+ ((Standards.SIC, "7336"), (Standards.NAICS2017, "541430")),
+ ((Standards.SIC, "7338"), (Standards.NAICS2017, "561410")),
+ ((Standards.SIC, "7338"), (Standards.NAICS2017, "561492")),
+ ((Standards.SIC, "7342"), (Standards.NAICS2017, "561710")),
+ ((Standards.SIC, "7342"), (Standards.NAICS2017, "561720")),
+ ((Standards.SIC, "7349"), (Standards.NAICS2017, "561720")),
+ ((Standards.SIC, "7349"), (Standards.NAICS2017, "561790")),
+ ((Standards.SIC, "7352"), (Standards.NAICS2017, "532283")),
+ ((Standards.SIC, "7352"), (Standards.NAICS2017, "532490")),
+ ((Standards.SIC, "7353"), (Standards.NAICS2017, "238910")),
+ ((Standards.SIC, "7353"), (Standards.NAICS2017, "238990")),
+ ((Standards.SIC, "7353"), (Standards.NAICS2017, "532412")),
+ ((Standards.SIC, "7359"), (Standards.NAICS2017, "532210")),
+ ((Standards.SIC, "7359"), (Standards.NAICS2017, "532289")),
+ ((Standards.SIC, "7359"), (Standards.NAICS2017, "532310")),
+ ((Standards.SIC, "7359"), (Standards.NAICS2017, "532411")),
+ ((Standards.SIC, "7359"), (Standards.NAICS2017, "532412")),
+ ((Standards.SIC, "7359"), (Standards.NAICS2017, "532420")),
+ ((Standards.SIC, "7359"), (Standards.NAICS2017, "532490")),
+ ((Standards.SIC, "7359"), (Standards.NAICS2017, "562991")),
+ ((Standards.SIC, "7361"), (Standards.NAICS2017, "541612")),
+ ((Standards.SIC, "7361"), (Standards.NAICS2017, "561311")),
+ ((Standards.SIC, "7361"), (Standards.NAICS2017, "561312")),
+ ((Standards.SIC, "7363"), (Standards.NAICS2017, "561320")),
+ ((Standards.SIC, "7363"), (Standards.NAICS2017, "561330")),
+ ((Standards.SIC, "7371"), (Standards.NAICS2017, "541511")),
+ ((Standards.SIC, "7372"), (Standards.NAICS2017, "334614")),
+ ((Standards.SIC, "7372"), (Standards.NAICS2017, "511210")),
+ ((Standards.SIC, "7373"), (Standards.NAICS2017, "541512")),
+ ((Standards.SIC, "7374"), (Standards.NAICS2017, "518210")),
+ ((Standards.SIC, "7375"), (Standards.NAICS2017, "517311")),
+ ((Standards.SIC, "7375"), (Standards.NAICS2017, "517919")),
+ ((Standards.SIC, "7376"), (Standards.NAICS2017, "541513")),
+ ((Standards.SIC, "7377"), (Standards.NAICS2017, "532420")),
+ ((Standards.SIC, "7378"), (Standards.NAICS2017, "443142")),
+ ((Standards.SIC, "7378"), (Standards.NAICS2017, "811212")),
+ ((Standards.SIC, "7379"), (Standards.NAICS2017, "518210")),
+ ((Standards.SIC, "7379"), (Standards.NAICS2017, "541512")),
+ ((Standards.SIC, "7379"), (Standards.NAICS2017, "541519")),
+ ((Standards.SIC, "7381"), (Standards.NAICS2017, "561611")),
+ ((Standards.SIC, "7381"), (Standards.NAICS2017, "561612")),
+ ((Standards.SIC, "7381"), (Standards.NAICS2017, "561613")),
+ ((Standards.SIC, "7382"), (Standards.NAICS2017, "561621")),
+ ((Standards.SIC, "7383"), (Standards.NAICS2017, "519110")),
+ ((Standards.SIC, "7383"), (Standards.NAICS2017, "711510")),
+ ((Standards.SIC, "7384"), (Standards.NAICS2017, "812921")),
+ ((Standards.SIC, "7384"), (Standards.NAICS2017, "812922")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "312230")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "313310")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "314999")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "325998")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "425120")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "488490")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "491110")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "512240")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "512290")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "518210")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "519190")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "522320")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "541199")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "541340")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "541350")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "541370")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "541410")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "541420")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "541490")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "541870")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "541890")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "541930")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "541990")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "561410")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "561421")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "561422")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "561431")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "561439")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "561440")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "561491")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "561499")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "561591")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "561599")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "561790")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "561910")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "561920")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "561990")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "711310")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "711320")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "711410")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "812320")),
+ ((Standards.SIC, "7389"), (Standards.NAICS2017, "812990")),
+ ((Standards.SIC, "7513"), (Standards.NAICS2017, "532120")),
+ ((Standards.SIC, "7514"), (Standards.NAICS2017, "532111")),
+ ((Standards.SIC, "7515"), (Standards.NAICS2017, "532112")),
+ ((Standards.SIC, "7519"), (Standards.NAICS2017, "532120")),
+ ((Standards.SIC, "7521"), (Standards.NAICS2017, "812930")),
+ ((Standards.SIC, "7532"), (Standards.NAICS2017, "811121")),
+ ((Standards.SIC, "7533"), (Standards.NAICS2017, "811112")),
+ ((Standards.SIC, "7534"), (Standards.NAICS2017, "326212")),
+ ((Standards.SIC, "7534"), (Standards.NAICS2017, "811198")),
+ ((Standards.SIC, "7536"), (Standards.NAICS2017, "811122")),
+ ((Standards.SIC, "7537"), (Standards.NAICS2017, "811113")),
+ ((Standards.SIC, "7538"), (Standards.NAICS2017, "811111")),
+ ((Standards.SIC, "7539"), (Standards.NAICS2017, "811118")),
+ ((Standards.SIC, "7539"), (Standards.NAICS2017, "811198")),
+ ((Standards.SIC, "7542"), (Standards.NAICS2017, "811192")),
+ ((Standards.SIC, "7549"), (Standards.NAICS2017, "488410")),
+ ((Standards.SIC, "7549"), (Standards.NAICS2017, "811122")),
+ ((Standards.SIC, "7549"), (Standards.NAICS2017, "811191")),
+ ((Standards.SIC, "7549"), (Standards.NAICS2017, "811198")),
+ ((Standards.SIC, "7622"), (Standards.NAICS2017, "238290")),
+ ((Standards.SIC, "7622"), (Standards.NAICS2017, "443142")),
+ ((Standards.SIC, "7622"), (Standards.NAICS2017, "811211")),
+ ((Standards.SIC, "7622"), (Standards.NAICS2017, "811213")),
+ ((Standards.SIC, "7623"), (Standards.NAICS2017, "443141")),
+ ((Standards.SIC, "7623"), (Standards.NAICS2017, "811310")),
+ ((Standards.SIC, "7623"), (Standards.NAICS2017, "811412")),
+ ((Standards.SIC, "7629"), (Standards.NAICS2017, "443141")),
+ ((Standards.SIC, "7629"), (Standards.NAICS2017, "811211")),
+ ((Standards.SIC, "7629"), (Standards.NAICS2017, "811212")),
+ ((Standards.SIC, "7629"), (Standards.NAICS2017, "811213")),
+ ((Standards.SIC, "7629"), (Standards.NAICS2017, "811219")),
+ ((Standards.SIC, "7629"), (Standards.NAICS2017, "811412")),
+ ((Standards.SIC, "7631"), (Standards.NAICS2017, "448310")),
+ ((Standards.SIC, "7631"), (Standards.NAICS2017, "811490")),
+ ((Standards.SIC, "7641"), (Standards.NAICS2017, "811420")),
+ ((Standards.SIC, "7692"), (Standards.NAICS2017, "811310")),
+ ((Standards.SIC, "7694"), (Standards.NAICS2017, "335312")),
+ ((Standards.SIC, "7694"), (Standards.NAICS2017, "811310")),
+ ((Standards.SIC, "7699"), (Standards.NAICS2017, "115210")),
+ ((Standards.SIC, "7699"), (Standards.NAICS2017, "238220")),
+ ((Standards.SIC, "7699"), (Standards.NAICS2017, "442299")),
+ ((Standards.SIC, "7699"), (Standards.NAICS2017, "444130")),
+ ((Standards.SIC, "7699"), (Standards.NAICS2017, "444210")),
+ ((Standards.SIC, "7699"), (Standards.NAICS2017, "451110")),
+ ((Standards.SIC, "7699"), (Standards.NAICS2017, "488390")),
+ ((Standards.SIC, "7699"), (Standards.NAICS2017, "561622")),
+ ((Standards.SIC, "7699"), (Standards.NAICS2017, "561790")),
+ ((Standards.SIC, "7699"), (Standards.NAICS2017, "562991")),
+ ((Standards.SIC, "7699"), (Standards.NAICS2017, "562998")),
+ ((Standards.SIC, "7699"), (Standards.NAICS2017, "711510")),
+ ((Standards.SIC, "7699"), (Standards.NAICS2017, "811211")),
+ ((Standards.SIC, "7699"), (Standards.NAICS2017, "811212")),
+ ((Standards.SIC, "7699"), (Standards.NAICS2017, "811219")),
+ ((Standards.SIC, "7699"), (Standards.NAICS2017, "811310")),
+ ((Standards.SIC, "7699"), (Standards.NAICS2017, "811411")),
+ ((Standards.SIC, "7699"), (Standards.NAICS2017, "811412")),
+ ((Standards.SIC, "7699"), (Standards.NAICS2017, "811430")),
+ ((Standards.SIC, "7699"), (Standards.NAICS2017, "811490")),
+ ((Standards.SIC, "7812"), (Standards.NAICS2017, "512110")),
+ ((Standards.SIC, "7819"), (Standards.NAICS2017, "334614")),
+ ((Standards.SIC, "7819"), (Standards.NAICS2017, "512191")),
+ ((Standards.SIC, "7819"), (Standards.NAICS2017, "512199")),
+ ((Standards.SIC, "7819"), (Standards.NAICS2017, "532281")),
+ ((Standards.SIC, "7819"), (Standards.NAICS2017, "532490")),
+ ((Standards.SIC, "7819"), (Standards.NAICS2017, "541214")),
+ ((Standards.SIC, "7819"), (Standards.NAICS2017, "541690")),
+ ((Standards.SIC, "7819"), (Standards.NAICS2017, "561311")),
+ ((Standards.SIC, "7819"), (Standards.NAICS2017, "711510")),
+ ((Standards.SIC, "7822"), (Standards.NAICS2017, "423990")),
+ ((Standards.SIC, "7822"), (Standards.NAICS2017, "512120")),
+ ((Standards.SIC, "7829"), (Standards.NAICS2017, "512120")),
+ ((Standards.SIC, "7829"), (Standards.NAICS2017, "512199")),
+ ((Standards.SIC, "7829"), (Standards.NAICS2017, "519120")),
+ ((Standards.SIC, "7832"), (Standards.NAICS2017, "512131")),
+ ((Standards.SIC, "7833"), (Standards.NAICS2017, "512132")),
+ ((Standards.SIC, "7841"), (Standards.NAICS2017, "532282")),
+ ((Standards.SIC, "7911"), (Standards.NAICS2017, "611610")),
+ ((Standards.SIC, "7911"), (Standards.NAICS2017, "713990")),
+ ((Standards.SIC, "7922"), (Standards.NAICS2017, "512290")),
+ ((Standards.SIC, "7922"), (Standards.NAICS2017, "532490")),
+ ((Standards.SIC, "7922"), (Standards.NAICS2017, "561311")),
+ ((Standards.SIC, "7922"), (Standards.NAICS2017, "561599")),
+ ((Standards.SIC, "7922"), (Standards.NAICS2017, "711110")),
+ ((Standards.SIC, "7922"), (Standards.NAICS2017, "711120")),
+ ((Standards.SIC, "7922"), (Standards.NAICS2017, "711310")),
+ ((Standards.SIC, "7922"), (Standards.NAICS2017, "711320")),
+ ((Standards.SIC, "7922"), (Standards.NAICS2017, "711410")),
+ ((Standards.SIC, "7922"), (Standards.NAICS2017, "711510")),
+ ((Standards.SIC, "7929"), (Standards.NAICS2017, "711130")),
+ ((Standards.SIC, "7929"), (Standards.NAICS2017, "711190")),
+ ((Standards.SIC, "7929"), (Standards.NAICS2017, "711510")),
+ ((Standards.SIC, "7933"), (Standards.NAICS2017, "713950")),
+ ((Standards.SIC, "7941"), (Standards.NAICS2017, "711211")),
+ ((Standards.SIC, "7941"), (Standards.NAICS2017, "711310")),
+ ((Standards.SIC, "7941"), (Standards.NAICS2017, "711320")),
+ ((Standards.SIC, "7941"), (Standards.NAICS2017, "711410")),
+ ((Standards.SIC, "7948"), (Standards.NAICS2017, "711212")),
+ ((Standards.SIC, "7948"), (Standards.NAICS2017, "711219")),
+ ((Standards.SIC, "7991"), (Standards.NAICS2017, "713940")),
+ ((Standards.SIC, "7992"), (Standards.NAICS2017, "713910")),
+ ((Standards.SIC, "7993"), (Standards.NAICS2017, "713120")),
+ ((Standards.SIC, "7993"), (Standards.NAICS2017, "713290")),
+ ((Standards.SIC, "7993"), (Standards.NAICS2017, "713990")),
+ ((Standards.SIC, "7996"), (Standards.NAICS2017, "713110")),
+ ((Standards.SIC, "7997"), (Standards.NAICS2017, "481219")),
+ ((Standards.SIC, "7997"), (Standards.NAICS2017, "488119")),
+ ((Standards.SIC, "7997"), (Standards.NAICS2017, "713910")),
+ ((Standards.SIC, "7997"), (Standards.NAICS2017, "713940")),
+ ((Standards.SIC, "7997"), (Standards.NAICS2017, "713990")),
+ ((Standards.SIC, "7999"), (Standards.NAICS2017, "487110")),
+ ((Standards.SIC, "7999"), (Standards.NAICS2017, "487210")),
+ ((Standards.SIC, "7999"), (Standards.NAICS2017, "487990")),
+ ((Standards.SIC, "7999"), (Standards.NAICS2017, "532284")),
+ ((Standards.SIC, "7999"), (Standards.NAICS2017, "561599")),
+ ((Standards.SIC, "7999"), (Standards.NAICS2017, "611620")),
+ ((Standards.SIC, "7999"), (Standards.NAICS2017, "611699")),
+ ((Standards.SIC, "7999"), (Standards.NAICS2017, "711190")),
+ ((Standards.SIC, "7999"), (Standards.NAICS2017, "711219")),
+ ((Standards.SIC, "7999"), (Standards.NAICS2017, "711310")),
+ ((Standards.SIC, "7999"), (Standards.NAICS2017, "711320")),
+ ((Standards.SIC, "7999"), (Standards.NAICS2017, "712190")),
+ ((Standards.SIC, "7999"), (Standards.NAICS2017, "713210")),
+ ((Standards.SIC, "7999"), (Standards.NAICS2017, "713290")),
+ ((Standards.SIC, "7999"), (Standards.NAICS2017, "713920")),
+ ((Standards.SIC, "7999"), (Standards.NAICS2017, "713940")),
+ ((Standards.SIC, "7999"), (Standards.NAICS2017, "713990")),
+ ((Standards.SIC, "7999"), (Standards.NAICS2017, "812990")),
+ ((Standards.SIC, "8011"), (Standards.NAICS2017, "621111")),
+ ((Standards.SIC, "8011"), (Standards.NAICS2017, "621112")),
+ ((Standards.SIC, "8011"), (Standards.NAICS2017, "621491")),
+ ((Standards.SIC, "8011"), (Standards.NAICS2017, "621493")),
+ ((Standards.SIC, "8021"), (Standards.NAICS2017, "621210")),
+ ((Standards.SIC, "8031"), (Standards.NAICS2017, "621111")),
+ ((Standards.SIC, "8031"), (Standards.NAICS2017, "621112")),
+ ((Standards.SIC, "8041"), (Standards.NAICS2017, "621310")),
+ ((Standards.SIC, "8042"), (Standards.NAICS2017, "621320")),
+ ((Standards.SIC, "8043"), (Standards.NAICS2017, "621391")),
+ ((Standards.SIC, "8049"), (Standards.NAICS2017, "621330")),
+ ((Standards.SIC, "8049"), (Standards.NAICS2017, "621340")),
+ ((Standards.SIC, "8049"), (Standards.NAICS2017, "621399")),
+ ((Standards.SIC, "8051"), (Standards.NAICS2017, "623110")),
+ ((Standards.SIC, "8051"), (Standards.NAICS2017, "623210")),
+ ((Standards.SIC, "8051"), (Standards.NAICS2017, "623311")),
+ ((Standards.SIC, "8052"), (Standards.NAICS2017, "623110")),
+ ((Standards.SIC, "8052"), (Standards.NAICS2017, "623210")),
+ ((Standards.SIC, "8052"), (Standards.NAICS2017, "623311")),
+ ((Standards.SIC, "8059"), (Standards.NAICS2017, "623110")),
+ ((Standards.SIC, "8059"), (Standards.NAICS2017, "623210")),
+ ((Standards.SIC, "8059"), (Standards.NAICS2017, "623311")),
+ ((Standards.SIC, "8062"), (Standards.NAICS2017, "622110")),
+ ((Standards.SIC, "8063"), (Standards.NAICS2017, "622210")),
+ ((Standards.SIC, "8069"), (Standards.NAICS2017, "622110")),
+ ((Standards.SIC, "8069"), (Standards.NAICS2017, "622210")),
+ ((Standards.SIC, "8069"), (Standards.NAICS2017, "622310")),
+ ((Standards.SIC, "8071"), (Standards.NAICS2017, "621511")),
+ ((Standards.SIC, "8071"), (Standards.NAICS2017, "621512")),
+ ((Standards.SIC, "8072"), (Standards.NAICS2017, "339116")),
+ ((Standards.SIC, "8082"), (Standards.NAICS2017, "621610")),
+ ((Standards.SIC, "8092"), (Standards.NAICS2017, "621492")),
+ ((Standards.SIC, "8093"), (Standards.NAICS2017, "621399")),
+ ((Standards.SIC, "8093"), (Standards.NAICS2017, "621410")),
+ ((Standards.SIC, "8093"), (Standards.NAICS2017, "621420")),
+ ((Standards.SIC, "8093"), (Standards.NAICS2017, "621498")),
+ ((Standards.SIC, "8099"), (Standards.NAICS2017, "541430")),
+ ((Standards.SIC, "8099"), (Standards.NAICS2017, "541922")),
+ ((Standards.SIC, "8099"), (Standards.NAICS2017, "621410")),
+ ((Standards.SIC, "8099"), (Standards.NAICS2017, "621991")),
+ ((Standards.SIC, "8099"), (Standards.NAICS2017, "621999")),
+ ((Standards.SIC, "8111"), (Standards.NAICS2017, "541110")),
+ ((Standards.SIC, "8211"), (Standards.NAICS2017, "611110")),
+ ((Standards.SIC, "8221"), (Standards.NAICS2017, "611310")),
+ ((Standards.SIC, "8222"), (Standards.NAICS2017, "611210")),
+ ((Standards.SIC, "8231"), (Standards.NAICS2017, "519120")),
+ ((Standards.SIC, "8243"), (Standards.NAICS2017, "611420")),
+ ((Standards.SIC, "8243"), (Standards.NAICS2017, "611519")),
+ ((Standards.SIC, "8244"), (Standards.NAICS2017, "611410")),
+ ((Standards.SIC, "8249"), (Standards.NAICS2017, "611512")),
+ ((Standards.SIC, "8249"), (Standards.NAICS2017, "611513")),
+ ((Standards.SIC, "8249"), (Standards.NAICS2017, "611519")),
+ ((Standards.SIC, "8299"), (Standards.NAICS2017, "611430")),
+ ((Standards.SIC, "8299"), (Standards.NAICS2017, "611512")),
+ ((Standards.SIC, "8299"), (Standards.NAICS2017, "611519")),
+ ((Standards.SIC, "8299"), (Standards.NAICS2017, "611610")),
+ ((Standards.SIC, "8299"), (Standards.NAICS2017, "611620")),
+ ((Standards.SIC, "8299"), (Standards.NAICS2017, "611630")),
+ ((Standards.SIC, "8299"), (Standards.NAICS2017, "611691")),
+ ((Standards.SIC, "8299"), (Standards.NAICS2017, "611692")),
+ ((Standards.SIC, "8299"), (Standards.NAICS2017, "611699")),
+ ((Standards.SIC, "8299"), (Standards.NAICS2017, "611710")),
+ ((Standards.SIC, "8322"), (Standards.NAICS2017, "624110")),
+ ((Standards.SIC, "8322"), (Standards.NAICS2017, "624120")),
+ ((Standards.SIC, "8322"), (Standards.NAICS2017, "624190")),
+ ((Standards.SIC, "8322"), (Standards.NAICS2017, "624210")),
+ ((Standards.SIC, "8322"), (Standards.NAICS2017, "624221")),
+ ((Standards.SIC, "8322"), (Standards.NAICS2017, "624229")),
+ ((Standards.SIC, "8322"), (Standards.NAICS2017, "624230")),
+ ((Standards.SIC, "8322"), (Standards.NAICS2017, "922150")),
+ ((Standards.SIC, "8331"), (Standards.NAICS2017, "624310")),
+ ((Standards.SIC, "8351"), (Standards.NAICS2017, "624410")),
+ ((Standards.SIC, "8361"), (Standards.NAICS2017, "623210")),
+ ((Standards.SIC, "8361"), (Standards.NAICS2017, "623220")),
+ ((Standards.SIC, "8361"), (Standards.NAICS2017, "623312")),
+ ((Standards.SIC, "8361"), (Standards.NAICS2017, "623990")),
+ ((Standards.SIC, "8399"), (Standards.NAICS2017, "813212")),
+ ((Standards.SIC, "8399"), (Standards.NAICS2017, "813219")),
+ ((Standards.SIC, "8399"), (Standards.NAICS2017, "813311")),
+ ((Standards.SIC, "8399"), (Standards.NAICS2017, "813312")),
+ ((Standards.SIC, "8399"), (Standards.NAICS2017, "813319")),
+ ((Standards.SIC, "8412"), (Standards.NAICS2017, "712110")),
+ ((Standards.SIC, "8412"), (Standards.NAICS2017, "712120")),
+ ((Standards.SIC, "8422"), (Standards.NAICS2017, "712130")),
+ ((Standards.SIC, "8422"), (Standards.NAICS2017, "712190")),
+ ((Standards.SIC, "8611"), (Standards.NAICS2017, "813910")),
+ ((Standards.SIC, "8621"), (Standards.NAICS2017, "813920")),
+ ((Standards.SIC, "8631"), (Standards.NAICS2017, "813930")),
+ ((Standards.SIC, "8641"), (Standards.NAICS2017, "813319")),
+ ((Standards.SIC, "8641"), (Standards.NAICS2017, "813410")),
+ ((Standards.SIC, "8641"), (Standards.NAICS2017, "813990")),
+ ((Standards.SIC, "8641"), (Standards.NAICS2017, "921150")),
+ ((Standards.SIC, "8651"), (Standards.NAICS2017, "813940")),
+ ((Standards.SIC, "8661"), (Standards.NAICS2017, "813110")),
+ ((Standards.SIC, "8699"), (Standards.NAICS2017, "561599")),
+ ((Standards.SIC, "8699"), (Standards.NAICS2017, "813312")),
+ ((Standards.SIC, "8699"), (Standards.NAICS2017, "813410")),
+ ((Standards.SIC, "8699"), (Standards.NAICS2017, "813910")),
+ ((Standards.SIC, "8699"), (Standards.NAICS2017, "813990")),
+ ((Standards.SIC, "8711"), (Standards.NAICS2017, "541330")),
+ ((Standards.SIC, "8712"), (Standards.NAICS2017, "541310")),
+ ((Standards.SIC, "8713"), (Standards.NAICS2017, "541360")),
+ ((Standards.SIC, "8713"), (Standards.NAICS2017, "541370")),
+ ((Standards.SIC, "8721"), (Standards.NAICS2017, "541211")),
+ ((Standards.SIC, "8721"), (Standards.NAICS2017, "541214")),
+ ((Standards.SIC, "8721"), (Standards.NAICS2017, "541219")),
+ ((Standards.SIC, "8731"), (Standards.NAICS2017, "541713")),
+ ((Standards.SIC, "8731"), (Standards.NAICS2017, "541714")),
+ ((Standards.SIC, "8731"), (Standards.NAICS2017, "541715")),
+ ((Standards.SIC, "8732"), (Standards.NAICS2017, "541720")),
+ ((Standards.SIC, "8732"), (Standards.NAICS2017, "541910")),
+ ((Standards.SIC, "8733"), (Standards.NAICS2017, "541713")),
+ ((Standards.SIC, "8733"), (Standards.NAICS2017, "541714")),
+ ((Standards.SIC, "8733"), (Standards.NAICS2017, "541715")),
+ ((Standards.SIC, "8733"), (Standards.NAICS2017, "541720")),
+ ((Standards.SIC, "8734"), (Standards.NAICS2017, "541380")),
+ ((Standards.SIC, "8734"), (Standards.NAICS2017, "541940")),
+ ((Standards.SIC, "8741"), (Standards.NAICS2017, "236115")),
+ ((Standards.SIC, "8741"), (Standards.NAICS2017, "236116")),
+ ((Standards.SIC, "8741"), (Standards.NAICS2017, "236118")),
+ ((Standards.SIC, "8741"), (Standards.NAICS2017, "236210")),
+ ((Standards.SIC, "8741"), (Standards.NAICS2017, "236220")),
+ ((Standards.SIC, "8741"), (Standards.NAICS2017, "237110")),
+ ((Standards.SIC, "8741"), (Standards.NAICS2017, "237120")),
+ ((Standards.SIC, "8741"), (Standards.NAICS2017, "237130")),
+ ((Standards.SIC, "8741"), (Standards.NAICS2017, "237310")),
+ ((Standards.SIC, "8741"), (Standards.NAICS2017, "237990")),
+ ((Standards.SIC, "8741"), (Standards.NAICS2017, "561110")),
+ ((Standards.SIC, "8742"), (Standards.NAICS2017, "541611")),
+ ((Standards.SIC, "8742"), (Standards.NAICS2017, "541612")),
+ ((Standards.SIC, "8742"), (Standards.NAICS2017, "541613")),
+ ((Standards.SIC, "8742"), (Standards.NAICS2017, "541614")),
+ ((Standards.SIC, "8742"), (Standards.NAICS2017, "561312")),
+ ((Standards.SIC, "8743"), (Standards.NAICS2017, "541820")),
+ ((Standards.SIC, "8744"), (Standards.NAICS2017, "561210")),
+ ((Standards.SIC, "8748"), (Standards.NAICS2017, "541320")),
+ ((Standards.SIC, "8748"), (Standards.NAICS2017, "541330")),
+ ((Standards.SIC, "8748"), (Standards.NAICS2017, "541618")),
+ ((Standards.SIC, "8748"), (Standards.NAICS2017, "541690")),
+ ((Standards.SIC, "8748"), (Standards.NAICS2017, "611710")),
+ ((Standards.SIC, "8811"), (Standards.NAICS2017, "814110")),
+ ((Standards.SIC, "8999"), (Standards.NAICS2017, "512230")),
+ ((Standards.SIC, "8999"), (Standards.NAICS2017, "512250")),
+ ((Standards.SIC, "8999"), (Standards.NAICS2017, "519130")),
+ ((Standards.SIC, "8999"), (Standards.NAICS2017, "541612")),
+ ((Standards.SIC, "8999"), (Standards.NAICS2017, "541620")),
+ ((Standards.SIC, "8999"), (Standards.NAICS2017, "541690")),
+ ((Standards.SIC, "8999"), (Standards.NAICS2017, "541990")),
+ ((Standards.SIC, "8999"), (Standards.NAICS2017, "561312")),
+ ((Standards.SIC, "8999"), (Standards.NAICS2017, "711510")),
+ ((Standards.SIC, "9111"), (Standards.NAICS2017, "921110")),
+ ((Standards.SIC, "9121"), (Standards.NAICS2017, "921120")),
+ ((Standards.SIC, "9131"), (Standards.NAICS2017, "921140")),
+ ((Standards.SIC, "9199"), (Standards.NAICS2017, "921190")),
+ ((Standards.SIC, "9211"), (Standards.NAICS2017, "922110")),
+ ((Standards.SIC, "9221"), (Standards.NAICS2017, "922120")),
+ ((Standards.SIC, "9222"), (Standards.NAICS2017, "922130")),
+ ((Standards.SIC, "9223"), (Standards.NAICS2017, "922140")),
+ ((Standards.SIC, "9224"), (Standards.NAICS2017, "922160")),
+ ((Standards.SIC, "9229"), (Standards.NAICS2017, "922190")),
+ ((Standards.SIC, "9311"), (Standards.NAICS2017, "921130")),
+ ((Standards.SIC, "9411"), (Standards.NAICS2017, "923110")),
+ ((Standards.SIC, "9431"), (Standards.NAICS2017, "923120")),
+ ((Standards.SIC, "9441"), (Standards.NAICS2017, "923130")),
+ ((Standards.SIC, "9451"), (Standards.NAICS2017, "923140")),
+ ((Standards.SIC, "9511"), (Standards.NAICS2017, "924110")),
+ ((Standards.SIC, "9512"), (Standards.NAICS2017, "924120")),
+ ((Standards.SIC, "9531"), (Standards.NAICS2017, "925110")),
+ ((Standards.SIC, "9532"), (Standards.NAICS2017, "925120")),
+ ((Standards.SIC, "9611"), (Standards.NAICS2017, "926110")),
+ ((Standards.SIC, "9621"), (Standards.NAICS2017, "488111")),
+ ((Standards.SIC, "9621"), (Standards.NAICS2017, "926120")),
+ ((Standards.SIC, "9631"), (Standards.NAICS2017, "926130")),
+ ((Standards.SIC, "9641"), (Standards.NAICS2017, "926140")),
+ ((Standards.SIC, "9651"), (Standards.NAICS2017, "926150")),
+ ((Standards.SIC, "9661"), (Standards.NAICS2017, "927110")),
+ ((Standards.SIC, "9711"), (Standards.NAICS2017, "928110")),
+ ((Standards.SIC, "9721"), (Standards.NAICS2017, "928120")),
+ ],
+)
diff --git a/pyisic/types.py b/pyisic/types.py
index 28251f9..19c7287 100644
--- a/pyisic/types.py
+++ b/pyisic/types.py
@@ -44,6 +44,7 @@ class Standards(_Enum): # pragma: no cover
CAEM2005 = "CAEM2005"
CAEM2009 = "CAEM2009"
SBI2008 = "SBI2008"
+ SIC = "SIC"
@_dataclass
| feat: SIC lookup + SIC-NAICS conversion
https://www.naics.com/sic-naics-crosswalk-search-results/
| 2022-11-15T22:24:40 | 0.0 | [] | [] |
|||
jazzband/django-redis | jazzband__django-redis-639 | 65a43495e4948aa6b7157a4784b063a6dccbe644 | diff --git a/changelog.d/638.bugfix b/changelog.d/638.bugfix
new file mode 100644
index 00000000..ade737ce
--- /dev/null
+++ b/changelog.d/638.bugfix
@@ -0,0 +1,1 @@
+Access `django_redis.cache.DJANGO_REDIS_SCAN_ITERSIZE` and `django_redis.client.herd.CACHE_HERD_TIMEOUT` in runtime to not read Django settings in import time.
\ No newline at end of file
diff --git a/django_redis/cache.py b/django_redis/cache.py
index ebf4d14b..0b4d5890 100644
--- a/django_redis/cache.py
+++ b/django_redis/cache.py
@@ -9,8 +9,6 @@
from .exceptions import ConnectionInterrupted
-DJANGO_REDIS_SCAN_ITERSIZE = getattr(settings, "DJANGO_REDIS_SCAN_ITERSIZE", 10)
-
CONNECTION_INTERRUPTED = object()
@@ -45,6 +43,9 @@ def __init__(self, server: str, params: Dict[str, Any]) -> None:
super().__init__(params)
self._server = server
self._params = params
+ self._default_scan_itersize = getattr(
+ settings, "DJANGO_REDIS_SCAN_ITERSIZE", 10
+ )
options = params.get("OPTIONS", {})
self._client_cls = options.get(
@@ -105,7 +106,7 @@ def delete(self, *args, **kwargs):
@omit_exception
def delete_pattern(self, *args, **kwargs):
- kwargs["itersize"] = kwargs.get("itersize", DJANGO_REDIS_SCAN_ITERSIZE)
+ kwargs.setdefault("itersize", self._default_scan_itersize)
return self.client.delete_pattern(*args, **kwargs)
@omit_exception
diff --git a/django_redis/client/herd.py b/django_redis/client/herd.py
index d5ab941d..0c52480f 100644
--- a/django_redis/client/herd.py
+++ b/django_redis/client/herd.py
@@ -21,15 +21,12 @@ class Marker:
pass
-CACHE_HERD_TIMEOUT = getattr(settings, "CACHE_HERD_TIMEOUT", 60)
-
-
-def _is_expired(x):
- if x >= CACHE_HERD_TIMEOUT:
+def _is_expired(x, herd_timeout: int) -> bool:
+ if x >= herd_timeout:
return True
- val = x + random.randint(1, CACHE_HERD_TIMEOUT)
+ val = x + random.randint(1, herd_timeout)
- if val >= CACHE_HERD_TIMEOUT:
+ if val >= herd_timeout:
return True
return False
@@ -37,6 +34,7 @@ def _is_expired(x):
class HerdClient(DefaultClient):
def __init__(self, *args, **kwargs):
self._marker = Marker()
+ self._herd_timeout = getattr(settings, "CACHE_HERD_TIMEOUT", 60)
super().__init__(*args, **kwargs)
def _pack(self, value, timeout):
@@ -55,7 +53,7 @@ def _unpack(self, value):
now = int(time.time())
if herd_timeout < now:
x = now - herd_timeout
- return unpacked, _is_expired(x)
+ return unpacked, _is_expired(x, self._herd_timeout)
return unpacked, False
@@ -84,7 +82,7 @@ def set(
)
packed = self._pack(value, timeout)
- real_timeout = timeout + CACHE_HERD_TIMEOUT
+ real_timeout = timeout + self._herd_timeout
return super().set(
key, packed, timeout=real_timeout, version=version, client=client, nx=nx
diff --git a/setup.cfg b/setup.cfg
index e0106810..3c76f354 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -100,6 +100,7 @@ REDIS =
passenv = CI GITHUB*
commands =
{envpython} -m pytest --cov-report= --ds=settings.sqlite {posargs}
+ {envpython} -m pytest --cov-append --cov-report= --ds=settings.sqlite_herd {posargs}
{envpython} -m pytest --cov-append --cov-report= --ds=settings.sqlite_json {posargs}
{envpython} -m pytest --cov-append --cov-report= --ds=settings.sqlite_lz4 {posargs}
{envpython} -m pytest --cov-append --cov-report= --ds=settings.sqlite_msgpack {posargs}
| Import to django_redis.cache fails
**Describe the bug**
I can't import `django_redis.cache.RedisCache` to extend it, because the module access `django.conf.settings` on import time.
**To Reproduce**
```
$ python -c "import django_redis.cache"
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "./lib/python3.11/site-packages/django_redis/cache.py", line 12, in <module>
DJANGO_REDIS_SCAN_ITERSIZE = getattr(settings, "DJANGO_REDIS_SCAN_ITERSIZE", 10)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "./lib/python3.11/site-packages/django/conf/__init__.py", line 87, in __getattr__
self._setup(name)
File "./lib/python3.11/site-packages/django/conf/__init__.py", line 67, in _setup
raise ImproperlyConfigured(
django.core.exceptions.ImproperlyConfigured: Requested setting DJANGO_REDIS_SCAN_ITERSIZE, but settings are not configured. You must either define the environment variable DJANGO_SETTINGS_MODULE or call settings.configure() before accessing settings.
```
| 2023-01-06T23:28:32 | 0.0 | [] | [] |
|||
skorokithakis/catt | skorokithakis__catt-429 | 585b8d2252475d94b0e693f1e45290815a251047 | diff --git a/CHANGELOG.md b/CHANGELOG.md
index 41c39d0..250c225 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,6 +1,13 @@
# Changelog
+## Unreleased
+
+### Features
+
+* Add volumemute command (#427) [neurodiv-eric]
+
+
## v0.12.10 (2023-01-30)
### Features
diff --git a/catt/controllers.py b/catt/controllers.py
index 0660ddb..86cf116 100644
--- a/catt/controllers.py
+++ b/catt/controllers.py
@@ -315,7 +315,10 @@ def media_info(self):
@property
def cast_info(self):
- cinfo = {"volume_level": str(int(round(self._cast.status.volume_level, 2) * 100))}
+ cinfo = {
+ "volume_level": str(int(round(self._cast.status.volume_level, 2) * 100)),
+ "volume_muted": self._cast.status.volume_muted
+ }
if self._is_idle:
return cinfo
diff --git a/catt/util.py b/catt/util.py
index c2731a7..25b2e37 100644
--- a/catt/util.py
+++ b/catt/util.py
@@ -35,11 +35,8 @@ def echo_status(status):
if status.get("player_state"):
click.echo("State: {}".format(status["player_state"]))
- if status.get("volume_level"):
- click.echo("Volume: {}".format(status["volume_level"]))
-
- if status.get("volume_muted"):
- click.echo("Volume muted: {}".format(status["is_volume_muted"]))
+ click.echo("Volume: {}".format(status["volume_level"]))
+ click.echo("Volume muted: {}".format(status["volume_muted"]))
def guess_mime(path):
diff --git a/pyproject.toml b/pyproject.toml
index e89896b..374af5a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -28,9 +28,9 @@ catt = "catt.cli:main"
python = ">=3.7"
click = ">=7.1.2"
ifaddr = ">=0.1.7"
-pychromecast = ">=12.1.4, <13"
+pychromecast = ">=13.0.7, <14"
requests = ">=2.23.0"
-yt-dlp = ">=2022.6.22.1"
+yt-dlp = ">=2023.3.4"
[tool.poetry.dev-dependencies]
coverage = "*"
| pychromecast v13
I've noticed that catt sometimes hangs and it has been happening more frequently.
I upgraded pychromecast to 13.0.4 and I haven't experienced it hanging yet.
Perhaps the requirements.txt could be more specific to exclude a bad version of pychromecast if that's the reason for limiting it to v12
| 2023-05-05T10:49:17 | 0.0 | [] | [] |
|||
skorokithakis/catt | skorokithakis__catt-401 | f1ae3c823c65b3bdd74342fae4beeb08b0b56c9c | diff --git a/catt/stream_info.py b/catt/stream_info.py
index 89f47e72..928d81a1 100644
--- a/catt/stream_info.py
+++ b/catt/stream_info.py
@@ -166,7 +166,7 @@ def set_playlist_entry(self, number):
def _get_stream_preinfo(self, video_url):
try:
- return self._ydl.sanitize_info(self._ydl.extract_info(video_url, process=False))
+ return self._ydl.extract_info(video_url, process=False)
except yt_dlp.utils.DownloadError:
# We sometimes get CI failures when testing with YouTube videos,
# as YouTube throttles our connections intermittently. We evaluated
diff --git a/pyproject.toml b/pyproject.toml
index e2a1586c..e80b0dc5 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -30,7 +30,7 @@ click = ">=7.1.2"
ifaddr = ">=0.1.7"
pychromecast = ">=12.1.4, <13"
requests = ">=2.23.0"
-yt-dlp = ">=2021.12.1"
+yt-dlp = ">=2022.6.22.1"
[tool.poetry.dev-dependencies]
coverage = "*"
| Casting a YouTube playlist fails with an AttributeError
Thanks for releasing `catt`, it's a nice tool. I had an issue with its most recent release on PyPI.
Running this:
```bash
$ catt -d my_device cast "https://www.youtube.com/playlist?list=PLjHf9jaFs8XVAQpJLdNNyA8tzhXzhpZHu"
```
Fails with this exception:
```python
Traceback (most recent call last):
File "~/.pyenv/versions/3.10.1/bin/catt", line 8, in <module>
sys.exit(main())
<snip>
File "~/.pyenv/versions/3.10.1/lib/python3.10/site-packages/click/decorators.py", line 38, in new_func
return f(get_current_context().obj, *args, **kwargs)
File "~/.pyenv/versions/3.10.1/lib/python3.10/site-packages/catt/cli.py", line 243, in cast
if not random_play and cst.playlist_capability and stream.playlist_all_ids:
File "~/.pyenv/versions/3.10.1/lib/python3.10/site-packages/catt/stream_info.py", line 143, in playlist_all_ids
if self.is_playlist and self._entries and self._entries[0].get("id"):
AttributeError: 'str' object has no attribute 'get'. Did you mean: 'playlist_id'?
```
Versions:
```bash
$ python --version
Python 3.10.1
$ catt --version
catt v0.12.5, Zaniest Zapper.
```
This error might be related to https://github.com/skorokithakis/catt/issues/367.
Fix yt-dlp support
Actually, it seems fe9097565767e255b146b5ad35f5a7486c5fa922 needs a fixup to port `stream_info.py` (and also `test_catt.py`) to use `yt-dlp`.
Here are the docs for that: https://github.com/yt-dlp/yt-dlp#embedding-yt-dlp, and this is the most relevant bit:
> **Tip**: If you are porting your code from youtube-dl to yt-dlp, one important point to look out for is that we do not guarantee the return value of `YoutubeDL.extract_info` to be json serializable, or even be a dictionary. It will be dictionary-like, but if you want to ensure it is a serializable dictionary, pass it through `YoutubeDL.sanitize_info` as shown in the example above
_Originally posted by @FranciscoPombal in https://github.com/skorokithakis/catt/issues/354#issuecomment-991135771_
| Hm, yes, unfortunately I think this is related to #367, and so will take a bit of time to fix. I will see if I have some time to work on this, but PRs here are definitely appreciated.
I will close this as a duplicate for now, please discuss in #367.
The latest Docker build fails with the error below:
FROM python:3.7
RUN pip install catt
Running catt results in:
```
Traceback (most recent call last):
File "/usr/local/bin/catt", line 5, in <module>
from catt.cli import main
File "/usr/local/lib/python3.7/site-packages/catt/cli.py", line 14, in <module>
from .controllers import CastState
File "/usr/local/lib/python3.7/site-packages/catt/controllers.py", line 22, in <module>
from .stream_info import StreamInfo
File "/usr/local/lib/python3.7/site-packages/catt/stream_info.py", line 4, in <module>
import youtube_dl
ModuleNotFoundError: No module named 'youtube_dl'
Traceback (most recent call last):
File "/usr/local/bin/catt", line 5, in <module>
from catt.cli import main
File "/usr/local/lib/python3.7/site-packages/catt/cli.py", line 14, in <module>
from .controllers import CastState
File "/usr/local/lib/python3.7/site-packages/catt/controllers.py", line 22, in <module>
from .stream_info import StreamInfo
File "/usr/local/lib/python3.7/site-packages/catt/stream_info.py", line 4, in <module>
import youtube_dl
ModuleNotFoundError: No module named 'youtube_dl'
```
@skorokithakis I believe you did not change catt/stream_info.py, tests/test_catt.py, and README.rst where youtube_dl is referenced.
@skorokithakis a new "pipx install catt" also fails with the same error when trying to run catt cast.
Ah, damn it. I didn't realize we had changed the dependency. Can you perhaps make a PR, @anthonyrocom? I'll cut a new release afterwards.
Hey @skorokithakis I didn't mean to leave you hanging. I haven't had time to analyze the changes needed. For anyone who needs it, you can RUN pip install youtube-dl before RUN pip install catt if you're building your Dockerfile.
I'll see if I can get a PR done at the end of this week.
Oh, no problem. However, youtube-dl is discontinued and probably doesn't work with many sites. We're switching over to yt-dlp, so I don't know if the workaround you mention in your comment would actually work.
@skorokithakis
> youtube-dl is discontinued
Is this official?
@theychx I don't know about official, it hasn't had any activity since July so I'd say that's as official as it's going to get...
My workaround makes catt run again but it doesn't mean YouTube or other links will cast 😅 I am streaming a local source so I wasn't thinking of it.
It's a good call seeing youtube-dl as inactive since they previously had a monthly or more release cadence.
@skorokithakis
The main `youtube-dl` maintainer just resurfaced from his hiatus, and made a fresh release. So I suggest we switch back to `youtube-dl`.
Hmm, that's very interesting, has he said if he's going to be maintaining it? I don't know if he's just going to go away again...
#369 relates to this.
I just finished the pull request too :D
Haha, yes, excellent timing :P By the way, as per the comment above, I think `extract_info` needs to be passed through `sanitize_info` first. I'm not entirely sure if we serialize it (I think not), but it can't hurt...
I also noticed you just updated catt/cli.py so I'm out of date too.
Maybe we just need to choose youtube-dl or yt-dlp, one or the other for now, because right now the build is broken if you don't have youtube-dl installed. Up to you, make the call! :)
I'm not sure what extract_info or sanitize_info does.
> I also noticed you just updated catt/cli.py so I'm out of date too.
That shouldn't be an issue, your code has no conflicts.
> Maybe we just need to choose youtube-dl or yt-dlp, one or the other for now, because right now the build is broken if you don't have youtube-dl installed. Up to you, make the call! :)
Yeah, we do need to choose one. On one hand, I'm inclined to go with the original, on the other, we know that yt-dlp is maintained, but YouTube-dl's status is uncertain.
@theychx I had a look and it seems that ytdl just released the 6-month-old code. There doesn't seem to be any new code merged or any work done, so I don't know how active the maintainer is. Given that catt is currently breaking, I think we should merge @anthonyrocom's PR and release, and we can think about switching back later if ytdl catches up to yt-dlp at some point. The latter seems to be much better maintained nowadays anyway.
I agree with going to yt-dlp right now because you have the agility to easily switch. Right now that's the well maintained project.
Looks like ytdl is officially dead, at least for now: https://github.com/ytdl-org/youtube-dl/commit/21b759057502c6e70d51011cfb3fb86d84055182
The tests are failing. I did some work on this in a branch, but yt-dlp seems to have changed a few things related to finding the best format and I need to figure out how to do that before we can release a fixed version...
Hey @skorokithakis sorry, I've been offline due to holidays. Anything I can help with? I admit I did a simple test but nothing more extensive. Is test_catt.py not passing?
Ah, no problem. Yes, the tests are failing, but I haven't managed to figure out why. It seems to be because they changed the format specifiers, but I didn't look into it more. | 2022-06-22T13:28:10 | 0.0 | [] | [] |
||
littlepea/django-docs | littlepea__django-docs-34 | 08912dac5d589ef57812a6cfdccb8398a57f0da7 | diff --git a/docs/urls.py b/docs/urls.py
index 50e9bb0..9623db3 100644
--- a/docs/urls.py
+++ b/docs/urls.py
@@ -1,13 +1,13 @@
-from django.conf.urls import url
+from django.conf.urls import re_path
from docs.views import DocsRootView, serve_docs, DOCS_DIRHTML
urlpatterns = []
if not DOCS_DIRHTML:
urlpatterns += [
- url(r'^$', DocsRootView.as_view(permanent=True), name='docs_root'),
+ re_path(r'^$', DocsRootView.as_view(permanent=True), name='docs_root'),
]
urlpatterns += [
- url(r"^(?P<path>.*)$", serve_docs, name="docs_files"),
+ re_path(r"^(?P<path>.*)$", serve_docs, name="docs_files"),
]
| django.conf.urls.url() is deprecated in favor of django.urls.re_path()
| 2021-07-27T14:27:52 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-1380 | cc8753cd93789a42545a1ce78ab15502b081c2a3 | diff --git a/dlrover/trainer/torch/flash_checkpoint/ddp.py b/dlrover/trainer/torch/flash_checkpoint/ddp.py
index 6124f450d..e00aea062 100644
--- a/dlrover/trainer/torch/flash_checkpoint/ddp.py
+++ b/dlrover/trainer/torch/flash_checkpoint/ddp.py
@@ -115,3 +115,11 @@ def save_checkpoint(
def load_checkpoint(self, resume_path=""):
return self._engine.load(resume_path)
+
+ def wait_latest_checkpoint(self, timeout=1800):
+ """
+ Wait for the latest checkpoint.
+ Args:
+ timeout (second): The timeout to wait.
+ """
+ self._engine.wait_latest_checkpoint(timeout)
diff --git a/dlrover/trainer/torch/flash_checkpoint/deepspeed.py b/dlrover/trainer/torch/flash_checkpoint/deepspeed.py
index 2d47edff6..c8a826f4f 100644
--- a/dlrover/trainer/torch/flash_checkpoint/deepspeed.py
+++ b/dlrover/trainer/torch/flash_checkpoint/deepspeed.py
@@ -254,3 +254,11 @@ def load_checkpoint(
)
torch.load = torch_native_load
return load_path, client_states
+
+ def wait_latest_checkpoint(self, timeout=1800):
+ """
+ Wait for the latest checkpoint.
+ Args:
+ timeout (second): The timeout to wait.
+ """
+ self._async_save_engine.wait_latest_checkpoint(timeout)
diff --git a/dlrover/trainer/torch/flash_checkpoint/deepspeed_engine.py b/dlrover/trainer/torch/flash_checkpoint/deepspeed_engine.py
index 059c26e00..0bf96bf7d 100644
--- a/dlrover/trainer/torch/flash_checkpoint/deepspeed_engine.py
+++ b/dlrover/trainer/torch/flash_checkpoint/deepspeed_engine.py
@@ -120,6 +120,8 @@ def save_to_storage(self, step, state_dict, paths):
if self._local_rank == 0 and success:
event = CheckpointEvent(type=CheckpointEventType.SAVE, step=step)
self._event_queue.put(event)
+ if success:
+ self.latest_step = step
return success
def get_local_shard_num(self):
diff --git a/dlrover/trainer/torch/flash_checkpoint/engine.py b/dlrover/trainer/torch/flash_checkpoint/engine.py
index b14b3981c..9acf2fe9a 100644
--- a/dlrover/trainer/torch/flash_checkpoint/engine.py
+++ b/dlrover/trainer/torch/flash_checkpoint/engine.py
@@ -191,6 +191,7 @@ def __init__(
self.checkpoint_dir = checkpoint_dir
self.storage = storage
+ self.latest_step = 0
self._save_timeout = save_timeout
self._local_rank = env_utils.get_local_rank()
self._cached_step = -1
@@ -411,6 +412,27 @@ def _restore_memory_from_replica(self):
)
dist.barrier()
+ def wait_latest_checkpoint(self, timeout=1800):
+ """
+ Wait for the saver finish persisting the checkpoint of latest step.
+ """
+ start = time.time()
+ while True:
+ tracker_file = os.path.join(
+ self.checkpoint_dir, CheckpointConstant.TRACER_FILE_NAME
+ )
+ with open(tracker_file, "r") as f:
+ step = int(f.read())
+ if step == self.latest_step:
+ break
+ if time.time() - start > timeout:
+ logger.info(
+ f"Timeout ({timeout})s to wait for "
+ "the latest step checkpoint."
+ )
+ break
+ time.sleep(3)
+
@abstractmethod
def get_saving_ranks(self):
pass
diff --git a/dlrover/trainer/torch/flash_checkpoint/fsdp.py b/dlrover/trainer/torch/flash_checkpoint/fsdp.py
index 84c0b99d1..db763d114 100644
--- a/dlrover/trainer/torch/flash_checkpoint/fsdp.py
+++ b/dlrover/trainer/torch/flash_checkpoint/fsdp.py
@@ -148,6 +148,14 @@ def load_checkpoint(self, model, optimizer, resume_path=""):
optimizer.load_state_dict(flattened_osd)
return state_dict
+ def wait_latest_checkpoint(self, timeout=1800):
+ """
+ Wait for the latest checkpoint.
+ Args:
+ timeout (second): The timeout to wait.
+ """
+ self._engine.wait_latest_checkpoint(timeout)
+
class FsdpFullCheckpointer(Checkpointer):
"""
@@ -282,3 +290,11 @@ def load_checkpoint(self, model, optimizer, resume_path=""):
model.load_state_dict(model_state_dict)
optimizer.load_state_dict(optim_state_dict)
return state_dict
+
+ def wait_latest_checkpoint(self, timeout=1800):
+ """
+ Wait for the latest checkpoint.
+ Args:
+ timeout (second): The timeout to wait.
+ """
+ self._engine.wait_latest_checkpoint(timeout)
diff --git a/dlrover/trainer/torch/flash_checkpoint/fsdp_engine.py b/dlrover/trainer/torch/flash_checkpoint/fsdp_engine.py
index 0946b34a5..8254af79d 100644
--- a/dlrover/trainer/torch/flash_checkpoint/fsdp_engine.py
+++ b/dlrover/trainer/torch/flash_checkpoint/fsdp_engine.py
@@ -534,20 +534,22 @@ def save_to_storage(self, step, state_dict, paths: Dict[str, str]):
state_dict (dict): the state dict of model and optimizer to save.
paths (dict): the storage path to save the state dict.
"""
- succeed = True
+ success = True
if step > self._cached_step:
- succeed = self.save_to_memory(step, state_dict, paths)
+ success = self.save_to_memory(step, state_dict, paths)
if dist.is_initialized():
dist.barrier()
# Only local rank 0 on each node notifies the event to save.
- if self._local_rank == 0 and succeed:
+ if self._local_rank == 0 and success:
logger.info(
"Put a save event to notify the agent persists checkpoint."
)
event = CheckpointEvent(type=CheckpointEventType.SAVE, step=step)
self._event_queue.put(event)
+ if success:
+ self.latest_step = step
def get_saver_class(self):
"""
diff --git a/dlrover/trainer/torch/flash_checkpoint/full_ckpt_engine.py b/dlrover/trainer/torch/flash_checkpoint/full_ckpt_engine.py
index 298ce3730..7ff9bafe3 100644
--- a/dlrover/trainer/torch/flash_checkpoint/full_ckpt_engine.py
+++ b/dlrover/trainer/torch/flash_checkpoint/full_ckpt_engine.py
@@ -140,6 +140,8 @@ def save_to_storage(self, step, state_dict, paths):
if success and self._rank == 0:
event = CheckpointEvent(type=CheckpointEventType.SAVE, step=step)
self._event_queue.put(event)
+ if success:
+ self.latest_step = step
return success
def load(self, resume_path=""):
diff --git a/dlrover/trainer/torch/flash_checkpoint/hf_trainer.py b/dlrover/trainer/torch/flash_checkpoint/hf_trainer.py
index f1fdbab39..da56aea47 100644
--- a/dlrover/trainer/torch/flash_checkpoint/hf_trainer.py
+++ b/dlrover/trainer/torch/flash_checkpoint/hf_trainer.py
@@ -381,3 +381,13 @@ def _get_last_checkpoint_step(self):
with open(tracer_file, "r") as f:
step = int(f.read())
return step
+
+ def wait_latest_checkpoint(self, timeout=1800):
+ """
+ Wait for the latest checkpoint.
+ Args:
+ timeout (second): The timeout to wait.
+ """
+ self.flash_checkpointer.async_save_engine.wait_latest_checkpoint(
+ timeout
+ )
diff --git a/dlrover/trainer/torch/flash_checkpoint/megatron.py b/dlrover/trainer/torch/flash_checkpoint/megatron.py
index 76c90ab34..eb073e762 100644
--- a/dlrover/trainer/torch/flash_checkpoint/megatron.py
+++ b/dlrover/trainer/torch/flash_checkpoint/megatron.py
@@ -245,3 +245,16 @@ def load_checkpoint(
)
torch.load = torch_native_load
return iteration
+
+
+def wait_latest_checkpoint(timeout=1800):
+ """
+ Wait for the latest checkpoint.
+ Args:
+ timeout (second): The timeout to wait.
+ """
+ args = get_args()
+ checkpointer = MegatronCheckpointer.singleton_instance(
+ checkpoint_dir=args.save
+ )
+ checkpointer.engine.wait_latest_checkpoint(timeout)
diff --git a/dlrover/trainer/torch/flash_checkpoint/megatron_dist_ckpt.py b/dlrover/trainer/torch/flash_checkpoint/megatron_dist_ckpt.py
index c66fc0c17..ed94a771d 100644
--- a/dlrover/trainer/torch/flash_checkpoint/megatron_dist_ckpt.py
+++ b/dlrover/trainer/torch/flash_checkpoint/megatron_dist_ckpt.py
@@ -708,3 +708,16 @@ def get_checkpoint_storage(deletion_strategy=None):
else:
storage = PosixDiskStorage()
return storage
+
+
+def wait_latest_checkpoint(timeout=1800):
+ """
+ Wait for the latest checkpoint.
+ Args:
+ timeout (second): The timeout to wait.
+ """
+ args = get_args()
+ checkpointer = MegatronDistCheckpointer.singleton_instance(
+ checkpoint_dir=args.save
+ )
+ checkpointer.engine.wait_latest_checkpoint(timeout)
diff --git a/dlrover/trainer/torch/flash_checkpoint/megatron_engine.py b/dlrover/trainer/torch/flash_checkpoint/megatron_engine.py
index d2e6a7ed8..f7d2d5290 100644
--- a/dlrover/trainer/torch/flash_checkpoint/megatron_engine.py
+++ b/dlrover/trainer/torch/flash_checkpoint/megatron_engine.py
@@ -237,17 +237,20 @@ def save_to_storage(self, step, state_dict, paths):
["model_states", "optim_states"] of the state dict and
the value is the path of storage to save.
"""
- succeed = True
+ success = True
if step > self._cached_step:
- succeed = self.save_to_memory(step, state_dict, paths)
+ success = self.save_to_memory(step, state_dict, paths)
if dist.is_initialized():
dist.barrier()
- if succeed and self._local_rank == 0:
+ if success and self._local_rank == 0:
event = CheckpointEvent(type=CheckpointEventType.SAVE, step=step)
self._event_queue.put(event)
+ if success:
+ self.latest_step = step
+
def get_local_shard_num(self):
local_world_size = env_utils.get_local_world_size()
global_shard_num = self.get_global_shard_num()
diff --git a/docs/blogs/flash_checkpoint_cn.md b/docs/blogs/flash_checkpoint_cn.md
index 5c45a6cbe..0036233ff 100644
--- a/docs/blogs/flash_checkpoint_cn.md
+++ b/docs/blogs/flash_checkpoint_cn.md
@@ -107,6 +107,11 @@ if iter_num % save_storage_interval == 0:
ckpt_dict = checkpointer.load_checkpoint()
model.load_state_dict(ckpt_dict["model"])
optimizer.load_state_dict(ckpt_dict["optimizer"]
+
+# Wait for the latest checkpoint before exit. The process to asynchronously
+# persist the checkpoint from the memory to the storage will exist if the
+# main process exits.
+checkpointer.wait_latest_checkpoint(timeout=1800)
```
#### FSDP
@@ -141,6 +146,11 @@ with FSDP.state_dict_type(model, StateDictType.SHARDED_STATE_DICT):
checkpointer.save_checkpoint(
step, state_dict, ckpt_dir, storage_type=StorageType.DISK
)
+
+# Wait for the latest checkpoint before exit. The process to asynchronously
+# persist the checkpoint from the memory to the storage will exist if the
+# main process exits.
+checkpointer.wait_latest_checkpoint(timeout=1800)
```
加载 Checkpoint 的 API 与 PyTorch 的 Distributed Checkpoint API 保持一致,只需要将
| Flash Checkpoint incomplete saving
**Describe the bug**
First of all, thank you for your work. During the verification and usage process, I encountered an issue when using Flash Checkpoint. When the last checkpoint is being saved asynchronously during training, there is a problem with incomplete saving.
I have initially investigated the cause, and it appears that the training process has already completed, the main process exiting, which in turn causes the child process responsible for saving the checkpoint to exit as well, resulting in this situation.Do you have any similar findings?
**APP Info (please complete the following information):**
- DLRover: 0.3.7
**HARDWARE Info (please complete the following information):**
- Device: GPU A100
| Yes. We will implement a method to wait the latest checkpoint persistence. Now, you can use `time.sleep` to wait the persistence process to walk around the issue. | 2024-12-08T13:47:24 | 0.0 | [] | [] |
||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-1092 | 918166358ea1edd48a16684ad9b1054fde633342 | diff --git a/dlrover/python/elastic_agent/torch/ckpt_saver.py b/dlrover/python/elastic_agent/torch/ckpt_saver.py
index 912957643..9cb16b3a8 100644
--- a/dlrover/python/elastic_agent/torch/ckpt_saver.py
+++ b/dlrover/python/elastic_agent/torch/ckpt_saver.py
@@ -234,6 +234,7 @@ def __init__(self, local_rank, host=True):
)
self.shared_memory: Optional[SharedMemory] = None
self.metadata = SharedDict(name=meta_name, create=host)
+ self._need_creation = True
def close(self):
if self.shared_memory:
@@ -249,7 +250,7 @@ def unlink(self):
self.metadata.unlink()
def reset(self):
- self.shared_memory = None
+ self._need_creation = True
def _create_tensor_meta(self, value: torch.Tensor):
"""
@@ -301,7 +302,7 @@ def load_state_dict(self):
config = meta_dict.get(DLROVER_CKPT_CONFIG_KEY, default_config)
if not meta_dict or config.writing_shm:
return {}
- if self.shared_memory is None:
+ if self.shared_memory is None or self._need_creation:
self.init_shared_memory(create=False)
if not self.shared_memory:
return {}
@@ -325,6 +326,7 @@ def init_shared_memory(self, create=False, size=0):
self.shared_memory = _create_shared_memory(
self._shm_name, create=create, size=size
)
+ self._need_creation = False
def get_checkpoint_config(self, default_config):
"""
@@ -489,6 +491,13 @@ def _save_shm_before_exiting(signum, frame):
signal.signal(signal.SIGINT, _clean_shm_handler)
signal.signal(signal.SIGTERM, _save_shm_before_exiting)
+ def wait_saving_checkpoint(self):
+ """
+ Check whether the saver finishes writing the
+ latest checkpoint to the storage.
+ """
+ return self._writing_storage
+
def close(self):
"""Clear the resource of the shared objects."""
event = CheckpointEvent(type=CheckpointEventType.EXIT)
diff --git a/dlrover/python/elastic_agent/torch/training.py b/dlrover/python/elastic_agent/torch/training.py
index 290121bbd..90450c7b7 100644
--- a/dlrover/python/elastic_agent/torch/training.py
+++ b/dlrover/python/elastic_agent/torch/training.py
@@ -83,6 +83,9 @@
__all__ = ["launch_agent"]
+_DEFAULT_INTERVAL = 5
+
+
def _set_paral_config():
"""
Set up the directory and path for the parallelism configuration.
@@ -266,7 +269,7 @@ def next_rendezvous(self):
)
if start_pending == 0:
start_pending = time.time()
- time.sleep(5)
+ time.sleep(_DEFAULT_INTERVAL)
start_join = time.time()
if start_join - start_pending > self.pend_timeout:
raise TimeoutError(
@@ -582,6 +585,7 @@ def _invoke_run(self, role: str = DEFAULT_ROLE) -> RunResult:
"for other agents to finish."
)
self._exit_barrier()
+ self._wait_async_saver()
return run_result
elif state in {WorkerState.UNHEALTHY, WorkerState.FAILED}:
logger.error(f"The worker fails with {run_result.failures}")
@@ -607,6 +611,24 @@ def _invoke_run(self, role: str = DEFAULT_ROLE) -> RunResult:
else:
raise Exception(f"[{role}] Worker group in {state.name} state")
+ def _wait_async_saver(self):
+ """
+ The agent waits for saving the checkpoint from the shared memory
+ before exiting.
+ """
+ saver = AsyncCheckpointSaver.get_ckpt_saver()
+ if saver:
+ # Wait the saver finishes writing the checkpoint from the shared
+ # memory to the storage.
+ start_wait_time = time.time()
+ while saver.wait_saving_checkpoint():
+ time.sleep(_DEFAULT_INTERVAL)
+ wait_time = round(time.time() - start_wait_time, 2)
+ logger.info(
+ "Wait for saving the checkpoint and "
+ f"the waiting time is {wait_time}s."
+ )
+
def _save_ckpt_to_storage(self):
"""
The agent can save the checkpointing state dict in the shared
| Fatal Python error: Segmentation fault when kill the training process.
```bash
Fatal Python error: Segmentation fault
Thread 0x00007fcf1abb2700 (most recent call first):
File "/opt/conda/lib/python3.8/site-packages/torch/serialization.py", line 853 in _save
File "/opt/conda/lib/python3.8/site-packages/torch/serialization.py", line 619 in save
File "/opt/conda/lib/python3.8/site-packages/dlrover/python/common/storage.py", line 140 in write_state_dict
File "/opt/conda/lib/python3.8/site-packages/dlrover/python/elastic_agent/torch/ckpt_saver.py", line 912 in persist_to_storage
File "/opt/conda/lib/python3.8/site-packages/dlrover/python/elastic_agent/torch/ckpt_saver.py", line 569 in _save_shard
File "/opt/conda/lib/python3.8/concurrent/futures/thread.py", line 57 in run
File "/opt/conda/lib/python3.8/concurrent/futures/thread.py", line 80 in _worker
File "/opt/conda/lib/python3.8/threading.py", line 870 in run
File "/opt/conda/lib/python3.8/threading.py", line 932 in _bootstrap_inner
File "/opt/conda/lib/python3.8/threading.py", line 890 in _bootstrap
Current thread 0x00007fcf1b3b3700 (most recent call first):
File "/opt/conda/lib/python3.8/site-packages/torch/serialization.py", line 853 in _save
File "/opt/conda/lib/python3.8/site-packages/torch/serialization.py", line 619 in save
File "/opt/conda/lib/python3.8/site-packages/dlrover/python/common/storage.py", line 140 in write_state_dict
File "/opt/conda/lib/python3.8/site-packages/dlrover/python/elastic_agent/torch/ckpt_saver.py", line 912 in persist_to_storage
File "/opt/conda/lib/python3.8/site-packages/dlrover/python/elastic_agent/torch/ckpt_saver.py", line 569 in _save_shard
File "/opt/conda/lib/python3.8/concurrent/futures/thread.py", line 57 in run
File "/opt/conda/lib/python3.8/concurrent/futures/thread.py", line 80 in _worker
File "/opt/conda/lib/python3.8/threading.py", line 870 in run
File "/opt/conda/lib/python3.8/threading.py", line 932 in _bootstrap_inner
File "/opt/conda/lib/python3.8/threading.py", line 890 in _bootstrap
Thread 0x00007fcfd45c9700 (most recent call first):
File "/opt/conda/lib/python3.8/selectors.py", line 415 in select
File "/opt/conda/lib/python3.8/multiprocessing/connection.py", line 931 in wait
File "/opt/conda/lib/python3.8/concurrent/futures/process.py", line 362 in _queue_management_worker
File "/opt/conda/lib/python3.8/threading.py", line 870 in run
File "/opt/conda/lib/python3.8/threading.py", line 932 in _bootstrap_inner
File "/opt/conda/lib/python3.8/threading.py", line 890 in _bootstrap
Thread 0x00007fcfc05c1700 (most recent call first):
File "/opt/conda/lib/python3.8/socket.py", line 292 in accept
File "/opt/conda/lib/python3.8/site-packages/dlrover/python/common/multi_process.py", line 247 in _sync
File "/opt/conda/lib/python3.8/threading.py", line 870 in run
File "/opt/conda/lib/python3.8/threading.py", line 932 in _bootstrap_inner
File "/opt/conda/lib/python3.8/threading.py", line 890 in _bootstrap
Thread 0x00007fcfc2dc2700 (most recent call first):
File "/opt/conda/lib/python3.8/socket.py", line 292 in accept
File "/opt/conda/lib/python3.8/site-packages/dlrover/python/common/multi_process.py", line 368 in _sync
File "/opt/conda/lib/python3.8/threading.py", line 870 in run
File "/opt/conda/lib/python3.8/threading.py", line 932 in _bootstrap_inner
File "/opt/conda/lib/python3.8/threading.py", line 890 in _bootstrap
Thread 0x00007fcfc55c3700 (most recent call first):
File "/opt/conda/lib/python3.8/socket.py", line 292 in accept
File "/opt/conda/lib/python3.8/site-packages/dlrover/python/common/multi_process.py", line 476 in _sync
File "/opt/conda/lib/python3.8/threading.py", line 870 in run
File "/opt/conda/lib/python3.8/threading.py", line 932 in _bootstrap_inner
File "/opt/conda/lib/python3.8/threading.py", line 890 in _bootstrap
Thread 0x00007fcfc7dc4700 (most recent call first):
File "/opt/conda/lib/python3.8/socket.py", line 292 in accept
File "/opt/conda/lib/python3.8/site-packages/dlrover/python/common/multi_process.py", line 247 in _sync
File "/opt/conda/lib/python3.8/threading.py", line 870 in run
File "/opt/conda/lib/python3.8/threading.py", line 932 in _bootstrap_inner
File "/opt/conda/lib/python3.8/threading.py", line 890 in _bootstrap
Thread 0x00007fcfca5c5700 (most recent call first):
File "/opt/conda/lib/python3.8/socket.py", line 292 in accept
File "/opt/conda/lib/python3.8/site-packages/dlrover/python/common/multi_process.py", line 368 in _sync
File "/opt/conda/lib/python3.8/threading.py", line 870 in run
File "/opt/conda/lib/python3.8/threading.py", line 932 in _bootstrap_inner
File "/opt/conda/lib/python3.8/threading.py", line 890 in _bootstrap
Thread 0x00007fcfccdc6700 (most recent call first):
File "/opt/conda/lib/python3.8/socket.py", line 292 in accept
File "/opt/conda/lib/python3.8/site-packages/dlrover/python/common/multi_process.py", line 476 in _sync
File "/opt/conda/lib/python3.8/threading.py", line 870 in run
File "/opt/conda/lib/python3.8/threading.py", line 932 in _bootstrap_inner
File "/opt/conda/lib/python3.8/threading.py", line 890 in _bootstrap
Thread 0x00007fcfcf5c7700 (most recent call first):
File "/opt/conda/lib/python3.8/socket.py", line 292 in accept
File "/opt/conda/lib/python3.8/site-packages/dlrover/python/common/multi_process.py", line 368 in _sync
File "/opt/conda/lib/python3.8/threading.py", line 870 in run
File "/opt/conda/lib/python3.8/threading.py", line 932 in _bootstrap_inner
File "/opt/conda/lib/python3.8/threading.py", line 890 in _bootstrap
Thread 0x00007fcfd1dc8700 (most recent call first):
File "/opt/conda/lib/python3.8/threading.py", line 302 in wait
File "/opt/conda/lib/python3.8/concurrent/futures/_base.py", line 439 in result
File "/opt/conda/lib/python3.8/site-packages/dlrover/python/elastic_agent/torch/ckpt_saver.py", line 829 in save_step_checkpoint
File "/opt/conda/lib/python3.8/site-packages/dlrover/python/elastic_agent/torch/ckpt_saver.py", line 531 in _sync_shm_to_storage
File "/opt/conda/lib/python3.8/site-packages/dlrover/python/elastic_agent/torch/ckpt_saver.py", line 425 in _saver
File "/opt/conda/lib/python3.8/threading.py", line 870 in run
File "/opt/conda/lib/python3.8/threading.py", line 932 in _bootstrap_inner
File "/opt/conda/lib/python3.8/threading.py", line 890 in _bootstrap
Thread 0x00007fcfd6dca700 (most recent call first):
File "/opt/conda/lib/python3.8/threading.py", line 302 in wait
File "/opt/conda/lib/python3.8/queue.py", line 170 in get
File "/opt/conda/lib/python3.8/site-packages/dlrover/python/common/multi_process.py", line 406 in get
File "/opt/conda/lib/python3.8/site-packages/dlrover/python/elastic_agent/torch/ckpt_saver.py", line 434 in _factory
File "/opt/conda/lib/python3.8/threading.py", line 870 in run
File "/opt/conda/lib/python3.8/threading.py", line 932 in _bootstrap_inner
File "/opt/conda/lib/python3.8/threading.py", line 890 in _bootstrap
Thread 0x00007fcfd95cb700 (most recent call first):
File "/opt/conda/lib/python3.8/socket.py", line 292 in accept
File "/opt/conda/lib/python3.8/site-packages/dlrover/python/common/multi_process.py", line 368 in _sync
File "/opt/conda/lib/python3.8/threading.py", line 870 in run
File "/opt/conda/lib/python3.8/threading.py", line 932 in _bootstrap_inner
File "/opt/conda/lib/python3.8/threading.py", line 890 in _bootstrap
Thread 0x00007fd0dd005740 (most recent call first):
File "/opt/conda/lib/python3.8/multiprocessing/shared_memory.py", line 226 in close
File "/opt/conda/lib/python3.8/multiprocessing/shared_memory.py", line 183 in __del__
File "/opt/conda/lib/python3.8/site-packages/dlrover/python/elastic_agent/torch/ckpt_saver.py", line 252 in reset
File "/opt/conda/lib/python3.8/site-packages/dlrover/python/elastic_agent/torch/ckpt_saver.py", line 538 in reset_shared_memory
File "/opt/conda/lib/python3.8/site-packages/dlrover/python/elastic_agent/torch/ckpt_saver.py", line 717 in reset
File "/opt/conda/lib/python3.8/site-packages/dlrover/python/elastic_agent/torch/training.py", line 659 in _restart_workers
File "/opt/conda/lib/python3.8/site-packages/dlrover/python/elastic_agent/torch/training.py", line 605 in _invoke_run
File "/opt/conda/lib/python3.8/site-packages/torch/distributed/elastic/agent/server/api.py", line 736 in run
File "/opt/conda/lib/python3.8/site-packages/torch/distributed/elastic/metrics/api.py", line 124 in wrapper
File "/opt/conda/lib/python3.8/site-packages/dlrover/python/elastic_agent/torch/training.py", line 757 in launch_agent
File "/opt/conda/lib/python3.8/site-packages/dlrover/trainer/torch/elastic_run.py", line 223 in __call__
File "/opt/conda/lib/python3.8/site-packages/dlrover/trainer/torch/elastic_run.py", line 360 in run
File "/opt/conda/lib/python3.8/site-packages/dlrover/trainer/torch/elastic_run.py", line 373 in main
File "/opt/conda/lib/python3.8/site-packages/torch/distributed/elastic/multiprocessing/errors/__init__.py", line 346 in wrapper
File "/opt/conda/bin/dlrover-run", line 8 in <module>
examples/pretrain_gpt_distributed.sh: line 46: 6291 Segmentation fault (core dumped) dlrover-run --max_restarts=3 --auto-config pretrain_gpt.py --tensor-model-parallel-size 1 --pipeline-model-parallel-size 2 --num-layers 24 --hidden-size 1024 --num-attention-heads 16 --seq-length 1024 --max-position-embeddings 1024 --micro-batch-size 1 --global-batch-size 4 --train-iters 5000 --lr-decay-iters 320000 --save $CHECKPOINT_PATH --load $CHECKPOINT_PATH --data-path $DATA_PATH --vocab-file $VOCAB_FILE --merge-file $MERGE_FILE --split 900,50,50 --distributed-backend nccl --lr 0.00015 --min-lr 1.0e-5 --lr-decay-style cosine --weight-decay 1e-2 --clip-grad 1.0 --lr-warmup-fraction .01 --log-interval 10 --save-interval 500 --eval-interval 1000 --eval-iters 10 --bf16
```
| 2024-04-22T02:52:21 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-1087 | 171c4b683fd1b1532c39c16c359b1e9060b62b2a | diff --git a/dlrover/python/master/node/dist_job_manager.py b/dlrover/python/master/node/dist_job_manager.py
index c940d9e4a..e2111d92c 100644
--- a/dlrover/python/master/node/dist_job_manager.py
+++ b/dlrover/python/master/node/dist_job_manager.py
@@ -816,7 +816,7 @@ def remove_not_joined_rdzv_workers(self, worker_ranks):
def pend_without_workers(self):
"""Check whether to wait for evicted workers."""
- if self._worker_manager.has_failed_worker():
+ if self._worker_manager.has_exited_worker():
return False
elif self._worker_manager.wait_worker_restart():
return True
diff --git a/dlrover/python/master/node/worker.py b/dlrover/python/master/node/worker.py
index fa3f9b082..64d4e380f 100644
--- a/dlrover/python/master/node/worker.py
+++ b/dlrover/python/master/node/worker.py
@@ -264,10 +264,13 @@ def remove_not_joined_rdzv_workers(self, worker_ranks: List[int]):
plan.merge(p)
return plan
- def has_failed_worker(self):
- """Check whether there is failed worker except evicted workers."""
+ def has_exited_worker(self):
+ """Check whether there is exited worker except evicted workers."""
for worker in self._nodes.values():
- if worker.exit_reason == NodeExitReason.FATAL_ERROR:
+ if (
+ worker.exit_reason == NodeExitReason.FATAL_ERROR
+ or worker.status == NodeStatus.SUCCEEDED
+ ):
return True
return False
| The job master hangs when there is only one worker and the worker is preempted.
| 2024-04-18T12:21:53 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-1072 | 50a1ef68fa18975520ee01d890f436d4d5b74e0d | diff --git a/dlrover/python/elastic_agent/master_client.py b/dlrover/python/elastic_agent/master_client.py
index 106cd9cd7..9ac51c5ac 100644
--- a/dlrover/python/elastic_agent/master_client.py
+++ b/dlrover/python/elastic_agent/master_client.py
@@ -103,7 +103,7 @@ def find_free_port(self):
_, port = sock.getsockname()
return port
- # @retry_grpc_request
+ @retry_grpc_request
def _report(self, message: grpc.Message):
request = elastic_training_pb2.Message()
request.node_id = self._node_id
diff --git a/dlrover/python/elastic_agent/torch/ckpt_saver.py b/dlrover/python/elastic_agent/torch/ckpt_saver.py
index d9469a8ea..912957643 100644
--- a/dlrover/python/elastic_agent/torch/ckpt_saver.py
+++ b/dlrover/python/elastic_agent/torch/ckpt_saver.py
@@ -513,7 +513,7 @@ def _sync_shm_to_storage(self):
event: CheckpointEvent = self._event_queue.get()
if event.type == CheckpointEventType.UPDATE_SHARD:
logger.info(
- "Reset the shared memory after the training starts."
+ "Reset the shared memory after the training starts. "
f"The number of global shards is {event.global_shard_num}."
)
self.global_shard_num = event.global_shard_num
@@ -527,8 +527,6 @@ def _sync_shm_to_storage(self):
def reset_shared_memory(self):
self._stop_commit = True
- for lock in self._shm_locks:
- lock.release()
for shm_handler in self._shm_handlers:
shm_handler.reset()
diff --git a/dlrover/trainer/torch/flash_checkpoint/ddp_engine.py b/dlrover/trainer/torch/flash_checkpoint/ddp_engine.py
index 16e1cced7..2ccf2af29 100644
--- a/dlrover/trainer/torch/flash_checkpoint/ddp_engine.py
+++ b/dlrover/trainer/torch/flash_checkpoint/ddp_engine.py
@@ -123,15 +123,16 @@ def save_to_storage(self, step, state_dict, paths):
["model_states", "optim_states"] of the state dict and
the value is the path of storage to save.
"""
- succeed = True
+ success = True
if step > self._cached_step:
- succeed = self.save_to_memory(step, state_dict, paths)
+ success = self.save_to_memory(step, state_dict, paths)
# Only rank 0 persist the checkpoint to the storage.
if dist.is_initialized():
dist.barrier()
- if succeed and self._rank == 0:
+ if success and self._rank == 0:
event = CheckpointEvent(type=CheckpointEventType.SAVE, step=step)
self._event_queue.put(event)
+ return success
def load(self, resume_path=""):
"""
diff --git a/dlrover/trainer/torch/flash_checkpoint/deepspeed.py b/dlrover/trainer/torch/flash_checkpoint/deepspeed.py
index aa6e4adf5..657d03201 100644
--- a/dlrover/trainer/torch/flash_checkpoint/deepspeed.py
+++ b/dlrover/trainer/torch/flash_checkpoint/deepspeed.py
@@ -68,12 +68,10 @@ def save(self, state_dict, path: str):
elif path.endswith(_DS_OPTIM_SD_FILE_SUFFIX):
sd_name = CheckpointConstant.OPTIM_STATES_NAME
else:
- raise ValueError(
- f"The suffix of {path} is not "
- f"{_DS_MODEL_SD_FILE_SUFFIX} and {_DS_OPTIM_SD_FILE_SUFFIX}. "
- )
- self.state_dict[sd_name] = state_dict
- self.paths[sd_name] = path
+ sd_name = path.split("/")[-1]
+ if sd_name:
+ self.state_dict[sd_name] = state_dict
+ self.paths[sd_name] = path
def load(self, path: str, map_location=None):
def load_func(path):
@@ -144,7 +142,6 @@ def __init__(
self._ckpt_agent = AsyncCheckpointAgent(
self._async_save_engine.storage
)
- self.engine.checkpoint_engine = self._ckpt_agent
self._local_rank = env_utils.get_local_rank()
self._ds_tracer_file = os.path.join(
self.checkpoint_dir, DeepSpeedCheckpointSaver.TRACER_FILE
diff --git a/dlrover/trainer/torch/flash_checkpoint/deepspeed_engine.py b/dlrover/trainer/torch/flash_checkpoint/deepspeed_engine.py
index 258901e5f..2f9bd3d60 100644
--- a/dlrover/trainer/torch/flash_checkpoint/deepspeed_engine.py
+++ b/dlrover/trainer/torch/flash_checkpoint/deepspeed_engine.py
@@ -107,17 +107,18 @@ def save_to_storage(self, step, state_dict, paths):
["model_states", "optim_states"] of the state dict and
the value is the path of storage to save.
"""
- succeed = True
+ success = True
if step > self._cached_step:
- succeed = self.save_to_memory(step, state_dict, paths)
+ success = self.save_to_memory(step, state_dict, paths)
if dist.is_initialized():
dist.barrier()
# Only local rank 0 to notify the saving event to the agent.
- if self._local_rank == 0 and succeed:
+ if self._local_rank == 0 and success:
event = CheckpointEvent(type=CheckpointEventType.SAVE, step=step)
self._event_queue.put(event)
+ return success
def get_local_shard_num(self):
local_world_size = env_utils.get_local_world_size()
diff --git a/dlrover/trainer/torch/flash_checkpoint/engine.py b/dlrover/trainer/torch/flash_checkpoint/engine.py
index 207ef130a..c1bf6c0ec 100644
--- a/dlrover/trainer/torch/flash_checkpoint/engine.py
+++ b/dlrover/trainer/torch/flash_checkpoint/engine.py
@@ -103,7 +103,7 @@ def wrapper(*args, **kwargs):
def start_async_save():
AsyncCheckpointSaver.start_async_saving_ckpt()
while True:
- time.sleep(36000)
+ time.sleep(60)
def start_saver_process():
diff --git a/dlrover/trainer/torch/flash_checkpoint/hf_trainer.py b/dlrover/trainer/torch/flash_checkpoint/hf_trainer.py
new file mode 100644
index 000000000..cbeffac05
--- /dev/null
+++ b/dlrover/trainer/torch/flash_checkpoint/hf_trainer.py
@@ -0,0 +1,380 @@
+# Copyright 2024 The DLRover Authors. All rights reserved.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import random
+import re
+import shutil
+import warnings
+from typing import Optional
+
+import numpy as np
+import torch
+import torch.distributed as dist
+from deepspeed.runtime.engine import DeepSpeedEngine
+from transformers import Trainer
+from transformers.trainer import (
+ OPTIMIZER_NAME,
+ PREFIX_CHECKPOINT_DIR,
+ SCHEDULER_NAME,
+ TRAINER_STATE_NAME,
+ TRAINING_ARGS_NAME,
+ WEIGHTS_NAME,
+ DeepSpeedSchedulerWrapper,
+ ParallelMode,
+ PeftModel,
+ PreTrainedModel,
+ is_peft_available,
+ logger,
+ reissue_pt_warnings,
+ unwrap_model,
+)
+
+from dlrover.python.common.storage import PosixDiskStorage
+from dlrover.trainer.torch.flash_checkpoint.ddp_engine import (
+ DdpCheckpointEngine,
+)
+from dlrover.trainer.torch.flash_checkpoint.deepspeed import (
+ AsyncCheckpointAgent,
+)
+from dlrover.trainer.torch.flash_checkpoint.deepspeed_engine import (
+ DeepSpeedCheckpointEngine,
+)
+from dlrover.trainer.torch.flash_checkpoint.engine import CheckpointEngine
+
+torch_native_save = torch.save
+torch_native_load = torch.load
+
+
+class HfFlashCheckpointer(object):
+ def __init__(self, checkpoint_dir, storage=None):
+ self.checkpoint_dir = checkpoint_dir
+ self.storage = PosixDiskStorage() if not storage else storage
+ self.ckpt_agent = AsyncCheckpointAgent(self.storage)
+ self.async_save_engine: Optional[CheckpointEngine] = None
+
+ def save_checkpoint_to_memory(self, step):
+ success = self.async_save_engine.save_to_memory(
+ step,
+ self.ckpt_agent.state_dict,
+ self.ckpt_agent.paths,
+ )
+ return success
+
+ def save_checkpoint_to_storage(self, step):
+ success = self.async_save_engine.save_to_storage(
+ step,
+ self.ckpt_agent.state_dict,
+ self.ckpt_agent.paths,
+ )
+ return success
+
+
+class HfDeepSpeedCheckpointer(HfFlashCheckpointer):
+ def __init__(
+ self,
+ engine: DeepSpeedEngine,
+ checkpoint_dir,
+ storage=None,
+ comm_backend="",
+ ):
+ super().__init__(checkpoint_dir, storage)
+ self.engine = engine
+ global_shard_num = 1
+ if self.engine.zero_optimization():
+ global_shard_num = dist.get_world_size(
+ self.engine.optimizer.dp_process_group
+ )
+ zero_stage = self.engine.zero_optimization_stage()
+ self.async_save_engine = DeepSpeedCheckpointEngine(
+ checkpoint_dir,
+ storage=self.storage,
+ global_shard_num=global_shard_num,
+ zero_stage=zero_stage,
+ comm_backend=comm_backend,
+ )
+
+
+class HfDdpCheckpointer(HfFlashCheckpointer):
+ def __init__(
+ self,
+ checkpoint_dir,
+ storage=None,
+ comm_backend="",
+ ):
+ super().__init__(checkpoint_dir, storage)
+ self.async_save_engine = DdpCheckpointEngine(
+ checkpoint_dir,
+ storage=self.storage,
+ comm_backend=comm_backend,
+ )
+
+
+class FlashCkptTrainer(Trainer):
+ """
+ The flash checkpoint trainer synchronously saves the model weights
+ and optimizer states of checkpoint into the memory and asynchronously
+ saves the checkpoint from the memory to the storage. The training is not
+ blocked when saving the checkpoint to the storage.
+
+ Note:: The trainer creates a directory and saves json files of training
+ configuration like `config.json`, `trainer_state.json` and
+ `generation_config.json` to the directory when saving a checkpoint.
+ There might not be model weights and optimizer states in the
+ checkpoint directory because the trainer asynchronously save them into the
+ directory. We can get the last step of complete checkpoint directory
+ by the step int the file `dlrover_latest.txt` in the `OUTPUT_DIR` of
+ `TrainingArguments`.
+ """
+
+ def _save_checkpoint(self, model, trial, metrics=None):
+ run_dir = self._get_output_dir(trial=trial)
+ # Save model checkpoint
+ checkpoint_folder = f"{PREFIX_CHECKPOINT_DIR}-{self.state.global_step}"
+ output_dir = os.path.join(run_dir, checkpoint_folder)
+
+ if not hasattr(self, "flash_checkpointer"):
+ if self.is_deepspeed_enabled:
+ self.flash_checkpointer = HfDeepSpeedCheckpointer(
+ self.model_wrapped, run_dir
+ )
+ elif not self.is_deepspeed_enabled and not self.is_fsdp_enabled:
+ self.flash_checkpointer = HfDdpCheckpointer(run_dir)
+ else:
+ raise ValueError(
+ "Flash Checkpoint only supports DeepSpeed or DDP."
+ )
+
+ if self.hp_search_backend is None and trial is None:
+ self.store_flos()
+
+ torch.save = self.flash_checkpointer.ckpt_agent.save
+ self.save_model(output_dir, _internal_call=True)
+ if self.is_deepspeed_enabled:
+ self.model_wrapped.save_checkpoint(output_dir)
+
+ elif (
+ self.args.should_save
+ and not self.is_deepspeed_enabled
+ and not self.is_fsdp_enabled
+ ):
+ # deepspeed.save_checkpoint above saves model/optim/sched
+ torch.save(
+ self.optimizer.state_dict(),
+ os.path.join(output_dir, OPTIMIZER_NAME),
+ )
+
+ # Save SCHEDULER & SCALER
+ is_deepspeed_custom_scheduler = (
+ self.is_deepspeed_enabled
+ and not isinstance(self.lr_scheduler, DeepSpeedSchedulerWrapper)
+ )
+ if self.args.should_save and (
+ not self.is_deepspeed_enabled or is_deepspeed_custom_scheduler
+ ):
+ with warnings.catch_warnings(record=True) as caught_warnings:
+ torch.save(
+ self.lr_scheduler.state_dict(),
+ os.path.join(output_dir, SCHEDULER_NAME),
+ )
+ reissue_pt_warnings(caught_warnings)
+
+ # Determine the new best metric / best model checkpoint
+ if metrics is not None and self.args.metric_for_best_model is not None:
+ metric_to_check = self.args.metric_for_best_model
+ if not metric_to_check.startswith("eval_"):
+ metric_to_check = f"eval_{metric_to_check}"
+ metric_value = metrics[metric_to_check]
+
+ operator = np.greater if self.args.greater_is_better else np.less
+ if (
+ self.state.best_metric is None
+ or self.state.best_model_checkpoint is None
+ or operator(metric_value, self.state.best_metric)
+ ):
+ self.state.best_metric = metric_value
+ self.state.best_model_checkpoint = output_dir
+
+ # Save the Trainer state
+ if self.args.should_save:
+ self.state.save_to_json(
+ os.path.join(output_dir, TRAINER_STATE_NAME)
+ )
+
+ # Save RNG state in non-distributed training
+ rng_states = {
+ "python": random.getstate(),
+ "numpy": np.random.get_state(),
+ "cpu": torch.random.get_rng_state(),
+ }
+ if torch.cuda.is_available():
+ if self.args.parallel_mode == ParallelMode.DISTRIBUTED:
+ # In non distributed, we save the global
+ # CUDA RNG state (will take care of DataParallel)
+ rng_states["cuda"] = torch.cuda.random.get_rng_state_all()
+ else:
+ rng_states["cuda"] = torch.cuda.random.get_rng_state()
+
+ # A process can arrive here before the process 0 has a chance to
+ # save the model, in which case output_dir may not yet exist.
+ os.makedirs(output_dir, exist_ok=True)
+
+ if self.args.world_size <= 1:
+ torch.save(rng_states, os.path.join(output_dir, "rng_state.pth"))
+ else:
+ torch.save(
+ rng_states,
+ os.path.join(
+ output_dir, f"rng_state_{self.args.process_index}.pth"
+ ),
+ )
+ torch.save = torch_native_save
+ success = self.flash_checkpointer.save_checkpoint_to_storage(
+ self.state.global_step
+ )
+ if not success:
+ logger.info(
+ f"Skip saving the checkpoint of step {self.state.global_step} "
+ "because the latest checkpoint is not finished."
+ )
+ shutil.rmtree(output_dir, ignore_errors=True)
+
+ if self.args.push_to_hub:
+ self._push_from_checkpoint(output_dir)
+
+ # Maybe delete some older checkpoints.
+ if self.args.should_save:
+ self._rotate_checkpoints(use_mtime=True, output_dir=run_dir)
+
+ def _save(self, output_dir: Optional[str] = None, state_dict=None):
+ # If we are executing this function, we are the process zero
+ # so we don't check for that.
+ output_dir = (
+ output_dir if output_dir is not None else self.args.output_dir
+ )
+ os.makedirs(output_dir, exist_ok=True)
+ logger.info(f"Saving model checkpoint to {output_dir}")
+
+ supported_classes = (
+ (PreTrainedModel,)
+ if not is_peft_available()
+ else (PreTrainedModel, PeftModel)
+ )
+ # Save a trained model and configuration using `save_pretrained()`.
+ # They can then be reloaded using `from_pretrained()`
+ if self.args.save_safetensors:
+ logger.warn(
+ "Flash checkpoint does not support safatensors "
+ "and torch.save is used."
+ )
+ if not isinstance(self.model, supported_classes):
+ if state_dict is None:
+ state_dict = self.model.state_dict()
+
+ if isinstance(unwrap_model(self.model), supported_classes):
+ unwrap_model(self.model).save_pretrained(
+ output_dir,
+ state_dict=state_dict,
+ safe_serialization=False,
+ save_function=self.flash_checkpointer.ckpt_agent.save,
+ )
+ else:
+ logger.info(
+ "Trainer.model is not a `PreTrainedModel`, "
+ "only saving its state dict."
+ )
+ torch.save(state_dict, os.path.join(output_dir, WEIGHTS_NAME))
+ else:
+ self.model.save_pretrained(
+ output_dir,
+ state_dict=state_dict,
+ safe_serialization=False,
+ save_function=self.flash_checkpointer.ckpt_agent.save,
+ )
+
+ if self.tokenizer is not None:
+ self.tokenizer.save_pretrained(output_dir)
+
+ args_path = os.path.join(output_dir, TRAINING_ARGS_NAME)
+ self.flash_checkpointer.ckpt_agent.save(self.args, args_path)
+
+ def _rotate_checkpoints(self, use_mtime=False, output_dir=None) -> None:
+ if (
+ self.args.save_total_limit is None
+ or self.args.save_total_limit <= 0
+ ):
+ return
+
+ last_step = self._get_last_checkpoint_step()
+
+ # Check if we should delete older checkpoint(s)
+ checkpoints_sorted = self._sorted_checkpoints(
+ use_mtime=use_mtime, output_dir=output_dir
+ )
+
+ valid_checkpoints = []
+ for path in checkpoints_sorted:
+ regex_match = re.match(f".*{PREFIX_CHECKPOINT_DIR}-([0-9]+)", path)
+ if regex_match is not None and regex_match.groups() is not None:
+ step = int(regex_match.groups()[0])
+ if step <= last_step:
+ valid_checkpoints.append(path)
+
+ if len(valid_checkpoints) <= self.args.save_total_limit:
+ return
+
+ # If save_total_limit=1 with load_best_model_at_end=True,
+ # we could end up deleting the last checkpoint, which
+ # should be avoided and allow resuming
+ save_total_limit = self.args.save_total_limit
+ if (
+ self.state.best_model_checkpoint is not None
+ and self.args.save_total_limit == 1
+ and valid_checkpoints[-1] != self.state.best_model_checkpoint
+ ):
+ save_total_limit = 2
+
+ number_of_checkpoints_to_delete = max(
+ 0, len(valid_checkpoints) - save_total_limit
+ )
+ checkpoints_to_be_deleted = valid_checkpoints[
+ :number_of_checkpoints_to_delete
+ ]
+ for checkpoint in checkpoints_to_be_deleted:
+ logger.info(
+ f"Deleting older checkpoint [{checkpoint}] "
+ f"due to save_total_limit = {self.args.save_total_limit}."
+ )
+ shutil.rmtree(checkpoint, ignore_errors=True)
+
+ def get_last_checkpoint(self):
+ """
+ Get the path of the last complete checkpoint. Some latter directories
+ may not have the complete checkpoint because the asynchronous
+ persistence may not finish. The step in the `dlrover_latest.txt` is
+ the last step of complete checkpoint. We can get the path by the step.
+ """
+ step = self._get_last_checkpoint_step()
+ if step == 0:
+ return False
+ checkpoint_folder = f"{PREFIX_CHECKPOINT_DIR}-{step}"
+ ckpt_dir = os.path.join(self.args.output_dir, checkpoint_folder)
+ return ckpt_dir
+
+ def _get_last_checkpoint_step(self):
+ tracer_file = os.path.join(self.args.output_dir, "dlrover_latest.txt")
+ if not os.path.exists(tracer_file):
+ return 0
+ with open(tracer_file, "r") as f:
+ step = int(f.read())
+ return step
diff --git a/examples/pytorch/llama2/ascend_utils.py b/examples/pytorch/llama2/ascend_utils.py
new file mode 100644
index 000000000..15eb4a1d5
--- /dev/null
+++ b/examples/pytorch/llama2/ascend_utils.py
@@ -0,0 +1,34 @@
+# Copyright 2024 The DLRover Authors. All rights reserved.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+try:
+ from transformers.utils import is_torch_npu_available
+except (ImportError, ModuleNotFoundError):
+
+ def is_torch_npu_available():
+ "Checks if `torch_npu` is installed and potentially"
+ " if a NPU is in the environment"
+ import importlib
+
+ if importlib.util.find_spec("torch_npu") is None:
+ return False
+
+ import torch
+ import torch_npu # noqa: F401,F811
+
+ return hasattr(torch, "npu") and torch.npu.is_available()
+
+
+if is_torch_npu_available():
+ import torch_npu # noqa: F401,F811
+ from torch_npu.contrib import transfer_to_npu # noqa: F401
diff --git a/examples/pytorch/llama2/deepspeed_config.json b/examples/pytorch/llama2/deepspeed_config.json
new file mode 100644
index 000000000..2b1694abb
--- /dev/null
+++ b/examples/pytorch/llama2/deepspeed_config.json
@@ -0,0 +1,18 @@
+{
+ "zero_optimization": {
+ "stage": 1,
+ "allgather_partitions": true,
+ "allgather_bucket_size": 5e8,
+ "overlap_comm": true,
+ "reduce_scatter": true,
+ "reduce_bucket_size": 5e8,
+ "contiguous_gradients": true,
+ "round_robin_gradients": true
+ },
+ "gradient_accumulation_steps": "auto",
+ "gradient_clipping": "auto",
+ "steps_per_print": 100,
+ "train_batch_size": "auto",
+ "train_micro_batch_size_per_gpu": "auto",
+ "wall_clock_breakdown": false
+}
\ No newline at end of file
diff --git a/examples/pytorch/llama2/fine_tuning.py b/examples/pytorch/llama2/fine_tuning.py
index 9d725ad18..53385c74b 100644
--- a/examples/pytorch/llama2/fine_tuning.py
+++ b/examples/pytorch/llama2/fine_tuning.py
@@ -23,28 +23,9 @@
TrainerCallback,
)
-try:
- from transformers.utils import is_torch_npu_available
-except (ImportError, ModuleNotFoundError):
-
- def is_torch_npu_available():
- "Checks if `torch_npu` is installed and potentially"
- " if a NPU is in the environment"
- import importlib
-
- if importlib.util.find_spec("torch_npu") is None:
- return False
-
- import torch
- import torch_npu # noqa: F401,F811
-
- return hasattr(torch, "npu") and torch.npu.is_available()
-
-
-if is_torch_npu_available():
- import torch_npu # noqa: F401,F811
- from torch_npu.contrib import transfer_to_npu # noqa: F401
+from dlrover.trainer.torch.flash_checkpoint.hf_trainer import FlashCkptTrainer
+from .ascend_utils import is_torch_npu_available
CUTOFF_LEN = 512
@@ -134,12 +115,7 @@ def train(data_path, model_name_or_path="meta-llama/Llama-2-7b-hf"):
LORA_DROPOUT = 0.05
LORA_TARGET_MODULES = ["q_proj", "v_proj"]
- BATCH_SIZE = 16
- if not is_torch_npu_available():
- MICRO_BATCH_SIZE = 16
- else:
- MICRO_BATCH_SIZE = 8
- GRADIENT_ACCUMULATION_STEPS = BATCH_SIZE // MICRO_BATCH_SIZE
+ MICRO_BATCH_SIZE = 8
LEARNING_RATE = 3e-4
TRAIN_STEPS = 3000
OUTPUT_DIR = "experiments"
@@ -157,7 +133,6 @@ def train(data_path, model_name_or_path="meta-llama/Llama-2-7b-hf"):
training_arguments = transformers.TrainingArguments(
per_device_train_batch_size=MICRO_BATCH_SIZE,
- gradient_accumulation_steps=GRADIENT_ACCUMULATION_STEPS,
warmup_steps=100,
max_steps=TRAIN_STEPS,
learning_rate=LEARNING_RATE,
@@ -178,7 +153,7 @@ def train(data_path, model_name_or_path="meta-llama/Llama-2-7b-hf"):
tokenizer, pad_to_multiple_of=8, return_tensors="pt", padding=True
)
- trainer = transformers.Trainer(
+ trainer = FlashCkptTrainer(
model=model,
train_dataset=train_data,
eval_dataset=val_data,
@@ -197,7 +172,8 @@ def train(data_path, model_name_or_path="meta-llama/Llama-2-7b-hf"):
if not is_torch_npu_available():
model = torch.compile(model)
- trainer.train()
+ last_ckpt_path = trainer.get_last_checkpoint()
+ trainer.train(resume_from_checkpoint=last_ckpt_path)
model.save_pretrained(OUTPUT_DIR)
diff --git a/examples/pytorch/llama2/pretrain.py b/examples/pytorch/llama2/pretrain.py
new file mode 100644
index 000000000..c23cdbf41
--- /dev/null
+++ b/examples/pytorch/llama2/pretrain.py
@@ -0,0 +1,145 @@
+# Copyright 2024 The DLRover Authors. All rights reserved.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import fire
+import torch
+import transformers
+from datasets import load_dataset
+from transformers import (
+ LlamaConfig,
+ LlamaForCausalLM,
+ LlamaTokenizerFast,
+ TrainerCallback,
+)
+
+from dlrover.trainer.torch.flash_checkpoint.hf_trainer import FlashCkptTrainer
+
+from .ascend_utils import is_torch_npu_available
+
+CUTOFF_LEN = 512
+
+
+class PrintCudaMemCallback(TrainerCallback):
+ def on_log(self, args, state, control, logs=None, **kwargs):
+ cuda_mem = torch.cuda.max_memory_allocated() / 1e9
+ print(f"cuda memory {cuda_mem:.3f}G")
+
+
+def generate_prompt(data_point):
+ return f"""Below is an instruction that describes a task, paired with an input that provides further context. Write a response that appropriately completes the request. # noqa: E501
+### Instruction:
+{data_point["instruction"]}
+### Input:
+{data_point["input"]}
+### Response:
+{data_point["output"]}"""
+
+
+def tokenize(tokenizer, prompt, add_eos_token=True):
+ result = tokenizer(
+ prompt,
+ truncation=True,
+ max_length=CUTOFF_LEN,
+ padding=False,
+ return_tensors=None,
+ )
+ if (
+ result["input_ids"][-1] != tokenizer.eos_token_id
+ and len(result["input_ids"]) < CUTOFF_LEN
+ and add_eos_token
+ ):
+ result["input_ids"].append(tokenizer.eos_token_id)
+ result["attention_mask"].append(1)
+
+ result["labels"] = result["input_ids"].copy()
+ return result
+
+
+def generate_and_tokenize_prompt(tokenizer, data_point):
+ full_prompt = generate_prompt(data_point)
+ tokenized_full_prompt = tokenize(tokenizer, full_prompt)
+ return tokenized_full_prompt
+
+
+def train(data_path):
+ # The default model is llama-7B and we can set the model size by
+ # setting hidden_size, num_attention_heads, num_hidden_layers.
+ config = LlamaConfig() # llama-7B
+ model = LlamaForCausalLM(config)
+ tokenizer = LlamaTokenizerFast.from_pretrained(
+ "hf-internal-testing/llama-tokenizer"
+ )
+
+ tokenizer.pad_token_id = (
+ 0 # unk. we want this to be different from the eos token
+ )
+ tokenizer.padding_side = "left"
+ data = load_dataset("json", data_files=data_path)
+
+ train_val = data["train"].train_test_split(
+ test_size=200, shuffle=True, seed=42
+ )
+ train_data = train_val["train"].map(
+ lambda x: generate_and_tokenize_prompt(tokenizer, x)
+ )
+
+ val_data = train_val["test"].map(
+ lambda x: generate_and_tokenize_prompt(tokenizer, x)
+ )
+
+ MICRO_BATCH_SIZE = 8
+ LEARNING_RATE = 3e-4
+ TRAIN_STEPS = 10000
+ OUTPUT_DIR = "experiments"
+
+ training_arguments = transformers.TrainingArguments(
+ per_device_train_batch_size=MICRO_BATCH_SIZE,
+ warmup_steps=100,
+ max_steps=TRAIN_STEPS,
+ learning_rate=LEARNING_RATE,
+ fp16=True,
+ logging_steps=10,
+ optim="adamw_torch",
+ evaluation_strategy="steps",
+ save_strategy="steps",
+ eval_steps=10,
+ save_steps=10,
+ output_dir=OUTPUT_DIR,
+ save_total_limit=1,
+ load_best_model_at_end=True,
+ report_to="tensorboard",
+ deepspeed="deepspeed_config.json",
+ save_safetensors=False,
+ )
+
+ data_collator = transformers.DataCollatorForSeq2Seq(
+ tokenizer, pad_to_multiple_of=8, return_tensors="pt", padding=True
+ )
+
+ trainer = FlashCkptTrainer(
+ model=model,
+ train_dataset=train_data,
+ eval_dataset=val_data,
+ args=training_arguments,
+ data_collator=data_collator,
+ callbacks=[PrintCudaMemCallback()],
+ )
+ model.config.use_cache = False
+ if not is_torch_npu_available():
+ model = torch.compile(model)
+ last_ckpt_path = trainer.get_last_checkpoint()
+ trainer.train(resume_from_checkpoint=last_ckpt_path)
+
+
+if __name__ == "__main__":
+ fire.Fire(train)
diff --git a/examples/pytorch/llama2/requirements.txt b/examples/pytorch/llama2/requirements.txt
index 1bab331b3..3c1bd6a45 100644
--- a/examples/pytorch/llama2/requirements.txt
+++ b/examples/pytorch/llama2/requirements.txt
@@ -1,10 +1,10 @@
accelerate==0.24.1
appdirs==1.4.4
bitsandbytes==0.37.2
-datasets==2.10.1
+datasets==2.18.0
fire==0.5.0
peft
-transformers==4.31.0
+transformers==4.37.2
sentencepiece==0.1.97
tensorboardX==2.6
gradio==3.23.0
\ No newline at end of file
diff --git a/scripts/ci_install.sh b/scripts/ci_install.sh
new file mode 100644
index 000000000..3216f2d1a
--- /dev/null
+++ b/scripts/ci_install.sh
@@ -0,0 +1,27 @@
+#!/bin/bash
+# Copyright 2024 The DLRover Authors. All rights reserved.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+pip install kubernetes
+pip install grpcio-tools
+pip install psutil
+pip install deprecated
+pip install 'ray[default]'
+pip install pyhocon
+pip install pytest-cov
+pip install tensorflow==2.13.0
+pip install deepspeed==0.12.6
+pip install accelerate==0.29.2
+pip install transformers==4.37.2
+pip install torch==2.0.1+cpu -f https://download.pytorch.org/whl/torch_stable.html
+pip install peft==0.10.0
| how to use Flash Checkpoint for huggingface trainer job
I can not find `flash_checkpoint` in example https://github.com/intelligent-machine-learning/dlrover/blob/master/examples/pytorch/llama2/fine_tuning.py,
It is auto enable when train job is start with `dlrover-run`?
need example of llama with flash checkpoint
hello,we hope a example of llama with flash checkpoint to test the time of pause training when exporting checkpoints. thanks!
|
Megatron-LM provides an example to run llama2 in the [llama2.md](https://github.com/NVIDIA/Megatron-LM/blob/main/docs/llama2.md). If you can run the example, you can use flash checkpoint in Megatron-LM by modifying the`megatron/training.py.`
```Python
# from megatron.checkpointing import load_checkpoint
# from megatron.checkpointing import save_checkpoint
from dlrover.trainer.torch.flash_checkpoint.megatron_dist_ckpt import save_checkpoint
from dlrover.trainer.torch.flash_checkpoint.megatron_dist_ckpt import load_checkpoint
```
In addition, we will add an example of llama2 to use flash checkpoint to save the checkpoint of FSDP in the [llama2 example](https://github.com/intelligent-machine-learning/dlrover/blob/master/atorch/examples/llama2/README.md) of ATorch. | 2024-04-11T06:51:33 | 0.0 | [] | [] |
||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-1050 | cb7f1a575fcf0971a1b47ecd723c5897463c37fe | diff --git a/docs/blogs/stabilize_llm_training_cn.md b/docs/blogs/stabilize_llm_training_cn.md
index 712023fc7..ad7f847ff 100644
--- a/docs/blogs/stabilize_llm_training_cn.md
+++ b/docs/blogs/stabilize_llm_training_cn.md
@@ -101,7 +101,7 @@ worker 是运行 AI 训练的节点,其主要包含一个基于 TorchElastic
从 master 获取组网信息、启动和监控训练进程和上报心跳信息。如下图所示:
<div align="center">
-<img src="../figures/ft_llm_training/dlrover_ft_arch.png" alt="Editor" width="600">
+<img src="../figures/ft_llm_training/dlrover_ft_arch.jpg" alt="Editor" width="600">
<text>图3:DLRover 弹性训练架构 </text>
</div>
diff --git a/docs/design/straggler-detection.md b/docs/design/straggler-detection.md
index e50a79e58..b0eda3f6a 100644
--- a/docs/design/straggler-detection.md
+++ b/docs/design/straggler-detection.md
@@ -115,7 +115,7 @@ spec:
containers:
- name: main
# yamllint disable-line rule:line-length
- image: registry.cn-hangzhou.aliyuncs.com/intell-ai/dlrover:torch201-mnist
+ image: registry.cn-hangzhou.aliyuncs.com/intell-ai/dlrover:pytorch-example
imagePullPolicy: Always
command:
- /bin/bash
diff --git a/docs/figures/dlrover_ft_arch.jpg b/docs/figures/dlrover_ft_arch.jpg
new file mode 100644
index 000000000..29ce208de
Binary files /dev/null and b/docs/figures/dlrover_ft_arch.jpg differ
diff --git a/docs/figures/ft_llm_training/dlrover_ft_arch.png b/docs/figures/ft_llm_training/dlrover_ft_arch.png
deleted file mode 100644
index 561770b30..000000000
Binary files a/docs/figures/ft_llm_training/dlrover_ft_arch.png and /dev/null differ
diff --git a/docs/tech_report/fault_tolerance_exps.md b/docs/tech_report/fault_tolerance_exps.md
index 0db4a309a..86c512291 100644
--- a/docs/tech_report/fault_tolerance_exps.md
+++ b/docs/tech_report/fault_tolerance_exps.md
@@ -88,7 +88,7 @@ use chaosblade to perform a CPU full load 90% on the `worker-1` with the command
chaosblade-1.7.2/blade create cpu load --cpu-percent 90
```
-If you use the image `registry.cn-hangzhou.aliyuncs.com/intell-ai/dlrover:torch201-mnist`,
+If you use the image `registry.cn-hangzhou.aliyuncs.com/intell-ai/dlrover:pytorch-example`,
you can use chaosblade to create a chaos experiment by
```bash
| 案例介绍中图3解释
在 https://github.com/intelligent-machine-learning/dlrover/blob/master/docs/blogs/stabilize_llm_training_cn.md 中,图3的应该是 ElasticJob Operator ,原来少了一个 c ?
| 2024-03-26T07:06:34 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-1004 | c07b6efd27eb1c46542de453fe579c816a1c3e7c | diff --git a/dlrover/go/operator/pkg/controllers/master/master.go b/dlrover/go/operator/pkg/controllers/master/master.go
index 4b9661441..3c722b011 100644
--- a/dlrover/go/operator/pkg/controllers/master/master.go
+++ b/dlrover/go/operator/pkg/controllers/master/master.go
@@ -38,6 +38,7 @@ const (
defaultImagePullPolicy = "Always"
envMasterAddrKey = "DLROVER_MASTER_ADDR"
envBrainServiceAddrKey = "DLROVER_BRAIN_SERVICE_ADDR"
+ envPodIP = "POD_IP"
// ReplicaTypeTrainerMaster is the type for DLRover Master replica.
ReplicaTypeTrainerMaster commonv1.ReplicaType = "dlrover-master"
@@ -266,6 +267,16 @@ func NewMasterTemplateToJob(job *elasticv1alpha1.ElasticJob, masterImage string)
)
}
}
+ podIPEnv := corev1.EnvVar{
+ Name: envPodIP,
+ ValueFrom: &corev1.EnvVarSource{
+ FieldRef: &corev1.ObjectFieldSelector{
+ APIVersion: "v1",
+ FieldPath: "status.podIP",
+ },
+ },
+ }
+ podTemplate.Spec.Containers[0].Env = append(podTemplate.Spec.Containers[0].Env, podIPEnv)
job.Spec.ReplicaSpecs[ReplicaTypeTrainerMaster] = &elasticv1alpha1.ReplicaSpec{
ReplicaSpec: commonv1.ReplicaSpec{
Template: *podTemplate,
diff --git a/dlrover/python/common/global_context.py b/dlrover/python/common/global_context.py
index 5c5a0ef05..75eee7e2e 100644
--- a/dlrover/python/common/global_context.py
+++ b/dlrover/python/common/global_context.py
@@ -121,12 +121,10 @@ def set_params_from_brain(self):
self.factor_to_cut_pending_cpu = self.get_param_value_from_brain(
ConfigKeys.FACTOR_TO_CUT_PENDING_CPU,
DefaultValues.FACTOR_TO_CUT_PENDING_CPU,
- float,
)
self.factor_to_cut_pending_mem = self.get_param_value_from_brain(
ConfigKeys.FACTOR_TO_CUT_PENDING_MEM,
DefaultValues.FACTOR_TO_CUT_PENDING_MEM,
- float,
)
self.seconds_to_wait_pending_pod = self.get_param_value_from_brain(
ConfigKeys.SECONDS_TO_WAIT_PENDING_POD,
@@ -169,7 +167,7 @@ def config_master_port(self, port=0):
if self.master_port is None:
self.master_port = grpc.find_free_port_in_range(20000, 30000)
- def get_param_value_from_brain(self, key_name, default_value, dtype=int):
+ def get_param_value_from_brain(self, key_name, default_value, dtype=float):
"""TODO: Get the configured value from Brain service."""
value = default_value
return dtype(value)
diff --git a/dlrover/python/master/scaler/pod_scaler.py b/dlrover/python/master/scaler/pod_scaler.py
index be263d92f..bec445705 100644
--- a/dlrover/python/master/scaler/pod_scaler.py
+++ b/dlrover/python/master/scaler/pod_scaler.py
@@ -13,6 +13,8 @@
import copy
import json
+import os
+import telnetlib
import threading
import time
from typing import Dict, List, Optional
@@ -434,11 +436,19 @@ def _create_pod(self, node: Node, pod_stats: Dict[str, int], ps_addrs):
env.append(
V1EnvVar(name=NodeEnv.WORKER_RANK, value=str(node.rank_index))
)
- master_service = "elasticjob-{}-dlrover-master:{}".format(
+ master_addr = "elasticjob-{}-dlrover-master:{}".format(
self._job_name, _dlrover_context.master_port
)
+ if not self._check_master_service_avaliable(master_addr):
+ logger.info(
+ "The service of master is not available and use the master IP."
+ )
+ master_ip = os.getenv("POD_IP", "")
+ if not master_ip:
+ raise ValueError("The master Pod must have the POD_IP env.")
+ master_addr = f"{master_ip}:{_dlrover_context.master_port}"
env.append(
- V1EnvVar(name=NodeEnv.DLROVER_MASTER_ADDR, value=master_service)
+ V1EnvVar(name=NodeEnv.DLROVER_MASTER_ADDR, value=master_addr)
)
env.append(
@@ -490,6 +500,16 @@ def _create_pod(self, node: Node, pod_stats: Dict[str, int], ps_addrs):
self._patch_tf_config_into_env(pod, node, pod_stats, ps_addrs)
return pod
+ def _check_master_service_avaliable(self, addr):
+ """Verify that the master grpc servicer is available."""
+ host = addr.split(":")[0]
+ port = int(addr.split(":")[1])
+ try:
+ telnetlib.Telnet(host=host, port=port, timeout=3)
+ return True
+ except Exception:
+ return False
+
def _patch_tf_config_into_env(self, pod, node: Node, pod_stats, ps_addrs):
if self._distribution_strategy == DistributionStrategy.PS and ps_addrs:
tf_config = new_tf_config(
| Fail to connect the master Pod.

| 2024-02-21T11:47:30 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-976 | 38588a64d1a35f206e75d75c3ebf9bb7c43ff27b | diff --git a/dlrover/python/common/constants.py b/dlrover/python/common/constants.py
index 35af148f6..b7c8ad38c 100644
--- a/dlrover/python/common/constants.py
+++ b/dlrover/python/common/constants.py
@@ -278,7 +278,6 @@ class ConfigPath(object):
class CheckpointConstant(object):
- CKPT_NAME_PREFIX = "checkpoint-"
TRACER_FILE_NAME = "dlrover_latest.txt"
MODEL_STATES_NAME = "model_states"
OPTIM_STATES_NAME = "optim_states"
diff --git a/dlrover/trainer/torch/flash_checkpoint/ddp.py b/dlrover/trainer/torch/flash_checkpoint/ddp.py
index 51061615d..4e0efbdbd 100644
--- a/dlrover/trainer/torch/flash_checkpoint/ddp.py
+++ b/dlrover/trainer/torch/flash_checkpoint/ddp.py
@@ -13,6 +13,8 @@
import os
+import torch.distributed as dist
+
from dlrover.python.common.constants import CheckpointConstant
from dlrover.python.common.storage import PosixDiskStorage
@@ -26,6 +28,17 @@ class DdpCheckpointer(Checkpointer):
Args:
checkpoint_dir: the directory to save the checkpoint.
+ storage: A CheckpointStorage instance. The checkpointer will
+ use a PosixStorage instance if the storage is not defined.
+ local_shard_num (int): the number of shards on a node,
+ The default is 1. If the model is partitioned on all ranks,
+ you should set the local_shard_num as the number of ranks
+ on a node.
+ global_shard_num (int): the number of shards across all ranks.
+ The default is 1.If the model is partitioned on all ranks,
+ you should set the local_shard_num as the number of all ranks.
+ comm_backend (str): the communcation backend to create a process group,
+ The default is the backend of general main process group.
Examples::
>>> checkpointer = DdpCheckpointer(
@@ -46,16 +59,33 @@ class DdpCheckpointer(Checkpointer):
>>> sate_dict = engine.load_checkpoint()
"""
- def __init__(self, checkpoint_dir: str, storage=None):
+ def __init__(
+ self,
+ checkpoint_dir: str,
+ storage=None,
+ local_shard_num=1,
+ global_shard_num=1,
+ comm_backend="",
+ ):
self.checkpoint_dir = checkpoint_dir
+ if dist.is_initialized():
+ self._rank = dist.get_rank()
+ else:
+ self._rank = 0
self.storage = PosixDiskStorage() if not storage else storage
- self._engine = DdpCheckpointEngine(checkpoint_dir, self.storage)
+ self._engine = DdpCheckpointEngine(
+ checkpoint_dir=checkpoint_dir,
+ storage=self.storage,
+ local_shard_num=local_shard_num,
+ global_shard_num=global_shard_num,
+ comm_backend=comm_backend,
+ )
def save_checkpoint(
self, step, state_dict, path="", storage_type=StorageType.DISK
):
if path == "":
- ckpt_name = f"{CheckpointConstant.CKPT_NAME_PREFIX}{step}.pt"
+ ckpt_name = f"{step}/rank_{self._rank}.pt"
path = os.path.join(self.checkpoint_dir, ckpt_name)
state_dict = {CheckpointConstant.MODEL_STATES_NAME: state_dict}
paths = {CheckpointConstant.MODEL_STATES_NAME: path}
diff --git a/dlrover/trainer/torch/flash_checkpoint/ddp_engine.py b/dlrover/trainer/torch/flash_checkpoint/ddp_engine.py
index 311551bea..2a40e5f74 100644
--- a/dlrover/trainer/torch/flash_checkpoint/ddp_engine.py
+++ b/dlrover/trainer/torch/flash_checkpoint/ddp_engine.py
@@ -34,10 +34,6 @@ class DdpCheckpointEngine(CheckpointEngine):
"""
Save the checkpoint state dict of DDP model into the memory or storage.
- Attributes:
- checkpoint_dir (str): the directory to save the temp checkpoint
- if the training process fails.
-
Examples::
>>> engine = DdpCheckpointEngine(
>>> checkpoint_dir="/tmp/checkpoint/"
@@ -53,8 +49,20 @@ class DdpCheckpointEngine(CheckpointEngine):
>>> sate_dict = engine.load()
"""
- def __init__(self, checkpoint_dir, storage):
- super().__init__(checkpoint_dir, storage)
+ def __init__(
+ self,
+ checkpoint_dir,
+ storage,
+ local_shard_num=1,
+ global_shard_num=1,
+ comm_backend="",
+ ):
+ if global_shard_num < local_shard_num:
+ global_shard_num = local_shard_num
+ logger.info(f"Set global_shard_num to {local_shard_num}.")
+ self._local_shard_num = local_shard_num
+ self._global_shard_num = global_shard_num
+ super().__init__(checkpoint_dir, storage, comm_backend)
def get_saving_ranks(self):
"""
@@ -65,16 +73,17 @@ def get_saving_ranks(self):
local_world_size = env_utils.get_local_world_size()
save_ranks = []
for i in range(group_size):
- saver_rank = i * local_world_size
- save_ranks.append(saver_rank)
+ for j in range(self._local_shard_num):
+ saver_rank = i * local_world_size + j
+ save_ranks.append(saver_rank)
logger.info(f"The ranks to save checkpoint are {save_ranks}.")
return save_ranks
def get_local_shard_num(self):
- return 1
+ return self._local_shard_num
def get_global_shard_num(self):
- return 1
+ return self._global_shard_num
def get_saver_class(self):
return DdpCheckpointSaver
@@ -114,8 +123,6 @@ def save_to_storage(self, step, state_dict, paths):
["model_states", "optim_states"] of the state dict and
the value is the path of storage to save.
"""
- if self._local_rank != 0:
- return
succeed = True
if step > self._cached_step:
succeed = self.save_to_memory(step, state_dict, paths)
@@ -180,7 +187,11 @@ def _load_from_storage(self, resume_path=""):
if not content:
return state_dict
iteration = int(content.strip())
- name = f"{CheckpointConstant.CKPT_NAME_PREFIX}{iteration}.pt"
+ if self._global_shard_num == 1:
+ # Load the checkpoint saved by rank 0 if no sharding.
+ name = f"{iteration}/rank_{self._rank}.pt"
+ else:
+ name = f"{iteration}/rank_0.pt"
path = os.path.join(self.checkpoint_dir, name)
logger.info(f"Load the state dict from {path}")
state_dict = self.storage.read_state_dict(
diff --git a/dlrover/trainer/torch/flash_checkpoint/deepspeed.py b/dlrover/trainer/torch/flash_checkpoint/deepspeed.py
index 3ab55d172..3fbadf123 100644
--- a/dlrover/trainer/torch/flash_checkpoint/deepspeed.py
+++ b/dlrover/trainer/torch/flash_checkpoint/deepspeed.py
@@ -94,6 +94,10 @@ class DeepSpeedCheckpointer(Checkpointer):
Args:
checkpoint_dir: the directory to save the checkpoint.
+ storage: A CheckpointStorage instance. The checkpointer will
+ use a PosixStorage instance if the storage is not defined.
+ comm_backend (str): the backend to synchronize when saving the
+ checkpoint to the memory.
Examples::
>>> engine = deepspeed.initialize(...)
@@ -108,7 +112,13 @@ class DeepSpeedCheckpointer(Checkpointer):
>>> )
"""
- def __init__(self, engine: DeepSpeedEngine, checkpoint_dir, storage=None):
+ def __init__(
+ self,
+ engine: DeepSpeedEngine,
+ checkpoint_dir,
+ storage=None,
+ comm_backend="",
+ ):
self.engine = engine
self.checkpoint_dir = checkpoint_dir
global_shard_num = 1
@@ -123,6 +133,7 @@ def __init__(self, engine: DeepSpeedEngine, checkpoint_dir, storage=None):
storage=self.storage,
global_shard_num=global_shard_num,
zero_stage=zero_stage,
+ comm_backend=comm_backend,
)
self._ckpt_engine = AsyncSaveEngine(self._async_save_engine.storage)
self.engine.checkpoint_engine = self._ckpt_engine
diff --git a/dlrover/trainer/torch/flash_checkpoint/deepspeed_engine.py b/dlrover/trainer/torch/flash_checkpoint/deepspeed_engine.py
index f200f1bb5..258901e5f 100644
--- a/dlrover/trainer/torch/flash_checkpoint/deepspeed_engine.py
+++ b/dlrover/trainer/torch/flash_checkpoint/deepspeed_engine.py
@@ -39,14 +39,23 @@ class DeepSpeedCheckpointEngine(CheckpointEngine):
checkpoint_dir (str): the directory to save the temp checkpoint
if the training process fails.
dp_size (int): the world size of data parallelism.
+ global_shard_num (int): the number of shards across all ranks.
+ zero_stage (int): the DeepSpeed ZERO Stage number.
+ comm_backend (str): the backend to synchronize when saving the
+ checkpoint to the memory.
"""
def __init__(
- self, checkpoint_dir, storage, global_shard_num=1, zero_stage=0
+ self,
+ checkpoint_dir,
+ storage,
+ global_shard_num=1,
+ zero_stage=0,
+ comm_backend="",
):
self.global_shard_num = global_shard_num
self.zero_stage = zero_stage
- super().__init__(checkpoint_dir, storage)
+ super().__init__(checkpoint_dir, storage, comm_backend)
def get_saving_ranks(self):
"""
diff --git a/dlrover/trainer/torch/flash_checkpoint/engine.py b/dlrover/trainer/torch/flash_checkpoint/engine.py
index 31b9d5ca4..526c9c730 100644
--- a/dlrover/trainer/torch/flash_checkpoint/engine.py
+++ b/dlrover/trainer/torch/flash_checkpoint/engine.py
@@ -75,7 +75,7 @@ def verify_all_rank_step_consistent(group: dist.ProcessGroup, step):
world_size = group.size()
else:
world_size = dist.get_world_size()
- outputs = [torch.tensor([0.0]) for _ in range(world_size)]
+ outputs = [torch.tensor([0.0]).to(device) for _ in range(world_size)]
dist.all_gather(outputs, t, group=group)
succeed = True
for step in outputs:
@@ -140,8 +140,8 @@ class CheckpointEngine(metaclass=ABCMeta):
Args:
checkpoint_dir (str): the directory to save checkpoint.
storage: a CheckpointStorage instance to write/read the storage.
- comm_backend (str): the backend to create a communcation group,
- default is gloo.
+ comm_backend (str): the communcation backend to create a process group,
+ The default is the backend of general main process group.
"""
saver_proc = None
@@ -150,7 +150,7 @@ def __init__(
self,
checkpoint_dir: str,
storage: CheckpointStorage,
- comm_backend: str = "gloo",
+ comm_backend: str = "",
):
if not self.saver_proc:
self.saver_proc = start_saver_process()
diff --git a/dlrover/trainer/torch/flash_checkpoint/fsdp.py b/dlrover/trainer/torch/flash_checkpoint/fsdp.py
index a77d8d253..117cdbf97 100644
--- a/dlrover/trainer/torch/flash_checkpoint/fsdp.py
+++ b/dlrover/trainer/torch/flash_checkpoint/fsdp.py
@@ -24,6 +24,10 @@ class FsdpCheckpointer(Checkpointer):
Args:
checkpoint_dir: the directory to save the checkpoint.
+ storage: A CheckpointStorage instance. The checkpointer will
+ use a PosixStorage instance if the storage is not defined.
+ comm_backend (str): the backend to synchronize when saving the
+ checkpoint to the memory.
Examples::
>>> checkpointer = FsdpCheckpointer(checkpoint_dir)
@@ -64,9 +68,11 @@ class FsdpCheckpointer(Checkpointer):
>>> optimizer.load_state_dict(flattened_osd)
"""
- def __init__(self, checkpoint_dir: str, storage=None):
+ def __init__(self, checkpoint_dir: str, storage=None, comm_backend=""):
self.storage = PosixDiskStorage() if not storage else storage
- self._engine = FsdpCheckpointEngine(checkpoint_dir, self.storage)
+ self._engine = FsdpCheckpointEngine(
+ checkpoint_dir, self.storage, comm_backend
+ )
def save_checkpoint(
self, step, state_dict, path, storage_type=StorageType.DISK
diff --git a/dlrover/trainer/torch/flash_checkpoint/fsdp_engine.py b/dlrover/trainer/torch/flash_checkpoint/fsdp_engine.py
index 8be6af27a..7c66c5481 100644
--- a/dlrover/trainer/torch/flash_checkpoint/fsdp_engine.py
+++ b/dlrover/trainer/torch/flash_checkpoint/fsdp_engine.py
@@ -419,8 +419,8 @@ class FsdpCheckpointEngine(CheckpointEngine):
and storage.
"""
- def __init__(self, checkpoint_dir: str, storage):
- super().__init__(checkpoint_dir, storage)
+ def __init__(self, checkpoint_dir: str, storage, comm_backend=""):
+ super().__init__(checkpoint_dir, storage, comm_backend)
self._shm_writer = SharedMemoryWriter(shm_handler=self._shm_handler)
self._shm_reader = SharedMemoryReader(self._shm_handler)
@@ -486,8 +486,6 @@ def save_to_memory(self, step, state_dict, paths: Dict[str, str]):
if acquired:
self._shm_lock.release()
self._cached_step = conf.step
- if dist.is_initialized():
- dist.barrier(group=self._saver_group)
return True
def save_to_storage(self, step, state_dict, paths: Dict[str, str]):
@@ -502,6 +500,9 @@ def save_to_storage(self, step, state_dict, paths: Dict[str, str]):
if step > self._cached_step:
succeed = self.save_to_memory(step, state_dict, paths)
+ if dist.is_initialized():
+ dist.barrier()
+
# Only local rank 0 on each node notifies the event to save.
if self._local_rank == 0 and succeed:
logger.info(
diff --git a/dlrover/trainer/torch/flash_checkpoint/megatron.py b/dlrover/trainer/torch/flash_checkpoint/megatron.py
index 2cf69c081..be7ed334d 100644
--- a/dlrover/trainer/torch/flash_checkpoint/megatron.py
+++ b/dlrover/trainer/torch/flash_checkpoint/megatron.py
@@ -49,12 +49,16 @@ def _get_rank():
@singleton
class MegatronCheckpointManager(object):
- def __init__(self, checkpoint_dir, storage=None):
+ def __init__(self, checkpoint_dir, storage=None, comm_backend=""):
self.state_dict = {}
self.paths = {}
self.checkpoint_dir = checkpoint_dir
self.storage = PosixDiskStorage() if not storage else storage
- self.engine = MegatronCheckpointEngine(checkpoint_dir, self.storage)
+ self.engine = MegatronCheckpointEngine(
+ checkpoint_dir=checkpoint_dir,
+ storage=self.storage,
+ comm_backend=comm_backend,
+ )
def save(self, state_dict, path: str):
if path.endswith(_MODEL_SD_NAME):
@@ -120,16 +124,25 @@ def save_checkpoint(
optimizer,
opt_param_scheduler,
storage_type=StorageType.DISK,
+ storage=None,
+ comm_backend="",
):
"""
Synchronously save the the checkpointing state dict into the CPU memory.
Args:
same as the `megatron.checkpointing.load_checkpoint`
+ storage: A CheckpointStorage instance. The checkpointer will
+ use a PosixStorage instance if the storage is not defined.
+ comm_backend (str): the backend to synchronize when saving the
+ checkpoint to the memory.
"""
+ args = get_args()
+ saver = MegatronCheckpointManager(
+ args.save, storage=storage, comm_backend=comm_backend
+ )
if storage_type == StorageType.MEMORY:
- args = get_args()
- saver = MegatronCheckpointManager(args.save)
+
torch_save_func = torch.save
torch.save = saver.save
megatron_save(iteration, model, optimizer, opt_param_scheduler)
@@ -143,8 +156,6 @@ def save_checkpoint(
if _get_rank() == 0:
saver.update_tracer_file(iteration)
elif storage_type == StorageType.DISK:
- args = get_args()
- saver = MegatronCheckpointManager(args.save)
torch_save_func = torch.save
torch.save = saver.save
megatron_save(iteration, model, optimizer, opt_param_scheduler)
@@ -155,7 +166,12 @@ def save_checkpoint(
def load_checkpoint(
- model, optimizer, opt_param_scheduler, load_arg="load", strict=True
+ model,
+ optimizer,
+ opt_param_scheduler,
+ load_arg="load",
+ strict=True,
+ comm_backend="",
):
"""Load the checkpointing state dict. The method firstly
load the state dict from the CPU memory and then from the storage.
@@ -163,7 +179,7 @@ def load_checkpoint(
same as the `megatron.checkpointing.load_checkpoint`
"""
args = get_args()
- saver = MegatronCheckpointManager(args.save)
+ saver = MegatronCheckpointManager(args.save, comm_backend=comm_backend)
torch_load_func = torch.load
torch.load = saver.load
iteration = megatron_load(
diff --git a/dlrover/trainer/torch/flash_checkpoint/megatron_engine.py b/dlrover/trainer/torch/flash_checkpoint/megatron_engine.py
index 0b598f43a..ef4a17436 100644
--- a/dlrover/trainer/torch/flash_checkpoint/megatron_engine.py
+++ b/dlrover/trainer/torch/flash_checkpoint/megatron_engine.py
@@ -31,13 +31,9 @@ class MegatronCheckpointEngine(CheckpointEngine):
the shared memory and notify the agent in main process to
asynchronously save the state dict from the shared memory into
the storage.
-
- Attributes:
- checkpoint_dir (str): the directory to save the temp checkpoint
- if the training process fails.
"""
- def __init__(self, checkpoint_dir, storage):
+ def __init__(self, checkpoint_dir, storage, comm_backend=""):
if dist.is_initialized():
try:
from megatron.core import mpu
@@ -57,7 +53,7 @@ def __init__(self, checkpoint_dir, storage):
self._pp_world_size = 1
self._tp_world_size = 1
- super().__init__(checkpoint_dir, storage)
+ super().__init__(checkpoint_dir, storage, comm_backend)
def get_saving_ranks(self):
"""
| DdpCheckpointer does not support Huawei ascendspeed
I used Huawei ascendspeed to divide the model into 8 npu, and then used DdpCheckpointer to save the checkpoint file, but only the file was saved on npu0.
I look forward to supporting ascendspeed+ddp.
| 2024-01-26T06:35:41 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-959 | ffbc33aa8365011133acb793880e4a5ca73e1308 | diff --git a/dlrover/python/common/multi_process.py b/dlrover/python/common/multi_process.py
index 81379fcf7..c18570a5e 100644
--- a/dlrover/python/common/multi_process.py
+++ b/dlrover/python/common/multi_process.py
@@ -34,6 +34,23 @@
ERROR_CODE = "ERROR"
+def retry_socket(func):
+ def wrapper(self, *args, **kwargs):
+ retry = kwargs.get("retry", 30)
+ succeed = False
+ for i in range(retry):
+ try:
+ result = func(self, *args, **kwargs)
+ succeed = True
+ return result
+ except (FileNotFoundError, ConnectionRefusedError):
+ time.sleep(1)
+ if not succeed:
+ return func(self, *args, **kwargs)
+
+ return wrapper
+
+
def _create_socket_server(path):
"""
Create a socket server.
@@ -197,6 +214,7 @@ def _sync(self):
"""Synchronize the obj between processes."""
pass
+ @retry_socket
def _request(self, request: SocketRequest):
"""Create a socket client to requet the shared object."""
client = _create_socket_client(self._socket_file)
diff --git a/dlrover/python/elastic_agent/torch/ckpt_saver.py b/dlrover/python/elastic_agent/torch/ckpt_saver.py
index 40a42c632..5a896877c 100644
--- a/dlrover/python/elastic_agent/torch/ckpt_saver.py
+++ b/dlrover/python/elastic_agent/torch/ckpt_saver.py
@@ -918,11 +918,9 @@ def commit_checkpoint( # type: ignore
# commit checkpoint
shutil.move(tmp_path, target_path)
-
- self.update_tracker_file(step)
-
# clean stage dir
shutil.rmtree(step_done_dir)
+ self.update_tracker_file(step)
logger.info(
f"Commit checkpoint tmp_path: {tmp_path}, "
f"path: {target_path}"
diff --git a/dlrover/python/master/scaler/pod_scaler.py b/dlrover/python/master/scaler/pod_scaler.py
index 55aae093e..f934622f0 100644
--- a/dlrover/python/master/scaler/pod_scaler.py
+++ b/dlrover/python/master/scaler/pod_scaler.py
@@ -410,8 +410,11 @@ def _create_pod(self, node: Node, pod_stats: Dict[str, int], ps_addrs):
# The two env vars is compatible with kubeflow/PytorchJob because
# users may use the scripts of kubeflow/PytorchJob in the ElasticJob.
- env.append(V1EnvVar(name=NodeEnv.WORLD_SIZE, value=str(worker_num)))
- env.append(V1EnvVar(name=NodeEnv.RANK, value=str(node.rank_index)))
+ if self._distribution_strategy == DistributionStrategy.ALLREDUCE:
+ env.append(
+ V1EnvVar(name=NodeEnv.WORLD_SIZE, value=str(worker_num))
+ )
+ env.append(V1EnvVar(name=NodeEnv.RANK, value=str(node.rank_index)))
# Deprecated env vars
env.append(V1EnvVar(name=NodeEnv.WORKER_TYPE, value=node.type))
diff --git a/dlrover/trainer/torch/flash_checkpoint/ddp_engine.py b/dlrover/trainer/torch/flash_checkpoint/ddp_engine.py
index 7d2c6fc80..a13134818 100644
--- a/dlrover/trainer/torch/flash_checkpoint/ddp_engine.py
+++ b/dlrover/trainer/torch/flash_checkpoint/ddp_engine.py
@@ -104,7 +104,7 @@ def save_to_memory(self, step, state_dict, paths: Dict[str, str]):
the value is the path of storage to save.
"""
conf = CheckpointConfig(step=step, paths=paths)
- self.save_state_dict_to_memory(state_dict, conf)
+ return self.save_state_dict_to_memory(state_dict, conf)
@timer
def save_to_storage(self, step, state_dict, paths):
@@ -124,12 +124,13 @@ def save_to_storage(self, step, state_dict, paths):
"""
if self._local_rank != 0:
return
+ succeed = True
if step > self._cached_step:
- self.save_to_memory(step, state_dict, paths)
- event = CheckpointEvent(type=CheckpointEventType.SAVE, step=step)
-
+ succeed = self.save_to_memory(step, state_dict, paths)
# Only rank 0 persist the checkpoint to the storage.
- self._event_queue.put(event)
+ if succeed and self._rank == 0:
+ event = CheckpointEvent(type=CheckpointEventType.SAVE, step=step)
+ self._event_queue.put(event)
def load(self, resume_path=""):
"""
diff --git a/dlrover/trainer/torch/flash_checkpoint/deepspeed_engine.py b/dlrover/trainer/torch/flash_checkpoint/deepspeed_engine.py
index f843257b9..e9e83b54e 100644
--- a/dlrover/trainer/torch/flash_checkpoint/deepspeed_engine.py
+++ b/dlrover/trainer/torch/flash_checkpoint/deepspeed_engine.py
@@ -90,7 +90,7 @@ def save_to_memory(self, step, state_dict, paths):
the value is the path of storage to save.
"""
conf = CheckpointConfig(step=step, paths=paths)
- self.save_state_dict_to_memory(state_dict, conf)
+ return self.save_state_dict_to_memory(state_dict, conf)
@timer
def save_to_storage(self, step, state_dict, paths):
@@ -108,13 +108,12 @@ def save_to_storage(self, step, state_dict, paths):
["model_states", "optim_states"] of the state dict and
the value is the path of storage to save.
"""
+ succeed = True
if step > self._cached_step:
- self.save_to_memory(step, state_dict, paths)
+ succeed = self.save_to_memory(step, state_dict, paths)
# Only local rank 0 to notify the saving event to the agent.
- if self._local_rank != 0:
- return
- if state_dict:
+ if self._local_rank == 0 and succeed:
event = CheckpointEvent(type=CheckpointEventType.SAVE, step=step)
self._event_queue.put(event)
diff --git a/dlrover/trainer/torch/flash_checkpoint/engine.py b/dlrover/trainer/torch/flash_checkpoint/engine.py
index b3709ea60..74e51cc9c 100644
--- a/dlrover/trainer/torch/flash_checkpoint/engine.py
+++ b/dlrover/trainer/torch/flash_checkpoint/engine.py
@@ -14,6 +14,7 @@
import os
import time
from abc import ABCMeta, abstractmethod
+from datetime import timedelta
from multiprocessing import Process
from typing import Dict
@@ -42,8 +43,11 @@ def check_all_rank_ready(group: dist.ProcessGroup, ready):
"""
if not group:
return ready
+ backend = dist.get_backend()
+ local_rank = env_utils.get_local_rank()
+ device = "cpu" if backend == "gloo" else f"cuda:{local_rank}"
value = 0 if ready else 1
- t = torch.tensor([value], dtype=torch.int64)
+ t = torch.tensor([value], dtype=torch.int32).to(device)
dist.all_reduce(t, group=group)
return t == 0
@@ -54,9 +58,12 @@ def verify_all_rank_step_consistent(group: dist.ProcessGroup, step):
"""
if not group:
return True
- t = torch.Tensor([float(step)])
+ backend = dist.get_backend()
+ local_rank = env_utils.get_local_rank()
+ device = "cpu" if backend == "gloo" else f"cuda:{local_rank}"
+ t = torch.tensor([float(step)]).to(device)
world_size = group.size()
- outputs = [torch.Tensor([0.0]) for _ in range(world_size)]
+ outputs = [torch.tensor([0.0]) for _ in range(world_size)]
dist.all_gather(outputs, t, group=group)
for step in outputs:
if not torch.equal(step, outputs[0]):
@@ -129,7 +136,11 @@ def __init__(self, checkpoint_dir: str, storage: CheckpointStorage):
self.storage = storage
if dist.is_initialized():
self._rank = dist.get_rank()
- self._loader_group = dist.new_group(backend="gloo")
+ backend = dist.get_backend()
+ self._loader_group = dist.new_group(
+ backend=backend,
+ timeout=timedelta(seconds=30),
+ )
else:
self._rank = 0
self._loader_group = None
@@ -191,16 +202,7 @@ def _notify_agent_to_create_saver(self):
},
)
- succeed = False
- for _ in range(3):
- try:
- queue.put(class_meta)
- succeed = True
- break
- except FileNotFoundError:
- time.sleep(3)
- if not succeed:
- queue.put(class_meta)
+ queue.put(class_meta)
queue.unlink()
def _update_saver_config(self):
@@ -215,17 +217,11 @@ def _update_saver_config(self):
raise ValueError(
"The event queue cannot be None on local rank 0."
)
- for _ in range(3):
- try:
- self._event_queue.put(event)
- return
- except FileNotFoundError:
- time.sleep(3)
self._event_queue.put(event)
def save_state_dict_to_memory(self, state_dict, conf: CheckpointConfig):
if self._local_rank != self.local_shard_id:
- return
+ return False
acquired = self._shm_lock.acquire(blocking=False)
all_rank_ready = check_all_rank_ready(self._saver_group, acquired)
@@ -237,7 +233,7 @@ def save_state_dict_to_memory(self, state_dict, conf: CheckpointConfig):
)
if acquired:
self._shm_lock.release()
- return
+ return False
state_dict[DLROVER_CKPT_CONFIG_KEY] = conf
self._shm_handler.save_state_dict(state_dict)
@@ -246,6 +242,7 @@ def save_state_dict_to_memory(self, state_dict, conf: CheckpointConfig):
self._cached_step = conf.step
if dist.is_initialized():
dist.barrier(group=self._saver_group)
+ return True
def get_state_dict_from_memory(self):
state_dict = {}
diff --git a/dlrover/trainer/torch/flash_checkpoint/fsdp_engine.py b/dlrover/trainer/torch/flash_checkpoint/fsdp_engine.py
index 223e83627..0166328f4 100644
--- a/dlrover/trainer/torch/flash_checkpoint/fsdp_engine.py
+++ b/dlrover/trainer/torch/flash_checkpoint/fsdp_engine.py
@@ -441,7 +441,7 @@ def save_to_memory(self, step, state_dict, paths: Dict[str, str]):
only if the training process fails.
"""
if self._local_rank != self.local_shard_id:
- return
+ return False
acquired = self._shm_lock.acquire(blocking=False)
all_rank_ready = check_all_rank_ready(self._saver_group, acquired)
@@ -453,7 +453,7 @@ def save_to_memory(self, step, state_dict, paths: Dict[str, str]):
)
if acquired:
self._shm_lock.release()
- return
+ return False
conf = CheckpointConfig(step=step)
conf.writing_shm = True
@@ -481,6 +481,7 @@ def save_to_memory(self, step, state_dict, paths: Dict[str, str]):
self._cached_step = conf.step
if dist.is_initialized():
dist.barrier(group=self._saver_group)
+ return True
def save_to_storage(self, step, state_dict, paths: Dict[str, str]):
"""
@@ -490,13 +491,12 @@ def save_to_storage(self, step, state_dict, paths: Dict[str, str]):
step (int): the iteration step.
state_dict (dict): the state dict of model and optimizer to save.
"""
+ succeed = True
if step > self._cached_step:
- self.save_to_memory(step, state_dict, paths)
+ succeed = self.save_to_memory(step, state_dict, paths)
# Only local rank 0 on each node notifies the event to save.
- if self._local_rank != 0:
- return
- if paths:
+ if self._local_rank == 0 and succeed:
logger.info(
"Put a save event to notify the agent persists checkpoint."
)
diff --git a/dlrover/trainer/torch/flash_checkpoint/megatron_engine.py b/dlrover/trainer/torch/flash_checkpoint/megatron_engine.py
index 16c763a1c..7f7e8c7f9 100644
--- a/dlrover/trainer/torch/flash_checkpoint/megatron_engine.py
+++ b/dlrover/trainer/torch/flash_checkpoint/megatron_engine.py
@@ -99,7 +99,7 @@ def save_to_memory(self, step, state_dict, paths):
state_dict (dict): the state dict of model and optimizer to save.
"""
conf = CheckpointConfig(step=step, paths=paths)
- self.save_state_dict_to_memory(state_dict, conf)
+ return self.save_state_dict_to_memory(state_dict, conf)
@timer
def save_to_storage(self, step, state_dict, paths):
@@ -114,13 +114,14 @@ def save_to_storage(self, step, state_dict, paths):
step (int): the iteration step.
state_dict (dict): the state dict of model and optimizer to save.
"""
+ succeed = True
if step > self._cached_step:
- self.save_to_memory(step, state_dict, paths)
+ succeed = self.save_to_memory(step, state_dict, paths)
# Only local rank 0 to notify the saving event to the agent.
if self._dp_rank != 0 or self._local_rank != 0:
return
- if state_dict:
+ if succeed:
event = CheckpointEvent(type=CheckpointEventType.SAVE, step=step)
self._event_queue.put(event)
| The time is too long to call `save_to_storage` if the saver is saving the latest checkpoint.
```text
[2024-01-17 19:16:54,785] [ERROR] [ckpt_saver.py:508:_save_shard] The step 15740 in event is no equal to step 15730 in memory.
[2024-01-17 19:16:54,785] [ERROR] [ckpt_saver.py:508:_save_shard] The step 15740 in event is no equal to step 15730 in memory.
[2024-01-17 19:16:54,785] [ERROR] [ckpt_saver.py:508:_save_shard] The step 15740 in event is no equal to step 15730 in memory.
[2024-01-17 19:16:54,785] [ERROR] [ckpt_saver.py:508:_save_shard] The step 15740 in event is no equal to step 15730 in memory.
[2024-01-17 19:16:54,785] [ERROR] [ckpt_saver.py:814:save_step_checkpoint] Rank 0 save checkpoint failed for step 15740
[2024-01-17 19:16:54,785] [ERROR] [ckpt_saver.py:508:_save_shard] The step 15740 in event is no equal to step 15730 in memory.
[2024-01-17 19:16:54,786] [ERROR] [ckpt_saver.py:508:_save_shard] The step 15740 in event is no equal to step 15730 in memory.
[2024-01-17 19:16:54,786] [ERROR] [ckpt_saver.py:508:_save_shard] The step 15740 in event is no equal to step 15730 in memory.
[2024-01-17 19:16:54,786] [ERROR] [ckpt_saver.py:508:_save_shard] The step 15740 in event is no equal to step 15730 in memory.
[2024-01-17 19:16:54,786] [ERROR] [ckpt_saver.py:814:save_step_checkpoint] Rank 1 save checkpoint failed for step 15740
[2024-01-17 19:16:54,786] [ERROR] [ckpt_saver.py:814:save_step_checkpoint] Rank 2 save checkpoint failed for step 15740
[2024-01-17 19:16:54,786] [ERROR] [ckpt_saver.py:814:save_step_checkpoint] Rank 3 save checkpoint failed for step 15740
[2024-01-17 19:16:54,786] [ERROR] [ckpt_saver.py:814:save_step_checkpoint] Rank 4 save checkpoint failed for step 15740
[2024-01-17 19:16:54,786] [ERROR] [ckpt_saver.py:814:save_step_checkpoint] Rank 5 save checkpoint failed for step 15740
[2024-01-17 19:16:54,786] [ERROR] [ckpt_saver.py:814:save_step_checkpoint] Rank 6 save checkpoint failed for step 15740
[2024-01-17 19:16:54,786] [ERROR] [ckpt_saver.py:814:save_step_checkpoint] Rank 7 save checkpoint failed for step 15740
[2024-01-17 19:16:54,786] [ERROR] [ckpt_saver.py:822:save_step_checkpoint] Rank 0 save checkpoint failed for step 15740
```
| 2024-01-17T12:24:02 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-927 | 78e7866eb7959e23f75ecc4f66e139019cf08999 | diff --git a/README.md b/README.md
index 758cbf793..381467e03 100644
--- a/README.md
+++ b/README.md
@@ -47,7 +47,7 @@ training job. The actions to restore training in DLRover are:
3. Restart the failed nodes due to hardward errors.
For detail, we can see [experiments](docs/tech_report/fault_tolerance_exps.md)
-of fault-tolerance and elasticity. With fault tolerance, the goodput of GLM-65B training
+of fault-tolerance and elasticity. **With fault tolerance, the goodput of GLM-65B training
on thousands of GPUs increased from 69% to 95%**. The goodput is the time spent computing
useful new steps over the elapsed time of the training job.
The downtime details are shown:
diff --git a/dlrover/trainer/torch/elastic_run.py b/dlrover/trainer/torch/elastic_run.py
index fa0902d3a..275dc477f 100644
--- a/dlrover/trainer/torch/elastic_run.py
+++ b/dlrover/trainer/torch/elastic_run.py
@@ -278,6 +278,7 @@ def run(args):
config, cmd, cmd_args = _elastic_config_from_args(args)
config.run_id = job_name
+ config.role = "dlrover-trainer"
try:
elastic_launch(
config=config,
diff --git a/dlrover/trainer/torch/flash_checkpoint/engine.py b/dlrover/trainer/torch/flash_checkpoint/engine.py
index 9a3326a9b..96550c0c4 100644
--- a/dlrover/trainer/torch/flash_checkpoint/engine.py
+++ b/dlrover/trainer/torch/flash_checkpoint/engine.py
@@ -14,6 +14,7 @@
import os
import time
from abc import ABCMeta, abstractmethod
+from multiprocessing import Process
import torch
import torch.distributed as dist
@@ -23,6 +24,7 @@
from dlrover.python.common.multi_process import SharedLock, SharedQueue
from dlrover.python.elastic_agent.torch.ckpt_saver import (
DLROVER_CKPT_CONFIG_KEY,
+ AsyncCheckpointSaver,
CheckpointEvent,
CheckpointEventType,
CheckpointShardConfig,
@@ -72,6 +74,34 @@ def wrapper(*args, **kwargs):
return wrapper
+def start_async_save():
+ AsyncCheckpointSaver.start_async_saving_ckpt()
+ while True:
+ time.sleep(36000)
+
+
+def start_saver_process():
+ """
+ Start a process to to asynchronously save checkpoint if the training
+ process is not launched by `dlrover-run`. This process will
+ exit and cannot save the checkpoint after the training process exit.
+ It is better to use `dlrover-run` to start the training process.
+ `dlrover-run` can save checkpoint once the training process fails
+ and relaunch new training processes which can restore the checkpoint
+ from the memory not the storage.
+ """
+ local_rank = env_utils.get_local_rank()
+ role_name = os.getenv("ROLE_NAME", "")
+ # Only start the process on local rank 0
+ # if the training process is not launched by dlrover-run.
+ if role_name != "dlrover-trainer" and local_rank == 0:
+ p = Process(target=start_async_save, daemon=True)
+ p.start()
+ logger.info("Start a process to asynchronously save checkpoint.")
+ return p
+ return None
+
+
class CheckpointEngine(metaclass=ABCMeta):
"""
The checkpoint engine synchronously writes the state dict into
@@ -89,7 +119,11 @@ class CheckpointEngine(metaclass=ABCMeta):
checkpoint_dir (str): the directory to save checkpoint.
"""
+ saver_proc = None
+
def __init__(self, checkpoint_dir: str):
+ if not self.saver_proc:
+ self.saver_proc = start_saver_process()
self.checkpoint_dir = checkpoint_dir
if dist.is_initialized():
self._rank = dist.get_rank()
diff --git a/docs/blogs/flash_checkpoint.md b/docs/blogs/flash_checkpoint.md
index 794aabc9e..e122c7b0c 100644
--- a/docs/blogs/flash_checkpoint.md
+++ b/docs/blogs/flash_checkpoint.md
@@ -272,8 +272,9 @@ if args.save and iteration % save_memory_interval == 0:
opt_param_scheduler, storage_type=StorageType.MEMORY,)
```
-Note: To use the Flash Checkpoint APIs, the training script needs to be launched with dlrover-run.
-The usage of dlrover-run is consistent with torchrun. The following example demonstrates
+**Note**: To use the Flash Checkpoint, the training script needs to be launched with dlrover-run.
+If we launch the training process by other command like `torchrun`, the training can only
+use the asynchronously persistence. The usage of dlrover-run is consistent with torchrun. The following example demonstrates
how to start single-node multi-GPU training:
```bash
diff --git a/docs/blogs/flash_checkpoint_cn.md b/docs/blogs/flash_checkpoint_cn.md
index 06e4bdca2..864d2e42d 100644
--- a/docs/blogs/flash_checkpoint_cn.md
+++ b/docs/blogs/flash_checkpoint_cn.md
@@ -237,8 +237,8 @@ if args.save and iteration % save_memory_interval == 0:
opt_param_scheduler, storage_type=StorageType.MEMORY,)
```
-**注意**:Flash Checkpoint 的 API 需要使用 dlrover-run来启动训练脚本,
-dlrover-run的使用方法与 torchrun保持一致,如下所示启动单机多卡训练:
+**注意**:Flash Checkpoint 的断点续存和内容热加载需要使用`dlrover-run`来启动训练脚本。如果使用其他的方式例如`torchrun`来启动,
+则只能使用异步持久化功能。`dlrover-run` 的使用方法与`torchrun`保持一致,如下所示启动单机多卡训练:
```bash
dlrover-run --nnodes=1 --max_restarts=2 --nproc_per_node=2 train.py
diff --git a/setup.py b/setup.py
index 2e8de101a..578ca9db2 100644
--- a/setup.py
+++ b/setup.py
@@ -33,7 +33,7 @@
setup(
name="dlrover",
- version="0.3.0rc1",
+ version="0.3.1rc0",
description="An Automatic Distributed Deep Learning Framework",
long_description="DLRover helps model developers focus on model algorithm"
" itself, without taking care of any engineering stuff,"
| Flash Checkpoint asynchronously save checkpoint without dlrover-run.
users may use `deepspeed` or `torchrun` to launch the training process. Flash Checkpoint should support them.
| 2024-01-04T10:46:11 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-919 | eaebfb2b9c2cbc118dab138c03d5d41ccc109807 | diff --git a/dlrover/trainer/torch/flash_checkpoint/ddp_engine.py b/dlrover/trainer/torch/flash_checkpoint/ddp_engine.py
index ef5652dc5..d1c4836dd 100644
--- a/dlrover/trainer/torch/flash_checkpoint/ddp_engine.py
+++ b/dlrover/trainer/torch/flash_checkpoint/ddp_engine.py
@@ -86,7 +86,7 @@ def get_saver_class(self):
return CommonDirCheckpointSaver
@timer
- def save_to_storage(self, step, state_dict, path):
+ def save_to_storage(self, step, state_dict, path=""):
"""
Asynchronously saves the state dict into the storage. It synchronously
saves the state dict into the shared memory and put the path
@@ -95,14 +95,17 @@ def save_to_storage(self, step, state_dict, path):
Only rank 0 saves the state dict into the storage.
Args:
+ step (int): the iteration step.
state_dict (dict): the state dict of model and optimizer to save.
- ckpt_name (str): the storage path to save the state dict.
+ path (str): the storage path to save the state dict.
Note, the ckpt_name is used to save the state dict to storage
only if the training process fails.
- path (int): the iteration step.
"""
if self._rank != 0:
return
+ if not path:
+ name = f"{CheckpointConstant.CKPT_NAME_PREFIX}{step}.pt"
+ path = os.path.join(self.checkpoint_dir, name)
if step > self._cached_step:
self.save_to_memory(step, state_dict, path)
event = CheckpointEvent(type=CheckpointEventType.SAVE, step=step)
@@ -142,6 +145,7 @@ def _load_from_storage(self, resume_path=""):
a dictionary containing a whole state of the modules in the
checkpointing file.
"""
+ state_dict = {}
if resume_path:
state_dict = torch.load(resume_path, map_location="cpu")
return state_dict
@@ -150,12 +154,15 @@ def _load_from_storage(self, resume_path=""):
self.checkpoint_dir, CheckpointConstant.TRACER_FILE_NAME
)
if not os.path.exists(tracker_filename):
- return {}
+ return state_dict
with open(tracker_filename, "r") as f:
metastring = f.read().strip()
iteration = int(metastring)
name = f"{CheckpointConstant.CKPT_NAME_PREFIX}{iteration}.pt"
path = os.path.join(self.checkpoint_dir, name)
+ if not os.path.exists(path):
+ logger.warning(f"Checkpoint path {path} is not exist.")
+ return state_dict
logger.info(f"Load the state dict from {path}")
state_dict = torch.load(path, map_location="cpu")
return state_dict
diff --git a/dlrover/trainer/torch/flash_checkpoint/engine.py b/dlrover/trainer/torch/flash_checkpoint/engine.py
index cfe8e34cc..35714e222 100644
--- a/dlrover/trainer/torch/flash_checkpoint/engine.py
+++ b/dlrover/trainer/torch/flash_checkpoint/engine.py
@@ -93,8 +93,10 @@ def __init__(self, checkpoint_dir: str):
self.checkpoint_dir = checkpoint_dir
if dist.is_initialized():
self._rank = dist.get_rank()
+ self._loader_group = dist.new_group(backend="gloo")
else:
self._rank = 0
+ self._loader_group = None
self._local_rank = int(os.getenv("LOCAL_RANK", 0))
self._saver_group = None
self._cached_step = 0
@@ -222,12 +224,10 @@ def get_state_dict_from_memory(self):
state_dict = {}
default_config = CheckpointShardConfig()
config = self._shm_handler.get_checkpoint_config(default_config)
- if config.step == 0:
- return state_dict
passed = verify_all_rank_step_consistent(
- self._saver_group, config.step
+ self._loader_group, config.step
)
- if passed:
+ if passed and config.step > 0:
state_dict = self._shm_handler.load_state_dict()
logger.info(
f"Load step {config.step} checkpoint from the shared memory."
diff --git a/examples/pytorch/nanogpt/ds_config.json b/examples/pytorch/nanogpt/ds_config.json
new file mode 100644
index 000000000..09df4686a
--- /dev/null
+++ b/examples/pytorch/nanogpt/ds_config.json
@@ -0,0 +1,21 @@
+{
+ "zero_optimization": {
+ "stage": 1,
+ "overlap_comm": true,
+ "contiguous_gradients": true,
+ "sub_group_size": 1e9,
+ "reduce_bucket_size": "auto",
+ "stage3_prefetch_bucket_size": "auto",
+ "stage3_param_persistence_threshold": "auto",
+ "stage3_max_live_parameters": 1e9,
+ "stage3_max_reuse_distance": 1e9,
+ "stage3_gather_fp16_weights_on_model_save": true
+ },
+
+ "gradient_accumulation_steps": 1,
+ "gradient_clipping": 0.1,
+ "steps_per_print": 100,
+ "train_batch_size": 32,
+ "train_micro_batch_size_per_gpu": 16,
+ "wall_clock_breakdown": false
+ }
\ No newline at end of file
diff --git a/examples/pytorch/nanogpt/ds_train.py b/examples/pytorch/nanogpt/ds_train.py
new file mode 100644
index 000000000..3b37bbfc8
--- /dev/null
+++ b/examples/pytorch/nanogpt/ds_train.py
@@ -0,0 +1,296 @@
+# Copyright 2023 The DLRover Authors. All rights reserved.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""
+The start command on a local ndoe:
+
+dlrover-run --nnodes=1 --max_restarts=2 --nproc_per_node=2 \
+ ds_train.py --n_layer 36 --n_head 20 --n_embd 1280 \
+ --data_dir './' \
+ --device cuda --ds_config ./ds_config.json \
+ --epochs 50 --checkpoint_step 50
+"""
+
+import argparse
+import contextlib
+import os
+import time
+
+import deepspeed
+import torch
+from lora import apply_lora
+from train_utils import (
+ add_train_args,
+ cleanup,
+ create_lora_config,
+ get_data_loaders,
+ get_lr,
+ gpt_init,
+ log_rank0,
+ setup,
+)
+
+from dlrover.trainer.torch.flash_checkpoint.deepspeed import (
+ DeepSpeedCheckpointer,
+ StorageType,
+)
+
+os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
+
+# We should use a shared storage to persist the checkpiont.
+checkpoint_dir = "/nas/nanogpt-ckpt-ds/"
+
+
+def train():
+ args = arg_parser()
+ setup()
+ os.makedirs(checkpoint_dir, exist_ok=True)
+ world_size = int(os.getenv("WORLD_SIZE", 1))
+ local_rank = int(os.getenv("LOCAL_RANK", 0))
+ gradient_accumulation_steps = args.gradient_accumulation_steps
+ batch_size = args.batch_size
+ if gradient_accumulation_steps == 0:
+ gradient_accumulation_steps = world_size
+ assert gradient_accumulation_steps % world_size == 0
+ block_size = args.block_size
+ gradient_accumulation_steps //= world_size
+ tokens_per_iter = (
+ gradient_accumulation_steps * world_size * batch_size * block_size
+ ) # noqa: E501
+ log_rank0(f"tokens per iteration will be: {tokens_per_iter:,}")
+ device = f"cuda:{local_rank}" if torch.cuda.is_available() else "cpu"
+ device_type = (
+ "cuda" if "cuda" in device else "cpu"
+ ) # For later use in torch.autocast
+ if device_type == "cuda":
+ torch.cuda.set_device(device)
+ # Note: float16 data type will automatically use a GradScaler
+ dtype = (
+ "bfloat16"
+ if torch.cuda.is_available() and torch.cuda.is_bf16_supported()
+ else "float16"
+ )
+ # Auto implement a GradScaler
+ ptdtype = {
+ "float32": torch.float32,
+ "bfloat16": torch.bfloat16,
+ "float16": torch.float16,
+ }[dtype]
+ ctx = (
+ contextlib.nullcontext()
+ if device_type == "cpu"
+ else torch.amp.autocast(device_type=device_type, dtype=ptdtype)
+ )
+ train_loader, val_loader, meta_vocab_size = get_data_loaders(
+ data_dir=args.data_dir,
+ batch_size=batch_size,
+ block_size=block_size,
+ )
+ model = gpt_init(meta_vocab_size, args=args)
+ # Optimizer
+ log_rank0(f"creating optimizer...{model.parameters()}")
+ optimizer = torch.optim.AdamW(
+ model.parameters(),
+ weight_decay=args.weight_decay,
+ lr=args.learning_rate,
+ betas=(args.beta1, args.beta2),
+ )
+
+ lora_config = create_lora_config(args)
+ if lora_config is not None:
+ log_rank0(f"apply lora config {lora_config}")
+ apply_lora(model, **lora_config)
+ if torch.cuda.is_available() and device_type == "cuda":
+ # Create model and move it to GPU with id rank
+ model = model.to(local_rank)
+ model, _, _, _ = deepspeed.initialize(
+ model=model,
+ optimizer=optimizer,
+ model_parameters=model.parameters(),
+ config=args.ds_config,
+ )
+ else:
+ raise ValueError("Deepspeed must run with cuda devices.")
+
+ # Compile the model
+ if compile == "True":
+ log_rank0("compiling the model... (takes a ~minute)")
+ model = torch.compile(model) # requires PyTorch 2.0
+
+ # Training loop
+ total_time = 0.0
+ local_iter_num = 0 # Number of iterations in the lifetime of this process
+ raw_model = model.module # Unwrap DDP/FSDP container if needed
+ running_mfu = -1.0
+ iter_num = 0
+ decay_lr = args.decay_lr
+ max_iters = args.max_iters
+ log_interval = args.log_interval
+ learning_rate = args.learning_rate
+
+ # Forward backward update, with optional gradient accumulation
+ # to simulate larger batch size and using the GradScaler
+ # if data type is float16
+ t0 = time.time()
+ if args.use_native_ckpt:
+ model.load_checkpoint(checkpoint_dir)
+ else:
+ checkpointer = DeepSpeedCheckpointer(model, checkpoint_dir)
+ checkpointer.load_checkpoint(checkpoint_dir)
+ load_time = round(time.time() - t0, 2)
+ print(f"Load checkpoint time {load_time}s")
+ iter_num = model.global_steps
+ print(f"The restored iteration step is {iter_num}")
+
+ for epoch in range(args.epochs):
+ # Note: set the epoch into the sampler.
+ train_loader.sampler.set_epoch(epoch)
+ for X, Y in train_loader:
+ # Determine and set the learning rate for this iteration
+ lr = get_lr(iter_num, args) if decay_lr else learning_rate
+ for param_group in optimizer.param_groups:
+ param_group["lr"] = lr
+ t0 = time.time()
+ X, Y = X.to(device), Y.to(device)
+ with ctx:
+ logits, loss = model(X, Y)
+ # Scale the loss to account for gradient accumulation
+ loss = loss / gradient_accumulation_steps
+ # immediately async prefetch next batch while model
+ # is doing the forward pass on the GPU
+ # Backward pass, with gradient scaling if training in fp16
+ model.backward(loss)
+ model.step()
+
+ # Timing and logging
+ t1 = time.time()
+ dt = t1 - t0
+ total_time += dt
+
+ if iter_num % log_interval == 0:
+ # Get loss as float. note: this is a CPU-GPU sync point
+ # scale up to undo the division above, approximating
+ # the true total loss (exact would have been a sum)
+ lossf = loss.item() * gradient_accumulation_steps
+ if local_iter_num >= 5: # Let the training loop settle a bit
+ mfu = raw_model.estimate_mfu(
+ batch_size * gradient_accumulation_steps, dt
+ )
+ running_mfu = (
+ mfu
+ if running_mfu == -1.0
+ else 0.9 * running_mfu + 0.1 * mfu
+ )
+ cuda_mem = torch.cuda.max_memory_allocated() / 1e9
+ log_rank0(
+ f"iter {iter_num}: loss {lossf:.4f},"
+ f" time {dt * 1000:.2f}ms, "
+ f"mfu {running_mfu * 100:.2f}%,"
+ f" cuda memory {cuda_mem:.3f}G, "
+ f"lr {lr:.2e}, total time {total_time:.2f}s"
+ )
+ iter_num += 1
+ local_iter_num += 1
+ start_save_t = time.time()
+ if args.use_native_ckpt:
+ saved = native_save_checkpoint(
+ model, checkpoint_dir, iter_num, args.save_storage_interval
+ )
+ else:
+ saved = flash_save_checkpoint(
+ checkpointer,
+ iter_num,
+ args.save_memory_interval,
+ args.save_storage_interval,
+ )
+ if saved:
+ save_time = round(time.time() - start_save_t, 2)
+ print(f"Save checkpoint time {save_time}s")
+
+ # Termination conditions
+ if iter_num > max_iters:
+ break
+
+ if iter_num > max_iters:
+ break
+
+
+def native_save_checkpoint(
+ model: deepspeed.DeepSpeedEngine,
+ checkpoint_dir,
+ iter_num,
+ save_storage_interval,
+):
+ saved = False
+ if iter_num % save_storage_interval == 0:
+ model.save_checkpoint(checkpoint_dir, tag=iter_num)
+ saved = True
+ return saved
+
+
+def flash_save_checkpoint(
+ checkpointer: DeepSpeedCheckpointer,
+ iter_num,
+ save_memory_interval,
+ save_storage_interval,
+):
+ saved = False
+ if iter_num % save_memory_interval == 0:
+ saved = True
+ checkpointer.save_checkpoint(
+ checkpoint_dir,
+ tag=iter_num,
+ storage_type=StorageType.MEMORY,
+ )
+ if iter_num % save_storage_interval == 0:
+ saved = True
+ checkpointer.save_checkpoint(
+ checkpoint_dir, tag=iter_num, storage_type=StorageType.DISK
+ )
+ return saved
+
+
+# Determine the device type based on the input string.
+def device_type(string):
+ lower_string = string.lower()
+ if "gpu" in lower_string or "cuda" in lower_string:
+ if lower_string != "cuda":
+ log_rank0(
+ "It seems you are trying to use a cuda device."
+ 'The correct argument should be "cuda".'
+ "Automatically using the cuda device."
+ )
+ return "cuda"
+ else:
+ if lower_string != "cpu":
+ log_rank0(
+ f'Unrecognized device type argument "{lower_string}".'
+ "Defaulting to use the cpu device."
+ )
+ return "cpu"
+
+
+def arg_parser():
+ parser = argparse.ArgumentParser(description="Process training parameters")
+
+ add_train_args(parser)
+ parser.add_argument("--ds_config", type=str, default="", required=False)
+ args = parser.parse_args()
+
+ return args
+
+
+if __name__ == "__main__":
+ train()
+ cleanup()
diff --git a/examples/pytorch/nanogpt/fsdp_train.py b/examples/pytorch/nanogpt/fsdp_train.py
new file mode 100644
index 000000000..9ab662ad6
--- /dev/null
+++ b/examples/pytorch/nanogpt/fsdp_train.py
@@ -0,0 +1,376 @@
+# Copyright 2023 The DLRover Authors. All rights reserved.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import argparse
+import contextlib
+import functools
+import os
+import time
+
+import torch
+import torch.distributed.checkpoint as dist_cp
+from model import Block
+from torch.distributed.checkpoint.optimizer import (
+ load_sharded_optimizer_state_dict,
+)
+from torch.distributed.fsdp import CPUOffload
+from torch.distributed.fsdp import FullyShardedDataParallel as FSDP
+from torch.distributed.fsdp import StateDictType
+from torch.distributed.fsdp.wrap import transformer_auto_wrap_policy
+from train_utils import (
+ add_train_args,
+ cleanup,
+ get_data_loaders,
+ get_lr,
+ gpt_init,
+ log_rank0,
+ setup,
+)
+
+from dlrover.trainer.torch.elastic.trainer import ElasticTrainer
+from dlrover.trainer.torch.flash_checkpoint.fsdp import (
+ FsdpCheckpointer,
+ StorageType,
+)
+
+os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
+
+# We should use a shared storage to persist the checkpiont.
+checkpoint_dir = "/nas/nanogpt-ckpt-fsdp/"
+
+
+def train():
+ args = arg_parser()
+ setup()
+ os.makedirs(checkpoint_dir, exist_ok=True)
+ world_size = int(os.getenv("WORLD_SIZE", 1))
+ local_rank = int(os.getenv("LOCAL_RANK", 0))
+ gradient_accumulation_steps = args.gradient_accumulation_steps
+ batch_size = args.batch_size
+ if gradient_accumulation_steps == 0:
+ gradient_accumulation_steps = world_size
+ assert gradient_accumulation_steps % world_size == 0
+ block_size = args.block_size
+ gradient_accumulation_steps //= world_size
+ tokens_per_iter = (
+ gradient_accumulation_steps * world_size * batch_size * block_size
+ ) # noqa: E501
+ log_rank0(f"tokens per iteration will be: {tokens_per_iter:,}")
+ device = f"cuda:{local_rank}" if torch.cuda.is_available() else "cpu"
+ device_type = (
+ "cuda" if "cuda" in device else "cpu"
+ ) # For later use in torch.autocast
+ if device_type == "cuda":
+ torch.cuda.set_device(device)
+ # Note: float16 data type will automatically use a GradScaler
+ dtype = (
+ "bfloat16"
+ if torch.cuda.is_available() and torch.cuda.is_bf16_supported()
+ else "float16"
+ )
+ # Auto implement a GradScaler
+ ptdtype = {
+ "float32": torch.float32,
+ "bfloat16": torch.bfloat16,
+ "float16": torch.float16,
+ }[dtype]
+ ctx = (
+ contextlib.nullcontext()
+ if device_type == "cpu"
+ else torch.amp.autocast(device_type=device_type, dtype=ptdtype)
+ )
+ train_loader, val_loader, meta_vocab_size = get_data_loaders(
+ data_dir=args.data_dir,
+ batch_size=batch_size,
+ block_size=block_size,
+ )
+ model = gpt_init(meta_vocab_size, args=args)
+ scaler = torch.cuda.amp.GradScaler(enabled=(dtype == "float16"))
+ if torch.cuda.is_available() and device_type == "cuda":
+ print(f"Running basic FSDP example on local rank {local_rank}.")
+ my_auto_wrap_policy = functools.partial(
+ transformer_auto_wrap_policy,
+ transformer_layer_cls={Block},
+ )
+ cpu_offload = (
+ CPUOffload(offload_params=True) if args.cpu_offload else None
+ )
+ model = FSDP(
+ model,
+ device_id=local_rank,
+ auto_wrap_policy=my_auto_wrap_policy,
+ cpu_offload=cpu_offload,
+ )
+
+ else:
+ raise ValueError("FSDP can only runs on CUDA.")
+
+ # Optimizer
+ log_rank0(f"creating optimizer...{model.parameters()}")
+ optimizer = torch.optim.AdamW(
+ model.parameters(),
+ weight_decay=args.weight_decay,
+ lr=args.learning_rate,
+ betas=(args.beta1, args.beta2),
+ )
+
+ # Compile the model
+ if compile == "True":
+ log_rank0("compiling the model... (takes a ~minute)")
+ model = torch.compile(model) # requires PyTorch 2.0
+
+ # Training loop
+ total_time = 0.0
+ local_iter_num = 0 # Number of iterations in the lifetime of this process
+ raw_model = model.module # Unwrap DDP/FSDP container if needed
+ running_mfu = -1.0
+ iter_num = 0
+ decay_lr = args.decay_lr
+ max_iters = args.max_iters
+ log_interval = args.log_interval
+ grad_clip = args.grad_clip
+ learning_rate = args.learning_rate
+ elastic_trainer = ElasticTrainer(
+ model=model,
+ dataloader=train_loader,
+ )
+ optimizer = elastic_trainer.prepare(optimizer)
+
+ # Forward backward update, with optional gradient accumulation
+ # to simulate larger batch size and using the GradScaler
+ # if data type is float16
+
+ start_load_t = time.time()
+ if args.use_native_ckpt:
+ iter_num = native_load_checkpoint(0, model, optimizer)
+ else:
+ checkpointer = FsdpCheckpointer(checkpoint_dir)
+ iter_num = flash_load_checkpoint(checkpointer, model, optimizer)
+ load_time = round(time.time() - start_load_t, 2)
+ print(f"Load checkpoint time : {load_time}s")
+ iter_num = 0 if not iter_num else iter_num
+
+ for epoch in range(args.epochs):
+ # Note: set the epoch into the sampler.
+ train_loader.sampler.set_epoch(epoch)
+ for X, Y in train_loader:
+ # Determine and set the learning rate for this iteration
+ lr = get_lr(iter_num, args) if decay_lr else learning_rate
+ for param_group in optimizer.param_groups:
+ param_group["lr"] = lr
+ t0 = time.time()
+ X, Y = X.to(device), Y.to(device)
+ with ctx:
+ logits, loss = model(X, Y)
+ # Scale the loss to account for gradient accumulation
+ loss = loss / gradient_accumulation_steps
+ # immediately async prefetch next batch while model
+ # is doing the forward pass on the GPU
+ # Backward pass, with gradient scaling if training in fp16
+ scaler.scale(loss).backward()
+ # Clip the gradient
+ if grad_clip != 0.0:
+ scaler.unscale_(optimizer)
+ torch.nn.utils.clip_grad_norm_(model.parameters(), grad_clip)
+ # Step the optimizer and scaler if training in fp16
+ scaler.step(optimizer)
+ scaler.update()
+ # Flush the gradients as soon as we can,
+ # no need for this memory anymore
+ optimizer.zero_grad(set_to_none=True)
+
+ # Timing and logging
+ t1 = time.time()
+ dt = t1 - t0
+ total_time += dt
+
+ if iter_num % log_interval == 0:
+ # Get loss as float. note: this is a CPU-GPU sync point
+ # scale up to undo the division above, approximating
+ # the true total loss (exact would have been a sum)
+ lossf = loss.item() * gradient_accumulation_steps
+ if local_iter_num >= 5: # Let the training loop settle a bit
+ mfu = raw_model.estimate_mfu(
+ batch_size * gradient_accumulation_steps, dt
+ )
+ running_mfu = (
+ mfu
+ if running_mfu == -1.0
+ else 0.9 * running_mfu + 0.1 * mfu
+ )
+ cuda_mem = torch.cuda.max_memory_allocated() / 1e9
+ log_rank0(
+ f"iter {iter_num}: loss {lossf:.4f},"
+ f" time {dt * 1000:.2f}ms, "
+ f"mfu {running_mfu * 100:.2f}%,"
+ f" cuda memory {cuda_mem:.3f}G, "
+ f"lr {lr:.2e}, total time {total_time:.2f}s"
+ )
+ iter_num += 1
+ local_iter_num += 1
+ start_save_t = time.time()
+ if args.use_native_ckpt:
+ saved = native_save_checkpoint(
+ iter_num, model, optimizer, args.save_storage_interval
+ )
+ else:
+ saved = flash_save_checkpoint(
+ checkpointer,
+ iter_num,
+ model,
+ optimizer,
+ args.save_memory_interval,
+ args.save_storage_interval,
+ )
+ if saved:
+ save_time = round(time.time() - start_save_t, 2)
+ print(f"Save checkpoint time {save_time}s")
+
+ # Termination conditions
+ if iter_num > max_iters:
+ break
+ if iter_num > max_iters:
+ break
+ if iter_num > max_iters:
+ break
+
+
+def native_load_checkpoint(step, model, optimizer):
+ with FSDP.state_dict_type(model, StateDictType.SHARDED_STATE_DICT):
+ state_dict = {
+ "model": model.state_dict(),
+ "step": 0,
+ # cannot load the optimizer state_dict
+ # together with the model state_dict
+ }
+ ckpt_dir = os.path.join(checkpoint_dir, str(step))
+ if not os.path.exists(ckpt_dir):
+ return
+ storage_reader = dist_cp.FileSystemReader(ckpt_dir)
+ dist_cp.load_state_dict(
+ state_dict=state_dict,
+ storage_reader=storage_reader,
+ )
+ model.load_state_dict(state_dict["model"])
+
+ optim_state = load_sharded_optimizer_state_dict(
+ model_state_dict=state_dict["model"],
+ optimizer_key="optim",
+ storage_reader=storage_reader,
+ )
+
+ flattened_osd = FSDP.optim_state_dict_to_load(
+ model, optimizer, optim_state["optim"]
+ )
+ optimizer.load_state_dict(flattened_osd)
+ return state_dict["step"]
+
+
+def native_save_checkpoint(step, model, optimizer, save_storage_interval):
+ saved = False
+ if step % save_storage_interval != 0:
+ return saved
+ ckpt_dir = os.path.join(checkpoint_dir, str(step))
+ os.makedirs(ckpt_dir, exist_ok=True)
+ with FSDP.state_dict_type(model, StateDictType.SHARDED_STATE_DICT):
+ state_dict = {
+ "model": model.state_dict(),
+ "optim": FSDP.optim_state_dict(model, optimizer),
+ "step": step,
+ }
+ print(f"save checkpoint to {ckpt_dir}")
+ if step % save_storage_interval == 0:
+ dist_cp.save_state_dict(
+ state_dict=state_dict,
+ storage_writer=dist_cp.FileSystemWriter(ckpt_dir),
+ )
+ saved = True
+ return saved
+
+
+def flash_load_checkpoint(checkpointer, model, optimizer):
+ with FSDP.state_dict_type(model, StateDictType.SHARDED_STATE_DICT):
+ state_dict = {
+ "model": model.state_dict(),
+ "step": 0,
+ # cannot load the optimizer state_dict together
+ # with the model state_dict
+ }
+ storage_reader = checkpointer.get_storage_reader()
+ if not storage_reader:
+ return
+ dist_cp.load_state_dict(
+ state_dict=state_dict,
+ storage_reader=storage_reader,
+ )
+ model.load_state_dict(state_dict["model"])
+
+ optim_state = load_sharded_optimizer_state_dict(
+ model_state_dict=state_dict["model"],
+ optimizer_key="optim",
+ storage_reader=storage_reader,
+ )
+
+ flattened_osd = FSDP.optim_state_dict_to_load(
+ model, optimizer, optim_state["optim"]
+ )
+ optimizer.load_state_dict(flattened_osd)
+ return state_dict["step"]
+
+
+def flash_save_checkpoint(
+ checkpointer,
+ step,
+ model,
+ optimizer,
+ save_memory_interval,
+ save_storage_interval,
+):
+ saved = False
+ if step % save_memory_interval != 0 and step % save_storage_interval != 0:
+ return saved
+ with FSDP.state_dict_type(model, StateDictType.SHARDED_STATE_DICT):
+ state_dict = {
+ "model": model.state_dict(),
+ "optim": FSDP.optim_state_dict(model, optimizer),
+ "step": step,
+ }
+ ckpt_dir = os.path.join(checkpoint_dir, str(step))
+ print(f"save checkpoint to {ckpt_dir}")
+ if step % save_memory_interval == 0:
+ checkpointer.save_checkpoint(
+ step, state_dict, ckpt_dir, storage_type=StorageType.MEMORY
+ )
+ saved = True
+ if step % save_storage_interval == 0:
+ checkpointer.save_checkpoint(
+ step, state_dict, ckpt_dir, storage_type=StorageType.DISK
+ )
+ saved = True
+ return saved
+
+
+def arg_parser():
+ parser = argparse.ArgumentParser(description="Process training parameters")
+ add_train_args(parser)
+
+ parser.add_argument("--cpu_offload", action="store_true", required=False)
+ args = parser.parse_args()
+
+ return args
+
+
+if __name__ == "__main__":
+ train()
+ cleanup()
diff --git a/examples/pytorch/nanogpt/train.py b/examples/pytorch/nanogpt/train.py
index f3e2561f5..73b4dffa6 100644
--- a/examples/pytorch/nanogpt/train.py
+++ b/examples/pytorch/nanogpt/train.py
@@ -14,184 +14,42 @@
import argparse
import contextlib
-import functools
-import math
import os
-import pickle
import time
-from datetime import datetime, timedelta
-import numpy as np
import torch
-import torch.distributed as dist
from lora import apply_lora
-from model import GPT, Block, GPTConfig
-from torch.distributed.fsdp import CPUOffload, FullStateDictConfig
-from torch.distributed.fsdp import FullyShardedDataParallel as FSDP
-from torch.distributed.fsdp import StateDictType
-from torch.distributed.fsdp.wrap import transformer_auto_wrap_policy
from torch.nn.parallel import DistributedDataParallel as DDP
-from torch.utils.data import Dataset
+from train_utils import (
+ add_train_args,
+ cleanup,
+ create_lora_config,
+ get_data_loaders,
+ get_lr,
+ gpt_init,
+ log_rank0,
+ setup,
+)
-from dlrover.trainer.torch.elastic.checkpoint import CheckpointManger
-from dlrover.trainer.torch.elastic.dataloader import ElasticDataLoader
from dlrover.trainer.torch.elastic.sampler import ElasticDistributedSampler
from dlrover.trainer.torch.elastic.trainer import ElasticTrainer
+from dlrover.trainer.torch.flash_checkpoint.ddp import (
+ DdpCheckpointer,
+ StorageType,
+)
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
# We should use a shared storage to persist the checkpiont.
checkpoint_dir = "/nas/nanogpt-ckpt/"
-local_rank = None
-master_process = False
-
-
-class GPTDataset(Dataset):
- def __init__(self, data_path, block_size=128):
- self.data = np.memmap(data_path, dtype=np.uint16, mode="r")
- self.block_size = block_size
-
- def __len__(self):
- return len(self.data) - self.block_size
-
- def __getitem__(self, idx):
- x = torch.from_numpy(
- self.data[idx : idx + self.block_size].astype( # noqa E203
- np.int64
- ) # noqa E203
- ) # noqa
- y = torch.from_numpy(
- self.data[idx + 1 : idx + 1 + self.block_size].astype( # noqa E203
- np.int64
- ) # noqa
- ) # noqa
- return x, y
-
-
-def get_data_loaders(
- data_dir,
- batch_size=32,
- block_size=128,
-):
- train_dataset = GPTDataset(os.path.join(data_dir, "train.bin"), block_size)
- val_dataset = GPTDataset(os.path.join(data_dir, "val.bin"), block_size)
- with open(os.path.join(data_dir, "meta.pkl"), "rb") as f:
- meta = pickle.load(f)
- sampler = ElasticDistributedSampler(dataset=train_dataset)
- train_loader = ElasticDataLoader(
- train_dataset, batch_size=batch_size, sampler=sampler, pin_memory=True
- )
- val_loader = ElasticDataLoader(
- val_dataset, batch_size=batch_size, shuffle=False, pin_memory=True
- )
- meta_vocab_size = meta["vocab_size"]
- return train_loader, val_loader, meta_vocab_size
-
-
-def gpt_init(meta_vocab_size=None, args=None):
- n_layer = args.n_layer
- n_head = args.n_head
- n_embd = args.n_embd
- block_size = args.block_size
- bias = args.bias
- dropout = args.dropout
- # model init
- model_args = dict(
- n_layer=n_layer,
- n_head=n_head,
- n_embd=n_embd,
- block_size=block_size,
- bias=bias,
- vocab_size=None,
- dropout=dropout,
- ) # Start with model_args from command line
- # Init a new model from scratch
- log_rank0("Initializing a new model from scratch")
- # Determine the vocab size we'll use for from-scratch training
- if meta_vocab_size is None:
- print(
- "defaulting to vocab_size of GPT-2 to 50304 "
- "(50257 rounded up for efficiency)"
- )
- model_args["vocab_size"] = (
- meta_vocab_size if meta_vocab_size is not None else 50304
- )
- gptconf = GPTConfig(**model_args)
- return GPT(gptconf)
-
-
-def create_lora_config(args):
- if (
- args.lora_rank is None
- and args.lora_dropout is None
- and args.lora_alpha is None
- and args.lora_targets is None
- ):
- return None
- lora_config = {
- "rank": args.lora_rank,
- "dropout": args.lora_dropout,
- "alpha": args.lora_alpha,
- "targets": args.lora_targets.split(",") if args.lora_targets else [],
- }
- return lora_config
-
-
-# Learning rate decay scheduler (cosine with warmup)
-def get_lr(it, args):
- learning_rate = args.learning_rate
- warmup_iters = args.warmup_iters
- lr_decay_iters = args.lr_decay_iters
- min_lr = args.min_lr
- # 1) Linear warmup for warmup_iters steps
- if it < warmup_iters:
- return learning_rate * it / warmup_iters
- # 2) If it > lr_decay_iters, return min learning rate
- if it > lr_decay_iters:
- return min_lr
- # 3) In between, use cosine decay down to min learning rate
- decay_ratio = (it - warmup_iters) / (lr_decay_iters - warmup_iters)
- assert 0 <= decay_ratio <= 1
- coeff = 0.5 * (1.0 + math.cos(math.pi * decay_ratio)) # coeff ranges 0..1
- return min_lr + coeff * (learning_rate - min_lr)
-
-
-def log_rank0(msg):
- rank = int(os.getenv("RANK", 0))
- if rank == 0:
- print(msg)
-
-
-def setup(args):
- global local_rank, master_process
-
- use_cuda = torch.cuda.is_available() and args.device != "cpu"
- if use_cuda:
- dist.init_process_group("nccl", timeout=timedelta(seconds=120))
- else:
- dist.init_process_group("gloo", timeout=timedelta(seconds=120))
- rank = dist.get_rank()
- local_rank = int(os.getenv("LOCAL_RANK", 0))
- print(f"rank {rank} is initialized local_rank = {local_rank}")
- # This process will do logging, checkpointing etc.
- master_process = rank == 0
- seed_offset = rank # Each process gets a different seed
- torch.manual_seed(1337 + seed_offset)
- torch.backends.cuda.matmul.allow_tf32 = True # Allow tf32 on matmul
- torch.backends.cudnn.allow_tf32 = True # Allow tf32 on cudnn
-
-
-def cleanup():
- dist.destroy_process_group()
-
def train():
- global local_rank
args = arg_parser()
- setup(args)
+ setup()
os.makedirs(checkpoint_dir, exist_ok=True)
world_size = int(os.getenv("WORLD_SIZE", 1))
+ local_rank = int(os.getenv("LOCAL_RANK", 0))
gradient_accumulation_steps = args.gradient_accumulation_steps
batch_size = args.batch_size
if gradient_accumulation_steps == 0:
@@ -203,14 +61,8 @@ def train():
gradient_accumulation_steps * world_size * batch_size * block_size
) # noqa: E501
log_rank0(f"tokens per iteration will be: {tokens_per_iter:,}")
- device = (
- f"cuda:{local_rank}"
- if torch.cuda.is_available() and "cuda" in args.device
- else "cpu"
- )
- device_type = (
- "cuda" if "cuda" in device else "cpu"
- ) # For later use in torch.autocast
+ device = f"cuda:{local_rank}" if torch.cuda.is_available() else "cpu"
+ device_type = "cuda" if "cuda" in device else "cpu"
if device_type == "cuda":
torch.cuda.set_device(device)
# Note: float16 data type will automatically use a GradScaler
@@ -246,27 +98,9 @@ def train():
if torch.cuda.is_available() and device_type == "cuda":
# Create model and move it to GPU with id rank
model = model.to(local_rank)
- if args.use_fsdp:
- print(f"Running basic FSDP example on local rank {local_rank}.")
-
- my_auto_wrap_policy = functools.partial(
- transformer_auto_wrap_policy,
- transformer_layer_cls={Block},
- )
- cpu_offload = (
- CPUOffload(offload_params=True) if args.cpu_offload else None
- )
- model = FSDP(
- model,
- device_id=local_rank,
- auto_wrap_policy=my_auto_wrap_policy,
- cpu_offload=cpu_offload,
- )
- else:
- print(f"Running basic DDP example on local rank {local_rank}.")
- model = DDP(model, device_ids=[local_rank])
- print(f"Model device {model.device}")
-
+ print(f"Running basic DDP example on local rank {local_rank}.")
+ model = DDP(model, device_ids=[local_rank])
+ print(f"Model device {model.device}")
else:
print(f"Running basic CPU example on device {device}.")
model = model.to(device)
@@ -307,11 +141,28 @@ def train():
# to simulate larger batch size and using the GradScaler
# if data type is float16
- ckpt_manager = CheckpointManger.init_checkpoint_manager(
- model, optimizer, train_loader, checkpoint_dir
- )
- ckpt_dict = ckpt_manager.load()
+ checkpointer = DdpCheckpointer(checkpoint_dir)
+
+ t0 = time.time()
+ ckpt_dict = {}
+ if args.use_native_ckpt:
+ ckpt_path = os.path.join(checkpoint_dir, "50.pt")
+ ckpt_dict = torch.load(ckpt_path)
+ else:
+ ckpt_dict = checkpointer.load_checkpoint()
+ read_time = round(time.time() - t0, 2)
+ if "model" in ckpt_dict:
+ model.load_state_dict(ckpt_dict["model"])
+ if "optimizer" in ckpt_dict:
+ optimizer.load_state_dict(ckpt_dict["optimizer"])
+ if "sampler" in ckpt_dict:
+ train_loader.sampler.load_state_dict(ckpt_dict["sampler"])
iter_num = ckpt_dict.get("step", 0)
+ load_time = round(time.time() - t0, 2)
+ print(
+ f"Local rank {local_rank}: reading time {read_time}, "
+ f"loading time {load_time}s"
+ )
for epoch in range(args.epochs):
# Note: set the epoch into the sampler.
@@ -377,8 +228,29 @@ def train():
)
iter_num += 1
local_iter_num += 1
- if iter_num % args.checkpoint_step == 0:
- ckpt_manager.save(epoch, iter_num)
+ start_save_t = time.time()
+ if args.use_native_ckpt:
+ saved = native_save_checkpoint(
+ iter_num,
+ model,
+ optimizer,
+ train_loader,
+ args.save_storage_interval,
+ )
+ else:
+ saved = flash_save_checkpoint(
+ checkpointer,
+ iter_num,
+ model,
+ optimizer,
+ train_loader,
+ args.save_memory_interval,
+ args.save_storage_interval,
+ )
+ if saved:
+ save_time = round(time.time() - start_save_t, 2)
+ print(f"Save checkpoint time {save_time}s")
+
# Termination conditions
if iter_num > max_iters:
break
@@ -386,138 +258,72 @@ def train():
break
if iter_num > max_iters:
break
- if args.save_model:
- rank = int(os.getenv("RANK", "0"))
- save_model(model, epoch, rank, args.use_fsdp)
-
-
-def save_model(model, epoch, rank, use_fsdp=False):
- # save
- if rank == 0:
- print("--> entering save model state")
-
- if use_fsdp:
- save_policy = FullStateDictConfig(offload_to_cpu=True, rank0_only=True)
- with FSDP.state_dict_type(
- model, StateDictType.FULL_STATE_DICT, save_policy
- ):
- cpu_state = model.state_dict()
- else:
- cpu_state = model.state_dict()
- if rank == 0:
- print("--> saving model ...")
- currEpoch = "-" + str(epoch) + ".pt"
- print(f"--> attempting to save model prefix {currEpoch}")
- time_of_run = datetime.now().strftime("%Y-%m-%d-%I-%M-%S")
- save_name = "nanogpt-" + time_of_run + currEpoch
- print(f"--> saving as model name {save_name}")
- torch.save(cpu_state, save_name)
-
-# Determine the device type based on the input string.
-def device_type(string):
- lower_string = string.lower()
- if "gpu" in lower_string or "cuda" in lower_string:
- if lower_string != "cuda":
- log_rank0(
- "It seems you are trying to use a cuda device."
- 'The correct argument should be "cuda".'
- "Automatically using the cuda device."
- )
- return "cuda"
- else:
- if lower_string != "cpu":
- log_rank0(
- f'Unrecognized device type argument "{lower_string}".'
- "Defaulting to use the cpu device."
- )
- return "cpu"
+def native_save_checkpoint(
+ iter_num, model, optimizer, train_loader, save_storage_interval
+):
+ saved = False
+ if iter_num % save_storage_interval != 0:
+ return saved
+ state_dict = {
+ "model": model.state_dict(),
+ "optimizer": optimizer.state_dict(),
+ "step": iter_num,
+ }
+ if isinstance(train_loader.sampler, ElasticDistributedSampler):
+ sampler_sd = train_loader.sampler.state_dict(
+ iter_num, train_loader.batch_size
+ )
+ state_dict["ds_sampler"] = sampler_sd
+ ckpt_path = os.path.join(checkpoint_dir, f"{iter_num}.pt")
+ torch.save(state_dict, ckpt_path)
+ saved = True
+
+
+def flash_save_checkpoint(
+ checkpointer,
+ iter_num,
+ model,
+ optimizer,
+ train_loader,
+ save_memory_interval,
+ save_storage_interval,
+):
+ saved = False
+ if (
+ iter_num % save_memory_interval != 0
+ and iter_num % save_storage_interval != 0
+ ):
+ return saved
+ state_dict = {
+ "model": model.state_dict(),
+ "optimizer": optimizer.state_dict(),
+ "step": iter_num,
+ }
+ if isinstance(train_loader.sampler, ElasticDistributedSampler):
+ sampler_sd = train_loader.sampler.state_dict(
+ iter_num, train_loader.batch_size
+ )
+ state_dict["ds_sampler"] = sampler_sd
+ ckpt_path = os.path.join(checkpoint_dir, f"checkpoint-{iter_num}.pt")
+ if iter_num % save_memory_interval == 0:
+ checkpointer.save_checkpoint(
+ iter_num, state_dict, ckpt_path, storage_type=StorageType.MEMORY
+ )
+ saved = True
+ if iter_num % save_storage_interval == 0:
+ checkpointer.save_checkpoint(
+ iter_num, state_dict, ckpt_path, storage_type=StorageType.DISK
+ )
+ saved = True
+ return saved
def arg_parser():
parser = argparse.ArgumentParser(description="Process training parameters")
-
- # Data settings
- parser.add_argument("--data_dir", type=str, required=True)
- parser.add_argument("--out_dir", type=str, default="out", required=False)
- parser.add_argument(
- "--eval_interval", type=int, default=2000, required=False
- )
- parser.add_argument("--log_interval", type=int, default=1, required=False)
- parser.add_argument("--eval_iters", type=int, default=200, required=False)
- parser.add_argument("--eval_only", action="store_true", required=False)
- parser.add_argument(
- "--always_save_checkpoint", action="store_true", required=False
- )
- parser.add_argument("--batch_size", type=int, default=32, required=False)
- parser.add_argument("--block_size", type=int, default=128, required=False)
- parser.add_argument("--epochs", type=int, default=1, required=False)
-
- # Model settings
- parser.add_argument("--n_layer", type=int, default=6, required=False)
- parser.add_argument("--n_head", type=int, default=6, required=False)
- parser.add_argument("--n_embd", type=int, default=384, required=False)
- parser.add_argument("--dropout", type=float, default=0.0, required=False)
- parser.add_argument("--bias", action="store_true", required=False)
-
- # LoRA settings
- parser.add_argument("--lora_rank", type=int, default=4, required=False)
- parser.add_argument(
- "--lora_dropout", type=float, default=0.0, required=False
- )
- parser.add_argument(
- "--lora_alpha", type=float, default=1.0, required=False
- )
- parser.add_argument(
- "--lora_targets",
- type=str,
- default="wq,wk,wo,wv",
- required=False,
- help="comma separated list of targets to apply lora to",
- )
- # Optimizer settings
- parser.add_argument(
- "--learning_rate", type=float, default=6e-4, required=False
- )
- parser.add_argument("--max_iters", type=int, default=2000, required=False)
- parser.add_argument(
- "--weight_decay", type=float, default=1e-1, required=False
- )
- parser.add_argument("--beta1", type=float, default=0.9, required=False)
- parser.add_argument("--beta2", type=float, default=0.95, required=False)
- parser.add_argument("--grad_clip", type=float, default=1.0, required=False)
- parser.add_argument(
- "--gradient_accumulation_steps", type=int, default=0, required=False
- )
-
- # Learning rate decay settings
- parser.add_argument("--decay_lr", action="store_true", required=False)
- parser.add_argument("--warmup_iters", type=int, default=0, required=False)
- parser.add_argument(
- "--lr_decay_iters", type=int, default=10, required=False
- )
- parser.add_argument("--min_lr", type=float, default=6e-5, required=False)
-
- # System settings
- parser.add_argument(
- "--device",
- type=device_type,
- default="cpu",
- required=False,
- help="""The device to use for computation.
- Choose from 'cuda' or 'cpu'.
- Defaults to 'cpu' if not specified.""",
- )
- parser.add_argument("--compile", type=str, default="False", required=False)
- parser.add_argument("--use_fsdp", action="store_true", required=False)
- parser.add_argument("--cpu_offload", action="store_true", required=False)
- parser.add_argument(
- "--checkpoint_step", type=int, default=100, required=False
- )
- parser.add_argument("--save_model", action="store_true", required=False)
+ add_train_args(parser)
args = parser.parse_args()
-
return args
diff --git a/examples/pytorch/nanogpt/train_utils.py b/examples/pytorch/nanogpt/train_utils.py
new file mode 100644
index 000000000..0ab917f40
--- /dev/null
+++ b/examples/pytorch/nanogpt/train_utils.py
@@ -0,0 +1,236 @@
+# Copyright 2023 The DLRover Authors. All rights reserved.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import argparse
+import math
+import os
+import pickle
+from datetime import timedelta
+
+import numpy as np
+import torch
+import torch.distributed as dist
+from model import GPT, GPTConfig
+from torch.utils.data import Dataset
+
+from dlrover.trainer.torch.elastic.dataloader import ElasticDataLoader
+from dlrover.trainer.torch.elastic.sampler import ElasticDistributedSampler
+
+os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
+
+
+class GPTDataset(Dataset):
+ def __init__(self, data_path, block_size=128):
+ self.data = np.memmap(data_path, dtype=np.uint16, mode="r")
+ self.block_size = block_size
+
+ def __len__(self):
+ return len(self.data) - self.block_size
+
+ def __getitem__(self, idx):
+ x = torch.from_numpy(
+ self.data[idx : idx + self.block_size].astype( # noqa E203
+ np.int64
+ ) # noqa E203
+ ) # noqa
+ y = torch.from_numpy(
+ self.data[idx + 1 : idx + 1 + self.block_size].astype( # noqa E203
+ np.int64
+ ) # noqa
+ ) # noqa
+ return x, y
+
+
+def get_data_loaders(
+ data_dir,
+ batch_size=32,
+ block_size=128,
+):
+ train_dataset = GPTDataset(os.path.join(data_dir, "train.bin"), block_size)
+ val_dataset = GPTDataset(os.path.join(data_dir, "val.bin"), block_size)
+ with open(os.path.join(data_dir, "meta.pkl"), "rb") as f:
+ meta = pickle.load(f)
+ sampler = ElasticDistributedSampler(dataset=train_dataset)
+ train_loader = ElasticDataLoader(
+ train_dataset, batch_size=batch_size, sampler=sampler, pin_memory=True
+ )
+ val_loader = ElasticDataLoader(
+ val_dataset, batch_size=batch_size, shuffle=False, pin_memory=True
+ )
+ meta_vocab_size = meta["vocab_size"]
+ return train_loader, val_loader, meta_vocab_size
+
+
+def gpt_init(meta_vocab_size=None, args=None):
+ n_layer = args.n_layer
+ n_head = args.n_head
+ n_embd = args.n_embd
+ block_size = args.block_size
+ bias = args.bias
+ dropout = args.dropout
+ # model init
+ model_args = dict(
+ n_layer=n_layer,
+ n_head=n_head,
+ n_embd=n_embd,
+ block_size=block_size,
+ bias=bias,
+ vocab_size=None,
+ dropout=dropout,
+ ) # Start with model_args from command line
+ # Init a new model from scratch
+ log_rank0("Initializing a new model from scratch")
+ # Determine the vocab size we'll use for from-scratch training
+ if meta_vocab_size is None:
+ print(
+ "defaulting to vocab_size of GPT-2 to 50304 "
+ "(50257 rounded up for efficiency)"
+ )
+ model_args["vocab_size"] = (
+ meta_vocab_size if meta_vocab_size is not None else 50304
+ )
+ gptconf = GPTConfig(**model_args)
+ return GPT(gptconf)
+
+
+def create_lora_config(args):
+ if (
+ args.lora_rank is None
+ and args.lora_dropout is None
+ and args.lora_alpha is None
+ and args.lora_targets is None
+ ):
+ return None
+ lora_config = {
+ "rank": args.lora_rank,
+ "dropout": args.lora_dropout,
+ "alpha": args.lora_alpha,
+ "targets": args.lora_targets.split(",") if args.lora_targets else [],
+ }
+ return lora_config
+
+
+# Learning rate decay scheduler (cosine with warmup)
+def get_lr(it, args):
+ learning_rate = args.learning_rate
+ warmup_iters = args.warmup_iters
+ lr_decay_iters = args.lr_decay_iters
+ min_lr = args.min_lr
+ # 1) Linear warmup for warmup_iters steps
+ if it < warmup_iters:
+ return learning_rate * it / warmup_iters
+ # 2) If it > lr_decay_iters, return min learning rate
+ if it > lr_decay_iters:
+ return min_lr
+ # 3) In between, use cosine decay down to min learning rate
+ decay_ratio = (it - warmup_iters) / (lr_decay_iters - warmup_iters)
+ assert 0 <= decay_ratio <= 1
+ coeff = 0.5 * (1.0 + math.cos(math.pi * decay_ratio)) # coeff ranges 0..1
+ return min_lr + coeff * (learning_rate - min_lr)
+
+
+def log_rank0(msg):
+ rank = int(os.getenv("RANK", 0))
+ if rank == 0:
+ print(msg)
+
+
+def setup():
+ if torch.cuda.is_available():
+ dist.init_process_group("nccl", timeout=timedelta(seconds=120))
+ else:
+ dist.init_process_group("gloo", timeout=timedelta(seconds=120))
+ rank = dist.get_rank()
+ local_rank = int(os.getenv("LOCAL_RANK", 0))
+ print(f"rank {rank} is initialized local_rank = {local_rank}")
+ # This process will do logging, checkpointing etc.
+ seed_offset = rank # Each process gets a different seed
+ torch.manual_seed(1337 + seed_offset)
+ torch.backends.cuda.matmul.allow_tf32 = True # Allow tf32 on matmul
+ torch.backends.cudnn.allow_tf32 = True # Allow tf32 on cudnn
+
+
+def cleanup():
+ dist.destroy_process_group()
+
+
+def add_train_args(parser: argparse.ArgumentParser):
+ # Data settings
+ parser.add_argument("--data_dir", type=str, required=True)
+ parser.add_argument("--out_dir", type=str, default="out", required=False)
+ parser.add_argument(
+ "--eval_interval", type=int, default=2000, required=False
+ )
+ parser.add_argument("--log_interval", type=int, default=1, required=False)
+ parser.add_argument("--eval_iters", type=int, default=200, required=False)
+ parser.add_argument("--eval_only", action="store_true", required=False)
+ parser.add_argument("--batch_size", type=int, default=32, required=False)
+ parser.add_argument("--block_size", type=int, default=128, required=False)
+ parser.add_argument("--epochs", type=int, default=1, required=False)
+
+ # Model settings
+ parser.add_argument("--n_layer", type=int, default=6, required=False)
+ parser.add_argument("--n_head", type=int, default=6, required=False)
+ parser.add_argument("--n_embd", type=int, default=384, required=False)
+ parser.add_argument("--dropout", type=float, default=0.0, required=False)
+ parser.add_argument("--bias", action="store_true", required=False)
+
+ # LoRA settings
+ parser.add_argument("--lora_rank", type=int, default=4, required=False)
+ parser.add_argument(
+ "--lora_dropout", type=float, default=0.0, required=False
+ )
+ parser.add_argument(
+ "--lora_alpha", type=float, default=1.0, required=False
+ )
+ parser.add_argument(
+ "--lora_targets",
+ type=str,
+ default="wq,wk,wo,wv",
+ required=False,
+ help="comma separated list of targets to apply lora to",
+ )
+ # Optimizer settings
+ parser.add_argument(
+ "--learning_rate", type=float, default=6e-4, required=False
+ )
+ parser.add_argument("--max_iters", type=int, default=2000, required=False)
+ parser.add_argument(
+ "--weight_decay", type=float, default=1e-1, required=False
+ )
+ parser.add_argument("--beta1", type=float, default=0.9, required=False)
+ parser.add_argument("--beta2", type=float, default=0.95, required=False)
+ parser.add_argument("--grad_clip", type=float, default=1.0, required=False)
+ parser.add_argument(
+ "--gradient_accumulation_steps", type=int, default=0, required=False
+ )
+
+ # Learning rate decay settings
+ parser.add_argument("--decay_lr", action="store_true", required=False)
+ parser.add_argument("--warmup_iters", type=int, default=0, required=False)
+ parser.add_argument(
+ "--lr_decay_iters", type=int, default=10, required=False
+ )
+ parser.add_argument("--min_lr", type=float, default=6e-5, required=False)
+
+ parser.add_argument("--compile", type=str, default="False", required=False)
+ parser.add_argument(
+ "--save_memory_interval", type=int, default=20, required=False
+ )
+ parser.add_argument(
+ "--save_storage_interval", type=int, default=200, required=False
+ )
+ parser.add_argument(
+ "--use_native_ckpt", action="store_true", required=False
+ )
| how to run the deepseed model?
https://www.deepspeed.ai/training/
deepseed shows much better perf than the job without opt.
we can provide an example to run this model
| 2023-12-27T06:19:17 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-912 | aaac5d550fd7f4c5083e953c77aaa4be1ae047dc | diff --git a/dlrover/trainer/torch/deepspeed/__init__.py b/dlrover/trainer/torch/deepspeed/__init__.py
deleted file mode 100644
index 561b39cf2..000000000
--- a/dlrover/trainer/torch/deepspeed/__init__.py
+++ /dev/null
@@ -1,12 +0,0 @@
-# Copyright 2023 The DLRover Authors. All rights reserved.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/dlrover/trainer/torch/flash_checkpoint/checkpointer.py b/dlrover/trainer/torch/flash_checkpoint/checkpointer.py
new file mode 100644
index 000000000..874ef07bc
--- /dev/null
+++ b/dlrover/trainer/torch/flash_checkpoint/checkpointer.py
@@ -0,0 +1,65 @@
+# Copyright 2023 The DLRover Authors. All rights reserved.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from abc import ABCMeta, abstractmethod
+from enum import Enum, auto
+
+
+class StorageType(Enum):
+ MEMORY = auto()
+ DISK = auto()
+
+
+class Checkpointer(metaclass=ABCMeta):
+ """
+ Checkpointer can save and load PyTorch module states efficiently.
+
+ It begins by duplicating the state dictionary to shared memory,
+ then proceeds to save it to storage asynchronously. This process ensures
+ that the Checkpointer's save operation minimally blocks training time.
+ If the node does not fail, the Checkpointer prioritizes restoring the
+ checkpointed state dictionary directly from the shared memory upon load
+ requests. However, if the node has restarted, the Checkpointer reverts
+ to loading the state dictionary from the designated storage instead.
+ """
+
+ @abstractmethod
+ def save_checkpoint(
+ self, step, state_dict, path, storage_type=StorageType.DISK
+ ):
+ """
+ Save the checkpoint of model, optimizer and sampler.
+
+ Args:
+ step (int): the global iteration step.
+ state_dict (dict): the state dict of model and optimizer to save.
+ path (str): the storage path to save the state dict.
+ Note, the path is used to save the state dict to storage
+ only if the training process fails.
+ storage_type (StorageType): StorageType.MEMROY or StorageType.DISK.
+ """
+ pass
+
+ @abstractmethod
+ def load_checkpoint(self, resuming_path=None):
+ """
+ The manager loads the states from the files in the
+ checkpoint direcotry to the model, optimizer and sampler.
+ Args:
+ resuming_path (str, optinoal): The manager will load checkpoint
+ from the path. If the path is None, the manager will load
+ the state checkpoint from the file with the maximum step.
+ Return:
+ A dict: a state dict.
+ """
+ pass
diff --git a/dlrover/trainer/torch/deepspeed/async_checkpoint.py b/dlrover/trainer/torch/flash_checkpoint/deepspeed.py
similarity index 89%
rename from dlrover/trainer/torch/deepspeed/async_checkpoint.py
rename to dlrover/trainer/torch/flash_checkpoint/deepspeed.py
index a46f952ef..7cae9abaf 100644
--- a/dlrover/trainer/torch/deepspeed/async_checkpoint.py
+++ b/dlrover/trainer/torch/flash_checkpoint/deepspeed.py
@@ -28,6 +28,8 @@
DeepSpeedCheckpointEngine,
)
+from .checkpointer import StorageType
+
class AsyncSaveEngine(CheckpointEngine):
def __init__(self):
@@ -65,7 +67,7 @@ def commit(self, tag):
pass
-class DeepSpeedCheckpointManger(object):
+class DeepSpeedCheckpointer(object):
"""
The manager can synchronously save the DeepSpeedEngine checkpoint
to the memory and asynchronously save the checkpointing states
@@ -77,7 +79,7 @@ class DeepSpeedCheckpointManger(object):
Examples::
>>> engine = deepspeed.initialize(...)
- >>> ckpt_manager = DeepSpeedCheckpointManger(engine, save_dir)
+ >>> ckpt_manager = DeepSpeedCheckpointer(engine, save_dir)
>>> if step % 10 == 0:
>>> ckpt_manager.save_checkpoint_to_memory(save_dir, tag)
>>> if step % 100 == 0:
@@ -104,7 +106,26 @@ def __init__(self, engine: DeepSpeedEngine, checkpoint_dir):
if zero_stage < ZeroStageEnum.weights and self._local_rank == 0:
self.engine.save_non_zero_checkpoint = True
- def save_checkpoint_to_memory(
+ def save_checkpoint(
+ self,
+ save_dir,
+ tag=None,
+ client_state={},
+ save_latest=True,
+ storage_type=StorageType.DISK,
+ ):
+ if storage_type == StorageType.MEMORY:
+ self._save_checkpoint_to_memory(
+ save_dir, tag, client_state, save_latest
+ )
+ elif storage_type == StorageType.DISK:
+ self._save_checkpoint_to_storage(
+ save_dir, tag, client_state, save_latest
+ )
+ else:
+ raise ValueError(f"No support storage type {storage_type}")
+
+ def _save_checkpoint_to_memory(
self, save_dir, tag=None, client_state={}, save_latest=True
):
torch_save_func = torch.save
@@ -128,7 +149,7 @@ def save_checkpoint_to_memory(
except Exception:
pass
- def save_checkpoint_to_storage(
+ def _save_checkpoint_to_storage(
self, save_dir, tag=None, client_state={}, save_latest=True
):
torch_save_func = torch.save
diff --git a/dlrover/trainer/torch/flash_checkpoint/fsdp.py b/dlrover/trainer/torch/flash_checkpoint/fsdp.py
new file mode 100644
index 000000000..97a05a194
--- /dev/null
+++ b/dlrover/trainer/torch/flash_checkpoint/fsdp.py
@@ -0,0 +1,38 @@
+# Copyright 2023 The DLRover Authors. All rights reserved.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from .checkpointer import Checkpointer, StorageType
+from .fsdp_engine import FsdpCheckpointEngine
+
+
+class FsdpCheckpointer(Checkpointer):
+ def __init__(self, checkpoint_dir: str):
+ self._engine = FsdpCheckpointEngine(checkpoint_dir)
+
+ def save_checkpoint(
+ self, step, state_dict, path, storage_type=StorageType.DISK
+ ):
+ if storage_type == StorageType.MEMORY:
+ self._engine.save_to_memory(step, state_dict, path)
+ elif storage_type == StorageType.DISK:
+ if not path:
+ raise ValueError(
+ "path cannot be empty if storage type is disk!"
+ )
+ self._engine.save_to_storage(step, state_dict, path)
+ else:
+ raise ValueError(f"No support storage type {storage_type}")
+
+ def load_checkpoint(self, resume_path=""):
+ return self._engine.load(resume_path)
diff --git a/dlrover/trainer/torch/megatron/async_checkpoint.py b/dlrover/trainer/torch/flash_checkpoint/megatron.py
similarity index 62%
rename from dlrover/trainer/torch/megatron/async_checkpoint.py
rename to dlrover/trainer/torch/flash_checkpoint/megatron.py
index d4f20c6c8..334698a99 100644
--- a/dlrover/trainer/torch/megatron/async_checkpoint.py
+++ b/dlrover/trainer/torch/flash_checkpoint/megatron.py
@@ -22,18 +22,16 @@
try:
from megatron import get_args
- from megatron.checkpointing import (
- get_checkpoint_tracker_filename,
- load_checkpoint,
- save_checkpoint,
- )
+ from megatron.checkpointing import get_checkpoint_tracker_filename
+ from megatron.checkpointing import load_checkpoint as megatron_load
+ from megatron.checkpointing import save_checkpoint as megatron_save
except ImportError:
logger.warning("Please check the magatron.checkpointing exists.")
from dlrover.python.common.singleton import singleton
-from dlrover.trainer.torch.flash_checkpoint.megatron_engine import (
- MegatronCheckpointEngine,
-)
+
+from .checkpointer import StorageType
+from .megatron_engine import MegatronCheckpointEngine
@singleton
@@ -75,27 +73,12 @@ def update_latest_checkpoint_step(self):
self._latest_ckpt_iteration = iteration
-def save_checkpoint_to_storage(
- iteration, model, optimizer, opt_param_scheduler
-):
- """
- Asynchronously save the the checkpointing state dict into the storage.
- The method will not wait for saving the checkpointing to the storage.
-
- Args:
- same as the `megatron.checkpointing.load_checkpoint`
- """
- args = get_args()
- saver = MegatronCheckpointManager(args.save)
- torch_save_func = torch.save
- torch.save = saver.save
- save_checkpoint(iteration, model, optimizer, opt_param_scheduler)
- saver.engine.save_to_storage(iteration, saver.state_dict, saver.path)
- torch.save = torch_save_func
-
-
-def save_checkpoint_to_memory(
- iteration, model, optimizer, opt_param_scheduler
+def save_checkpoint(
+ iteration,
+ model,
+ optimizer,
+ opt_param_scheduler,
+ storage_type=StorageType.DISK,
):
"""
Synchronously save the the checkpointing state dict into the CPU memory.
@@ -103,23 +86,34 @@ def save_checkpoint_to_memory(
Args:
same as the `megatron.checkpointing.load_checkpoint`
"""
- args = get_args()
- saver = MegatronCheckpointManager(args.save)
- saver.update_latest_checkpoint_step()
- torch_save_func = torch.save
- torch.save = saver.save
- save_checkpoint(iteration, model, optimizer, opt_param_scheduler)
- saver.engine.save_to_memory(iteration, saver.state_dict, saver.path)
- torch.save = torch_save_func
-
- # Megatron save_checkpoint will create the directory with the iteration
- # and write the iteration into the tracerfile. But async saver only
- # save the state dict into the CPU memory not the storage. The saver
- # need to clear the empty checkpoint directory.
- saver.clear_empty_checkpoint(iteration)
-
-
-def load_latest_checkpoint(
+ if storage_type == StorageType.MEMORY:
+ args = get_args()
+ saver = MegatronCheckpointManager(args.save)
+ saver.update_latest_checkpoint_step()
+ torch_save_func = torch.save
+ torch.save = saver.save
+ megatron_save(iteration, model, optimizer, opt_param_scheduler)
+ saver.engine.save_to_memory(iteration, saver.state_dict, saver.path)
+ torch.save = torch_save_func
+
+ # Megatron save_checkpoint will create the directory with the iteration
+ # and write the iteration into the tracerfile. But async saver only
+ # save the state dict into the CPU memory not the storage. The saver
+ # need to clear the empty checkpoint directory.
+ saver.clear_empty_checkpoint(iteration)
+ elif storage_type == StorageType.DISK:
+ args = get_args()
+ saver = MegatronCheckpointManager(args.save)
+ torch_save_func = torch.save
+ torch.save = saver.save
+ megatron_save(iteration, model, optimizer, opt_param_scheduler)
+ saver.engine.save_to_storage(iteration, saver.state_dict, saver.path)
+ torch.save = torch_save_func
+ else:
+ raise ValueError(f"No support storage type {storage_type}")
+
+
+def load_checkpoint(
model, optimizer, opt_param_scheduler, load_arg="load", strict=True
):
"""Load the checkpointing state dict. The method firstly
@@ -131,5 +125,5 @@ def load_latest_checkpoint(
saver = MegatronCheckpointManager(args.save)
torch_load_func = torch.load
torch.load = saver.load
- load_checkpoint(model, optimizer, opt_param_scheduler, load_arg, strict)
+ megatron_load(model, optimizer, opt_param_scheduler, load_arg, strict)
torch.load = torch_load_func
diff --git a/dlrover/trainer/torch/megatron/__init__.py b/dlrover/trainer/torch/megatron/__init__.py
deleted file mode 100644
index 561b39cf2..000000000
--- a/dlrover/trainer/torch/megatron/__init__.py
+++ /dev/null
@@ -1,12 +0,0 @@
-# Copyright 2023 The DLRover Authors. All rights reserved.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
| Discussion: how to name the APIs to async save/load checkpoints.
We are developing the features to asynchronously save the checkpoint into the storage and provide APIs to users. Now, there are two schemes to name the APIs. We can select one scheme or offer a new scheme. Please vote your scheme in replies.
Scheme 1:
```
def save_checkpoint_to_memory():
""" Synchronously save the checkpoint to the CPU memory."""
pass
def save_checkpoint_to_storage():
""" Asynchronously save the checkpoint to the storage."""
pass
def load_checkpoint():
"""
Load the checkpoint from the CPU memory if the state dict is in the memory.
Otherwise, load the checkpoint from the storage.
"""
pass
```
Scheme 2:
```
def save_checkpoint(in_memory=True/storage_level="memory_only/disk"):
"""
If to_memory, synchronously save the checkpoint to the CPU memory.
Otherwise, asynchronously save the checkpoint to the storage.
"""
pass
def load_checkpoint():
"""
Load the checkpoint from the CPU memory if the state dict is in the memory.
Otherwise, load the checkpoint from the storage.
"""
pass
```
| I perfer schem2 like the spark rdd operation (https://spark.apache.org/docs/latest/api/python/reference/api/pyspark.RDD.persist.html) :)
I prefer scheme 1
From "code smells": It is best not to use flag/boolean types as function parameters. Flags are not a best practice because they reduce the cohesion of a function. It does more than one thing making it hard to understand what a function does based on the call site.
If I want to modify the checkpoint storage mode, for scheme 1, I can simply replace the function name directly. For scheme 2, I may need to adjust the parameters, which may not be as straightforward.
> I perfer schem2 like the spark rdd operation (https://spark.apache.org/docs/latest/api/python/reference/api/pyspark.RDD.persist.html) :)
The RDD scheme is similar to the scheme 2 and I have updated the scheme 2.
I prefer scheme 2 since it hides details from users. Moreover, we should provide a reasonable default setup when users do not provide any configuration.
scheme 2
+1
I prefer scheme 2 since it looks more like a unified interface. When users do not specify which place to checkpoint their models, we make the choices for users to achieve higher performance **by default**. | 2023-12-22T10:43:59 | 0.0 | [] | [] |
||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-892 | 251b6049b1bcc01867beb3969d633c7a9ea1c446 | diff --git a/dlrover/python/common/constants.py b/dlrover/python/common/constants.py
index db7b9a5a8..607af8b1e 100644
--- a/dlrover/python/common/constants.py
+++ b/dlrover/python/common/constants.py
@@ -273,3 +273,5 @@ class ConfigPath(object):
class CheckpointConstant(object):
CKPT_NAME_PREFIX = "checkpoint-"
TRACER_FILE_NAME = "latest_checkpointed_iteration.txt"
+ MODEL_STATES_NAME = "model_states"
+ OPTIM_STATES_NAME = "optim_states"
diff --git a/dlrover/python/elastic_agent/torch/ckpt_saver.py b/dlrover/python/elastic_agent/torch/ckpt_saver.py
index cc2060157..79f8beaed 100644
--- a/dlrover/python/elastic_agent/torch/ckpt_saver.py
+++ b/dlrover/python/elastic_agent/torch/ckpt_saver.py
@@ -69,16 +69,43 @@ class CheckpointShardConfig:
Attrbiutes:
step (int): the global interation step.
- ckpt_name (str): the path to save the checkpoint shard.
writing_shm (bool): the flag whether the training process is writing
the state dict into the shared memory.
"""
step: int = 0
- ckpt_name: str = ""
writing_shm: bool = False
+@dataclass
+class SingleFileCheckpointConfig(CheckpointShardConfig):
+ """
+ The configuration of a checkpointing shard to save the optimizer
+ and model state dict into a file.
+
+ Attrbiutes:
+ path (str): the path to save the checkpoint shard.
+ """
+
+ path: str = ""
+
+
+@dataclass
+class DeepSpeedCheckpointConfig(CheckpointShardConfig):
+ """
+ The configuration of a checkpointing shard to save the DeepSpeed
+ ZERO-0/1/2/3 stage.
+
+ Attrbiutes:
+ model_path (str): the path to save the checkpoint shard of model.
+ optimizer_path (str): the path to save the checkpoint
+ shard of optimizer.
+ """
+
+ model_path: str = ""
+ optimizer_path: str = ""
+
+
@dataclass
class SaveEvent:
name: str = ""
@@ -128,14 +155,17 @@ def _read_tensor_from_buf(value, shm_tensor_buffer):
Read a tensor from the buffer of shared memory.
"""
if isinstance(value, TensorMeta):
- shm_tensor = torch.frombuffer(
- buffer=shm_tensor_buffer.buf,
- dtype=value.dtype,
- offset=value.offset,
- count=value.numel,
- )
- value = shm_tensor.reshape(value.shape)
- return value
+ if value.numel == 0:
+ return torch.tensor([], dtype=value.dtype)
+ else:
+ shm_tensor = torch.frombuffer(
+ buffer=shm_tensor_buffer.buf,
+ dtype=value.dtype,
+ offset=value.offset,
+ count=value.numel,
+ )
+ value = shm_tensor.reshape(value.shape)
+ return value
else:
return value
@@ -207,6 +237,8 @@ def _write_shared_memory(value: torch.Tensor, meta: TensorMeta, buffer):
"""
Write a CPU tensor into the shared memory.
"""
+ if value.numel() == 0:
+ return
shm_tensor = torch.frombuffer(
buffer, dtype=value.dtype, count=value.numel(), offset=meta.offset
).reshape(value.shape)
@@ -265,27 +297,23 @@ def _create_tensor_meta(self, value: torch.Tensor):
self._buffer_size += value.numel() * value.element_size()
return meta
- def save_state_dict(self, step, state_dict, ckpt_name=""):
+ def save_state_dict(self, state_dict, ckpt_conf: CheckpointShardConfig):
"""
Copy the state dict from CPU memory buffer into the shared memory.
"""
- if self._tensor_shm is None:
+ if not self._tensor_shm:
meta_dict = _traverse_state_dict(
state_dict, self._create_tensor_meta
)
self.init_tensor_shm(create=True, size=self._buffer_size)
else:
meta_dict = self._tensor_meta.get(local=True)
+ ckpt_conf.writing_shm = True
+ meta_dict[_DLROVER_CKPT_KEY] = ckpt_conf
- conf = CheckpointShardConfig(
- step=step,
- ckpt_name=ckpt_name,
- writing_shm=True,
- )
- meta_dict[_DLROVER_CKPT_KEY] = conf
self._tensor_meta.set(meta_dict)
_traverse_copy_to_shm(state_dict, meta_dict, self._tensor_shm.buf)
- conf.writing_shm = False
+ ckpt_conf.writing_shm = False
self._tensor_meta.set(meta_dict)
def load_state_dict(self):
@@ -322,7 +350,7 @@ def init_tensor_shm(self, create=False, size=0):
self._shm_name, create=create, size=size
)
- def get_checkpoint_config(self):
+ def get_checkpoint_config(self, default_config):
"""
Get the configuration of checkpointing state dict in the shared
memory.
@@ -331,7 +359,6 @@ def get_checkpoint_config(self):
A CheckpointShardConfig instance.
"""
meta_dict = self._tensor_meta.get()
- default_config = CheckpointShardConfig()
config = meta_dict.get(_DLROVER_CKPT_KEY, default_config)
return config
@@ -470,7 +497,11 @@ def _reset_shared_memory(self):
shm_handler.reset()
def _save_shard(
- self, step, local_shard_id: int, ckpt_path: str, step_done_dir: str
+ self,
+ step,
+ local_shard_id: int,
+ ckpt_config: CheckpointShardConfig,
+ step_done_dir: str,
):
"""Save the shard of state dict into the storage."""
try:
@@ -480,7 +511,8 @@ def _save_shard(
shm_handler.init_tensor_shm(create=False)
shm_lock.acquire()
- config = shm_handler.get_checkpoint_config()
+ default_config = CheckpointShardConfig()
+ config = shm_handler.get_checkpoint_config(default_config)
if config.step != step:
shm_lock.release()
logger.error(
@@ -491,11 +523,11 @@ def _save_shard(
logger.info(
f"Local rank {local_shard_id} Save checkpoint from the shared "
- f"memory into the storage {ckpt_path}."
+ f"memory into the storage {ckpt_config}."
)
state_dict = shm_handler.load_state_dict()
state_dict.pop(_DLROVER_CKPT_KEY, None)
- self.persist_to_storage(state_dict, ckpt_path)
+ self.persist_to_storage(state_dict, ckpt_config)
shm_lock.release()
global_shard_id = (
self.local_shard_num * self._node_rank + local_shard_id
@@ -557,7 +589,8 @@ def save_shm_to_storage(self, timeout=120):
return
steps = []
for shm_handler in self._shm_handlers:
- config = shm_handler.get_checkpoint_config()
+ default_config = CheckpointShardConfig()
+ config = shm_handler.get_checkpoint_config(default_config)
steps.append(config.step)
if len(set(steps)) > 1:
logger.error(
@@ -584,13 +617,14 @@ def save_step_checkpoint(self, step: int):
pass
@abstractmethod
- def persist_to_storage(self, state_dict, ckpt_path):
+ def persist_to_storage(self, state_dict, ckpt_config):
"""
Persist the state dict to a storage path.
Args:
state_dict (dict): the state dict of PyTorch modules.
- ckpt_path (str): the path of storaget to persist the state dict.
+ ckpt_config : the checkpoint config with the path to
+ save the storage.
"""
pass
@@ -676,12 +710,15 @@ def save_step_checkpoint(self, step: int):
# save to stage path for each local rank
futures: List[Future] = []
for i in range(self.local_shard_num):
- ckpt_config = self._shm_handlers[i].get_checkpoint_config()
+ default_config = SingleFileCheckpointConfig()
+ ckpt_config = self._shm_handlers[i].get_checkpoint_config(
+ default_config
+ )
future: Future = self._executor.submit(
self._save_shard,
step,
i,
- ckpt_config.ckpt_name,
+ ckpt_config,
step_done_dir,
)
futures.append(future)
@@ -712,11 +749,13 @@ def save_step_checkpoint(self, step: int):
self._writing_storage = False
- def persist_to_storage(self, state_dict, path):
+ def persist_to_storage(
+ self, state_dict, ckpt_config: SingleFileCheckpointConfig
+ ):
"""Persist the checkpoint from CPU memory buffer into the storage."""
- checkpoint_dir = os.path.dirname(path)
+ checkpoint_dir = os.path.dirname(ckpt_config.path)
os.makedirs(checkpoint_dir, exist_ok=True)
- torch.save(state_dict, path)
+ torch.save(state_dict, ckpt_config.path)
def commit_checkpoint(self, step: int, step_done_dir: str, timeout=600):
"""
@@ -908,6 +947,39 @@ def commit_checkpoint( # type: ignore
time.sleep(2)
+class DeepSpeedCheckpointSaver(AsyncCheckpointSaver):
+ @classmethod
+ def get_checkpoint_tracker_filename(cls, checkpoint_dir):
+ """
+ Get the path of tracker file to record the latest checkpointing
+ step.
+
+ Args:
+ checkpoint_dir (str): the checkpoint directory.
+
+ Returns:
+ str: the path of tracker file.
+ """
+ fname = "latest"
+ return os.path.join(checkpoint_dir, fname)
+
+ def persist_to_storage( # type: ignore
+ self, state_dict, ckpt_config: DeepSpeedCheckpointConfig
+ ):
+ """Persist the checkpoint from CPU memory buffer into the storage."""
+ model_sd = state_dict.get(CheckpointConstant.MODEL_STATES_NAME, {})
+ if model_sd and ckpt_config.model_path:
+ checkpoint_dir = os.path.dirname(ckpt_config.model_path)
+ os.makedirs(checkpoint_dir, exist_ok=True)
+ torch.save(model_sd, ckpt_config.model_path)
+
+ optimizer_sd = state_dict.get(CheckpointConstant.OPTIM_STATES_NAME, {})
+ if optimizer_sd and ckpt_config.optimizer_path:
+ checkpoint_dir = os.path.dirname(ckpt_config.optimizer_path)
+ os.makedirs(checkpoint_dir, exist_ok=True)
+ torch.save(optimizer_sd, ckpt_config.optimizer_path)
+
+
class CheckpointEngine(metaclass=ABCMeta):
"""
The engine runs in the training process and is called by the
@@ -928,7 +1000,6 @@ def __init__(self, checkpoint_dir: str):
self._rank = 0
self._local_rank = int(os.getenv("LOCAL_RANK", 0))
self._saver_group = None
- self._buffer_size = 0
self._cached_step = 0
self._restart_count = env_utils.get_torch_restart_count()
# queue for agent to save to storage, only rank 0
@@ -1010,6 +1081,15 @@ def save_to_memory(self, step, state_dict, path=""):
Note, the path is used to save the state dict to storage
only if the training process fails.
"""
+ conf = SingleFileCheckpointConfig(
+ step=step,
+ path=path,
+ )
+ self._save_state_dict_to_memory(state_dict, conf)
+
+ def _save_state_dict_to_memory(
+ self, state_dict, conf: CheckpointShardConfig
+ ):
if self._local_rank != self.local_shard_id:
return
@@ -1029,11 +1109,11 @@ def save_to_memory(self, step, state_dict, path=""):
if acquired:
self._shm_lock.release()
return
- self._shm_handler.save_state_dict(step, state_dict, path)
+ self._shm_handler.save_state_dict(state_dict, conf)
if acquired:
self._shm_lock.release()
- self._cached_step = step
+ self._cached_step = conf.step
@abstractmethod
def get_saver_class(self):
@@ -1243,36 +1323,15 @@ def save_to_storage(self, step, state_dict, path):
class MegatronCheckpointEngine(CheckpointEngine):
"""
- The checkpoint engine synchronously writes the state dict into
+ The checkpoint engine synchronously writes the state dict of
+ Megatron-LM model and optimizer into
the shared memory and notify the agent in main process to
asynchronously save the state dict from the shared memory into
- the storage. Writing to memory is significantly quicker
- than writing to storage. The engine only blocks the training
- with a little time. Users can frequently call `save_to_memory` in
- the training loop and call `save_to_storage`.
-
- If the training process fail, the agent in main process can continuely
- saves the the state dict from the shared memory into the storage.
- The engine saves the model and optimizer state dict without sharding
- in a local or DDP job.
+ the storage.
Attributes:
checkpoint_dir (str): the directory to save the temp checkpoint
if the training process fails.
-
- Examples::
- >>> engine = MegatronCheckpointEngine(
- >>> checkpoint_dir="/tmp/checkpoint/"
- >>> )
- >>> for step, data in enumerate(dataloader):
- >>> ...
- >>> state_dict = model.state_dict()
- >>> path = f"/tmp/checkpoint-{step}.pt"
- >>> if step % 5 == 0:
- >>> engine.save_to_memory(step, state_dict, path)
- >>> elif step % 100 == 0:
- >>> engine.save_to_storage(step, state_dict, path)
- >>> sate_dict = engine.load()
"""
def __init__(self, checkpoint_dir):
@@ -1371,10 +1430,7 @@ def load(self, resume_path=""):
def _load_from_storage(self, resume_path=""):
"""
- Load the state dict from the CPU memory if the state dict is complete
- in CPU memory. Otherwise, the function will load the state dict from
- the storage.
-
+ Load the state dict from the storage.
Args:
resume_path (str, optional): , If the resume_path is an empty
string, the function will load the latest checkpoint file in
@@ -1388,31 +1444,161 @@ def _load_from_storage(self, resume_path=""):
if resume_path:
state_dict = torch.load(resume_path, map_location="cpu")
return state_dict
- else:
- func = AsyncCheckpointSaver.get_checkpoint_tracker_filename
- tracker_filename = func(self.checkpoint_dir)
- if not os.path.exists(tracker_filename):
- return {}
- with open(tracker_filename, "r") as f:
- metastring = f.read().strip()
- iteration = int(metastring)
- ckpt_name = self._get_checkpoint_name(iteration)
- state_dict = torch.load(ckpt_name, map_location="cpu")
- return state_dict
+ return {}
+
+
+class DeepSpeedCheckpointEngine(CheckpointEngine):
+ """
+ The checkpoint engine synchronously writes the state dict of
+ `DeepSpeedEngine` into the shared memory and notify the agent
+ in main process to asynchronously save the state dict from the shared
+ memory into the storage.
+
+ Attributes:
+ checkpoint_dir (str): the directory to save the temp checkpoint
+ if the training process fails.
+ dp_size (int): the world size of data parallelism.
+ """
- def _get_checkpoint_name(self, step):
- directory = "iter_{:07d}".format(step)
- # Use both the tensor and pipeline MP rank.
- if self._pp_world_size == 1:
- return os.path.join(
- self.checkpoint_dir,
- directory,
- "mp_rank_{:02d}".format(self._tp_rank),
- "model_optim_rng.pt",
+ def __init__(self, checkpoint_dir, global_shard_num=1, zero_stage=0):
+ self.global_shard_num = global_shard_num
+ self.zero_stage = zero_stage
+ super().__init__(checkpoint_dir)
+ if dist.is_initialized():
+ saver_ranks = self._get_saver_ranks()
+ logger.info(f"Saver ranks of DeepSpeed is {saver_ranks}")
+ self._saver_group = dist.new_group(
+ ranks=saver_ranks,
+ backend="gloo",
+ timeout=timedelta(seconds=30),
)
- return os.path.join(
- self.checkpoint_dir,
- directory,
- "mp_rank_{:02d}_{:03d}".format(self._tp_rank, self._pp_rank),
- "model_optim_rng.pt",
+
+ def _get_saver_ranks(self):
+ """
+ Get the ranks which need to save the sharding state dict into
+ the memory.
+ """
+ world_size = dist.get_world_size()
+ local_world_size = env_utils.get_local_world_size()
+ save_ranks = []
+ local_shard_num = self.get_local_shard_num()
+ for i in range(world_size):
+ local_rank = i % local_world_size
+ if local_rank < local_shard_num:
+ save_ranks.append(i)
+ return save_ranks
+
+ @timer
+ def save_to_memory(
+ self, step, state_dict, model_path="", optimizer_path=""
+ ):
+ """
+ Synchronously Saves the state dict into the shared memory with the main
+ process. If the agent in the main process is saving the shared memory
+ into the storage, the method will skip to write the shared memory.
+ Only local rank 0 save the state dict into the memory because the
+ state dict is replicated across all ranks.
+
+ Args:
+ step (int): the global iteration step.
+ state_dict (dict): the state dict of model and optimizer to save.
+ model_path (str): the storage path to save the model state dict.
+ optimizer_path (str): the storage path to save the optimizer
+ state dict.
+ """
+ conf = DeepSpeedCheckpointConfig(
+ step=step,
+ model_path=model_path,
+ optimizer_path=optimizer_path,
+ )
+ self._save_state_dict_to_memory(state_dict, conf)
+
+ @timer
+ def save_to_storage(
+ self, step, state_dict, model_path="", optimizer_path=""
+ ):
+ """
+ Asynchonously saves the state dict into the storage. It synchonously
+ saves the state dict into the shared memory and put the path
+ into a shared queue. The agent in the main process waits for the queue
+ for save the state dict in the shared memory into the storage.
+ Only rank 0 saves the state dict into the storage.
+
+ Args:
+ step (int): the global iteration step.
+ state_dict (dict): the state dict of model and optimizer to save.
+ model_path (str): the storage path to save the model state dict.
+ optimizer_path (str): the storage path to save the optimizer
+ state dict.
+ """
+ if step > self._cached_step:
+ self.save_to_memory(step, state_dict, model_path, optimizer_path)
+
+ # Only local rank 0 to notify the saving event to the agent.
+ if self._local_rank != 0:
+ return
+ if model_path or optimizer_path:
+ event = SaveEvent(name=_SAVE_EVENT_NAME, step=step)
+ self._event_queue.put(event)
+
+ def get_local_shard_num(self):
+ local_world_size = env_utils.get_local_world_size()
+ global_shard_num = self.get_global_shard_num()
+ return min(local_world_size, global_shard_num)
+
+ def get_global_shard_num(self):
+ return self.global_shard_num
+
+ def get_saver_class(self):
+ return DeepSpeedCheckpointSaver
+
+ def load(self, resume_model_path="", resume_optimizer_path=""):
+ """
+ The method firstly try to load the state dict from the shared memory.
+ If there is no state dict in the shared memory, the method will
+ load the state dict from the storage.
+
+ Returns:
+ A dict.
+ """
+ state_dict = self._shm_handler.load_state_dict()
+ msd_name = CheckpointConstant.MODEL_STATES_NAME
+ if msd_name not in state_dict and self.zero_stage in [1, 2]:
+ local_rank_0_shm_handler = SharedMemoryHandler(0, host=False)
+ # For stage 1,2, the model is not partitioned and only local rank 0
+ # saves the model state dict into the CPU memory. Other local ranks
+ # need get the model state dict from the shared memory of local
+ # rank 0.
+ sd = local_rank_0_shm_handler.load_state_dict()
+ state_dict[msd_name] = sd[msd_name]
+ if state_dict:
+ return state_dict
+ state_dict = self._load_from_storage(
+ resume_model_path, resume_optimizer_path
)
+ return state_dict
+
+ def _load_from_storage(
+ self, resume_model_path="", resume_optimizer_path=""
+ ):
+ """
+ Load the DeepSpeedEngine state dict from the storage.
+
+ Args:
+ resume_path (str, optional): , If the resume_path is an empty
+ string, the function will load the latest checkpoint file in
+ the checkpoint directory.
+
+ Returns:
+ A dict:
+ a dictionary containing a whole state of the modules in the
+ checkpointing file.
+ """
+ ds_state_dict = {}
+ if resume_model_path:
+ sd = torch.load(resume_model_path, map_location="cpu")
+ ds_state_dict[CheckpointConstant.MODEL_STATES_NAME] = sd
+ if resume_optimizer_path:
+ sd = torch.load(resume_model_path, map_location="cpu")
+ ds_state_dict[CheckpointConstant.OPTIM_STATES_NAME] = sd
+ return ds_state_dict
| Asynchronously save checkpoint into the storage for DeepSpeed.
Now, `deepspeed.DeepSpeedEngine.save_checkpoint` needs to block the training when saving the checkpoint into the storage.
https://deepspeed.readthedocs.io/en/latest/model-checkpointing.html#deepspeed.DeepSpeedEngine.save_checkpoint.
If the storage is remote, the time to save storage may be too long. So, we can asynchronously save checkpoint into the storage which will block the training with a little time.
| 2023-12-16T03:10:08 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-874 | 04db873b95d7eb930ea3b667a1812df3e529e80f | diff --git a/dlrover/python/elastic_agent/torch/training.py b/dlrover/python/elastic_agent/torch/training.py
index 2bdb83236..09dcdf72a 100644
--- a/dlrover/python/elastic_agent/torch/training.py
+++ b/dlrover/python/elastic_agent/torch/training.py
@@ -542,6 +542,7 @@ def _invoke_run(self, role: str = DEFAULT_ROLE) -> RunResult:
self._exit_barrier()
return run_result
elif state in {WorkerState.UNHEALTHY, WorkerState.FAILED}:
+ logger.error(f"The worker fails with {run_result.failures}")
self._report_failure_to_master(run_result.failures)
self._save_ckpt_to_storage()
if self._remaining_failovers > 0:
diff --git a/dlrover/python/master/elastic_training/rdzv_manager.py b/dlrover/python/master/elastic_training/rdzv_manager.py
index 97318bd18..bccfb65c9 100644
--- a/dlrover/python/master/elastic_training/rdzv_manager.py
+++ b/dlrover/python/master/elastic_training/rdzv_manager.py
@@ -62,6 +62,9 @@ def __init__(self):
self._node_unit = 1
self._name = ""
self._latest_rdzv_nodes = []
+ self._start_rdzv_ts = 0
+ self._node_rdzv_times: Dict[int, int] = {}
+ self._latest_log_nodes_time = 0
def get_rdzv_round(self):
return self._rdzv_round
@@ -134,17 +137,35 @@ def _check_rdzv_completed(self):
- set(self._rdzv_nodes.items())
)
self._lastcall_time = 0
- logger.info(
- f"Completed {self._rdzv_round} round "
- f"rendezvous of {self._name} is {self._rdzv_nodes}"
- )
+ self._log_rendezvous_info()
if self._waiting_nodes:
logger.warning(
f"Waiting nodes not in {self._rdzv_round} rendezvous "
f"are {self._waiting_nodes}."
)
+ elif time.time() - self._latest_log_nodes_time > 60:
+ self._latest_log_nodes_time = time.time()
+ logger.info(
+ f"Waiting nodes in rendezvous are {self._waiting_nodes}"
+ )
return rdzv_completed
+ def _log_rendezvous_info(self):
+ logger.info(
+ f"Completed {self._rdzv_round} round "
+ f"rendezvous of {self._name} is {self._rdzv_nodes} \n"
+ "The times of nodes to join rendezvous "
+ f"are {self._node_rdzv_times}."
+ )
+ self._node_rdzv_times.clear()
+ if self._start_rdzv_ts > 0:
+ rdzv_time = round(time.time() - self._start_rdzv_ts, 2)
+ logger.info(
+ f"Elapsed time to complete the {self._rdzv_round} "
+ f"round rendzvous is {rdzv_time}s"
+ )
+ self._start_rdzv_ts = 0
+
def not_joined_rdzv_nodes(self):
"""Return workers which do not join a rendezvous."""
nodes = []
@@ -168,11 +189,18 @@ def join_rendezvous(
int: the number of rendezvous round.
"""
with self._lock:
+ if not self._waiting_nodes:
+ self._start_rdzv_ts = time.time()
+ logger.info(f"Start the {self._rdzv_round} round rendezvous.")
if node_rank in self._waiting_nodes:
return self._rdzv_round
self._waiting_nodes[node_rank] = local_world_size
self._rdzv_nodes = {}
self._lastcall_time = time.time()
+ self._node_rdzv_times[node_rank] = round(
+ self._lastcall_time - self._start_rdzv_ts, 2
+ )
+
return self._rdzv_round
def num_nodes_waiting(self):
@@ -300,6 +328,8 @@ def get_comm_world(self, node_rank):
if not self._node_groups:
rdzv_completed = self._check_rdzv_completed()
if rdzv_completed:
+ self._fault_nodes.clear()
+ self._straggler_nodes.clear()
self._node_groups = self._group_nodes(self._rdzv_round)
logger.info(
f"Round {self._rdzv_round} "
@@ -414,8 +444,6 @@ def join_rendezvous(
int: the number of rendezvous round.
"""
self._node_groups.clear()
- self._fault_nodes.clear()
- self._straggler_nodes.clear()
return super().join_rendezvous(node_rank, local_world_size)
def check_fault_node(self):
| Record the elapsed time to create the rendezvous.
| 2023-12-07T09:53:43 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-868 | 4f1004dcacc53e029b3b524eea4608530e7dbba2 | diff --git a/dlrover/python/common/multi_process.py b/dlrover/python/common/multi_process.py
index 79a735974..0e73f63f6 100644
--- a/dlrover/python/common/multi_process.py
+++ b/dlrover/python/common/multi_process.py
@@ -65,7 +65,7 @@ def _create_socket_client(path):
client.connect(path)
connected = True
break
- except FileNotFoundError:
+ except (FileNotFoundError, ConnectionRefusedError):
time.sleep(0.1)
if not connected:
client.connect(path)
@@ -460,7 +460,7 @@ def update(self, new_dict):
self._shared_queue.put(1)
self._request(request)
except Exception:
- logger.info("The recv processs has breakdown.")
+ logger.info("The recv process has breakdown.")
def get(self, local=False):
"""
diff --git a/dlrover/python/elastic_agent/torch/ckpt_saver.py b/dlrover/python/elastic_agent/torch/ckpt_saver.py
index b0e8d4a6b..6647245e3 100644
--- a/dlrover/python/elastic_agent/torch/ckpt_saver.py
+++ b/dlrover/python/elastic_agent/torch/ckpt_saver.py
@@ -22,7 +22,6 @@
from datetime import timedelta
from typing import Callable, Dict, List, Mapping, Tuple
-import numpy as np
import torch
import torch.distributed as dist
@@ -77,27 +76,6 @@ def _init_dir(dir):
os.makedirs(dir)
-def _convert_torch_dtype_to_numpy(torch_dtype):
- """Conver the torch dtype to numpy dtype."""
- dtype_map = {
- torch.float32: np.float32,
- torch.float: np.float32,
- torch.float64: np.float64,
- torch.double: np.double,
- torch.float16: np.float16,
- torch.half: np.half,
- torch.uint8: np.uint8,
- torch.int8: np.int8,
- torch.int16: np.int16,
- torch.short: np.short,
- torch.int32: np.int32,
- torch.int: np.int32,
- torch.long: np.int64,
- torch.bool: np.dtype("bool"),
- }
- return dtype_map[torch_dtype]
-
-
def _traverse_state_dict(value: object, visitor: Callable[[object], None]):
"""
Invoke ``visitor`` for each value recursively in ``state_dict``.
@@ -129,13 +107,13 @@ def _read_tensor_from_buf(value, shm_tensor_buffer):
Read a tensor from the buffer of shared memory.
"""
if isinstance(value, TensorMeta):
- data_array = np.frombuffer(
+ shm_tensor = torch.frombuffer(
buffer=shm_tensor_buffer.buf,
dtype=value.dtype,
offset=value.offset,
count=value.numel,
)
- value = torch.reshape(torch.tensor(data_array), value.shape)
+ value = shm_tensor.reshape(value.shape)
return value
else:
return value
@@ -213,21 +191,14 @@ def _tarverse_copy_to_shm(value, meta, buffer):
meta[i] = v
-def _write_shared_memory(value, meta: TensorMeta, buffer):
+def _write_shared_memory(value: torch.Tensor, meta: TensorMeta, buffer):
"""
Write a CPU tensor into the shared memory.
"""
- data_array = value.cpu().numpy()
- write_array = np.ndarray(
- data_array.shape,
- dtype=data_array.dtype,
- buffer=buffer,
- offset=meta.offset,
- )
- if data_array.shape == ():
- write_array.fill(data_array)
- else:
- write_array[:] = data_array[:]
+ shm_tensor = torch.frombuffer(
+ buffer, dtype=value.dtype, count=value.numel(), offset=meta.offset
+ ).reshape(value.shape)
+ shm_tensor.copy_(value)
def _load_from_historic_checkpoint(checkpoint_dir):
@@ -270,10 +241,6 @@ def __init__(self, checkpoint_dir, num_proc=1):
self.checkpoint_dir = checkpoint_dir
self.num_proc = num_proc
- @abstractmethod
- def _sync_shm_to_storage(self):
- pass
-
@classmethod
def start_async_saving_ckpt(cls):
"""
@@ -299,6 +266,10 @@ def _save():
target=_save, name="checkpoint-saver", daemon=True
).start()
+ @abstractmethod
+ def _sync_shm_to_storage(self):
+ pass
+
@classmethod
def get_ckpt_saver(cls):
return cls._saver_instance
@@ -618,10 +589,9 @@ def _create_tensor_meta(self, value: torch.Tensor):
"""
if not torch.is_tensor(value):
return value
- dtype = _convert_torch_dtype_to_numpy(value.dtype)
meta = TensorMeta(
shape=tuple(value.shape), # type: ignore
- dtype=dtype,
+ dtype=value.dtype,
element_size=value.element_size(),
numel=value.numel(),
offset=self._buffer_size,
| CheckpointManager fails to save the bfloat16 tensors.
```bash
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python3.8/site-packages/torch/_tensor.py", line 972, in __array__
return self.numpy().astype(dtype, copy=False)
TypeError: Got unsupported ScalarType BFloat16
```
| 2023-12-03T12:10:45 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-864 | 84f6c86fb51439fd7f0fe986bd8ca6b60cb49363 | diff --git a/dlrover/python/elastic_agent/torch/ckpt_saver.py b/dlrover/python/elastic_agent/torch/ckpt_saver.py
index d0b673a0d..983e28866 100644
--- a/dlrover/python/elastic_agent/torch/ckpt_saver.py
+++ b/dlrover/python/elastic_agent/torch/ckpt_saver.py
@@ -229,6 +229,31 @@ def _save():
def get_ckpt_saver(cls):
return cls._saver_instance
+ @classmethod
+ def register_signal_handler(cls):
+ sigint_handler = signal.getsignal(signal.SIGINT)
+ sigterm_handler = signal.getsignal(signal.SIGTERM)
+
+ def _clean_shm_handler(signum, frame):
+ """Clean the shared memory from ^C and "killall python" etc."""
+ if cls._saver_instance:
+ cls._saver_instance.close()
+ if callable(sigint_handler):
+ sigint_handler(signum, frame)
+
+ def _save_shm_before_exiting(signum, frame):
+ """Save the state dict from the shared memory into the storage
+ before the process exits.
+ """
+ if cls._saver_instance:
+ cls._saver_instance.save_shm_to_storage()
+ cls._saver_instance.close()
+ if callable(sigterm_handler):
+ sigterm_handler(signum, frame)
+
+ signal.signal(signal.SIGINT, _clean_shm_handler)
+ signal.signal(signal.SIGTERM, _save_shm_before_exiting)
+
@abstractmethod
def close(self):
pass
@@ -717,14 +742,3 @@ def save_to_storage(self, state_dict, path, step):
"""
if self._rank == 0:
super().save_to_storage(state_dict, path, step)
-
-
-def _clean_shm_handler(signum, frame):
- """Clean the shared memory from ^C and "killall python" etc."""
- saver: CheckpointSaver = CheckpointSaver.get_ckpt_saver()
- if saver:
- saver.close()
-
-
-signal.signal(signal.SIGINT, _clean_shm_handler)
-signal.signal(signal.SIGTERM, _clean_shm_handler)
diff --git a/dlrover/python/elastic_agent/torch/training.py b/dlrover/python/elastic_agent/torch/training.py
index 7a500b4b1..cf5d8cb3d 100644
--- a/dlrover/python/elastic_agent/torch/training.py
+++ b/dlrover/python/elastic_agent/torch/training.py
@@ -487,6 +487,9 @@ def _initialize_workers(self, worker_group):
if self._config.network_check:
run_network_check(self._config, self._entrypoint)
super()._initialize_workers(worker_group)
+ # We need to register handler after starting workers because
+ # the PContext start_worker will overwrite the handler.
+ CheckpointSaver.register_signal_handler()
except RendezvousOutSyncError:
logger.info(
"Exit elastic-training rendezvous when there are "
@@ -541,6 +544,7 @@ def _invoke_run(self, role: str = DEFAULT_ROLE) -> RunResult:
return run_result
elif state in {WorkerState.UNHEALTHY, WorkerState.FAILED}:
self._report_failure_to_master(run_result.failures)
+ self._save_ckpt_to_storage()
if self._remaining_failovers > 0:
logger.info(
f"[{role}] Worker group {state.name}. "
@@ -556,10 +560,20 @@ def _invoke_run(self, role: str = DEFAULT_ROLE) -> RunResult:
elif state == WorkerState.HEALTHY:
# membership changes do not count as retries
if self._membership_changed(role, rdzv_handler):
+ self._save_ckpt_to_storage()
self._restart_workers(self._worker_group)
else:
raise Exception(f"[{role}] Worker group in {state.name} state")
+ def _save_ckpt_to_storage(self):
+ """
+ The agent can save the checkpointing state dict in the shared
+ memory into the storage before restarting training processes.
+ """
+ saver: CheckpointSaver = CheckpointSaver.get_ckpt_saver()
+ if saver:
+ saver.save_shm_to_storage()
+
def _stop_workers_to_restart(self):
"""
The agent query from the dlrover job master to check whether to restart
@@ -588,18 +602,8 @@ def _report_failure_to_master(self, failures: Dict[int, ProcessFailure]):
def _restart_workers(self, worker_group: WorkerGroup):
self._restart_count += 1
self._remaining_restarts -= 1
- self._save_ckpt_to_storage()
super()._restart_workers(worker_group)
- def _save_ckpt_to_storage(self):
- """
- The agent can save the checkpointing state dict in the shared
- memory into the storage before restarting training processes.
- """
- saver: CheckpointSaver = CheckpointSaver.get_ckpt_saver()
- if saver:
- saver.save_shm_to_storage()
-
def _membership_changed(self, role, rdzv_handler: RendezvousHandler):
# Timeout may happen when to query TCPStore.
if self._config.network_check:
| Save the state dict from the memory to storage if the Pod is preempted.
| 2023-12-01T02:13:49 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-862 | bb289574b95811335153c22164239b45fb8d1ecf | diff --git a/dlrover/python/common/multi_process.py b/dlrover/python/common/multi_process.py
index f816c6aef..c14d91b12 100644
--- a/dlrover/python/common/multi_process.py
+++ b/dlrover/python/common/multi_process.py
@@ -60,13 +60,13 @@ def _create_socket_client(path):
"""
client = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
connected = False
- for _ in range(3):
+ for _ in range(30):
try:
client.connect(path)
connected = True
break
except FileNotFoundError:
- time.sleep(1)
+ time.sleep(0.1)
if not connected:
client.connect(path)
return client
diff --git a/dlrover/python/elastic_agent/torch/ckpt_saver.py b/dlrover/python/elastic_agent/torch/ckpt_saver.py
index 6e2c2b13f..d0b673a0d 100644
--- a/dlrover/python/elastic_agent/torch/ckpt_saver.py
+++ b/dlrover/python/elastic_agent/torch/ckpt_saver.py
@@ -13,14 +13,18 @@
import os
import shutil
+import signal
import sys
import threading
+import time
from abc import ABCMeta, abstractmethod
from dataclasses import dataclass
+from datetime import timedelta
from typing import Callable, List, Mapping, Tuple
import numpy as np
import torch
+import torch.distributed as dist
from dlrover.python.common.log import default_logger as logger
from dlrover.python.common.multi_process import (
@@ -30,12 +34,24 @@
SharedQueue,
)
-CKPT_DIR_PREFIX = "checkpoint-"
+_CKPT_DIR_PREFIX = "checkpoint-"
-SAVE_STEP_QNAME_PREFIX = "checkpoint_lock_rank_"
-CKPT_META_NAME_PREFIX = "checkpoint_meta_local_rank_"
-TENSOR_SHM_NAME_PREFIX = "checkpoint_shm_local_rank_"
-SHM_LOCK_NAME_PREFIX = "shm_local_rank_"
+_SAVE_STEP_QNAME_PREFIX = "checkpoint_lock_rank_"
+_CKPT_META_NAME_PREFIX = "checkpoint_meta_local_rank_"
+_TENSOR_SHM_NAME_PREFIX = "checkpoint_shm_local_rank_"
+_SHM_LOCK_NAME_PREFIX = "shm_local_rank_"
+_WIRTING_SHM = "__WRITING_SHM__"
+
+
+def timer(func):
+ def wrapper(*args, **kwargs):
+ start = time.time()
+ result = func(*args, **kwargs)
+ t = round(time.time() - start, 3)
+ logger.info(f"Function {func.__name__} cost {t}s")
+ return result
+
+ return wrapper
def _init_dir(dir):
@@ -44,7 +60,8 @@ def _init_dir(dir):
os.makedirs(dir)
-def convert_torch_dtype_to_numpy(torch_dtype):
+def _convert_torch_dtype_to_numpy(torch_dtype):
+ """Conver the torch dtype to numpy dtype."""
dtype_map = {
torch.float32: np.float32,
torch.float: np.float32,
@@ -64,33 +81,33 @@ def convert_torch_dtype_to_numpy(torch_dtype):
return dtype_map[torch_dtype]
-def traverse_state_dict(value: object, visitor: Callable[[object], None]):
+def _traverse_state_dict(value: object, visitor: Callable[[object], None]):
"""
Invoke ``visitor`` for each value recursively in ``state_dict``.
"""
if isinstance(value, Mapping):
temp_dict = {}
for k, v in value.items():
- temp_dict[k] = traverse_state_dict(v, visitor)
+ temp_dict[k] = _traverse_state_dict(v, visitor)
return temp_dict
elif isinstance(value, List):
temp_list = []
for _, v in enumerate(value):
- temp_list.append(traverse_state_dict(v, visitor))
+ temp_list.append(_traverse_state_dict(v, visitor))
return temp_list
else:
return visitor(value)
-def read_state_dict_from_shm(checkpoint_meta, tensor_shm):
- state_dict = traverse_state_dict(
+def _read_state_dict_from_shm(checkpoint_meta, tensor_shm):
+ state_dict = _traverse_state_dict(
checkpoint_meta,
- lambda x: read_tensor_from_buf(x, tensor_shm),
+ lambda x: _read_tensor_from_buf(x, tensor_shm),
)
return state_dict
-def read_tensor_from_buf(value, shm_tensor_buffer):
+def _read_tensor_from_buf(value, shm_tensor_buffer):
"""
Read a tensor from the buffer of shared memory.
"""
@@ -107,6 +124,43 @@ def read_tensor_from_buf(value, shm_tensor_buffer):
return value
+def _create_shared_memory(name, create, size=0):
+ """
+ Create a shared memory.
+ """
+ if not create:
+ try:
+ return SharedMemory(name=name)
+ except FileNotFoundError:
+ return None
+ try:
+ shm = SharedMemory(
+ name=name,
+ create=create,
+ size=size,
+ )
+ except FileExistsError:
+ shm = SharedMemory(name=name)
+ return shm
+
+
+def _get_latest_checkpoint(checkpoint_dir):
+ """Get the checkpoint directory with the maximum step."""
+ if not os.path.exists(checkpoint_dir):
+ return ""
+ max_step = 0
+ for fn in os.listdir(checkpoint_dir):
+ if not fn.startswith(_CKPT_DIR_PREFIX):
+ continue
+ step = int(fn.split("-")[-1])
+ max_step = step if step > max_step else max_step
+ if max_step > 0:
+ path = os.path.join(checkpoint_dir, f"{_CKPT_DIR_PREFIX}{max_step}")
+ else:
+ path = ""
+ return path
+
+
@dataclass
class TensorMeta(object):
shape: Tuple[int] = None # type: ignore
@@ -126,29 +180,63 @@ class SaverFactory(object):
class CheckpointSaver(metaclass=ABCMeta):
+ """
+ CheckpointSaver saves the state dict from the shared memory into
+ the storage.
+
+ Attributes:
+ checkpoint_dir (str): the directory to save the checkpointing state
+ dict to the storage if the training process fails.
+ num_proc (int): the number of training process, i.e. local world size.
+ """
+
+ _saver_instance = None
+
+ def __init__(self, checkpoint_dir, num_proc=1):
+ self.checkpoint_dir = checkpoint_dir
+ self.num_proc = num_proc
+
@abstractmethod
- def _save_shm_to_storage(self):
+ def _sync_shm_to_storage(self):
pass
@classmethod
- def start_async_saving_ckpt(cls):
+ def start_async_saving_ckpt(cls, num_proc=1):
"""
Start a thread to asynchronously save the checkpoint state dict
from the shared memory into the storage. Firstly, it waits that
the training process notify the saver class to create a saver.
+
+ Args:
+ num_proc: the number of training process, i.e. local world size.
"""
sq = SharedQueue(name="factory", create=True)
- def _save(sq: SharedQueue):
- class_name = sq.get()
+ def _save():
+ class_name, args = sq.get()
class_def = getattr(sys.modules[__name__], class_name)
- saver: CheckpointSaver = class_def()
- saver._save_shm_to_storage()
+ if cls._saver_instance is None:
+ args["num_proc"] = num_proc
+ saver: CheckpointSaver = class_def(**args)
+ cls._saver_instance = saver
+ cls._saver_instance._sync_shm_to_storage()
threading.Thread(
- target=_save, args=(sq,), name="checkpoint-saver", daemon=True
+ target=_save, name="checkpoint-saver", daemon=True
).start()
+ @classmethod
+ def get_ckpt_saver(cls):
+ return cls._saver_instance
+
+ @abstractmethod
+ def close(self):
+ pass
+
+ @abstractmethod
+ def save_shm_to_storage(self):
+ pass
+
class NoShardingSaver(CheckpointSaver):
"""
@@ -156,17 +244,17 @@ class NoShardingSaver(CheckpointSaver):
from the shared memory created by local rank 0 to the storage.
"""
- def __init__(self) -> None:
- self._checkpoint_dir = ""
+ def __init__(self, checkpoint_dir, num_proc=1) -> None:
+ super().__init__(checkpoint_dir, num_proc)
self._tensor_shm = None
# Only local rank 0 save the state dict to memory in DDP.
- qname = SAVE_STEP_QNAME_PREFIX + str(0)
+ qname = _SAVE_STEP_QNAME_PREFIX + str(0)
self._to_save_queue = SharedQueue(name=qname, create=True)
- meta_name = CKPT_META_NAME_PREFIX + str(0)
+ meta_name = _CKPT_META_NAME_PREFIX + str(0)
self._shared_ckpt_meta = SharedDict(name=meta_name, create=True)
- lock_name = SHM_LOCK_NAME_PREFIX + str(0)
+ lock_name = _SHM_LOCK_NAME_PREFIX + str(0)
self._shm_lock = SharedLock(name=lock_name, create=True)
- self._shm_name = TENSOR_SHM_NAME_PREFIX + str(0)
+ self._shm_name = _TENSOR_SHM_NAME_PREFIX + str(0)
def __del__(self):
self.close()
@@ -179,7 +267,7 @@ def close(self):
self._shared_ckpt_meta.close()
self._shm_lock.close()
- def _save_shm_to_storage(self):
+ def _sync_shm_to_storage(self):
"""
The loop to persist the state dict from the memory
buffer into the storage.
@@ -195,7 +283,7 @@ def _save_shm_to_storage(self):
f"into the storage {path}."
)
meta_dict = self._shared_ckpt_meta.get()
- state_dict = read_state_dict_from_shm(meta_dict, self._tensor_shm)
+ state_dict = _read_state_dict_from_shm(meta_dict, self._tensor_shm)
self._persist_to_storage(state_dict, path)
self._shm_lock.release()
@@ -204,3 +292,439 @@ def _persist_to_storage(self, state_dict, path):
checkpoint_dir = os.path.dirname(path)
_init_dir(checkpoint_dir)
torch.save(state_dict, path)
+
+ def save_shm_to_storage(self):
+ """
+ Save the state dict in the shared memory into the storage. The agent
+ can call the method to save the state dict into the storage if the
+ training process fails or the agent wants to restart training
+ processes.
+ """
+ if self._tensor_shm is None:
+ return
+ acquired = self._shm_lock.acquire()
+ if not acquired:
+ # The training process does not release the lock because it fails
+ # when writing the state dict into the shared memory. The shared
+ # memory may be dirty and the saver cannot save it to the storage.
+ return
+ meta_dict = self._shared_ckpt_meta.get()
+ step = meta_dict["step"]
+ path = os.path.join(
+ self.checkpoint_dir, f"checkpoint-{step}/checkpoint.pt"
+ )
+ state_dict = _read_state_dict_from_shm(meta_dict, self._tensor_shm)
+ self._persist_to_storage(state_dict, path)
+ self._shm_lock.release()
+ logger.info(
+ "Save the checkpointing state dict from the shared "
+ f"memory to {path}."
+ )
+
+
+class CheckpointEngine(metaclass=ABCMeta):
+ """
+ The checkpoint engine synchronously writes the state dict into
+ the shared memory and notify the agent in main process to
+ asynchronously save the state dict from the shared memory into
+ the storage. Writing to memory is significantly quicker
+ than writing to storage. The engine only blocks the training
+ with a little time. Users can frequently call `save_to_memory` in
+ the training loop and call `save_to_storage`.
+
+ If the training process fail, the agent in main process can continuely
+ saves the the state dict from the shared memory into the storage.
+
+ Attributes:
+ checkpoint_dir (str): the directory to save the temp checkpoint
+ if the training process fails.
+
+ Examples::
+ >>> engine = NoShardingCheckpointEngine(
+ >>> checkpoint_dir="/tmp/checkpoint/"
+ >>> )
+ >>> for step, data in enumerate(dataloader):
+ >>> ...
+ >>> state_dict = model.state_dict()
+ >>> if step % 5 == 0:
+ >>> engine.save_to_memory(state_dict, step)
+ >>> elif step % 100 == 0:
+ >>> path = f"/tmp/checkpoint/ckpt-{step}.pt"
+ >>> engine.save_to_storage(state_dict, path, step)
+ >>> sate_dict = engine.load()
+ """
+
+ def __init__(self, checkpoint_dir):
+ self.checkpoint_dir = checkpoint_dir
+ if dist.is_initialized():
+ self._rank = dist.get_rank()
+ self._local_rank = int(os.environ["LOCAL_RANK"])
+ self._saver_group = dist.new_group(
+ backend="gloo", timeout=timedelta(seconds=30)
+ )
+ else:
+ self._rank = 0
+ self._local_rank = int(os.getenv("LOCAL_RANK", 0))
+ self._saver_group = None
+
+ self._buffer_size = 0
+ self._cached_step = 0
+ self._meta_dict = dict()
+ self._shm_name = ""
+ self._tensor_shm: SharedMemory = None
+ self._shared_ckpt_meta: SharedDict = None
+ self._shm_lock: SharedLock = None
+ self._to_save_queue: SharedQueue = None
+ self._notify_agent_to_create_saver()
+ self._init_shared_objs()
+
+ def __del__(self):
+ self.close()
+
+ def close(self):
+ if self._tensor_shm:
+ self._tensor_shm.close()
+
+ @abstractmethod
+ def _init_shared_objs(self):
+ """
+ Initialize the shared queue, lock and memory to communiate
+ with the agent in the main process.
+ """
+ pass
+
+ @abstractmethod
+ def _notify_agent_to_create_saver(self):
+ """
+ Notify the agent in the main process to create a checkpointing
+ saver to save the state dict from the shared memory into the storage.
+ """
+ pass
+
+ @timer
+ def save_to_memory(self, state_dict, step):
+ """
+ Synchonously Saves the state dict into the shared memory with the main
+ process. If the agent in the main process is saving the shared memory
+ into the storage, the method will skip to write the shared memory.
+
+ Args:
+ state_dict (dict): the state dict of model and optimizer to save.
+ step (int): the iteration step.
+ """
+ if "step" not in state_dict:
+ state_dict["step"] = step
+ if _WIRTING_SHM in state_dict:
+ raise ValueError(f"state_dict cannot have the key {_WIRTING_SHM}.")
+
+ if self._tensor_shm is None:
+ self._make_state_dict_buffer(state_dict)
+ acquired = self._shm_lock.acquire(blocking=False)
+ all_rank_ready = self._check_all_rank_ready(acquired)
+ if not all_rank_ready:
+ logger.info(
+ f"Rank {self._rank} skips the save the checkpoint "
+ f"in CPU memory since it is saving the latest "
+ "checkpoint from the CPU memory into the storage."
+ )
+ if acquired:
+ self._shm_lock.release()
+ return
+ self._copy_state_dict_to_shm(state_dict)
+
+ if acquired:
+ self._shm_lock.release()
+ self._cached_step = step
+
+ def _create_tensor_meta(self, value: torch.Tensor):
+ """
+ Create a tensor meta of a tensor and compute the total
+ size of the state dict.
+ """
+ if not torch.is_tensor(value):
+ return value
+ dtype = _convert_torch_dtype_to_numpy(value.dtype)
+ meta = TensorMeta(
+ shape=tuple(value.shape), # type: ignore
+ dtype=dtype,
+ element_size=value.element_size(),
+ numel=value.numel(),
+ offset=self._buffer_size,
+ )
+ self._buffer_size += value.numel() * value.element_size()
+ return meta
+
+ def _make_state_dict_buffer(self, state_dict):
+ """
+ Make the shared memory to store the state dict.
+ """
+ self._meta_dict = _traverse_state_dict(
+ state_dict, self._create_tensor_meta
+ )
+
+ # Update the meta dict in the main process.
+ self._shared_ckpt_meta.update(self._meta_dict)
+ self._tensor_shm = _create_shared_memory(
+ name=self._shm_name,
+ create=True,
+ size=self._buffer_size,
+ )
+
+ def _copy_state_dict_to_shm(self, state_dict):
+ """
+ Copy the state dict from CPU memory buffer into the shared memory.
+ """
+
+ def _tarverse_copy(value, meta):
+ if isinstance(value, Mapping):
+ for k, v in value.items():
+ if isinstance(v, (Mapping, List)):
+ m = meta[k]
+ _tarverse_copy(v, m)
+ elif torch.is_tensor(v):
+ m = meta[k]
+ self._write_shared_memory(v, m)
+ else:
+ meta[k] = v
+ elif isinstance(value, List):
+ for i, v in enumerate(value):
+ if isinstance(v, (Mapping, List)):
+ m = meta[i]
+ _tarverse_copy(v, m)
+ elif torch.is_tensor(v):
+ m = meta[i]
+ self._write_shared_memory(v, m)
+ else:
+ meta[i] = v
+
+ self._meta_dict[_WIRTING_SHM] = True
+ self._shared_ckpt_meta.update(self._meta_dict)
+ _tarverse_copy(state_dict, self._meta_dict)
+ # Update the meta dict in the main process.
+ self._meta_dict[_WIRTING_SHM] = False
+ self._shared_ckpt_meta.update(self._meta_dict)
+
+ def _write_shared_memory(self, value, meta: TensorMeta):
+ """
+ Write a CPU tensor into the shared memory.
+ """
+ data_array = value.cpu().numpy()
+ write_array = np.ndarray(
+ data_array.shape,
+ dtype=data_array.dtype,
+ buffer=self._tensor_shm.buf,
+ offset=meta.offset,
+ )
+ if data_array.shape == ():
+ write_array.fill(data_array)
+ else:
+ write_array[:] = data_array[:]
+
+ def _check_all_rank_ready(self, ready):
+ """
+ Check wether all ranks are ready.
+ """
+ if not self._saver_group:
+ return ready
+ value = 0 if ready else 1
+ t = torch.tensor([value], dtype=torch.int64)
+ dist.all_reduce(t, group=self._saver_group)
+ return t == 0
+
+ @timer
+ def save_to_storage(self, state_dict, path, step):
+ """
+ Asynchonously saves the state dict into the storage. It synchonously
+ saves the state dict into the shared memory and put the path
+ into a shared queue. The agent in the main process waits for the queue
+ for save the state dict in the shared memory into the storage.
+
+ Args:
+ state_dict (dict): the state dict of model and optimizer to save.
+ path (str): optional, the file path to save the checkpoint. If the
+ path is not defined, the engine will save the state dict into
+ the shared memory not the storage.
+ step (int): the iteration step.
+ """
+ if step > self._cached_step:
+ self.save_to_memory(state_dict, step)
+ if path:
+ self._to_save_queue.put(path)
+
+ def load(self, resume_path=""):
+ """
+ The method firstly try to load the state dict from the shared memory.
+ If there is no state dict in the shared memory, the method will
+ load the state dict from the storage.
+
+ Returns:
+ A dict.
+ """
+ state_dict = self._load_from_shared_memory()
+ if state_dict:
+ return state_dict
+ state_dict = self._load_from_storage(resume_path)
+ return state_dict
+
+ def _load_from_shared_memory(self):
+ """
+ Load the state dict from the shared memory.
+
+ Returns:
+ A dict.
+ """
+ if self._tensor_shm is None:
+ self._tensor_shm = _create_shared_memory(
+ self._shm_name,
+ create=False,
+ )
+ if not self._tensor_shm:
+ return None
+ meta_dict = self._shared_ckpt_meta.get()
+ if meta_dict.get(_WIRTING_SHM, False):
+ return None
+ state_dict = _read_state_dict_from_shm(meta_dict, self._tensor_shm)
+ return state_dict
+
+ def _load_from_storage(self, resume_path=""):
+ """
+ Load the state dict from the CPU memory if the state dict is complete
+ in CPU memory. Otherwise, the function will load the state dict from
+ the storage.
+
+ Args:
+ resume_path (str, optional): , If the resume_path is an empty
+ string, the function will load the latest checkpoint file in
+ the checkpoint directory.
+
+ Returns:
+ A dict:
+ a dictionary containing a whole state of the modules in the
+ checkpointing file.
+ """
+ if resume_path:
+ state_dict = torch.load(resume_path)
+ else:
+ state_dict = self._load_from_historic_checkpoint()
+ return state_dict
+
+ def _load_from_historic_checkpoint(self):
+ """Locd checkpoint from the lastest complete checkpoint."""
+ while True:
+ latest_ckpt_dir = _get_latest_checkpoint(self.checkpoint_dir)
+ if not latest_ckpt_dir:
+ return {}
+
+ resume_path = os.path.join(latest_ckpt_dir, "checkpoint.pt")
+ if not os.path.exists(resume_path):
+ shutil.rmtree(latest_ckpt_dir)
+ continue
+ try:
+ state_dict = torch.load(resume_path)
+ logger.info(f"Load checkpoint from {resume_path}")
+ return state_dict
+ except Exception:
+ logger.warning(
+ f"Fail to load checkpoint from {resume_path}."
+ " Roll back to the last checkpoint file."
+ )
+ shutil.rmtree(latest_ckpt_dir)
+
+
+class ShardingCheckpointEngine(CheckpointEngine):
+ """
+ The engine to save the sharding model and optimizer state dict
+ into the memory and storage. We can use it to save the model and optimizer
+ using FSDP, Zero-3 or Megatron-LM.
+ """
+
+ def __init__(self, checkpoint_dir):
+ super().__init__(checkpoint_dir)
+
+ def _notify_agent_to_create_saver(self):
+ # TODO: implement the saver in the agent to support saving
+ # sharding state dict.
+ pass
+
+ def _init_shared_objs(self):
+ meta_name = _CKPT_META_NAME_PREFIX + str(self._local_rank)
+ self._shared_ckpt_meta = SharedDict(name=meta_name, create=False)
+ lock_name = _SHM_LOCK_NAME_PREFIX + str(self._local_rank)
+ self._shm_lock = SharedLock(name=lock_name, create=False)
+ qname = _SAVE_STEP_QNAME_PREFIX + str(self._local_rank)
+ self._to_save_queue = SharedQueue(name=qname, create=False)
+ self._shm_name = _TENSOR_SHM_NAME_PREFIX + str(self._local_rank)
+
+
+class NoShardingCheckpointEngine(CheckpointEngine):
+ """
+ The engine saves the model and optimizer state dict without sharding
+ in a local or DDP job.
+ """
+
+ def __init__(self, checkpoint_dir):
+ super().__init__(checkpoint_dir)
+
+ def _notify_agent_to_create_saver(self):
+ queue = SharedQueue(name="factory")
+ args = {"checkpoint_dir": self.checkpoint_dir}
+ queue.put(("NoShardingSaver", args))
+ queue.close()
+
+ def _init_shared_objs(self):
+ """
+ Initialize the shared object with the main process.
+ Without model sharding, all ranks share the same shared memory
+ created by the local rank 0 on a node.
+ """
+ meta_name = _CKPT_META_NAME_PREFIX + str(0)
+ self._shared_ckpt_meta = SharedDict(name=meta_name, create=False)
+ lock_name = _SHM_LOCK_NAME_PREFIX + str(0)
+ self._shm_lock = SharedLock(name=lock_name, create=False)
+ self._shm_lock.release()
+ qname = _SAVE_STEP_QNAME_PREFIX + str(0)
+ self._to_save_queue = SharedQueue(name=qname, create=False)
+ self._shm_name = _TENSOR_SHM_NAME_PREFIX + str(0)
+
+ @timer
+ def save_to_memory(self, state_dict, step):
+ """
+ Synchonously Saves the state dict into the shared memory with the main
+ process. If the agent in the main process is saving the shared memory
+ into the storage, the method will skip to write the shared memory.
+
+ Args:
+ state_dict (dict): the state dict of model and optimizer to save.
+ step (int): the iteration step.
+ """
+ if self._local_rank == 0:
+ super().save_to_memory(state_dict, step)
+
+ @timer
+ def save_to_storage(self, state_dict, path, step):
+ """
+ Asynchonously saves the state dict into the storage. It synchonously
+ saves the state dict into the shared memory and put the path
+ into a shared queue. The agent in the main process waits for the queue
+ for save the state dict in the shared memory into the storage.
+
+ Args:
+ state_dict (dict): the state dict of model and optimizer to save.
+ step (int): the iteration step.
+ path (str): optional, the file path to save the checkpoint. If the
+ path is not defined, the engine will save the state dict into
+ the shared memory not the storage.
+ """
+ if self._rank == 0:
+ super().save_to_storage(state_dict, path, step)
+
+
+def _clean_shm_handler(signum, frame):
+ """Clean the shared memory from ^C and "killall python" etc."""
+ saver: CheckpointSaver = CheckpointSaver.get_ckpt_saver()
+ if saver:
+ saver.close()
+
+
+signal.signal(signal.SIGINT, _clean_shm_handler)
+signal.signal(signal.SIGTERM, _clean_shm_handler)
diff --git a/dlrover/python/elastic_agent/torch/training.py b/dlrover/python/elastic_agent/torch/training.py
index 8723b8a21..66136bdac 100644
--- a/dlrover/python/elastic_agent/torch/training.py
+++ b/dlrover/python/elastic_agent/torch/training.py
@@ -499,7 +499,10 @@ def _initialize_workers(self, worker_group):
break
def _invoke_run(self, role: str = DEFAULT_ROLE) -> RunResult:
- # NOTE: currently only works for a single role
+ # Start a thread to save the checkpointing state dict from
+ # the shared memory to the storage.
+ proc_num = self._worker_group.spec.local_world_size
+ CheckpointSaver.start_async_saving_ckpt(proc_num)
spec = self._worker_group.spec
role = spec.role
@@ -588,8 +591,18 @@ def _report_failure_to_master(self, failures: Dict[int, ProcessFailure]):
def _restart_workers(self, worker_group: WorkerGroup):
self._restart_count += 1
self._remaining_restarts -= 1
+ self._save_ckpt_to_storage()
super()._restart_workers(worker_group)
+ def _save_ckpt_to_storage(self):
+ """
+ The agent can save the checkpointing state dict in the shared
+ memory into the storage before restarting training processes.
+ """
+ saver: CheckpointSaver = CheckpointSaver.get_ckpt_saver()
+ if saver:
+ saver.save_shm_to_storage()
+
def _membership_changed(self, role, rdzv_handler: RendezvousHandler):
# Timeout may happen when to query TCPStore.
if self._config.network_check:
@@ -682,8 +695,6 @@ def launch_agent(
log_dir=config.log_dir,
)
- CheckpointSaver.start_async_saving_ckpt()
-
shutdown_rdzv = True
try:
metrics.initialize_metrics(metrics.MetricsConfig(config.metrics_cfg))
diff --git a/dlrover/trainer/torch/elastic/checkpoint.py b/dlrover/trainer/torch/elastic/checkpoint.py
index b803c5c0a..1dea49eb2 100644
--- a/dlrover/trainer/torch/elastic/checkpoint.py
+++ b/dlrover/trainer/torch/elastic/checkpoint.py
@@ -13,13 +13,8 @@
import os
import shutil
-import time
from abc import ABCMeta, abstractmethod
-from datetime import timedelta
-from typing import List, Mapping
-import numpy as np
-import torch
import torch.distributed as dist
from torch.distributed.fsdp import FullStateDictConfig
from torch.distributed.fsdp import FullyShardedDataParallel as FSDP
@@ -28,80 +23,20 @@
from torch.nn.parallel import DistributedDataParallel as DDP
from dlrover.python.common.log import default_logger as logger
-from dlrover.python.common.multi_process import (
- SharedDict,
- SharedLock,
- SharedMemory,
- SharedQueue,
-)
from dlrover.python.elastic_agent.torch.ckpt_saver import (
- CKPT_META_NAME_PREFIX,
- SAVE_STEP_QNAME_PREFIX,
- SHM_LOCK_NAME_PREFIX,
- TENSOR_SHM_NAME_PREFIX,
- TensorMeta,
- convert_torch_dtype_to_numpy,
- read_state_dict_from_shm,
- traverse_state_dict,
+ CheckpointEngine,
+ NoShardingCheckpointEngine,
)
from dlrover.trainer.torch.elastic.sampler import ElasticDistributedSampler
CKPT_DIR_PREFIX = "checkpoint-"
-def timer(func):
- def wrapper(*args, **kwargs):
- start = time.time()
- result = func(*args, **kwargs)
- t = round(time.time() - start, 3)
- logger.info(f"Function {func.__name__} cost {t}s")
- return result
-
- return wrapper
-
-
def _sync():
if dist.is_initialized():
dist.barrier()
-def _get_latest_checkpoint(checkpoint_dir):
- """Get the checkpoint directory with the maximum step."""
- if not os.path.exists(checkpoint_dir):
- return ""
- max_step = 0
- for fn in os.listdir(checkpoint_dir):
- if not fn.startswith(CKPT_DIR_PREFIX):
- continue
- step = int(fn.split("-")[-1])
- max_step = step if step > max_step else max_step
- if max_step > 0:
- path = os.path.join(checkpoint_dir, f"{CKPT_DIR_PREFIX}{max_step}")
- else:
- path = ""
- return path
-
-
-def _create_shared_memory(name, create, size=0):
- """
- Create a shared memory.
- """
- if not create:
- try:
- return SharedMemory(name=name)
- except FileNotFoundError:
- return None
- try:
- shm = SharedMemory(
- name=name,
- create=create,
- size=size,
- )
- except FileExistsError:
- shm = SharedMemory(name=name)
- return shm
-
-
def _keep_topk_checkpoint(checkpoint_dir, max_to_keep):
"""Keep top k checkpoints and remove other checkpoints.
@@ -125,398 +60,6 @@ def _keep_topk_checkpoint(checkpoint_dir, max_to_keep):
shutil.rmtree(dir_name)
-class CheckpointEngine(metaclass=ABCMeta):
- """
- The checkpoint engine synchronously writes the state dict into
- the shared memory and notify the agent in main process to
- asynchronously save the state dict from the shared memory into
- the storage. Writing to memory is significantly quicker
- than writing to storage. The engine only blocks the training
- with a little time. Users can frequently call `save_to_memory` in
- the training loop and call `save_to_storage`.
-
- If the training process fail, the agent in main process can continuely
- saves the the state dict from the shared memory into the storage.
-
- Attributes:
- checkpoint_dir (str): the directory to save the temp checkpoint
- if the training process fails.
-
- Examples::
- >>> engine = NoShardingCheckpointEngine(
- >>> checkpoint_dir="/tmp/checkpoint/"
- >>> )
- >>> for step, data in enumerate(dataloader):
- >>> ...
- >>> state_dict = model.state_dict()
- >>> if step % 5 == 0:
- >>> engine.save_to_memory(state_dict, step)
- >>> elif step % 100 == 0:
- >>> path = f"/tmp/checkpoint/ckpt-{step}.pt"
- >>> engine.save_to_storage(state_dict, path, step)
- >>> sate_dict = engine.load()
- """
-
- def __init__(self, checkpoint_dir):
- self.checkpoint_dir = checkpoint_dir
- if dist.is_initialized():
- self._rank = dist.get_rank()
- self._local_rank = int(os.environ["LOCAL_RANK"])
- self._saver_group = dist.new_group(
- backend="gloo", timeout=timedelta(seconds=30)
- )
- else:
- self._rank = 0
- self._local_rank = int(os.getenv("LOCAL_RANK", 0))
- self._saver_group = None
-
- self._buffer_size = 0
- self._cached_step = 0
- self._meta_dict = dict()
- self._shm_name = ""
- self._tensor_shm: SharedMemory = None
- self._shared_ckpt_meta: SharedDict = None
- self._shm_buffer_lock: SharedLock = None
- self._to_save_queue: SharedQueue = None
- self._notify_agent_to_create_saver()
- self._init_shared_objs()
-
- def __del__(self):
- self.close()
-
- def close(self):
- if self._shared_ckpt_meta:
- self._shared_ckpt_meta.close()
- if self._shm_buffer_lock:
- self._shm_buffer_lock.close()
- if self._to_save_queue:
- self._to_save_queue.close()
- if self._tensor_shm:
- self._tensor_shm.close()
-
- @abstractmethod
- def _init_shared_objs(self):
- """
- Initialize the shared queue, lock and memory to communiate
- with the agent in the main process.
- """
- pass
-
- @abstractmethod
- def _notify_agent_to_create_saver(self):
- """
- Notify the agent in the main process to create a checkpointing
- saver to save the state dict from the shared memory into the storage.
- """
- pass
-
- def _create_tensor_meta(self, value: torch.Tensor):
- """
- Create a tensor meta of a tensor and compute the total
- size of the state dict.
- """
- if not torch.is_tensor(value):
- return value
- dtype = convert_torch_dtype_to_numpy(value.dtype)
- meta = TensorMeta(
- shape=tuple(value.shape), # type: ignore
- dtype=dtype,
- element_size=value.element_size(),
- numel=value.numel(),
- offset=self._buffer_size,
- )
- self._buffer_size += value.numel() * value.element_size()
- return meta
-
- def _make_state_dict_buffer(self, state_dict):
- """
- Make the shared memory to store the state dict.
- """
- self._meta_dict = traverse_state_dict(
- state_dict, self._create_tensor_meta
- )
-
- # Update the meta dict in the main process.
- self._shared_ckpt_meta.update(self._meta_dict)
- self._tensor_shm = _create_shared_memory(
- name=self._shm_name,
- create=True,
- size=self._buffer_size,
- )
-
- def _copy_state_dict_to_shm(self, state_dict):
- """
- Copy the state dict from CPU memory buffer into the shared memory.
- """
-
- def _tarverse_copy(value, meta):
- if isinstance(value, Mapping):
- for k, v in value.items():
- if isinstance(v, (Mapping, List)):
- m = meta[k]
- _tarverse_copy(v, m)
- elif torch.is_tensor(v):
- m = meta[k]
- self._write_shared_memory(v, m)
- else:
- meta[k] = v
- elif isinstance(value, List):
- for i, v in enumerate(value):
- if isinstance(v, (Mapping, List)):
- m = meta[i]
- _tarverse_copy(v, m)
- elif torch.is_tensor(v):
- m = meta[i]
- self._write_shared_memory(v, m)
- else:
- meta[i] = v
-
- _tarverse_copy(state_dict, self._meta_dict)
- # Update the meta dict in the main process.
- self._shared_ckpt_meta.update(self._meta_dict)
-
- def _write_shared_memory(self, value, meta: TensorMeta):
- """
- Write a CPU tensor into the shared memory.
- """
- data_array = value.cpu().numpy()
- write_array = np.ndarray(
- data_array.shape,
- dtype=data_array.dtype,
- buffer=self._tensor_shm.buf,
- offset=meta.offset,
- )
- if data_array.shape == ():
- write_array.fill(data_array)
- else:
- write_array[:] = data_array[:]
-
- @timer
- def save_to_memory(self, state_dict, step):
- """
- Synchonously Saves the state dict into the shared memory with the main
- process. If the agent in the main process is saving the shared memory
- into the storage, the method will skip to write the shared memory.
-
- Args:
- state_dict (dict): the state dict of model and optimizer to save.
- step (int): the iteration step.
- """
- state_dict["step"] = step
- if self._tensor_shm is None:
- self._make_state_dict_buffer(state_dict)
- acquired = self._shm_buffer_lock.acquire(blocking=False)
- all_rank_ready = self._check_all_rank_ready(acquired)
- if not all_rank_ready:
- logger.info(
- f"Rank {self._rank} skips the save the checkpoint "
- f"in CPU memory since it is saving the latest "
- "checkpoint from the CPU memory into the storage."
- )
- if acquired:
- self._shm_buffer_lock.release()
- return
- self._copy_state_dict_to_shm(state_dict)
-
- if acquired:
- self._shm_buffer_lock.release()
- self._cached_step = step
-
- def _check_all_rank_ready(self, ready):
- """
- Check wether all ranks are ready.
- """
- if not self._saver_group:
- return ready
- value = 0 if ready else 1
- t = torch.tensor([value], dtype=torch.int64)
- dist.all_reduce(t, group=self._saver_group)
- return t == 0
-
- @timer
- def save_to_storage(self, state_dict, path, step):
- """
- Asynchonously saves the state dict into the storage. It synchonously
- saves the state dict into the shared memory and put the path
- into a shared queue. The agent in the main process waits for the queue
- for save the state dict in the shared memory into the storage.
-
- Args:
- state_dict (dict): the state dict of model and optimizer to save.
- path (str): optional, the file path to save the checkpoint. If the
- path is not defined, the engine will save the state dict into
- the shared memory not the storage.
- step (int): the iteration step.
- """
- if step > self._cached_step:
- self.save_to_memory(state_dict, step)
- if path:
- self._to_save_queue.put(path)
-
- def load(self, resume_path=""):
- """
- The method firstly try to load the state dict from the shared memory.
- If there is no state dict in the shared memory, the method will
- load the state dict from the storage.
-
- Returns:
- A dict.
- """
- state_dict = self._load_from_shared_memory()
- if state_dict:
- return state_dict
- state_dict = self._load_from_storage(resume_path)
- return state_dict
-
- def _load_from_shared_memory(self):
- """
- Load the state dict from the shared memory.
-
- Returns:
- A dict.
- """
- if self._tensor_shm is None:
- self._tensor_shm = _create_shared_memory(
- self._shm_name,
- create=False,
- )
- if not self._tensor_shm:
- return None
- meta_dict = self._shared_ckpt_meta.get()
- state_dict = read_state_dict_from_shm(meta_dict, self._tensor_shm)
- return state_dict
-
- def _load_from_storage(self, resume_path=""):
- """
- Load the state dict from the CPU memory if the state dict is complete
- in CPU memory. Otherwise, the function will load the state dict from
- the storage.
-
- Args:
- resume_path (str, optional): , If the resume_path is an empty
- string, the function will load the latest checkpoint file in
- the checkpoint directory.
-
- Returns:
- A dict:
- a dictionary containing a whole state of the modules in the
- checkpointing file.
- """
- if resume_path:
- state_dict = torch.load(resume_path)
- else:
- state_dict = self._load_from_historic_checkpoint()
- return state_dict
-
- def _load_from_historic_checkpoint(self):
- """Locd checkpoint from the lastest complete checkpoint."""
- while True:
- latest_ckpt_dir = _get_latest_checkpoint(self.checkpoint_dir)
- if not latest_ckpt_dir:
- return {}
-
- resume_path = os.path.join(latest_ckpt_dir, "checkpoint.pt")
- if not os.path.exists(resume_path):
- shutil.rmtree(latest_ckpt_dir)
- continue
- try:
- state_dict = torch.load(resume_path)
- logger.info(f"Load checkpoint from {resume_path}")
- return state_dict
- except Exception:
- logger.warning(
- f"Fail to load checkpoint from {resume_path}."
- " Roll back to the last checkpoint file."
- )
- shutil.rmtree(latest_ckpt_dir)
-
-
-class ShardingCheckpointEngine(CheckpointEngine):
- """
- The engine to save the sharding model and optimizer state dict
- into the memory and storage. We can use it to save the model and optimizer
- using FSDP, Zero-3 or Megatron-LM.
- """
-
- def __init__(self, checkpoint_dir):
- super().__init__(checkpoint_dir)
-
- def _notify_agent_to_create_saver(self):
- # TODO: implement the saver in the agent to support saving
- # sharding state dict.
- pass
-
- def _init_shared_objs(self):
- meta_name = CKPT_META_NAME_PREFIX + str(self._local_rank)
- self._shared_ckpt_meta = SharedDict(name=meta_name, create=False)
- lock_name = SHM_LOCK_NAME_PREFIX + str(self._local_rank)
- self._shm_buffer_lock = SharedLock(name=lock_name, create=False)
- qname = SAVE_STEP_QNAME_PREFIX + str(self._local_rank)
- self._to_save_queue = SharedQueue(name=qname, create=False)
- self._shm_name = TENSOR_SHM_NAME_PREFIX + str(self._local_rank)
-
-
-class NoShardingCheckpointEngine(CheckpointEngine):
- """
- The engine saves the model and optimizer state dict without sharding
- in a local or DDP job.
- """
-
- def __init__(self, checkpoint_dir):
- super().__init__(checkpoint_dir)
-
- def _notify_agent_to_create_saver(self):
- queue = SharedQueue(name="factory")
- queue.put("NoShardingSaver")
- queue.close()
-
- def _init_shared_objs(self):
- """
- Initialize the shared object with the main process.
- Without model sharding, all ranks share the same shared memory
- created by the local rank 0 on a node.
- """
- meta_name = CKPT_META_NAME_PREFIX + str(0)
- self._shared_ckpt_meta = SharedDict(name=meta_name, create=False)
- lock_name = SHM_LOCK_NAME_PREFIX + str(0)
- self._shm_buffer_lock = SharedLock(name=lock_name, create=False)
- qname = SAVE_STEP_QNAME_PREFIX + str(0)
- self._to_save_queue = SharedQueue(name=qname, create=False)
- self._shm_name = TENSOR_SHM_NAME_PREFIX + str(0)
-
- @timer
- def save_to_memory(self, state_dict, step):
- """
- Synchonously Saves the state dict into the shared memory with the main
- process. If the agent in the main process is saving the shared memory
- into the storage, the method will skip to write the shared memory.
-
- Args:
- state_dict (dict): the state dict of model and optimizer to save.
- step (int): the iteration step.
- """
- if self._local_rank == 0:
- super().save_to_memory(state_dict, step)
-
- @timer
- def save_to_storage(self, state_dict, path, step):
- """
- Asynchonously saves the state dict into the storage. It synchonously
- saves the state dict into the shared memory and put the path
- into a shared queue. The agent in the main process waits for the queue
- for save the state dict in the shared memory into the storage.
-
- Args:
- state_dict (dict): the state dict of model and optimizer to save.
- step (int): the iteration step.
- path (str): optional, the file path to save the checkpoint. If the
- path is not defined, the engine will save the state dict into
- the shared memory not the storage.
- """
- if self._rank == 0:
- super().save_to_storage(state_dict, path, step)
-
-
class CheckpointManger(metaclass=ABCMeta):
"""CheckpontManager can save and load checkpoint states.
| The agent unlinks the shared memory with signals.
| 2023-11-30T09:49:48 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-819 | b8a89bec116ae4b2cc06adeb74acdef89b637588 | diff --git a/dlrover/trainer/torch/elastic/checkpoint.py b/dlrover/trainer/torch/elastic/checkpoint.py
index 1ca339334..494fcddb6 100644
--- a/dlrover/trainer/torch/elastic/checkpoint.py
+++ b/dlrover/trainer/torch/elastic/checkpoint.py
@@ -11,17 +11,25 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import copy
import multiprocessing
import os
+import random
import shutil
+import string
+import time
from abc import ABCMeta, abstractmethod
+from dataclasses import dataclass
+from datetime import timedelta
+from multiprocessing import shared_memory
+from typing import Callable, List, Mapping, Tuple
+import numpy as np
import torch
import torch.distributed as dist
-import torch.distributed.checkpoint._traverse as _traverse
+from torch.distributed.fsdp import FullStateDictConfig
from torch.distributed.fsdp import FullyShardedDataParallel as FSDP
from torch.distributed.fsdp import StateDictType
+from torch.distributed.fsdp.api import FullOptimStateDictConfig
from torch.nn.parallel import DistributedDataParallel as DDP
from dlrover.python.common.log import default_logger as logger
@@ -30,18 +38,35 @@
CKPT_DIR_PREFIX = "checkpoint-"
-def init_dir(dir):
+def get_random_string(length):
+ letters = string.ascii_lowercase
+ result_str = "".join(random.choice(letters) for i in range(length))
+ return result_str
+
+
+def timer(func):
+ def wrapper(*args, **kwargs):
+ start = time.time()
+ result = func(*args, **kwargs)
+ t = round(time.time() - start, 3)
+ logger.info(f"Function {func.__name__} cost {t}s")
+ return result
+
+ return wrapper
+
+
+def _init_dir(dir):
if os.path.exists(dir):
shutil.rmtree(dir)
os.makedirs(dir)
-def sync():
+def _sync():
if dist.is_initialized():
dist.barrier()
-def get_latest_checkpoint(checkpoint_dir):
+def _get_latest_checkpoint(checkpoint_dir):
"""Get the checkpoint directory with the maximum step."""
if not os.path.exists(checkpoint_dir):
return ""
@@ -55,7 +80,7 @@ def get_latest_checkpoint(checkpoint_dir):
return path
-def keep_topk_checkpoint(checkpoint_dir, max_to_keep):
+def _keep_topk_checkpoint(checkpoint_dir, max_to_keep):
"""Keep top k checkpoints and remove other checkpoints.
Arguments:
@@ -78,6 +103,33 @@ def keep_topk_checkpoint(checkpoint_dir, max_to_keep):
shutil.rmtree(dir_name)
+def traverse_state_dict(value: object, visitor: Callable[[object], None]):
+ """
+ Invoke ``visitor`` for each value recursively in ``state_dict``.
+ """
+ if isinstance(value, Mapping):
+ temp_dict = {}
+ for k, v in value.items():
+ temp_dict[k] = traverse_state_dict(v, visitor)
+ return temp_dict
+ elif isinstance(value, List):
+ temp_list = []
+ for _, v in enumerate(value):
+ temp_list.append(traverse_state_dict(v, visitor))
+ return temp_list
+ else:
+ return visitor(value)
+
+
+@dataclass
+class TensorMeta(object):
+ shape: Tuple[int] = None # type: ignore
+ dtype: torch.dtype = None # type: ignore
+ element_size: int = 0
+ numel: int = 0
+ offset: int = 0
+
+
class CheckpointManger(metaclass=ABCMeta):
"""CheckpontManager can save and load checkpoint states.
@@ -100,23 +152,20 @@ def __init__(
optimizer,
dataloader,
checkpoint_dir,
- rank=0,
- max_to_keep=None,
):
self.model = model
self.optimizer = optimizer
self.dataloader = dataloader
self.checkpoint_dir = checkpoint_dir
- self.rank = rank
- self.max_to_keep = max_to_keep
+ if dist.is_initialized():
+ self._rank = dist.get_rank()
+ else:
+ self._rank = 0
- def log_rank0(self, log):
- if self.rank == 0:
+ def _log_rank0(self, log):
+ if self._rank == 0:
logger.info(log)
- def _is_rank0(self):
- return self.rank == 0
-
@abstractmethod
def save(self, epoch, step):
"""
@@ -129,20 +178,26 @@ def save(self, epoch, step):
pass
@abstractmethod
- def load(self, ckpt_path=None):
+ def load(self, resuming_path=None):
"""
The manager loads the states from the files in the
checkpoint direcotry to the model, optimizer and sampler.
- ckpt_path (str, optinoal): The manager will load checkpoint from the
- path. If the path is None, the manager will load the state
+ resuming_path (str, optinoal): The manager will load checkpoint from
+ the path. If the path is None, the manager will load the state
checkpoint from the file with the maximum step.
"""
pass
@classmethod
def init_checkpoint_manager(
- cls, model, optimizer, dataloader, directory, rank=0, max_to_keep=None
+ cls,
+ model,
+ optimizer,
+ dataloader,
+ directory,
+ max_to_keep=1,
+ save_storage_interval=1,
):
"""A factory method to initialize a checkpoint manager by the model
class.
@@ -153,7 +208,7 @@ def init_checkpoint_manager(
optimizer,
dataloader,
directory,
- rank,
+ save_storage_interval,
max_to_keep,
)
elif isinstance(model, DDP):
@@ -162,7 +217,7 @@ def init_checkpoint_manager(
optimizer,
dataloader,
directory,
- rank,
+ save_storage_interval,
max_to_keep,
)
elif isinstance(model, FSDP):
@@ -171,7 +226,7 @@ def init_checkpoint_manager(
optimizer,
dataloader,
directory,
- rank,
+ save_storage_interval,
max_to_keep,
)
else:
@@ -191,6 +246,22 @@ class LocalCheckpointManger(CheckpointManger):
"""
+ def __init__(
+ self,
+ model,
+ optimizer,
+ dataloader,
+ checkpoint_dir,
+ save_storage_interval,
+ max_to_keep=1,
+ ):
+ super().__init__(model, optimizer, dataloader, checkpoint_dir)
+ self._save_engine = AsyncCheckpointEngine(
+ checkpoint_dir,
+ save_storage_interval=save_storage_interval,
+ max_to_keep=max_to_keep,
+ )
+
def save(self, epoch, step):
"""
Save the checkpoint of model, optimizer, dataloader into the directory
@@ -206,25 +277,12 @@ def save(self, epoch, step):
step, self.dataloader.batch_size
)
checkpoint = {"model": msd, "optimizer": osd, "sampler": ssd}
- ckpt_dir = os.path.join(
- self.checkpoint_dir, f"{CKPT_DIR_PREFIX}{step}"
- )
- init_dir(ckpt_dir)
- ckpt_path = os.path.join(ckpt_dir, "checkpoint.pt")
- torch.save(checkpoint, ckpt_path)
- if self.max_to_keep:
- keep_topk_checkpoint(self.checkpoint_dir, self.max_to_keep)
-
- def load(self, ckpt_path=None):
- latest_ckpt_dir = get_latest_checkpoint(self.checkpoint_dir)
- if not latest_ckpt_dir:
- return
- if not ckpt_path:
- ckpt_path = os.path.join(latest_ckpt_dir, "checkpoint.pt")
- if not os.path.exists(ckpt_path):
+ self._save_engine.save(step, checkpoint)
+
+ def load(self, resuming_path=None):
+ checkpoint = self._save_engine.load(resuming_path)
+ if not checkpoint:
return
- logger.info(f"Load checkpoint from {ckpt_path}")
- checkpoint = torch.load(ckpt_path)
sampler = self.dataloader.sampler
if isinstance(sampler, ElasticDistributedSampler):
sampler.load_state_dict(checkpoint.get("sampler", {}))
@@ -234,7 +292,7 @@ def load(self, ckpt_path=None):
self.optimizer.load_state_dict(optim_state_dict)
-class DDPCheckpointManger(LocalCheckpointManger):
+class DDPCheckpointManger(CheckpointManger):
"""DDPCheckpontManager saves and loads checkpoint states of a DDP model.
Example::
@@ -245,12 +303,28 @@ class DDPCheckpointManger(LocalCheckpointManger):
>>> ckpt_manger.load()
"""
+ def __init__(
+ self,
+ model,
+ optimizer,
+ dataloader,
+ checkpoint_dir,
+ save_storage_interval=1,
+ max_to_keep=1,
+ ):
+ super().__init__(model, optimizer, dataloader, checkpoint_dir)
+ self._save_engine = AsyncCheckpointEngine(
+ checkpoint_dir,
+ save_storage_interval=save_storage_interval,
+ max_to_keep=max_to_keep,
+ )
+
def save(self, epoch, step):
"""
Save the checkpoint of model, optimizer, dataloader into the directory
`{self.directory}/checkpoint-{step}/checkpoint.pt`.
"""
- self.log_rank0(f"Save checkpoint of step={step} of epoch={epoch}.")
+ self._log_rank0(f"Save checkpoint of step={step} of epoch={epoch}.")
step = step + epoch * len(self.dataloader)
msd = self.model.state_dict()
osd = self.optimizer.state_dict()
@@ -260,22 +334,39 @@ def save(self, epoch, step):
step, self.dataloader.batch_size
)
checkpoint = {"model": msd, "optimizer": osd, "sampler": ssd}
- ckpt_dir = os.path.join(
- self.checkpoint_dir, f"{CKPT_DIR_PREFIX}{step}"
- )
- if self._is_rank0():
- init_dir(ckpt_dir)
- sync()
- # Only rank0 saves the checkpoint for DDP model.
- if self._is_rank0():
- ckpt_path = os.path.join(ckpt_dir, "checkpoint.pt")
- torch.save(checkpoint, ckpt_path)
- if self.max_to_keep:
- keep_topk_checkpoint(self.checkpoint_dir, self.max_to_keep)
- sync()
+ self._save_engine.save(step, checkpoint)
+
+ def load(self, resuming_path=None):
+ checkpoint = self._save_engine.load(resuming_path)
+ if not checkpoint:
+ return
+ sampler = self.dataloader.sampler
+ if isinstance(sampler, ElasticDistributedSampler):
+ sampler.load_state_dict(checkpoint.get("sampler", {}))
+ model_state_dict = checkpoint.get("model", {})
+ optim_state_dict = checkpoint.get("optimizer", {})
+ self.model.load_state_dict(model_state_dict)
+ self.optimizer.load_state_dict(optim_state_dict)
+ _sync()
class FSDPCheckpointManger(CheckpointManger):
+ def __init__(
+ self,
+ model,
+ optimizer,
+ dataloader,
+ checkpoint_dir,
+ save_storage_interval=1,
+ max_to_keep=1,
+ ):
+ super().__init__(model, optimizer, dataloader, checkpoint_dir)
+ self._save_engine = AsyncCheckpointEngine(
+ checkpoint_dir,
+ save_storage_interval=save_storage_interval,
+ max_to_keep=max_to_keep,
+ )
+
def save(self, epoch, step):
"""
Save the checkpoint of model, optimizer, dataloader into the directory
@@ -283,12 +374,14 @@ def save(self, epoch, step):
the part of the model and optimizer states into the file
`checkpoint-{step}/part-{rank}.pt`.
"""
- self.log_rank0(f"Save checkpoint of step={step} of epoch={epoch}.")
+ self._log_rank0(f"Save checkpoint of step={step} of epoch={epoch}.")
if self.dataloader:
step = step + epoch * len(self.dataloader)
with FSDP.state_dict_type(
self.model,
- StateDictType.SHARDED_STATE_DICT,
+ StateDictType.FULL_STATE_DICT,
+ FullStateDictConfig(rank0_only=False),
+ FullOptimStateDictConfig(rank0_only=False),
):
msd = self.model.state_dict()
osd = FSDP.optim_state_dict(self.model, self.optimizer)
@@ -300,28 +393,12 @@ def save(self, epoch, step):
step, self.dataloader.batch_size
)
checkpoint = {"model": msd, "optimizer": osd, "sampler": ssd}
- ckpt_dir = os.path.join(
- self.checkpoint_dir, f"{CKPT_DIR_PREFIX}{step}"
- )
- if self._is_rank0():
- init_dir(ckpt_dir)
- sync()
- ckpt_path = os.path.join(ckpt_dir, f"part-{self.rank}.pt")
- torch.save(checkpoint, ckpt_path)
- if self.max_to_keep:
- keep_topk_checkpoint(self.checkpoint_dir, self.max_to_keep)
- sync()
-
- def load(self, ckpt_path=None):
- latest_ckpt_dir = get_latest_checkpoint(self.checkpoint_dir)
- if not latest_ckpt_dir:
- return
- if not ckpt_path:
- ckpt_path = os.path.join(latest_ckpt_dir, f"part-{self.rank}.pt")
- if not os.path.exists(ckpt_path):
+ self._save_engine.save(step, checkpoint)
+
+ def load(self, resuming_path=None):
+ checkpoint = self._save_engine.load(resuming_path)
+ if not checkpoint:
return
- logger.info(f"Load checkpoint from {ckpt_path}")
- checkpoint = torch.load(ckpt_path)
if self.dataloader:
sampler = self.dataloader.sampler
if isinstance(sampler, ElasticDistributedSampler):
@@ -329,9 +406,12 @@ def load(self, ckpt_path=None):
model_state_dict = checkpoint.get("model", {})
optim_state_dict = checkpoint.get("optimizer", {})
+ # TODO: use shard_state_dict to checkpoint.
with FSDP.state_dict_type(
self.model,
- StateDictType.SHARDED_STATE_DICT,
+ StateDictType.FULL_STATE_DICT,
+ FullStateDictConfig(rank0_only=False),
+ FullOptimStateDictConfig(rank0_only=False),
):
# called from all ranks, though only rank0 has
# a valid param for full_osd.
@@ -342,7 +422,7 @@ def load(self, ckpt_path=None):
)
self.model.load_state_dict(model_state_dict)
self.optimizer.load_state_dict(optim_state_dict)
- sync()
+ _sync()
class AsyncCheckpointEngine(object):
@@ -350,74 +430,252 @@ class AsyncCheckpointEngine(object):
Attributes:
checkpoint_dir: str, the directory to save the checkpoint.
max_to_keep: int, the number of checkpoint files to keep.
- save_mem_interval: int, the interval of iteration steps to save
- the model and optimizer states into the CPU memory.
save_storage_interval: int, the interval of iteration steps to save
the model and optimizer states from CPU memory to the storage.
- auto_save: bool, the checkpoint manager will automatically configure
- the interval to save checkpoint into memory and storage according
- to the time of iteration step.
"""
def __init__(
self,
checkpoint_dir,
- save_mem_interval,
- save_storage_interval,
+ save_storage_interval=1,
max_to_keep=1,
- auto_save=False,
):
self.checkpoint_dir = checkpoint_dir
self.max_to_keep = max_to_keep
- self.save_mem_interval = save_mem_interval
self.save_storage_interval = save_storage_interval
- self.auto_save = auto_save
- manager = multiprocessing.Manager()
- self._shm_buffer = manager.dict()
+ self._manager = multiprocessing.Manager()
+ self._tensor_meta_buffer = self._manager.dict()
+ self._memory_buffer = None
+ self._shm_tensor_buffer = None
+ self._shm_buffer_lock = multiprocessing.Lock()
+ self._buffer_size = 0
+ self._checkpoint_step_queue = multiprocessing.Queue(maxsize=1)
+ if dist.is_initialized():
+ self._rank = dist.get_rank()
+ self._saver_group = dist.new_group(
+ backend="gloo", timeout=timedelta(seconds=30)
+ )
+ else:
+ self._rank = 0
+ self._saver_group = None
+ random_name = get_random_string(8)
+ self._shm_name = f"tensor_buffer_{random_name}_{self._rank}"
+ self._persist_proc = multiprocessing.Process(
+ name=f"persist-process-rank-{self._rank}",
+ target=self._persist_memory_buffer_to_storage,
+ daemon=True,
+ )
+ self._check_arguments()
+ self._persist_proc.start()
+
+ def __del__(self):
+ self.close()
+
+ def close(self):
+ self._manager.shutdown()
+ if self._persist_proc.is_alive():
+ self._persist_proc.kill()
+ if self._shm_tensor_buffer:
+ self._shm_tensor_buffer.close()
def _check_arguments(self):
- if self.save_mem_interval > self.save_storage_interval:
- raise ValueError(
- "save_storage_interval cannot be less than save_mem_interval."
- )
if self.max_to_keep == 0:
raise ValueError("max_to_keep cannot be 0.")
- if self.auto_save:
- raise ValueError("auto_save is not enbaled now.")
+ if self.save_storage_interval == 0:
+ raise ValueError("save_storage_interval cannot be 0.")
- def _alloc_shared_memory(self, path, value):
- if torch.is_tensor(value) and value.device.type != "cpu":
- self._shm_buffer[path] = torch.empty_like(
- value.cpu(), pin_memory=True
- )
- else:
- self._shm_buffer[path] = copy.deepcopy(value)
+ def _allocate_tensor_memory(self, value):
+ if not torch.is_tensor(value):
+ return value
+ pin_memory = False if value.device.type == "cpu" else True
+ t = torch.empty_like(value.cpu(), pin_memory=pin_memory)
+ return t
+
+ def _create_tensor_meta(self, value):
+ """
+ Create a tensor meta of a tensor and compute the total
+ size of the state dict.
+ """
+ if not torch.is_tensor(value):
+ return value
+ meta = TensorMeta(
+ shape=tuple(value.shape),
+ dtype=value.numpy().dtype,
+ element_size=value.element_size(),
+ numel=value.numel(),
+ offset=self._buffer_size,
+ )
+ self._buffer_size += value.numel() * value.element_size()
+ return meta
def _make_state_dict_buffer(self, state_dict):
- _traverse.traverse_state_dict(state_dict, self._alloc_shared_memory)
+ """
+ Make the shared memory to store the state dict.
+ """
+ self._memory_buffer = traverse_state_dict(
+ state_dict, self._allocate_tensor_memory
+ )
+ meta_dict = traverse_state_dict(
+ self._memory_buffer, self._create_tensor_meta
+ )
+ self._tensor_meta_buffer.update(meta_dict)
+ self._shm_tensor_buffer = shared_memory.SharedMemory(
+ create=True,
+ size=self._buffer_size,
+ name=self._shm_name,
+ )
- def _copy_state_to_memory(self, path, value):
- if torch.is_tensor(value):
- self._shm_buffer[path].copy_(value)
+ def _copy_state_dict_to_shm(self, state_dict):
+ """
+ Copy the state dict from CPU memory buffer into the shared memory.
+ """
+
+ def _tarverse_copy(origin_value, target_value, meta):
+ if isinstance(origin_value, Mapping):
+ for k, ov in origin_value.items():
+ if isinstance(ov, (Mapping, List)):
+ tv = target_value[k]
+ m = meta[k]
+ _tarverse_copy(ov, tv, m)
+ elif torch.is_tensor(ov):
+ tv = target_value[k]
+ tv.copy_(ov)
+ m = meta[k]
+ self._write_shared_memory(tv, m)
+ else:
+ target_value[k] = ov
+ elif isinstance(origin_value, List):
+ for i, ov in enumerate(origin_value):
+ if isinstance(ov, (Mapping, List)):
+ tv = target_value[i]
+ m = meta[i]
+ _tarverse_copy(ov, tv, m)
+ elif torch.is_tensor(ov):
+ tv = target_value[i]
+ tv.copy_(ov)
+ m = meta[i]
+ self._write_shared_memory(tv, m)
+ else:
+ target_value[i] = ov
+
+ _tarverse_copy(
+ state_dict, self._memory_buffer, self._tensor_meta_buffer
+ )
+
+ def _write_shared_memory(self, value, meta: TensorMeta):
+ """
+ Write a CPU tensor into the shared memory.
+ """
+ data_array = value.numpy()
+ write_array = np.ndarray(
+ data_array.shape,
+ dtype=data_array.dtype,
+ buffer=self._shm_tensor_buffer.buf,
+ offset=meta.offset,
+ )
+ if data_array.shape == ():
+ write_array.fill(data_array)
+ else:
+ write_array[:] = data_array[:]
+
+ def _persist_memory_buffer_to_storage(self):
+ """
+ The loop to persist the state dict from the memory
+ buffer into the storage.
+ """
+ logger.info("Start the process to persist the state dict.")
+ shm_tensor_buffer = None
+ while True:
+ step = self._checkpoint_step_queue.get()
+ if not shm_tensor_buffer:
+ shm_tensor_buffer = shared_memory.SharedMemory(
+ name=self._shm_name,
+ )
+ with self._shm_buffer_lock:
+ checkpoint_dir = os.path.join(
+ self.checkpoint_dir, f"{CKPT_DIR_PREFIX}{step}"
+ )
+ logger.info(
+ f"Save step-{step} checkpoint from memory "
+ f"into the storage {checkpoint_dir}."
+ )
+ state_dict = self._read_state_dict_from_buf(shm_tensor_buffer)
+ self._persist_to_storage(state_dict, checkpoint_dir)
+
+ def _read_state_dict_from_buf(self, shm_tensor_buffer):
+ meta_dict = {}
+ meta_dict.update(self._tensor_meta_buffer)
+ state_dict = traverse_state_dict(
+ meta_dict,
+ lambda x: self._read_tensor_from_buf(x, shm_tensor_buffer),
+ )
+ return state_dict
+
+ def _read_tensor_from_buf(self, value, shm_tensor_buffer):
+ """
+ Read a tensor from the buffer of shared memory.
+ """
+ if isinstance(value, TensorMeta):
+ data_array = np.frombuffer(
+ buffer=shm_tensor_buffer.buf,
+ dtype=value.dtype,
+ offset=value.offset,
+ count=value.numel,
+ )
+ value = torch.reshape(torch.tensor(data_array), value.shape)
+ return value
else:
- self._shm_buffer[path] = value
+ return value
- def _copy_state_dict_to_buffer(self, state_dict):
- _traverse.traverse_state_dict(state_dict, self._copy_state_to_memory)
+ def _persist_to_storage(self, state_dict, checkpoint_dir):
+ """Persist the checkpoint from CPU memory buffer into the storage."""
+ if self._rank == 0:
+ _init_dir(checkpoint_dir)
+ checkpoint_path = os.path.join(checkpoint_dir, "checkpoint.pt")
+ torch.save(state_dict, checkpoint_path)
+ _keep_topk_checkpoint(self.checkpoint_dir, self.max_to_keep)
+ @timer
def save(self, step, state_dict):
"""
- Save the state dict if the step is multiple of save_mem_interval.
+ Save the state dict into the CPU memory. If the step is the multiple
+ of the save_storage_interval, the engine will persist the state dict
+ from the CPU memory into the storage.
Args:
step: the iteration step in the training loop.
state_dict: a dictionary.
"""
- if step % self.save_mem_interval != 0:
- return
- if len(self._shm_buffer) == 0:
+ state_dict["step"] = step
+ if self._shm_tensor_buffer is None:
self._make_state_dict_buffer(state_dict)
- self._copy_state_dict_to_buffer(state_dict)
+ acquired = self._shm_buffer_lock.acquire(block=False)
+ all_rank_ready = self._check_all_rank_ready(acquired)
+ if not all_rank_ready:
+ logger.info(
+ f"Rank {self._rank} skips the save the checkpoint with "
+ f"step {step} in CPU memory since it is saving the latest "
+ "checkpoint from the CPU memory into the storage."
+ )
+ if acquired:
+ self._shm_buffer_lock.release()
+ return
+ self._copy_state_dict_to_shm(state_dict)
+ if step % self.save_storage_interval == 0:
+ self._checkpoint_step_queue.put(step)
+ if acquired:
+ self._shm_buffer_lock.release()
+
+ def _check_all_rank_ready(self, ready):
+ """
+ Check wether all ranks are ready.
+ """
+ if not self._saver_group:
+ return ready
+ value = 0 if ready else 1
+ t = torch.tensor([value], dtype=torch.int64)
+ dist.all_reduce(t, group=self._saver_group)
+ return t == 0
def load(self, resume_path=""):
"""
@@ -433,8 +691,14 @@ def load(self, resume_path=""):
Returns:
A dict.
"""
- if resume_path == "":
- resume_path = get_latest_checkpoint(self.checkpoint_dir)
-
- state_dict = torch.load(resume_path)
- return state_dict
+ if not resume_path:
+ latest_ckpt_dir = _get_latest_checkpoint(self.checkpoint_dir)
+ if not latest_ckpt_dir:
+ return {}
+ resume_path = os.path.join(latest_ckpt_dir, "checkpoint.pt")
+
+ if not os.path.exists(resume_path):
+ return {}
+ logger.info(f"Load checkpoint from {resume_path}")
+ checkpoint = torch.load(resume_path)
+ return checkpoint
diff --git a/examples/pytorch/mnist/cnn_train.py b/examples/pytorch/mnist/cnn_train.py
index 3d93c858f..8ce9a2683 100644
--- a/examples/pytorch/mnist/cnn_train.py
+++ b/examples/pytorch/mnist/cnn_train.py
@@ -155,14 +155,11 @@ def train(args):
model.parameters(), lr=args.learning_rate, momentum=args.momentum
)
scheduler = StepLR(optimizer, step_size=1, gamma=0.5)
- rank = dist.get_rank()
ckpt_manager = CheckpointManger.init_checkpoint_manager(
model,
optimizer,
train_loader,
CHEKPOINT_DIR,
- rank=rank,
- max_to_keep=3,
)
ckpt_manager.load()
diff --git a/examples/pytorch/nanogpt/train.py b/examples/pytorch/nanogpt/train.py
index d8a376e7e..33f9c4ba8 100644
--- a/examples/pytorch/nanogpt/train.py
+++ b/examples/pytorch/nanogpt/train.py
@@ -307,9 +307,8 @@ def train():
# to simulate larger batch size and using the GradScaler
# if data type is float16
- rank = dist.get_rank()
ckpt_manager = CheckpointManger.init_checkpoint_manager(
- model, optimizer, train_loader, checkpoint_dir, rank, 3
+ model, optimizer, train_loader, checkpoint_dir
)
ckpt_manager.load()
| examples/pytorch/mnist/cnn_train.py使用fsdp模式运行时,load checkpoint会报错
复现方法:
共享存储模式下,启动之后,delete掉一个worker,重新load checkpoint会报错,报错如下:

(debug日志显示model不为空)
配置如下:

| 2023-11-13T11:59:05 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-813 | 85fdbe12aa59acea0f0cd35f47b1e7c5d9f44149 | diff --git a/dlrover/trainer/torch/elastic/checkpoint.py b/dlrover/trainer/torch/elastic/checkpoint.py
index 6003d59a3..1ca339334 100644
--- a/dlrover/trainer/torch/elastic/checkpoint.py
+++ b/dlrover/trainer/torch/elastic/checkpoint.py
@@ -11,12 +11,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import copy
+import multiprocessing
import os
import shutil
from abc import ABCMeta, abstractmethod
import torch
import torch.distributed as dist
+import torch.distributed.checkpoint._traverse as _traverse
from torch.distributed.fsdp import FullyShardedDataParallel as FSDP
from torch.distributed.fsdp import StateDictType
from torch.nn.parallel import DistributedDataParallel as DDP
@@ -52,6 +55,29 @@ def get_latest_checkpoint(checkpoint_dir):
return path
+def keep_topk_checkpoint(checkpoint_dir, max_to_keep):
+ """Keep top k checkpoints and remove other checkpoints.
+
+ Arguments:
+ checkpoint_dir: the directory to save checkpoint files.
+ max_to_keep: the number of checkpoint files to keep.
+ """
+ steps = []
+ for dir_name in os.listdir(checkpoint_dir):
+ if not dir_name.startswith(CKPT_DIR_PREFIX):
+ continue
+ step = int(dir_name.split("-")[-1])
+ steps.append(step)
+
+ steps = sorted(steps)
+ if len(steps) <= max_to_keep:
+ return
+ remove_steps = steps[: -1 * max_to_keep]
+ for step in remove_steps:
+ dir_name = os.path.join(checkpoint_dir, f"{CKPT_DIR_PREFIX}{step}")
+ shutil.rmtree(dir_name)
+
+
class CheckpointManger(metaclass=ABCMeta):
"""CheckpontManager can save and load checkpoint states.
@@ -61,7 +87,7 @@ class CheckpointManger(metaclass=ABCMeta):
dataloader (DataLader): an instance of `torch.utils.data.DataLoader`.
The sampler of DataLoader should be an instance of
`dlrover.trainer.torch.elastic.ElasticDistribuedSampler`.
- directory (str): the directory to save the checkpoint states.
+ checkpoint_dir (str): the directory to save the checkpoint states.
rank (int): the rank of process in the communication world.
max_to_keep (int): the max number of checkpoint to keep. The oldest
checkpoint files will be removed if the number of checkpoints
@@ -73,14 +99,14 @@ def __init__(
model,
optimizer,
dataloader,
- directory,
+ checkpoint_dir,
rank=0,
max_to_keep=None,
):
self.model = model
self.optimizer = optimizer
self.dataloader = dataloader
- self.directory = directory
+ self.checkpoint_dir = checkpoint_dir
self.rank = rank
self.max_to_keep = max_to_keep
@@ -91,25 +117,6 @@ def log_rank0(self, log):
def _is_rank0(self):
return self.rank == 0
- def _keep_topk_checkpoint(self):
- """Keep top k checkpoints and remove other checkpoints."""
- if not self.max_to_keep or not self._is_rank0():
- return
- steps = []
- for dir_name in os.listdir(self.directory):
- if not dir_name.startswith(CKPT_DIR_PREFIX):
- continue
- step = int(dir_name.split("-")[-1])
- steps.append(step)
-
- steps = sorted(steps)
- if len(steps) <= self.max_to_keep:
- return
- remove_steps = steps[: -1 * self.max_to_keep]
- for step in remove_steps:
- dir_name = os.path.join(self.directory, f"{CKPT_DIR_PREFIX}{step}")
- shutil.rmtree(dir_name)
-
@abstractmethod
def save(self, epoch, step):
"""
@@ -199,14 +206,17 @@ def save(self, epoch, step):
step, self.dataloader.batch_size
)
checkpoint = {"model": msd, "optimizer": osd, "sampler": ssd}
- ckpt_dir = os.path.join(self.directory, f"{CKPT_DIR_PREFIX}{step}")
+ ckpt_dir = os.path.join(
+ self.checkpoint_dir, f"{CKPT_DIR_PREFIX}{step}"
+ )
init_dir(ckpt_dir)
ckpt_path = os.path.join(ckpt_dir, "checkpoint.pt")
torch.save(checkpoint, ckpt_path)
- self._keep_topk_checkpoint()
+ if self.max_to_keep:
+ keep_topk_checkpoint(self.checkpoint_dir, self.max_to_keep)
def load(self, ckpt_path=None):
- latest_ckpt_dir = get_latest_checkpoint(self.directory)
+ latest_ckpt_dir = get_latest_checkpoint(self.checkpoint_dir)
if not latest_ckpt_dir:
return
if not ckpt_path:
@@ -250,7 +260,9 @@ def save(self, epoch, step):
step, self.dataloader.batch_size
)
checkpoint = {"model": msd, "optimizer": osd, "sampler": ssd}
- ckpt_dir = os.path.join(self.directory, f"{CKPT_DIR_PREFIX}{step}")
+ ckpt_dir = os.path.join(
+ self.checkpoint_dir, f"{CKPT_DIR_PREFIX}{step}"
+ )
if self._is_rank0():
init_dir(ckpt_dir)
sync()
@@ -258,7 +270,8 @@ def save(self, epoch, step):
if self._is_rank0():
ckpt_path = os.path.join(ckpt_dir, "checkpoint.pt")
torch.save(checkpoint, ckpt_path)
- self._keep_topk_checkpoint()
+ if self.max_to_keep:
+ keep_topk_checkpoint(self.checkpoint_dir, self.max_to_keep)
sync()
@@ -287,17 +300,20 @@ def save(self, epoch, step):
step, self.dataloader.batch_size
)
checkpoint = {"model": msd, "optimizer": osd, "sampler": ssd}
- ckpt_dir = os.path.join(self.directory, f"{CKPT_DIR_PREFIX}{step}")
+ ckpt_dir = os.path.join(
+ self.checkpoint_dir, f"{CKPT_DIR_PREFIX}{step}"
+ )
if self._is_rank0():
init_dir(ckpt_dir)
sync()
ckpt_path = os.path.join(ckpt_dir, f"part-{self.rank}.pt")
torch.save(checkpoint, ckpt_path)
- self._keep_topk_checkpoint()
+ if self.max_to_keep:
+ keep_topk_checkpoint(self.checkpoint_dir, self.max_to_keep)
sync()
def load(self, ckpt_path=None):
- latest_ckpt_dir = get_latest_checkpoint(self.directory)
+ latest_ckpt_dir = get_latest_checkpoint(self.checkpoint_dir)
if not latest_ckpt_dir:
return
if not ckpt_path:
@@ -327,3 +343,98 @@ def load(self, ckpt_path=None):
self.model.load_state_dict(model_state_dict)
self.optimizer.load_state_dict(optim_state_dict)
sync()
+
+
+class AsyncCheckpointEngine(object):
+ """
+ Attributes:
+ checkpoint_dir: str, the directory to save the checkpoint.
+ max_to_keep: int, the number of checkpoint files to keep.
+ save_mem_interval: int, the interval of iteration steps to save
+ the model and optimizer states into the CPU memory.
+ save_storage_interval: int, the interval of iteration steps to save
+ the model and optimizer states from CPU memory to the storage.
+ auto_save: bool, the checkpoint manager will automatically configure
+ the interval to save checkpoint into memory and storage according
+ to the time of iteration step.
+ """
+
+ def __init__(
+ self,
+ checkpoint_dir,
+ save_mem_interval,
+ save_storage_interval,
+ max_to_keep=1,
+ auto_save=False,
+ ):
+ self.checkpoint_dir = checkpoint_dir
+ self.max_to_keep = max_to_keep
+ self.save_mem_interval = save_mem_interval
+ self.save_storage_interval = save_storage_interval
+ self.auto_save = auto_save
+ manager = multiprocessing.Manager()
+ self._shm_buffer = manager.dict()
+
+ def _check_arguments(self):
+ if self.save_mem_interval > self.save_storage_interval:
+ raise ValueError(
+ "save_storage_interval cannot be less than save_mem_interval."
+ )
+ if self.max_to_keep == 0:
+ raise ValueError("max_to_keep cannot be 0.")
+ if self.auto_save:
+ raise ValueError("auto_save is not enbaled now.")
+
+ def _alloc_shared_memory(self, path, value):
+ if torch.is_tensor(value) and value.device.type != "cpu":
+ self._shm_buffer[path] = torch.empty_like(
+ value.cpu(), pin_memory=True
+ )
+ else:
+ self._shm_buffer[path] = copy.deepcopy(value)
+
+ def _make_state_dict_buffer(self, state_dict):
+ _traverse.traverse_state_dict(state_dict, self._alloc_shared_memory)
+
+ def _copy_state_to_memory(self, path, value):
+ if torch.is_tensor(value):
+ self._shm_buffer[path].copy_(value)
+ else:
+ self._shm_buffer[path] = value
+
+ def _copy_state_dict_to_buffer(self, state_dict):
+ _traverse.traverse_state_dict(state_dict, self._copy_state_to_memory)
+
+ def save(self, step, state_dict):
+ """
+ Save the state dict if the step is multiple of save_mem_interval.
+
+ Args:
+ step: the iteration step in the training loop.
+ state_dict: a dictionary.
+ """
+ if step % self.save_mem_interval != 0:
+ return
+ if len(self._shm_buffer) == 0:
+ self._make_state_dict_buffer(state_dict)
+ self._copy_state_dict_to_buffer(state_dict)
+
+ def load(self, resume_path=""):
+ """
+ Load the state dict from the CPU memory if the state dict is complete
+ in CPU memory. Otherwise, the function will load the state dict from
+ the storage.
+
+ Args:
+ resume_path: str, If the resume_path is an empty
+ string, the function will load the latest checkpoint file in
+ the checkpoint directory.
+
+ Returns:
+ A dict.
+ """
+ if resume_path == "":
+ resume_path = get_latest_checkpoint(self.checkpoint_dir)
+
+ state_dict = torch.load(resume_path)
+ return state_dict
| Copy the tensor of model states from GPU to CPU.
| 2023-11-09T11:27:24 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-773 | 924c2d2a51cc6e77d89d4395dd0e5a89eaebb6bb | diff --git a/dlrover/trainer/torch/elastic/checkpoint.py b/dlrover/trainer/torch/elastic/checkpoint.py
new file mode 100644
index 000000000..bf391d135
--- /dev/null
+++ b/dlrover/trainer/torch/elastic/checkpoint.py
@@ -0,0 +1,332 @@
+# Copyright 2023 The DLRover Authors. All rights reserved.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import shutil
+from abc import ABCMeta, abstractmethod
+
+import torch
+import torch.distributed as dist
+from torch.distributed.fsdp import FullStateDictConfig
+from torch.distributed.fsdp import FullyShardedDataParallel as FSDP
+from torch.distributed.fsdp import StateDictType
+from torch.distributed.fsdp.api import FullOptimStateDictConfig
+from torch.nn.parallel import DistributedDataParallel as DDP
+
+from dlrover.python.common.log import default_logger as logger
+from dlrover.trainer.torch.elastic.sampler import ElasticDistributedSampler
+
+CKPT_DIR_PREFIX = "checkpoint-"
+
+
+def init_dir(dir):
+ if os.path.exists(dir):
+ shutil.rmtree(dir)
+ os.makedirs(dir)
+
+
+def sync():
+ if dist.is_initialized():
+ dist.barrier()
+
+
+def get_latest_checkpoint(checkpoint_dir):
+ """Get the checkpoint directory with the maximum step."""
+ if not os.path.exists(checkpoint_dir):
+ return ""
+ max_step = 0
+ for fn in os.listdir(checkpoint_dir):
+ if not fn.startswith(CKPT_DIR_PREFIX):
+ continue
+ step = int(fn.split("-")[-1])
+ max_step = step if step > max_step else max_step
+ path = os.path.join(checkpoint_dir, f"{CKPT_DIR_PREFIX}{max_step}")
+ return path
+
+
+class CheckpointManger(metaclass=ABCMeta):
+ """CheckpontManager can save and load checkpoint states.
+
+ Args:
+ model (nn.Module): an instance of `torch.nn.Module`.
+ optimizer (Optimizer): an instance of `torch.optim.Optimizer`.
+ dataloader (DataLader): an instance of `torch.utils.data.DataLoader`.
+ The sampler of DataLoader should be an instance of
+ `dlrover.trainer.torch.elastic.ElasticDistribuedSampler`.
+ directory (str): the directory to save the checkpoint states.
+ rank (int): the rank of process in the communication world.
+ max_to_keep (int): the max number of checkpoint to keep. The oldest
+ checkpoint files will be removed if the number of checkpoints
+ is bigger than max_to_kep.
+ """
+
+ def __init__(
+ self,
+ model,
+ optimizer,
+ dataloader,
+ directory,
+ rank=0,
+ max_to_keep=None,
+ ):
+ self.model = model
+ self.optimizer = optimizer
+ self.dataloader = dataloader
+ self.directory = directory
+ self.rank = rank
+ self.max_to_keep = max_to_keep
+
+ def log_rank0(self, log):
+ if self.rank == 0:
+ logger.info(log)
+
+ def _is_rank0(self):
+ return self.rank == 0
+
+ def _keep_topk_checkpoint(self):
+ """Keep top k checkpoints and remove other checkpoints."""
+ if not self.max_to_keep or not self._is_rank0():
+ return
+ steps = []
+ for dir_name in os.listdir(self.directory):
+ if not dir_name.startswith(CKPT_DIR_PREFIX):
+ continue
+ step = int(dir_name.split("-")[-1])
+ steps.append(step)
+
+ steps = sorted(steps)
+ if len(steps) <= self.max_to_keep:
+ return
+ remove_steps = steps[: -1 * self.max_to_keep]
+ for step in remove_steps:
+ dir_name = os.path.join(self.directory, f"{CKPT_DIR_PREFIX}{step}")
+ shutil.rmtree(dir_name)
+
+ @abstractmethod
+ def save(self, epoch, step):
+ """
+ Save the checkpoint of model, optimizer and sampler.
+
+ Args:
+ epoch (int): the epoch index.
+ step (int): the iteration step in the epoch.
+ """
+ pass
+
+ @abstractmethod
+ def load(self, ckpt_path=None):
+ """
+ The manager loads the states from the files in the
+ checkpoint direcotry to the model, optimizer and sampler.
+
+ ckpt_path (str, optinoal): The manager will load checkpoint from the
+ path. If the path is None, the manager will load the state
+ checkpoint from the file with the maximum step.
+ """
+ pass
+
+ @classmethod
+ def init_checkpoint_manager(
+ cls, model, optimizer, dataloader, directory, rank=0, max_to_keep=None
+ ):
+ """A factory method to initialize a checkpoint manager by the model
+ class.
+ """
+ if not dist.is_initialized():
+ return LocalCheckpointManger(
+ model,
+ optimizer,
+ dataloader,
+ directory,
+ rank,
+ max_to_keep,
+ )
+ elif isinstance(model, DDP):
+ return DDPCheckpointManger(
+ model,
+ optimizer,
+ dataloader,
+ directory,
+ rank,
+ max_to_keep,
+ )
+ elif isinstance(model, FSDP):
+ return FSDPCheckpointManger(
+ model,
+ optimizer,
+ dataloader,
+ directory,
+ rank,
+ max_to_keep,
+ )
+ else:
+ raise NotImplementedError(f"Not support model class {model}")
+
+
+class LocalCheckpointManger(CheckpointManger):
+ """The manager saves and loads checkpoint states of the local
+ model and optimizer without distributed execution.
+
+ Example::
+ >>> ckpt_manager = LocalCheckpointManger(
+ >>> model, optimizer, train_dataloader, "/tmp/checkpoint/"
+ >>> )
+ >>> ckpt_manager.save(0, 10)
+ >>> ckpt_manger.load()
+
+ """
+
+ def save(self, epoch, step):
+ """
+ Save the checkpoint of model, optimizer, dataloader into the directory
+ `{self.directory}/checkpoint-{step}/checkpoint.pt`.
+ """
+ logger.info(f"Save checkpoint of step={step} of epoch={epoch}.")
+ step = step + epoch * len(self.dataloader)
+ msd = self.model.state_dict()
+ osd = self.optimizer.state_dict()
+ ssd = {}
+ if isinstance(self.dataloader.sampler, ElasticDistributedSampler):
+ ssd = self.dataloader.sampler.state_dict(
+ step, self.dataloader.batch_size
+ )
+ checkpoint = {"model": msd, "optimizer": osd, "sampler": ssd}
+ ckpt_dir = os.path.join(self.directory, f"{CKPT_DIR_PREFIX}{step}")
+ init_dir(ckpt_dir)
+ ckpt_path = os.path.join(ckpt_dir, "checkpoint.pt")
+ torch.save(checkpoint, ckpt_path)
+ self._keep_topk_checkpoint()
+
+ def load(self, ckpt_path=None):
+ latest_ckpt_dir = get_latest_checkpoint(self.directory)
+ if not latest_ckpt_dir:
+ return
+ if not ckpt_path:
+ ckpt_path = os.path.join(latest_ckpt_dir, "checkpoint.pt")
+ if not os.path.exists(ckpt_path):
+ return
+ logger.info(f"Load checkpoint from {ckpt_path}")
+ checkpoint = torch.load(ckpt_path)
+ sampler = self.dataloader.sampler
+ if isinstance(sampler, ElasticDistributedSampler):
+ sampler.load_state_dict(checkpoint.get("sampler", {}))
+ model_state_dict = checkpoint.get("model", {})
+ optim_state_dict = checkpoint.get("optimizer", {})
+ self.model.load_state_dict(model_state_dict)
+ self.optimizer.load_state_dict(optim_state_dict)
+
+
+class DDPCheckpointManger(LocalCheckpointManger):
+ """DDPCheckpontManager saves and loads checkpoint states of a DDP model.
+
+ Example::
+ >>> ckpt_manager = CheckpointManager(
+ >>> model, optimizer, train_dataloader, "/tmp/checkpoint/"
+ >>> )
+ >>> ckpt_manager.save(0, 10)
+ >>> ckpt_manger.load()
+ """
+
+ def save(self, epoch, step):
+ """
+ Save the checkpoint of model, optimizer, dataloader into the directory
+ `{self.directory}/checkpoint-{step}/checkpoint.pt`.
+ """
+ self.log_rank0(f"Save checkpoint of step={step} of epoch={epoch}.")
+ step = step + epoch * len(self.dataloader)
+ msd = self.model.state_dict()
+ osd = self.optimizer.state_dict()
+ ssd = {}
+ if isinstance(self.dataloader.sampler, ElasticDistributedSampler):
+ ssd = self.dataloader.sampler.state_dict(
+ step, self.dataloader.batch_size
+ )
+ checkpoint = {"model": msd, "optimizer": osd, "sampler": ssd}
+ ckpt_dir = os.path.join(self.directory, f"{CKPT_DIR_PREFIX}{step}")
+ if self._is_rank0():
+ init_dir(ckpt_dir)
+ sync()
+ # Only rank0 saves the checkpoint for DDP model.
+ if self._is_rank0():
+ ckpt_path = os.path.join(ckpt_dir, "checkpoint.pt")
+ torch.save(checkpoint, ckpt_path)
+ self._keep_topk_checkpoint()
+ sync()
+
+
+class FSDPCheckpointManger(CheckpointManger):
+ def save(self, epoch, step):
+ """
+ Save the checkpoint of model, optimizer, dataloader into the directory
+ `{self.directory}/checkpoint-{step}/`. All ranks will save
+ the part of the model and optimizer states into the file
+ `checkpoint-{step}/part-{rank}.pt`.
+ """
+ self.log_rank0(f"Save checkpoint of step={step} of epoch={epoch}.")
+ step = step + epoch * len(self.dataloader)
+ FSDP.set_state_dict_type(
+ self.model,
+ StateDictType.FULL_STATE_DICT,
+ FullStateDictConfig(rank0_only=False),
+ FullOptimStateDictConfig(rank0_only=False),
+ )
+ msd = self.model.state_dict()
+ osd = FSDP.optim_state_dict(self.model, self.optimizer)
+ ssd = {}
+ if isinstance(self.dataloader.sampler, ElasticDistributedSampler):
+ ssd = self.dataloader.sampler.state_dict(
+ step, self.dataloader.batch_size
+ )
+ checkpoint = {"model": msd, "optimizer": osd, "sampler": ssd}
+ ckpt_dir = os.path.join(self.directory, f"{CKPT_DIR_PREFIX}{step}")
+ if self._is_rank0():
+ init_dir(ckpt_dir)
+ sync()
+ ckpt_path = os.path.join(ckpt_dir, f"part-{self.rank}.pt")
+ torch.save(checkpoint, ckpt_path)
+ self._keep_topk_checkpoint()
+ sync()
+
+ def load(self, ckpt_path=None):
+ latest_ckpt_dir = get_latest_checkpoint(self.directory)
+ if not latest_ckpt_dir:
+ return
+ if not ckpt_path:
+ ckpt_path = os.path.join(latest_ckpt_dir, f"part-{self.rank}.pt")
+ if not os.path.exists(ckpt_path):
+ return
+ logger.info(f"Load checkpoint from {ckpt_path}")
+ checkpoint = torch.load(ckpt_path)
+ sampler = self.dataloader.sampler
+ if isinstance(sampler, ElasticDistributedSampler):
+ sampler.load_state_dict(checkpoint.get("sampler", {}))
+ model_state_dict = checkpoint.get("model", {})
+ optim_state_dict = checkpoint.get("optimizer", {})
+
+ FSDP.set_state_dict_type(
+ self.model,
+ StateDictType.FULL_STATE_DICT,
+ FullStateDictConfig(rank0_only=False),
+ FullOptimStateDictConfig(rank0_only=False),
+ )
+ self.model.load_state_dict(model_state_dict)
+
+ # called from all ranks, though only rank0 has
+ # a valid param for full_osd.
+ optim_state_dict = FSDP.optim_state_dict_to_load(
+ model=self.model,
+ optim=self.optimizer,
+ optim_state_dict=optim_state_dict,
+ )
+ self.optimizer.load_state_dict(optim_state_dict)
+ sync()
diff --git a/examples/pytorch/mnist/cnn_train.py b/examples/pytorch/mnist/cnn_train.py
index a1f2c4f9e..08b0ed270 100644
--- a/examples/pytorch/mnist/cnn_train.py
+++ b/examples/pytorch/mnist/cnn_train.py
@@ -13,7 +13,7 @@
import argparse
import os
-from datetime import timedelta
+from datetime import datetime, timedelta
import torch
import torch.distributed as dist
@@ -30,13 +30,14 @@
from torch.utils.data import DataLoader
from torchvision import transforms
+from dlrover.trainer.torch.elastic.checkpoint import CheckpointManger
from dlrover.trainer.torch.elastic.dataloader import ElasticDataLoader
from dlrover.trainer.torch.elastic.sampler import ElasticDistributedSampler
from dlrover.trainer.torch.elastic.trainer import ElasticTrainer
# Note, we need to set the path of a shared file
# system like nas, cpfs or hdfs.
-CHEKPOINT_PATH = "./model.pt"
+CHEKPOINT_DIR = "/nas/mnist-ckpt/"
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
@@ -141,8 +142,16 @@ def train(args):
model.parameters(), lr=args.learning_rate, momentum=args.momentum
)
scheduler = StepLR(optimizer, step_size=1, gamma=0.5)
-
- load_checkpoint(model, optimizer, sampler, CHEKPOINT_PATH, args.use_fsdp)
+ rank = dist.get_rank()
+ ckpt_manager = CheckpointManger.init_checkpoint_manager(
+ model,
+ optimizer,
+ train_loader,
+ CHEKPOINT_DIR,
+ rank=rank,
+ max_to_keep=3,
+ )
+ ckpt_manager.load()
elastic_trainer = ElasticTrainer(model, dataloader=train_loader)
optimizer, scheduler = elastic_trainer.prepare(optimizer, scheduler)
@@ -155,32 +164,38 @@ def train(args):
scheduler.step()
model.train()
train_epoch(
+ epoch,
elastic_trainer,
model,
optimizer,
train_loader,
device,
- args.use_fsdp,
+ ckpt_manager,
args.fixed_batch_size,
)
log_rank0("Test model after epoch {}".format(epoch))
test(model, device, test_loader)
+ if args.save_model:
+ rank = int(os.environ.get("RANK", "0"))
+ save_model(model, args.num_epochs, rank, args.use_fsdp)
dist.barrier()
def train_epoch(
+ epoch,
elastic_trainer,
model,
optimizer,
train_loader,
device,
- use_fsdp=False,
+ ckpt_manager: CheckpointManger,
fixed_batch_size=False,
):
"""
The global batch size will not change if the number of workers changes.
"""
-
+ # Note: Set epoch into the sampler.
+ train_loader.sampler.set_epoch(epoch)
for _, (data, target) in enumerate(train_loader):
with elastic_trainer.step(fixed_batch_size):
optimizer.zero_grad()
@@ -195,66 +210,32 @@ def train_epoch(
log_rank0("loss = {}, step = {}".format(loss, train_step))
if train_step > 0 and train_step % 200 == 0:
- save_checkpoint(
- train_step,
- model,
- optimizer,
- train_loader,
- CHEKPOINT_PATH,
- use_fsdp,
- )
-
-
-def load_checkpoint(model, optimizer, sampler, ckpt_path, use_fsdp=False):
- if not os.path.exists(ckpt_path):
- return
- print("Checkpoint loaded to rank0 CPU.")
- checkpoint = torch.load(ckpt_path)
- sampler.load_state_dict(checkpoint.get("sampler", {}))
- model_state_dict = checkpoint.get("model", {})
- model.load_state_dict(model_state_dict)
- optim_state_dict = checkpoint.get("optimizer", {})
- if use_fsdp:
- FSDP.set_state_dict_type(
- model,
- StateDictType.FULL_STATE_DICT,
- FullStateDictConfig(rank0_only=True),
- )
- # called from all ranks, though only rank0 has
- # a valid param for full_osd.
- optim_state_dict = FSDP.optim_state_dict_to_load(
- optim_state_dict, model, optimizer
- )
- optimizer.load_state_dict(optim_state_dict)
- else:
- optimizer.load_state_dict(optim_state_dict)
+ ckpt_manager.save(epoch, train_step)
+ print("Finish save checkpoint.")
-def save_checkpoint(
- step, model, optimizer, data_loader, ckpt_path, use_fsdp=False
-):
- log_rank0("Save checkpoint.")
- msd, osd = get_model_optim_state(model, optimizer, use_fsdp)
- ssd = data_loader.sampler.state_dict(step, data_loader.batch_size)
- checkpoint = {"model": msd, "optimizer": osd, "sampler": ssd}
- rank = dist.get_rank()
+def save_model(model, epoch, rank, use_fsdp=False):
+ # save
if rank == 0:
- torch.save(checkpoint, ckpt_path)
-
+ print("--> entering save model state")
-def get_model_optim_state(model, optimizer, use_fsdp=False):
if use_fsdp:
- FSDP.set_state_dict_type(
- model,
- StateDictType.FULL_STATE_DICT,
- FullStateDictConfig(rank0_only=True),
- )
- model_state = model.state_dict()
- optim_state = FSDP.optim_state_dict(model, optimizer)
+ save_policy = FullStateDictConfig(offload_to_cpu=True, rank0_only=True)
+ with FSDP.state_dict_type(
+ model, StateDictType.FULL_STATE_DICT, save_policy
+ ):
+ cpu_state = model.state_dict()
else:
- model_state = model.state_dict()
- optim_state = optimizer.state_dict()
- return model_state, optim_state
+ cpu_state = model.state_dict()
+
+ if rank == 0:
+ print("--> saving model ...")
+ currEpoch = "-" + str(epoch) + ".pt"
+ print(f"--> attempting to save model prefix {currEpoch}")
+ time_of_run = datetime.now().strftime("%Y-%m-%d-%I-%M-%S")
+ save_name = "MNIST-CNN-" + time_of_run + currEpoch
+ print(f"--> saving as model name {save_name}")
+ torch.save(cpu_state, save_name)
def test(model, device, test_loader):
@@ -311,6 +292,7 @@ def arg_parser():
parser.add_argument(
"--validation_data", type=str, default="", required=True
)
+ parser.add_argument("--save_model", action="store_true", required=False)
return parser
diff --git a/examples/pytorch/mnist/elastic_job.yaml b/examples/pytorch/mnist/elastic_job.yaml
index ee34cb09a..4440e4efb 100644
--- a/examples/pytorch/mnist/elastic_job.yaml
+++ b/examples/pytorch/mnist/elastic_job.yaml
@@ -14,7 +14,7 @@ spec:
containers:
- name: main
# yamllint disable-line rule:line-length
- image: registry.cn-hangzhou.aliyuncs.com/intell-ai/dlrover:torch201-mnist
+ image: registry.cn-hangzhou.aliyuncs.com/intell-ai/dlrover:torch201-mnist-test
imagePullPolicy: Always
command:
- /bin/bash
@@ -34,3 +34,17 @@ spec:
cpu: "2" # turn up when using GPU
memory: 3Gi # turn up when using GPU
# nvidia.com/gpu: 1 # optional
+ # volumeMounts:
+ # - name: pvc-nas
+ # mountPath: /nas
+ # volumes:
+ # - name: pvc-nas
+ # persistentVolumeClaim:
+ # claimName: pvc-nas
+ dlrover-master:
+ template:
+ spec:
+ restartPolicy: Never
+ containers:
+ - name: main
+ imagePullPolicy: Always
diff --git a/examples/pytorch/nanogpt/train.py b/examples/pytorch/nanogpt/train.py
index 6ba78f9bd..b04beb753 100644
--- a/examples/pytorch/nanogpt/train.py
+++ b/examples/pytorch/nanogpt/train.py
@@ -19,23 +19,30 @@
import os
import pickle
import time
-from datetime import timedelta
+from datetime import datetime, timedelta
import numpy as np
import torch
import torch.distributed as dist
from lora import apply_lora
from model import GPT, Block, GPTConfig
+from torch.distributed.fsdp import FullStateDictConfig
from torch.distributed.fsdp import FullyShardedDataParallel as FSDP
+from torch.distributed.fsdp import StateDictType
from torch.distributed.fsdp.wrap import transformer_auto_wrap_policy
from torch.nn.parallel import DistributedDataParallel as DDP
from torch.utils.data import Dataset
+from dlrover.trainer.torch.elastic.checkpoint import CheckpointManger
from dlrover.trainer.torch.elastic.dataloader import ElasticDataLoader
+from dlrover.trainer.torch.elastic.sampler import ElasticDistributedSampler
from dlrover.trainer.torch.elastic.trainer import ElasticTrainer
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
+# We should use a shared storage to persist the checkpiont.
+checkpoint_dir = "/nas/nanogpt-ckpt/"
+
local_rank = None
master_process = False
@@ -66,16 +73,14 @@ def get_data_loaders(
data_dir,
batch_size=32,
block_size=128,
- device_type="cpu",
- device="cpu",
- use_fsdp="True",
):
train_dataset = GPTDataset(os.path.join(data_dir, "train.bin"), block_size)
val_dataset = GPTDataset(os.path.join(data_dir, "val.bin"), block_size)
with open(os.path.join(data_dir, "meta.pkl"), "rb") as f:
meta = pickle.load(f)
+ sampler = ElasticDistributedSampler(dataset=train_dataset)
train_loader = ElasticDataLoader(
- train_dataset, batch_size=batch_size, shuffle=True, pin_memory=True
+ train_dataset, batch_size=batch_size, sampler=sampler, pin_memory=True
)
val_loader = ElasticDataLoader(
val_dataset, batch_size=batch_size, shuffle=False, pin_memory=True
@@ -185,6 +190,7 @@ def train():
global local_rank
args = arg_parser()
setup(args)
+ os.makedirs(checkpoint_dir, exist_ok=True)
world_size = int(os.getenv("WORLD_SIZE", 1))
gradient_accumulation_steps = args.gradient_accumulation_steps
batch_size = args.batch_size
@@ -207,8 +213,6 @@ def train():
) # For later use in torch.autocast
if device_type == "cuda":
torch.cuda.set_device(device)
- # choose ddp or fdsp
- use_fsdp = args.use_fsdp == "True"
# Note: float16 data type will automatically use a GradScaler
dtype = (
"bfloat16"
@@ -230,9 +234,6 @@ def train():
data_dir=args.data_dir,
batch_size=batch_size,
block_size=block_size,
- device_type=device_type,
- device=device,
- use_fsdp=use_fsdp,
)
model = gpt_init(meta_vocab_size, args=args)
lora_config = create_lora_config(args)
@@ -245,7 +246,7 @@ def train():
if torch.cuda.is_available() and device_type == "cuda":
# Create model and move it to GPU with id rank
model = model.to(local_rank)
- if use_fsdp:
+ if args.use_fsdp:
print(f"Running basic FSDP example on local rank {local_rank}.")
my_auto_wrap_policy = functools.partial(
@@ -302,7 +303,15 @@ def train():
# to simulate larger batch size and using the GradScaler
# if data type is float16
+ rank = dist.get_rank()
+ ckpt_manager = CheckpointManger.init_checkpoint_manager(
+ model, optimizer, train_loader, checkpoint_dir, rank, 3
+ )
+ ckpt_manager.load()
+
for epoch in range(args.epochs):
+ # Note: set the epoch into the sampler.
+ train_loader.sampler.set_epoch(epoch)
for X, Y in train_loader:
with elastic_trainer.step():
# Determine and set the learning rate for this iteration
@@ -368,6 +377,35 @@ def train():
# Termination conditions
if iter_num > max_iters:
break
+ if iter_num % args.checkpoint_step == 0:
+ ckpt_manager.save(epoch, iter_num)
+ if args.save_model:
+ rank = int(os.getenv("RANK", "0"))
+ save_model(model, epoch, rank, args.use_fsdp)
+
+
+def save_model(model, epoch, rank, use_fsdp=False):
+ # save
+ if rank == 0:
+ print("--> entering save model state")
+
+ if use_fsdp:
+ save_policy = FullStateDictConfig(offload_to_cpu=True, rank0_only=True)
+ with FSDP.state_dict_type(
+ model, StateDictType.FULL_STATE_DICT, save_policy
+ ):
+ cpu_state = model.state_dict()
+ else:
+ cpu_state = model.state_dict()
+
+ if rank == 0:
+ print("--> saving model ...")
+ currEpoch = "-" + str(epoch) + ".pt"
+ print(f"--> attempting to save model prefix {currEpoch}")
+ time_of_run = datetime.now().strftime("%Y-%m-%d-%I-%M-%S")
+ save_name = "nanogpt-" + time_of_run + currEpoch
+ print(f"--> saving as model name {save_name}")
+ torch.save(cpu_state, save_name)
# Determine the device type based on the input string.
@@ -465,10 +503,11 @@ def arg_parser():
Defaults to 'cpu' if not specified.""",
)
parser.add_argument("--compile", type=str, default="False", required=False)
+ parser.add_argument("--use_fsdp", action="store_true", required=False)
parser.add_argument(
- "--use_fsdp", type=str, default="False", required=False
+ "--checkpoint_step", type=int, default=100, required=False
)
-
+ parser.add_argument("--save_model", action="store_true", required=False)
args = parser.parse_args()
return args
| Implement a CheckpointManager to save and load checkpoint.
| 2023-10-23T06:45:45 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-737 | 78168228485f40f0364d74f13011a11e7b9872a6 | diff --git a/README.md b/README.md
index d66698c79..571410868 100644
--- a/README.md
+++ b/README.md
@@ -43,6 +43,9 @@ training job. The actions to restore training in DLRover are:
2. Restart the process not the node due to software errors.
3. Restart the failed nodes due to hardward errors.
+For detail, we can see [experiments](docs/tutorial/fault_tolerations.md)
+of fault-tolerance and elasticity.
+
#### Fault Tolerance of PyTorch Distributed Training
DLRover supports fault tolerance of the process failure and the node failure
diff --git a/dlrover/go/operator/pkg/controllers/master/master.go b/dlrover/go/operator/pkg/controllers/master/master.go
index 22e940d98..4b9661441 100644
--- a/dlrover/go/operator/pkg/controllers/master/master.go
+++ b/dlrover/go/operator/pkg/controllers/master/master.go
@@ -35,7 +35,7 @@ const (
masterCommand = "python -m dlrover.python.master.main"
masterServicePort = 50001
initMasterIndex = 0
- defaultImagePullPolicy = "IfNotPresent"
+ defaultImagePullPolicy = "Always"
envMasterAddrKey = "DLROVER_MASTER_ADDR"
envBrainServiceAddrKey = "DLROVER_BRAIN_SERVICE_ADDR"
diff --git a/dlrover/python/master/main.py b/dlrover/python/master/main.py
index bdada30a9..4f67e3af4 100644
--- a/dlrover/python/master/main.py
+++ b/dlrover/python/master/main.py
@@ -37,7 +37,7 @@ def update_context(job_args: JobArgs):
def run(args):
job_args = new_job_args(args.platform, args.job_name, args.namespace)
job_args.initilize()
- logger.info("Job args : %s", job_args.to_json())
+ logger.info("Job args : %s", job_args.to_json(indent=4))
_dlrover_context.config_master_port(port=args.port)
if job_args.platform == PlatformType.LOCAL:
from dlrover.python.master.local_master import LocalJobMaster
diff --git a/docs/tutorial/fault_tolerations.md b/docs/tutorial/fault_tolerations.md
index b35e0463c..f32d8d1ec 100644
--- a/docs/tutorial/fault_tolerations.md
+++ b/docs/tutorial/fault_tolerations.md
@@ -1,346 +1,492 @@
-# worker和ps容错样例
+# Fault-tolerance and Elasticity Experiments of DLRover ElasticJob
-## worker容错示例
+The tutorial shows experiments to test the fault-tolerance and elasticity
+of DLRover ElasticJob. In the experiments, we use the chaos enginerring toolkit
+[chaosblade](https://github.com/chaosblade-io/chaosblade) to simulate fault scenarios.
-在任务运行过程中删除worker-i对应的pod,之后dlrover master会重新拉起一个pod。work-i对应的pod的名称会发生变化,新创建的pod的启动命令和被kill掉的pod的启动命令相同,启动后参与组网,并进行训练。期间其他worker不受影响。
+## Preliminary
-### 启动作业
+- Create a k8s cluster and configure cluster credentials on your local computer.
+- Deploy DLRover ElasticJob on the k8s cluster with the [tutorial](torch_on_cloud.md).
+- Build the image with chaosblade like the [example](../../examples/pytorch/mnist/mnist_chaos.dockerfile).
-首先,启动作业。为了避免自动扩容缩容的影响,选择人工配置扩容缩容策略。
+## Experiments of PyTorch Distributed Job
-```shell
-kubectl apply -f deepctr_manual_scale_job.yaml -n dlrover
+We conduct experiments to simulate the following scenarios:
+
+- The Pod is preempted.
+- The Pod is a straggler.
+- The Pod is placed on a fualt node.
+- The Pod network breaks down during training.
+- The training process corrupts in the Pod.
+
+### Pod is Preempted
+
+In the experiment, we submit a job with the [example](../../examples/pytorch/mnist/choas_test_job.yaml)
+and the command in the worker spec is
+
+```yaml
+ command:
+ - /bin/bash
+ - -c
+ - "dlrover-run --network-check --exclude-straggler --nnodes=3:$WORKER_NUM \
+ --nproc_per_node=2 --max_restarts=3 --rdzv_conf pend_timeout=600 \
+ examples/pytorch/mnist/cnn_train.py --num_epochs 5 \
+ --training_data /data/mnist_png/training/ \
+ --validation_data /data/mnist_png/testing/"
```
-当前有1个ps和3个worker。
+The Pods of the job are:
-```shell
-NAME READY STATUS RESTARTS AGE
-deepctr-auto-scaling-job-edljob-chief-0 1/1 Running 0 117s
-deepctr-auto-scaling-job-edljob-ps-0 1/1 Running 0 117s
-deepctr-auto-scaling-job-edljob-worker-0 1/1 Running 0 65s
-deepctr-auto-scaling-job-edljob-worker-1 1/1 Running 0 65s
+```text
+chaos-test-edljob-worker-0 1/1 Running 0 85s
+chaos-test-edljob-worker-1 1/1 Running 0 85s
+chaos-test-edljob-worker-2 1/1 Running 0 85s
+chaos-test-edljob-worker-3 1/1 Running 0 85s
+elasticjob-chaos-test-dlrover-master 1/1 Running 0 89s
```
-查看worker-0对应的pod的信息
+We kill the worker-0 to simulate that the Pod is preempted by the command
-```shell
-Name: deepctr-auto-scaling-job-edljob-worker-0
-Namespace: dlrover
-Priority: 0
-Service Account: default
-Node: cn-beijing.192.168.0.13/192.168.0.13
-Start Time: Mon, 20 Mar 2023 10:17:10 +0800
-Labels: app=dlrover
- elasticjob-name=deepctr-auto-scaling-job
- rank-index=0
- replica-index=0
- replica-type=worker
- restart-count=0
-Annotations: k8s.aliyun.com/pod-ips: 192.168.0.65
- kubernetes.io/psp: ack.privileged
-Status: Running
-IP: 192.168.0.65
-IPs:
- IP: 192.168.0.65
-Controlled By: ElasticJob/deepctr-auto-scaling-job
-Containers:
- main:
- Container ID: containerd://b1ad0d4b08efa07ea79bc5af0ea2eca67f2d9e91ad0023ed57e89a933b122ee4
- Image: registry.cn-hangzhou.aliyuncs.com/dlrover_deeprec/deeprec:v11
- Image ID: registry.cn-hangzhou.aliyuncs.com/dlrover_deeprec/deeprec@sha256:d0159b59af3dfb9e9ab4384945ef2b3b2a9cf3250dbe0a1bc06c06421ef8c780
- Port: <none>
- Host Port: <none>
- Command:
- /bin/bash
- -c
- pip install pyhocon && cd /usr/local/lib/python3.8/dist-packages/dlrover/trainer/examples/deepfm_deeprec && python -m dlrover.trainer.entry.local_entry --platform=Kubernetes --conf=deepfm_deeprec_conf.TrainConf --enable_auto_scaling=True
- State: Running
- Started: Mon, 20 Mar 2023 10:17:11 +0800
- Ready: True
- Restart Count: 0
- Limits:
- cpu: 500m
- memory: 4Gi
- Requests:
- cpu: 500m
- memory: 4Gi
- Environment:
- DLROVER_MASTER_ADDR: elasticjob-deepctr-auto-scaling-job-dlrover-master:50001
- WORKER_TYPE: worker
- WORKER_ID: 0
- WORKER_RANK: 0
- WORKER_NUM: 1
- TF_CONFIG: {"cluster":{"worker":["deepctr-auto-scaling-job-edljob-worker-0:3333"],"ps":["deepctr-auto-scaling-job-edljob-ps-0.dlrover.svc:2222"],"chief":["deepctr-auto-scaling-job-edljob-chief-0:3333"]},"task":{"type":"worker","index":0}}
- Mounts:
- /nas from pvc-nas (rw)
- /var/run/secrets/kubernetes.io/serviceaccount from kube-api-access-jtpfw (ro)
-Conditions:
- Type Status
- Initialized True
- Ready True
- ContainersReady True
- PodScheduled True
-Volumes:
- pvc-nas:
- Type: PersistentVolumeClaim (a reference to a PersistentVolumeClaim in the same namespace)
- ClaimName: pvc-nas
- ReadOnly: false
- kube-api-access-jtpfw:
- Type: Projected (a volume that contains injected data from multiple sources)
- TokenExpirationSeconds: 3607
- ConfigMapName: kube-root-ca.crt
- ConfigMapOptional: <nil>
- DownwardAPI: true
-QoS Class: Guaranteed
-Node-Selectors: <none>
-Tolerations: node.kubernetes.io/not-ready:NoExecute op=Exists for 300s
- node.kubernetes.io/unreachable:NoExecute op=Exists for 300s
-Events:
- Type Reason Age From Message
- ---- ------ ---- ---- -------
- Normal Scheduled 2m14s default-scheduler Successfully assigned dlrover/deepctr-auto-scaling-job-edljob-worker-0 to cn-beijing.192.168.0.13
- Normal AllocIPSucceed 2m14s terway-daemon Alloc IP 192.168.0.65/24
- Normal Pulling 2m14s kubelet Pulling image "registry.cn-hangzhou.aliyuncs.com/dlrover_deeprec/deeprec:v11"
- Normal Pulled 2m14s kubelet Successfully pulled image "registry.cn-hangzhou.aliyuncs.com/dlrover_deeprec/deeprec:v11" in 282.030396ms (282.03863ms including waiting)
- Normal Created 2m13s kubelet Created container main
- Normal Started 2m13s kubelet Started container main
-```
-
-### Worker 容错模拟
-
-为了模拟容错,需要主动删除worker-0对应的pod
+```bash
+kubectl -n dlrover delete pod chaos-test-edljob-worker-0
+```
-```shell
-kubectl delete pods -n dlrover deepctr-auto-scaling-job-edljob-worker-0
-pod "deepctr-auto-scaling-job-edljob-worker-0" deleted
+After killing worker-0, job Pods are
+
+```text
+chaos-test-edljob-worker-1 1/1 Running 0 2m3s
+chaos-test-edljob-worker-2 1/1 Running 0 2m3s
+chaos-test-edljob-worker-3 1/1 Running 0 2m3s
+chaos-test-edljob-worker-4 1/1 Running 0 30s
+elasticjob-chaos-test-dlrover-master 1/1 Running 0 2m7s
```
-worker-0对应的新pod启动,完成准备工作后开始消费数据,进行训练。
+Then, we can see the log of worker to check whether the training restores.
-```shell
-deepctr-auto-scaling-job-edljob-chief-0 1/1 Running 0 4m24s
-deepctr-auto-scaling-job-edljob-ps-0 1/1 Running 0 4m24s
-deepctr-auto-scaling-job-edljob-worker-1 1/1 Running 0 3m32s
-deepctr-auto-scaling-job-edljob-worker-2 0/1 ContainerCreating 0 2s
+```bash
+kubectl -n dlrover logs chaos-test-edljob-worker-1
```
-查看worker-0对应的pod的信息
+```text
+loss = 2.298487901687622, step = 0
+INFO:torch.nn.parallel.distributed:Reducer buckets have been rebuilt in this iteration.
+INFO:torch.nn.parallel.distributed:Reducer buckets have been rebuilt in this iteration.
+loss = 2.195965051651001, step = 20
+loss = 1.2307546138763428, step = 40
+loss = 0.6579511761665344, step = 60
+loss = 1.0608341693878174, step = 80
+loss = 0.7761049270629883, step = 100
+```
-```shell
-Name: deepctr-auto-scaling-job-edljob-worker-2
-Namespace: dlrover
-Priority: 0
-Service Account: default
-Node: cn-beijing.192.168.0.13/192.168.0.13
-Start Time: Mon, 20 Mar 2023 11:50:34 +0800
-Labels: app=dlrover
- elasticjob-name=deepctr-auto-scaling-job
- rank-index=0
- replica-index=2
- replica-type=worker
- restart-count=0
-Annotations: k8s.aliyun.com/pod-ips: 192.168.0.63
- kubernetes.io/psp: ack.privileged
-Status: Running
-IP: 192.168.0.63
-IPs:
- IP: 192.168.0.63
-Controlled By: ElasticJob/deepctr-auto-scaling-job
-Containers:
- main:
- Container ID: containerd://31b97063042b4f5569be958b79fe28c555ed2802a9fdd3fcbd79b6a1a779fdb0
- Image: registry.cn-hangzhou.aliyuncs.com/dlrover_deeprec/deeprec:v11
- Image ID: registry.cn-hangzhou.aliyuncs.com/dlrover_deeprec/deeprec@sha256:d0159b59af3dfb9e9ab4384945ef2b3b2a9cf3250dbe0a1bc06c06421ef8c780
- Port: <none>
- Host Port: <none>
- Command:
- /bin/bash
- -c
- pip install pyhocon && cd /usr/local/lib/python3.8/dist-packages/dlrover/trainer/examples/deepfm_deeprec && python -m dlrover.trainer.entry.local_entry --platform=Kubernetes --conf=deepfm_deeprec_conf.TrainConf --enable_auto_scaling=True
- State: Running
- Started: Mon, 20 Mar 2023 11:50:36 +0800
- Ready: True
- Restart Count: 0
- Limits:
- cpu: 500m
- memory: 4Gi
- Requests:
- cpu: 500m
- memory: 4Gi
- Environment:
- DLROVER_MASTER_ADDR: elasticjob-deepctr-auto-scaling-job-dlrover-master:50001
- WORKER_TYPE: worker
- WORKER_ID: 2
- WORKER_RANK: 0
- WORKER_NUM: 1
- TF_CONFIG: {"cluster":{"worker":["deepctr-auto-scaling-job-edljob-worker-0:3333"],"ps":["deepctr-auto-scaling-job-edljob-ps-0.dlrover.svc:2222"],"chief":["deepctr-auto-scaling-job-edljob-chief-0:3333"]},"task":{"type":"worker","index":0}}
- Mounts:
- /nas from pvc-nas (rw)
- /var/run/secrets/kubernetes.io/serviceaccount from kube-api-access-n4lq9 (ro)
-Conditions:
- Type Status
- Initialized True
- Ready True
- ContainersReady True
- PodScheduled True
-Volumes:
- pvc-nas:
- Type: PersistentVolumeClaim (a reference to a PersistentVolumeClaim in the same namespace)
- ClaimName: pvc-nas
- ReadOnly: false
- kube-api-access-n4lq9:
- Type: Projected (a volume that contains injected data from multiple sources)
- TokenExpirationSeconds: 3607
- ConfigMapName: kube-root-ca.crt
- ConfigMapOptional: <nil>
- DownwardAPI: true
-QoS Class: Guaranteed
-Node-Selectors: <none>
-Tolerations: node.kubernetes.io/not-ready:NoExecute op=Exists for 300s
- node.kubernetes.io/unreachable:NoExecute op=Exists for 300s
-Events:
- Type Reason Age From Message
- ---- ------ ---- ---- -------
- Normal Scheduled 93s default-scheduler Successfully assigned dlrover/deepctr-auto-scaling-job-edljob-worker-2 to cn-beijing.192.168.0.13
- Normal AllocIPSucceed 92s terway-daemon Alloc IP 192.168.0.63/24
- Normal Pulling 92s kubelet Pulling image "registry.cn-hangzhou.aliyuncs.com/dlrover_deeprec/deeprec:v11"
- Normal Pulled 92s kubelet Successfully pulled image "registry.cn-hangzhou.aliyuncs.com/dlrover_deeprec/deeprec:v11" in 314.52769ms (314.541567ms including waiting)
- Normal Created 92s kubelet Created container main
- Normal Started 92s kubelet Started container main
-```
-
-worker-0 对应pod的日志
+### Straggler Pod
-```shell
-[2023-03-20 11:51:10,774] [INFO][session_manager.py:511:_try_run_local_init_op] Running local_init_op.
-[2023-03-20 11:51:11,302] [INFO][session_manager.py:513:_try_run_local_init_op] Done running local_init_op.
-[2023-03-20 11:51:14,279] [INFO][global_step_hook.py:39:before_run] global_step: 10488361
+In the experiment, we set replicas of worker to 4 in a job and
+use chaosblade to perform a CPU full load 90% on the `worker-1` with the command
+
+```bash
+chaosblade-1.7.2/blade create cpu load --cpu-percent 90
```
-## PS 容错示例
+If you use the image `registry.cn-hangzhou.aliyuncs.com/intell-ai/dlrover:torch201-mnist`,
+you can use chaosblade to create a chaos experiment by
-运行过程中删除一个ps-i对应的pod,之后dlrover master会重新拉起一个pod,ps-i对应的pod的名称会发生变化,但是新创建的pod的启动命令和被kill掉的pod的启动命令相同。在pod被kill到新的pod启动ps创建server之前,worker训练会中断。
+```bash
+sh examples/pytorch/mnist/start_chaos.sh cpu-overload
+```
+
+and set the command in the yaml of elasticjob like the [example](../../examples/pytorch/mnist/choas_test_job.yaml).
+
+```yaml
+ command:
+ - /bin/bash
+ - -c
+ - "(bash examples/pytorch/mnist/start_chaos.sh cpu-overload &) && \
+ dlrover-run --network-check --exclude-straggler --nnodes=3:$WORKER_NUM \
+ --nproc_per_node=2 --max_restarts=3 --rdzv_conf pend_timeout=600 \
+ examples/pytorch/mnist/cnn_train.py --num_epochs 5 \
+ --training_data /data/mnist_png/training/ \
+ --validation_data /data/mnist_png/testing/"
+```
-### 启动作业
+After submitting an ElasticJob to the k8s cluster by
+`kubectl -n dlrover apply -f examples/pytorch/mnist/choas_test_job.yaml`,
+We can see the `worker-1` exits with errors like
+
+```text
+elasticjob-torch-mnist-debug-dlrover-master 0/1 Completed 0 3h17m
+torch-mnist-debug-edljob-worker-0 0/1 Completed 0 3h17m
+torch-mnist-debug-edljob-worker-1 0/1 Error 0 3h17m
+torch-mnist-debug-edljob-worker-2 0/1 Completed 0 3h17m
+torch-mnist-debug-edljob-worker-3 0/1 Completed 0 3h17m
+torch-mnist-debug-edljob-worker-4 0/1 Completed 0 3h10m
+```
-启动作业之后,可以查看当前运行的worker和ps。
+From the log of worker-1 by `kubectl -n dlrover logs torch-mnist-debug-edljob-worker-1`,
+worker-1 fails because it is a straggler. If we don't want to the worker-1 fails due to
+straggler, we can remove the config `dlrover-run --network-check --exclude-straggler`
+from the command like `dlrover-run --network-check`.
+
+```text
+[2023-09-26 03:52:20,235] [INFO] [training.py:707:run] Fault nodes are: [] and stragglers are: [1].
+Traceback (most recent call last):
+ File "/usr/local/bin/dlrover-run", line 8, in <module>
+ sys.exit(main())
+ ...
+ File "/usr/local/lib/python3.8/site-packages/dlrover/python/elastic_agent/torch/training.py", line 733, in run
+ raise RuntimeError("The node is a straggler and exits.")
+RuntimeError: The node is a straggler and exits.
-```shell
-NAME READY STATUS RESTARTS AGE
-deepctr-auto-scaling-job-edljob-chief-0 1/1 Running 0 4m3s
-deepctr-auto-scaling-job-edljob-ps-0 1/1 Running 0 4m3s
-deepctr-auto-scaling-job-edljob-ps-1 1/1 Running 0 106s
-deepctr-auto-scaling-job-edljob-worker-0 1/1 Running 0 2m30s
-deepctr-auto-scaling-job-edljob-worker-1 1/1 Running 0 2m30s
```
-### PS 容错模拟
+We can see the elapsed time of each node to run the task to check straggler in the master log.
-为了模拟容错,需要主动删除ps-0对应的pod,删除后worker的日志
+```bash
+kubectl -n dlrover logs elasticjob-torch-mnist-debug-dlrover-master | grep elapsed
+```
-```shell
-[2023-03-20 15:04:34,350] [INFO][monitored_session.py:1336:run] An error was raised. This may be due to a preemption in a connected worker or parameter server. The current session will be closed and a new session will be created. This error may also occur due to a gRPC failure caused by high memory or network bandwidth usage in the parameter servers. If this error occurs repeatedly, try increasing the number of parameter servers assigned to the job. Error: From /job:ps/replica:0/task:1:
-RecvTensor expects a different device incarnation: 11288349594494262162 vs. 11542130100054943552. Your worker job ("/job:localhost/replica:0/task:0") was probably restarted. Check your worker job for the reason why it was restarted.
+```text
+Round 0: The node elapsed time are {2: 20.307, 3: 20.265, 0: 206.872, 1: 151.752}
+Round 1: The node elapsed time are {2: 20.307, 3: 20.265, 0: 23.174, 1: 135.961}
+Round 2: The node elapsed time aree {2: 21.491, 0: 22.685, 3: 20.889, 1: 23.097}
```
-当ps pod重新创建,ps server启动
+From the log, The worker-1 the elapsed time is much bigger than others in the first 2 rounds.
+After the worker-1 fails, the ElasticJob relaunch a new Pod worker-4 to restore the failed Pod.
+The elapsed times of all nodes have not significant differenct. Note. the index is the
+`WOKRER_RANK` of node. The `WORKER_RANK` of worker-4 is the same as worker-1.
+
+### Fault Node
+
+In the experiment, we set replicas of worker to 4 in a job and
+use chaosblade to kill the process to run `run_network_check.py`
+to simulate the fault node.
+
+and set the command in the yaml of elasticjob like the [example](../../examples/pytorch/mnist/choas_test_job.yaml).
+
+```yaml
+command:
+ - /bin/bash
+ - -c
+ - "(bash examples/pytorch/mnist/start_chaos.sh kill-process &) && \
+ dlrover-run --network-check --exclude-straggler --nnodes=3:$WORKER_NUM \
+ --nproc_per_node=2 --max_restarts=3 --rdzv_conf pend_timeout=600 \
+ examples/pytorch/mnist/cnn_train.py --num_epochs 5 \
+ --training_data /data/mnist_png/training/ \
+ --validation_data /data/mnist_png/testing/"
+```
-```shell
+```text
+chaos-test-edljob-worker-0 1/1 Running 0 12m
+chaos-test-edljob-worker-1 0/1 Error 0 12m
+chaos-test-edljob-worker-2 1/1 Running 0 12m
+chaos-test-edljob-worker-3 1/1 Running 0 12m
+chaos-test-edljob-worker-4 1/1 Running 0 3m59s
+elasticjob-chaos-test-dlrover-master 1/1 Running 0 12m
+```
+
+The worker-1 fails with the message
+
+```text
+Traceback (most recent call last):
+ ....
+ File "/usr/local/lib/python3.8/site-packages/dlrover/python/elastic_agent/torch/training.py", line 732, in run
+ raise RuntimeError("The node network is breakdown.")
+RuntimeError: The node network is breakdown.
+```
+
+From the master log by `kubectl -n dlrover logs elasticjob-chaos-test-dlrover-master | grep "The node status"`,
+the worker-1 fails in the first 2 round check. Afther worker-4 starts to replace worker-1,
+all nodes are noraml.
+
+```text
+Round 1: The node status are {1: False, 2: True, 3: True, 0: False}.
+Round 2: The node status are {1: False, 2: True, 3: True, 0: True}.
+Round 3: The node status are {3: True, 0: True, 1: True, 2: True}.
+```
+
+### Network Breakdown
+
+In the experiment, we set replicas of worker to 4 in a job and
+use chaosblade to set the network loss rate to 100% to simulate
+that the network of the node is breakdown.
+
+We watch the log of worker-1 to check whether the training starts.
+The training starts if `loss=..., step=...` in the log.
+After the training starts, we perform a network loadd rate 100%
+in the worker-1 to simulate the networker of worker-1 is breakdown.
+
+```bash
+kubectl -n dlrover exec -it chaos-test-edljob-worker-1 bash
+./chaosblade-1.7.2/blade create network loss --percent 100 --interface eth0
+```
+
+Then, the worker-1 fails and a new worker-4 starts to replace the worker-1.
+
+```text
+chaos-test-edljob-worker-0 1/1 Running 0 4m39s
+chaos-test-edljob-worker-1 0/1 Error 0 4m39s
+chaos-test-edljob-worker-2 1/1 Running 0 4m39s
+chaos-test-edljob-worker-3 1/1 Running 0 4m39s
+chaos-test-edljob-worker-4 1/1 Running 0 17s
+elasticjob-chaos-test-dlrover-master 1/1 Running 0 4m43s
+```
+
+The training also restores after the worker-4 starts by the log of worker-0.
+
+```text
+loss = 0.24101698398590088, step = 0
+INFO:torch.nn.parallel.distributed:Reducer buckets have been rebuilt in this iteration.
+INFO:torch.nn.parallel.distributed:Reducer buckets have been rebuilt in this iteration.
+loss = 0.4646361768245697, step = 20
+```
+
+### Training Process Corruption
+
+In the experiment, we set replicas of worker to 4 in a job and
+use chaosblade to kill a training process to simulate the GPU error.
+
+We watch the log of worker-1 to check whether the training starts.
+The training starts if `loss=..., step=...` in the log.
+After the training starts, we kill a process in the worker-1.
+
+```bash
+kubectl -n dlrover exec -it chaos-test-edljob-worker-1 bash
+ps -aux | grep cnn_train.py
+```
+
+Then, we can kill a training process by `kill -9 ${PID}`. The all workers
+are still running and we can see that the training restarts from the log.
+
+```text
+chaos-test-edljob-worker-0 1/1 Running 0 3m4s
+chaos-test-edljob-worker-1 1/1 Running 0 3m4s
+chaos-test-edljob-worker-2 1/1 Running 0 3m4s
+chaos-test-edljob-worker-3 1/1 Running 0 3m4s
+elasticjob-chaos-test-dlrover-master 1/1 Running 0 3m9s
+```
+
+### Scale Up Nodes
+
+In the experiment, we use the [example](../../examples/pytorch/mnist/elastic_job.yaml)
+to submit an elastic training job. In the job, we set the `min_node=3` and
+`max_node=$WORKER_NUM` as the number of replicas. The ElasticJob will set the replicas
+into the environment `WORKER_NUM`.
+
+At first, there are 3 running workers and 1 pending worker due to the insufficient resource.
+
+```text
+elasticjob-torch-mnist-dxlrover-master 1/1 Running 0 57s
+torch-mnist-edljob-worker-0 1/1 Running 0 47s
+torch-mnist-edljob-worker-1 1/1 Running 0 47s
+torch-mnist-edljob-worker-2 1/1 Running 0 47s
+torch-mnist-edljob-worker-3 0/1 Pending 0 47s
+```
+
+After about 2 min, we can see the training starts in 3 running workers with the log.
+
+```text
+[2023-09-27 02:23:21,097] [INFO] [training.py:344:_rendezvous] [default] Rendezvous complete for workers. Result:
+ restart_count=0
+ master_addr=192.168.0.71
+ master_port=36725
+ group_rank=0
+ group_world_size=3
+ local_ranks=[0, 1]
+ role_ranks=[0, 1]
+ global_ranks=[0, 1]
+ role_world_sizes=[6, 6]
+ global_world_sizes=[6, 6]
+
+rank 1 is initialized local_rank = 1
+loss = 2.3198373317718506, step = 0
+loss = 2.2946105003356934, step = 0
+loss = 1.7543025016784668, step = 20
+```
+
+Then, we kill another job to release resource and the worker-3 will start.
+
+```text
+elasticjob-torch-mnist-dlrover-master 1/1 Running 0 5m39s
+torch-mnist-edljob-worker-0 1/1 Running 0 5m34s
+torch-mnist-edljob-worker-1 1/1 Running 0 5m34s
+torch-mnist-edljob-worker-2 1/1 Running 0 5m34s
+torch-mnist-edljob-worker-3 1/1 Running 0 5m34s
+```
+
+From the log of worker-0, we can see the training starts with `group_world_size=4`.
+
+```text
+[2023-09-27 02:25:43,362] [INFO] [training.py:344:_rendezvous] [default] Rendezvous complete for workers. Result:
+ restart_count=1
+ master_addr=192.168.0.71
+ master_port=58241
+ group_rank=0
+ group_world_size=4
+ local_ranks=[0, 1]
+ role_ranks=[0, 1]
+ global_ranks=[0, 1]
+ role_world_sizes=[8, 8]
+ global_world_sizes=[8, 8]
+
+rank 1 is initialized local_rank = 1rank 0 is initialized local_rank = 0
+
+loss = 2.2984073162078857, step = 0
+loss = 2.1407980918884277, step = 20
+loss = 1.1324385404586792, step = 40
+loss = 0.4783979058265686, step = 60
+loss = 0.5714012384414673, step = 80
+loss = 0.6941334009170532, step = 100
+```
+
+### Scale Down Nodes
+
+In the experiment, we use the [example](../../examples/pytorch/mnist/elastic_job.yaml)
+to submit an elastic training job. In the job, we set the `min_node=3` and
+`max_node=$WORKER_NUM` as the number of replicas. The ElasticJob will set the replicas
+into the environment `WORKER_NUM`.
+
+At first, there are 4 running workers.
+
+```text
+elasticjob-torch-mnist-dlrover-master 1/1 Running 0 2m43s
+torch-mnist-edljob-worker-0 1/1 Running 0 2m38s
+torch-mnist-edljob-worker-1 1/1 Running 0 2m38s
+torch-mnist-edljob-worker-2 1/1 Running 0 2m38s
+torch-mnist-edljob-worker-3 0/1 Running 0 2m38s
+```
+
+Then, we use the chaosblade to make worker-1 failed.
+
+```bash
+kubectl -n dlrover exec -it torch-mnist-edljob-worker-1 bash
+./chaosblade-1.7.2/blade create process kill --process dlrover-run --signal 1
+```
+
+```text
+elasticjob-torch-mnist-dlrover-master 1/1 Running 0 4m43s
+torch-mnist-edljob-worker-0 1/1 Running 0 4m38s
+torch-mnist-edljob-worker-1 0/1 Error 0 4m38s
+torch-mnist-edljob-worker-2 1/1 Running 0 4m38s
+torch-mnist-edljob-worker-3 1/1 Running 0 4m38s
+```
+
+From the log of worker-0, we can see the training restores the model and data sampler
+from the checkpoint and starts with `group_world_size=3`.
+
+```text
+[2023-09-27 03:18:00,815] [INFO] [training.py:344:_rendezvous] [default] Rendezvous complete for workers. Result:
+ restart_count=1
+ master_addr=192.168.0.66
+ master_port=39705
+ group_rank=0
+ group_world_size=3
+ local_ranks=[0, 1]
+ role_ranks=[0, 1]
+ global_ranks=[0, 1]
+ role_world_sizes=[6, 6]
+ global_world_sizes=[6, 6]
+
+[2023-09-27 03:18:05,957] [INFO] [sampler.py:153:load_state_dict] Load epoch = 0, completed num = 51200, num_samples = 1467
+[2023-09-27 03:18:05,958] [INFO] [sampler.py:153:load_state_dict] Load epoch = 0, completed num = 51200, num_samples = 1467
+loss = 0.2617453336715698, step = 0
+loss = 0.2548859417438507, step = 20
+```
+
+## Experiments of TensorFlow PS Distributed Job
+
+We conduct experiments with the TF distributed job using PS to
+test the fault-tolerance of worker and PS.
+
+### Fault-tolerance of Worker
+
+We can sumit a TensorFlow PS job using the [example](../../examples/tensorflow/criteo_deeprec/manual_job.yaml).
+The job will launch 1 chief, 1 worker and 1 PS.
+
+```bash
+kubectl -n dlrover apply -f examples/tensorflow/criteo_deeprec/manual_job.yaml
+```
+
+```text
+deepctr-manual-scale-edljob-chief-0 1/1 Running 0 88s
+deepctr-manual-scale-edljob-ps-0 1/1 Running 0 88s
+deepctr-manual-scale-edljob-worker-0 1/1 Running 0 88s
+elasticjob-deepctr-manual-scale-dlrover-master 1/1 Running 0 99s
+```
+
+We use `kubectl` to kill a worker.
+
+```bash
+kubectl -n dlrover delete pod deepctr-manual-scale-edljob-worker-0
+```
+
+After the worker-0 is killed, the job relaunch the worker-1 to restore the failed node.
+
+```text
NAME READY STATUS RESTARTS AGE
-deepctr-auto-scaling-job-edljob-chief-0 1/1 Running 0 11m
-deepctr-auto-scaling-job-edljob-ps-1 1/1 Running 0 8m55s
-deepctr-auto-scaling-job-edljob-ps-2 1/1 Running 0 6m13s
-deepctr-auto-scaling-job-edljob-worker-0 1/1 Running 0 9m39s
-deepctr-auto-scaling-job-edljob-worker-1 1/1 Running 0 9m39s
+deepctr-manual-scale-edljob-chief-0 1/1 Running 0 2m57s
+deepctr-manual-scale-edljob-ps-0 1/1 Running 0 2m57s
+deepctr-manual-scale-edljob-worker-1 1/1 Running 0 60s
+elasticjob-deepctr-manual-scale-dlrover-master 1/1 Running 0 3m8s
+```
+
+After the job runs about 4min, the chief-0 fails with OOM due to the insufficient memory
+configuration. The job relaunches the chief-1 with more memory to restore it.
+
+```text
+deepctr-manual-scale-edljob-chief-0 0/1 OOMKilled 0 4m53s
+deepctr-manual-scale-edljob-chief-1 1/1 Running 0 64s
+deepctr-manual-scale-edljob-ps-0 1/1 Running 0 4m53s
+deepctr-manual-scale-edljob-worker-1 1/1 Running 0 2m56s
```
-worker会加载最近一次的checkpoint,并继续训练
+We can view the memory of chief-0 and chief-1 by
+
+```bash
+kubectl -n dlrover get pod deepctr-manual-scale-edljob-chief-0 -o yaml | grep memory
+
+>>>
+ memory: 4Gi
+ memory: 4Gi
+```
+
+```bash
+kubectl -n dlrover get pod deepctr-manual-scale-edljob-chief-1 -o yaml | grep memory
+
+>>>
+ memory: 8Gi
+ memory: 8Gi
+```
+
+We can view the log of chief-1 to check whether the training restores.
```shell
-[2023-03-20 15:04:34,100] [INFO][monitored_session.py:1336:run] An error was raised. This may be due to a preemption in a connected worker or parameter server. The current session will be closed and a new session will be created. This error may also occur due to a gRPC failure caused by high memory or network bandwidth usage in the parameter servers. If this error occurs repeatedly, try increasing the number of parameter servers assigned to the job. Error:
-=====================
-Aborted: From /job:chief/replica:0/task:0:
-RecvTensor expects a different device incarnation: 11288349594494262162 vs. 11542130100054943552. Your worker job ("/job:localhost/replica:0/task:0") was probably restarted. Check your worker job for the reason why it was restarted.
-Additional GRPC error information:
-{"created":"@1679295874.088182934","description":"Error received from peer","file":"external/grpc/src/core/lib/surface/call.cc","file_line":1039,"grpc_message":"RecvTensor expects a different device incarnation: 11288349594494262162 vs. 11542130100054943552. Your worker job ("/job:localhost/replica:0/task:0") was probably restarted. Check your worker job for the reason why it was restarted.","grpc_status":10}
- [[node global_step (defined at /local/lib/python3.8/dist-packages/tensorflow_core/python/framework/ops.py:1748) ]]
-Aborted: From /job:ps/replica:0/task:0:
-Session handle is not found: f8368e3b7d417955. Possibly this worker ("/job:localhost/replica:0/task:0") just restarted.
-=====================
-
-
-Original stack trace for 'global_step':
- File "/lib/python3.8/threading.py", line 890, in _bootstrap
- self._bootstrap_inner()
- File "/lib/python3.8/threading.py", line 932, in _bootstrap_inner
- self.run()
- File "/lib/python3.8/threading.py", line 870, in run
- self._target(*self._args, **self._kwargs)
- File "/local/lib/python3.8/dist-packages/dlrover/trainer/worker/tf_kubernetes_worker.py", line 56, in run_worker
- self.estimator.train_and_evaluate()
- File "/local/lib/python3.8/dist-packages/dlrover/trainer/tensorflow/executor/estimator_executor.py", line 273, in train_and_evaluate
- tf.estimator.train_and_evaluate(
- File "/local/lib/python3.8/dist-packages/tensorflow_estimator/python/estimator/training.py", line 473, in train_and_evaluate
- return executor.run()
- File "/local/lib/python3.8/dist-packages/tensorflow_estimator/python/estimator/training.py", line 640, in run
- getattr(self, task_to_run)()
- File "/local/lib/python3.8/dist-packages/tensorflow_estimator/python/estimator/training.py", line 645, in run_chief
- return self._start_distributed_training()
- File "/local/lib/python3.8/dist-packages/tensorflow_estimator/python/estimator/training.py", line 790, in _start_distributed_training
- self._estimator.train(
- File "/local/lib/python3.8/dist-packages/tensorflow_estimator/python/estimator/estimator.py", line 370, in train
- loss = self._train_model(input_fn, hooks, saving_listeners)
- File "/local/lib/python3.8/dist-packages/tensorflow_estimator/python/estimator/estimator.py", line 1166, in _train_model
- return self._train_model_default(input_fn, hooks, saving_listeners)
- File "/local/lib/python3.8/dist-packages/tensorflow_estimator/python/estimator/estimator.py", line 1184, in _train_model_default
- global_step_tensor = self._create_and_assert_global_step(g)
- File "/local/lib/python3.8/dist-packages/tensorflow_estimator/python/estimator/estimator.py", line 1082, in _create_and_assert_global_step
- step = self._create_global_step(graph)
- File "/local/lib/python3.8/dist-packages/tensorflow_estimator/python/estimator/estimator.py", line 1071, in _create_global_step
- return training.create_global_step(graph)
- File "/local/lib/python3.8/dist-packages/tensorflow_core/python/training/training_util.py", line 137, in create_global_step
- return variable_scope.get_variable(
- File "/local/lib/python3.8/dist-packages/tensorflow_core/python/ops/variable_scope.py", line 1951, in get_variable
- return get_variable_scope().get_variable(
- File "/local/lib/python3.8/dist-packages/tensorflow_core/python/ops/variable_scope.py", line 1509, in get_variable
- return var_store.get_variable(
- File "/local/lib/python3.8/dist-packages/tensorflow_core/python/ops/variable_scope.py", line 786, in get_variable
- return _true_getter(
- File "/local/lib/python3.8/dist-packages/tensorflow_core/python/ops/variable_scope.py", line 731, in _true_getter
- return self._get_single_variable(
- File "/local/lib/python3.8/dist-packages/tensorflow_core/python/ops/variable_scope.py", line 1199, in _get_single_variable
- v = variables.VariableV1(
- File "/local/lib/python3.8/dist-packages/tensorflow_core/python/ops/variables.py", line 460, in __call__
- return cls._variable_v1_call(*args, **kwargs)
- File "/local/lib/python3.8/dist-packages/tensorflow_core/python/ops/variables.py", line 401, in _variable_v1_call
- return previous_getter(
- File "/local/lib/python3.8/dist-packages/tensorflow_core/python/ops/variables.py", line 394, in <lambda>
- previous_getter = lambda **kwargs: default_variable_creator(None, **kwargs)
- File "/local/lib/python3.8/dist-packages/tensorflow_core/python/ops/variable_scope.py", line 3389, in default_variable_creator
- return variables.RefVariable(
- File "/local/lib/python3.8/dist-packages/tensorflow_core/python/ops/variables.py", line 464, in __call__
- return super(VariableMetaclass, cls).__call__(*args, **kwargs)
- File "/local/lib/python3.8/dist-packages/tensorflow_core/python/ops/variables.py", line 1883, in __init__
- self._init_from_args(
- File "/local/lib/python3.8/dist-packages/tensorflow_core/python/ops/variables.py", line 2030, in _init_from_args
- self._variable = state_ops.variable_op_v2(
- File "/local/lib/python3.8/dist-packages/tensorflow_core/python/ops/state_ops.py", line 76, in variable_op_v2
- return gen_state_ops.variable_v2(
- File "/local/lib/python3.8/dist-packages/tensorflow_core/python/ops/gen_state_ops.py", line 1619, in variable_v2
- _, _, _op = _op_def_lib._apply_op_helper(
- File "/local/lib/python3.8/dist-packages/tensorflow_core/python/framework/op_def_library.py", line 792, in _apply_op_helper
- op = g.create_op(op_type_name, inputs, dtypes=None, name=scope,
- File "/local/lib/python3.8/dist-packages/tensorflow_core/python/util/deprecation.py", line 507, in new_func
- return func(*args, **kwargs)
- File "/local/lib/python3.8/dist-packages/tensorflow_core/python/framework/ops.py", line 3360, in create_op
- return self._create_op_internal(op_type, inputs, dtypes, input_types, name,
- File "/local/lib/python3.8/dist-packages/tensorflow_core/python/framework/ops.py", line 3422, in _create_op_internal
- ret = Operation(
- File "/local/lib/python3.8/dist-packages/tensorflow_core/python/framework/ops.py", line 1748, in __init__
- self._traceback = tf_stack.extract_stack()
-
-[2023-03-20 15:04:34,499] [INFO][monitored_session.py:256:finalize] Graph was finalized.
-[2023-03-20 15:04:34,511] [INFO][session_manager.py:220:_restore_checkpoint] run with loading checkpoint
-[2023-03-20 15:04:34,724] [INFO][saver.py:1531:restore] Restoring parameters from /nas/model.ckpt-10701903
+[2023-03-20 11:51:10,774] [INFO][session_manager.py:511:_try_run_local_init_op] Running local_init_op.
+[2023-03-20 11:51:11,302] [INFO][session_manager.py:513:_try_run_local_init_op] Done running local_init_op.
+[2023-03-20 11:51:14,279] [INFO][global_step_hook.py:39:before_run] global_step: 126
+```
+
+### Fault-tolerance of PS
+
+We kill the ps-0 by `kubectl -n dlrover delete pod deepctr-manual-scale-edljob-ps-0`.
+The job relaunches the ps-1 to restore the killed ps-0>
+
+```text
+deepctr-manual-scale-edljob-chief-0 0/1 OOMKilled 0 10m
+deepctr-manual-scale-edljob-chief-1 1/1 Running 0 7m1s
+deepctr-manual-scale-edljob-ps-1 1/1 Running 0 109s
+deepctr-manual-scale-edljob-worker-1 0/1 OOMKilled 0 8m53s
+deepctr-manual-scale-edljob-worker-2 1/1 Running 0 4m13s
+elasticjob-deepctr-manual-scale-dlrover-master 1/1 Running 0 11m
+```
+
+From the log of chief, the training job restore the model from the latest checkpoint
+and contiune training the model.
+
+```text
+[2023-09-26 19:24:00,861] [INFO][saver.py:1531:restore] Restoring parameters from /nas/deepctr/model.ckpt-126
+[2023-09-26 19:24:03,473] [INFO][session_manager.py:511:_try_run_local_init_op] Running local_init_op.
+[2023-09-26 19:24:03,580] [INFO] [resource.py:164:report_resource] Report Resource CPU : 0.98, Memory 7146, GPU []
+[2023-09-26 19:24:03,670] [INFO][session_manager.py:513:_try_run_local_init_op] Done running local_init_op.
+[2023-09-26 19:24:07,665] [INFO][basic_session_run_hooks.py:627:_save] Saving checkpoints for 126 into /nas/deepctr/model.ckpt.
```
diff --git a/examples/pytorch/mnist/cnn_train.py b/examples/pytorch/mnist/cnn_train.py
index 31fe72ccc..a1f2c4f9e 100644
--- a/examples/pytorch/mnist/cnn_train.py
+++ b/examples/pytorch/mnist/cnn_train.py
@@ -34,7 +34,9 @@
from dlrover.trainer.torch.elastic.sampler import ElasticDistributedSampler
from dlrover.trainer.torch.elastic.trainer import ElasticTrainer
-CHEKPOINT_PATH = "model.pt"
+# Note, we need to set the path of a shared file
+# system like nas, cpfs or hdfs.
+CHEKPOINT_PATH = "./model.pt"
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
diff --git a/examples/pytorch/mnist/elastic_job.yaml b/examples/pytorch/mnist/elastic_job.yaml
index 5de0bb182..ee34cb09a 100644
--- a/examples/pytorch/mnist/elastic_job.yaml
+++ b/examples/pytorch/mnist/elastic_job.yaml
@@ -11,7 +11,6 @@ spec:
replicas: 4
template:
spec:
- restartPolicy: Always
containers:
- name: main
# yamllint disable-line rule:line-length
@@ -20,17 +19,18 @@ spec:
command:
- /bin/bash
- -c
- - "dlrover-run --network-check --nnodes=$WORKER_NUM \
+ # WORKER_NUM is set into env with the value as replicas.
+ - "dlrover-run --network-check --nnodes=3:$WORKER_NUM \
--nproc_per_node=2 --max_restarts=3 \
- examples/pytorch/mnist/cnn_train.py --num_epochs 2 \
+ examples/pytorch/mnist/cnn_train.py --num_epochs 5 \
--training_data /data/mnist_png/training/ \
--validation_data /data/mnist_png/testing/"
resources:
limits:
- cpu: "1" # turn up when using GPU
- memory: 2Gi # turn up when using GPU
+ cpu: "2" # turn up when using GPU
+ memory: 3Gi # turn up when using GPU
# nvidia.com/gpu: 1 # optional
requests:
- cpu: "1" # turn up when using GPU
- memory: 2Gi # turn up when using GPU
+ cpu: "2" # turn up when using GPU
+ memory: 3Gi # turn up when using GPU
# nvidia.com/gpu: 1 # optional
diff --git a/examples/tensorflow/criteo_deeprec/manual_job.yaml b/examples/tensorflow/criteo_deeprec/manual_job.yaml
index 0e3b8c0a6..343e6a469 100644
--- a/examples/tensorflow/criteo_deeprec/manual_job.yaml
+++ b/examples/tensorflow/criteo_deeprec/manual_job.yaml
@@ -27,10 +27,10 @@ spec:
--enable_auto_scaling=True"
resources:
limits:
- cpu: "1"
+ cpu: "2"
memory: 3Gi
requests:
- cpu: "1"
+ cpu: "2"
memory: 3Gi
volumeMounts:
- name: pvc-nas
@@ -58,10 +58,10 @@ spec:
--enable_auto_scaling=True"
resources:
limits:
- cpu: "1"
+ cpu: "2"
memory: 4Gi
requests:
- cpu: "1"
+ cpu: "2"
memory: 4Gi
volumeMounts:
- name: pvc-nas
| A tech report to test the fault tolerance and elasticity using chaosblade.
We can use chaosblade to create chaos experiments:
- Pod is preempted.
- The training process hangs or fails.
- The cpu is busy to test straggler.
- OOM of Pod.
- Pod network is breakdown .
| 2023-09-26T11:40:47 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-736 | b6fac9b5fdfa35691e4372071aa4525ea637bdd8 | diff --git a/dlrover/python/elastic_agent/monitor/resource.py b/dlrover/python/elastic_agent/monitor/resource.py
index ee047880f..3db99def5 100644
--- a/dlrover/python/elastic_agent/monitor/resource.py
+++ b/dlrover/python/elastic_agent/monitor/resource.py
@@ -161,7 +161,7 @@ def report_resource(self):
GlobalMasterClient.MASTER_CLIENT.report_used_resource(
used_mem, current_cpu, self._gpu_stats
)
- logger.info(
+ logger.debug(
"Report Resource CPU : %s, Memory %s, GPU %s",
current_cpu,
used_mem,
diff --git a/dlrover/python/elastic_agent/torch/training.py b/dlrover/python/elastic_agent/torch/training.py
index 0a6ea0c3d..f60dd4d95 100644
--- a/dlrover/python/elastic_agent/torch/training.py
+++ b/dlrover/python/elastic_agent/torch/training.py
@@ -691,6 +691,7 @@ def run(self, role: str = DEFAULT_ROLE) -> bool:
stragglers = []
for i in range(self._check_round):
result, elapsed_time = self._run_network_check()
+ elapsed_time = round(elapsed_time, 3)
logger.info(
f"Network check time of round {i} is {elapsed_time}"
f" and succeed is {result}."
diff --git a/dlrover/python/master/elastic_training/rdzv_manager.py b/dlrover/python/master/elastic_training/rdzv_manager.py
index aecd51e58..3e128518e 100644
--- a/dlrover/python/master/elastic_training/rdzv_manager.py
+++ b/dlrover/python/master/elastic_training/rdzv_manager.py
@@ -288,7 +288,7 @@ def get_comm_world(self, rank_id):
f"node group: {self._node_groups}"
)
if self._rdzv_round % 2 == 0:
- self._node_status = {}
+ self._clear_check_status()
self._reported_nodes = set()
self._rdzv_round += 1
for i, group in enumerate(self._node_groups):
@@ -296,6 +296,10 @@ def get_comm_world(self, rank_id):
return i, group
return 0, self._rdzv_nodes
+ def _clear_check_status(self):
+ self._node_status = {}
+ self._node_times = {}
+
def _group_nodes(self, round):
"""Group nodes into goups.
Round 0: group all nodes into a group like {0:8, 1:8, 2:8, 3:8}.
@@ -365,14 +369,17 @@ def report_network_check_result(
self._node_status.setdefault(node_id, succeed)
self._node_times.setdefault(node_id, elapsed_time)
self._node_status[node_id] = self._node_status[node_id] or succeed
- self._node_times[node_id] = min(
- self._node_times[node_id], elapsed_time
+ self._node_times[node_id] = round(
+ min(self._node_times[node_id], elapsed_time), 3
)
if len(self._reported_nodes) == len(self._rdzv_nodes):
logger.info(
- f"The node status of {self._rdzv_round} check "
- f"is {self._node_status}.\n"
- f"The elapsed time of nodes are {self._node_times}"
+ f"Round {self._rdzv_round}: The node status "
+ f"are {self._node_status}."
+ )
+ logger.info(
+ f"Round {self._rdzv_round}: The node elapsed time "
+ f"are {self._node_times}"
)
def join_rendezvous(
@@ -399,7 +406,6 @@ def check_fault_node(self):
"""
with self._lock:
reason = ""
- fault_nodes = self._fault_nodes
if len(self._reported_nodes) < len(self._rdzv_nodes):
reason = NetworkFailureReason.WAITING_NODE
elif self._fault_nodes:
@@ -419,7 +425,7 @@ def check_fault_node(self):
)
else:
reason = NetworkFailureReason.NODE_FAILURE
- return fault_nodes, reason
+ return self._fault_nodes, reason
def get_straggler(self):
"""Detect whether there is the straggler according to the
diff --git a/dlrover/python/master/scaler/pod_scaler.py b/dlrover/python/master/scaler/pod_scaler.py
index db6f90916..e0bca63b0 100644
--- a/dlrover/python/master/scaler/pod_scaler.py
+++ b/dlrover/python/master/scaler/pod_scaler.py
@@ -215,6 +215,11 @@ def _list_job_pods(self):
status=pod.status.phase,
config_resource=pod_resource,
)
+ if node.type != NodeType.WORKER and node.status not in [
+ NodeStatus.PENDING,
+ NodeStatus.RUNNING,
+ ]:
+ continue
job_pods[pod_type].append(node)
return job_pods
diff --git a/dlrover/trainer/torch/run_network_check.py b/dlrover/trainer/torch/run_network_check.py
index e0984c093..f9496b406 100644
--- a/dlrover/trainer/torch/run_network_check.py
+++ b/dlrover/trainer/torch/run_network_check.py
@@ -82,7 +82,7 @@ def main(task):
shape = 1 << 24
task_time += bm_all_gather(shape, use_cuda)
local_rank = int(os.environ["LOCAL_RANK"])
- elapsed_time = init_time + task_time
+ elapsed_time = round(init_time + task_time, 3)
write_time_to_file(elapsed_time, local_rank)
if local_rank == 0:
logger.info(
diff --git a/examples/pytorch/mnist/elastic_job.yaml b/examples/pytorch/mnist/elastic_job.yaml
index bb17fd52a..5de0bb182 100644
--- a/examples/pytorch/mnist/elastic_job.yaml
+++ b/examples/pytorch/mnist/elastic_job.yaml
@@ -27,8 +27,10 @@ spec:
--validation_data /data/mnist_png/testing/"
resources:
limits:
- cpu: "1"
- memory: 2Gi
+ cpu: "1" # turn up when using GPU
+ memory: 2Gi # turn up when using GPU
+ # nvidia.com/gpu: 1 # optional
requests:
- cpu: "1"
- memory: 2Gi
+ cpu: "1" # turn up when using GPU
+ memory: 2Gi # turn up when using GPU
+ # nvidia.com/gpu: 1 # optional
diff --git a/examples/pytorch/mnist/mnist_chaos.dockerfile b/examples/pytorch/mnist/mnist_chaos.dockerfile
new file mode 100644
index 000000000..8c4f54234
--- /dev/null
+++ b/examples/pytorch/mnist/mnist_chaos.dockerfile
@@ -0,0 +1,25 @@
+FROM easydl/dlrover:ci as builder
+
+WORKDIR /dlrover
+COPY ./ .
+RUN sh scripts/build_wheel.sh
+
+FROM easydl/dlrover-train:torch201-cpu-py38 as base
+
+WORKDIR /dlrover
+
+RUN apt-get update && apt-get install -y sudo iproute2
+
+RUN pip install py-spy
+
+# Install chaosblade to test the fualt-node and straggler.
+# Download https://github.com/chaosblade-io/chaosblade/releases/download/v1.7.2/chaosblade-1.7.2-linux-amd64.tar.gz
+COPY chaosblade-1.7.2-linux-amd64.tar.gz ./chaosblade-1.7.2-linux-amd64.tar.gz
+RUN tar -zxvf chaosblade-1.7.2-linux-amd64.tar.gz
+RUN rm chaosblade-1.7.2-linux-amd64.tar.gz
+RUN chmod +x chaosblade-1.7.2/blade
+
+COPY --from=builder /dlrover/dist/dlrover-*.whl /
+RUN pip install /*.whl --extra-index-url=https://pypi.org/simple && rm -f /*.whl
+
+COPY ./examples ./examples
diff --git a/examples/pytorch/mnist/start_chaos.sh b/examples/pytorch/mnist/start_chaos.sh
new file mode 100644
index 000000000..661fda051
--- /dev/null
+++ b/examples/pytorch/mnist/start_chaos.sh
@@ -0,0 +1,34 @@
+#!/bin/bash
+# Copyright 2023 The DLRover Authors. All rights reserved.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+exp=$1
+
+if [ "${WORKER_ID}" -eq 1 ] && [ "${exp}" = "cpu-overload" ]
+then
+ chaosblade-1.7.2/blade create cpu load --cpu-percent 90
+elif [ "${WORKER_ID}" -eq 1 ] && [ "${exp}" = "memory-overload" ]
+then
+ chaosblade-1.7.2/blade create mem load --mode ram --mem-percent 80
+elif [ "${WORKER_ID}" -eq 1 ] && [ "${exp}" = "kill-process" ]
+then
+ for _ in {0..1200}
+ do
+ chaosblade-1.7.2/blade create process kill --process run_network_check --signal 1
+ sleep 1
+ done
+else
+ echo "No chaos experiment."
+fi
+
\ No newline at end of file
diff --git a/examples/tensorflow/criteo_deeprec/manual_job.yaml b/examples/tensorflow/criteo_deeprec/manual_job.yaml
new file mode 100644
index 000000000..0e3b8c0a6
--- /dev/null
+++ b/examples/tensorflow/criteo_deeprec/manual_job.yaml
@@ -0,0 +1,79 @@
+---
+apiVersion: elastic.iml.github.io/v1alpha1
+kind: ElasticJob
+metadata:
+ name: deepctr-manual-scale
+ namespace: dlrover
+spec:
+ distributionStrategy: ParameterServerStrategy
+ replicaSpecs:
+ ps:
+ autoScale: False
+ replicas: 1
+ template:
+ spec:
+ restartPolicy: Never
+ containers:
+ - name: main
+ # yamllint disable-line rule:line-length
+ image: registry.cn-hangzhou.aliyuncs.com/intell-ai/dlrover:deeprec_criteo_v2
+ imagePullPolicy: Always
+ command:
+ - /bin/bash
+ - -c
+ - "cd ./examples/tensorflow/criteo_deeprec \
+ && python -m dlrover.trainer.entry.local_entry \
+ --platform=Kubernetes --conf=train_conf.TrainConf \
+ --enable_auto_scaling=True"
+ resources:
+ limits:
+ cpu: "1"
+ memory: 3Gi
+ requests:
+ cpu: "1"
+ memory: 3Gi
+ volumeMounts:
+ - name: pvc-nas
+ mountPath: /nas
+ volumes:
+ - name: pvc-nas
+ persistentVolumeClaim:
+ claimName: pvc-nas
+ worker:
+ autoScale: False
+ replicas: 2
+ template:
+ spec:
+ containers:
+ - name: main
+ # yamllint disable-line rule:line-length
+ image: registry.cn-hangzhou.aliyuncs.com/intell-ai/dlrover:deeprec_criteo_v2
+ imagePullPolicy: Always
+ command:
+ - /bin/bash
+ - -c
+ - "cd ./examples/tensorflow/criteo_deeprec \
+ && python -m dlrover.trainer.entry.local_entry \
+ --platform=Kubernetes --conf=train_conf.TrainConf \
+ --enable_auto_scaling=True"
+ resources:
+ limits:
+ cpu: "1"
+ memory: 4Gi
+ requests:
+ cpu: "1"
+ memory: 4Gi
+ volumeMounts:
+ - name: pvc-nas
+ mountPath: /nas
+ volumes:
+ - name: pvc-nas
+ persistentVolumeClaim:
+ claimName: pvc-nas
+ dlrover-master:
+ template:
+ spec:
+ restartPolicy: Never
+ containers:
+ - name: main
+ imagePullPolicy: Always
diff --git a/examples/tensorflow/criteo_deeprec/scale_plan.yaml b/examples/tensorflow/criteo_deeprec/scale_plan.yaml
index e7ba6c3b1..0e390fe77 100644
--- a/examples/tensorflow/criteo_deeprec/scale_plan.yaml
+++ b/examples/tensorflow/criteo_deeprec/scale_plan.yaml
@@ -1,12 +1,12 @@
apiVersion: elastic.iml.github.io/v1alpha1
kind: ScalePlan
metadata:
- name: deepctr-auto-scaling-job-1
+ name: deepctr-manual-scale-plan-0
labels:
- elasticjob-name: deepctr-auto-scaling-job
+ elasticjob-name: deepctr-manual-scale
scale-type: manual
spec:
- ownerJob: deepctr-auto-scaling-job
+ ownerJob: deepctr-manual-scale
replicaResourceSpecs:
ps:
replicas: 2
diff --git a/examples/tensorflow/iris/autoscale_job.yaml b/examples/tensorflow/iris/autoscale_job.yaml
index 0ac77a230..2810879f0 100644
--- a/examples/tensorflow/iris/autoscale_job.yaml
+++ b/examples/tensorflow/iris/autoscale_job.yaml
@@ -47,3 +47,4 @@ spec:
containers:
- name: main
imagePullPolicy: Always
+ image: registry.cn-hangzhou.aliyuncs.com/intell-ai/dlrover:v0.2.0
| The elapsed time of network check is not changed.
```
The elapsed time of nodes are {2: 20.182805944442748, 3: 18.725976165771485, 0: 181.80825593948364, 1: 143.06098918533326}
The elapsed time of nodes are {2: 20.182805944442748, 3: 18.725976165771485, 0: 19.91257585334778, 1: 143.06098918533326}
The elapsed time of nodes are {2: 20.16765477180481, 3: 18.725976165771485, 0: 19.91257585334778, 1: 20.607787996292114}
```
| 2023-09-26T07:29:07 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-732 | a70582dd53b0c1fee873ecb5db68332d9127d2e3 | diff --git a/dlrover/python/elastic_agent/master_client.py b/dlrover/python/elastic_agent/master_client.py
index 99322e39a..9fa04a146 100644
--- a/dlrover/python/elastic_agent/master_client.py
+++ b/dlrover/python/elastic_agent/master_client.py
@@ -104,7 +104,7 @@ def _report(self, message: grpc.Message):
request.node_id = self._node_id
request.node_type = self._node_type
request.data = message.serialize()
- return self._stub.report(request)
+ return self._stub.report(request, timeout=5)
@retry_grpc_request
def _get(self, message: grpc.Message):
@@ -112,7 +112,7 @@ def _get(self, message: grpc.Message):
request.node_id = self._node_id
request.node_type = self._node_type
request.data = message.serialize()
- response = self._stub.get(request)
+ response = self._stub.get(request, timeout=5)
res_message = grpc.deserialize_message(response.data)
return res_message
diff --git a/dlrover/python/elastic_agent/torch/training.py b/dlrover/python/elastic_agent/torch/training.py
index 01e04e5da..2d08af533 100644
--- a/dlrover/python/elastic_agent/torch/training.py
+++ b/dlrover/python/elastic_agent/torch/training.py
@@ -109,6 +109,26 @@ class MasterRendezvousHandler(RendezvousHandler):
after the handler of all node agents calls `_join_rendezvous`.
Then, the handler will get the communcation world from the master
and assign ranks to the training process.
+
+ Args:
+ name: the name of rendezvous.
+ rank_id: the node rank id.
+ rdzv_params: RendezvousParameters instance. We can set timeout of
+ rendezvous in the rdzv_params.config. Now we set:
+ join_timeout: the timeout to join the rendevous. The timeout
+ happens if the number of nodes is less than min_nodes
+ in the join_timeout.
+ lastcall_timeout: the timeout to wait new nodes after the
+ number of nodes is equal or greater than min_nodes.
+ The node will join the rendezvous to start train if
+ the timeout happens.
+ pend_timeout: the timeout to wait the next rendezvous. The timeout
+ happens if there is a rendezvous and the node is not in the
+ rendzvous. For example. the number of nodes must be the
+ multiple of node_uint. If the node_uint = 4 and the number
+ of nodes is 5, then the 5th node will wait for more nodes
+ in the pend_timeout.
+ local_world_size: the number of local processes.
"""
def __init__(
@@ -123,6 +143,7 @@ def __init__(
self._rdzv_params = rdzv_params
self._local_world_size = local_world_size
self.join_timeout = int(rdzv_params.get("join_timeout", 600))
+ self.pend_timeout = float(rdzv_params.get("pend_timeout", "inf"))
self._client = GlobalMasterClient.MASTER_CLIENT
self._store = MasterKVStore(self._name, timedelta(seconds=60))
lastcall_timeout = int(rdzv_params.get("lastcall_timeout", 60))
@@ -168,6 +189,7 @@ def next_rendezvous(self):
)
logger.info(msg)
round = self._join_rendezvous()
+ start_pending = 0
while True:
group, world = self._client.get_comm_world(
self._name, self._rank_id
@@ -180,8 +202,14 @@ def next_rendezvous(self):
"The node is not in the world "
"and waits for more nodes."
)
+ if start_pending == 0:
+ start_pending = time.time()
time.sleep(5)
start_join = time.time()
+ if start_join - start_pending > self.pend_timeout:
+ raise TimeoutError(
+ f"Timeout {self.pend_timeout}s to wait more nodes"
+ )
continue
elif time.time() - start_join > self.join_timeout:
timeout = self.join_timeout
diff --git a/dlrover/trainer/torch/elastic_run.py b/dlrover/trainer/torch/elastic_run.py
index 3bc1dcca1..766be29f5 100644
--- a/dlrover/trainer/torch/elastic_run.py
+++ b/dlrover/trainer/torch/elastic_run.py
@@ -11,6 +11,67 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+
+"""
+``dlrover-run`` provides a superset of the functionality as ``torchrun``
+with the following additional functionalities:
+
+1. Check the network of node to detect the fault node or straggler.
+
+2. `rdzv-endpoint`, `rdzv-backend` and `rdzv-id` are not required for
+multi-node multi-worker.
+
+Usage
+--------
+
+Single-node multi-worker
+++++++++++++++++++++++++++++++
+
+::
+
+ dlrover-run
+ --standalone
+ --nproc-per-node=$NUM_TRAINERS
+ YOUR_TRAINING_SCRIPT.py (--arg1 ... train script args...)
+
+multi-node multi-worker
++++++++++++++++++++++++++++++++++++
+
+::
+
+ torchrun
+ --nnodes=$NUM_NODES
+ --nproc-per-node=$NUM_TRAINERS
+ --max-restarts=3
+ YOUR_TRAINING_SCRIPT.py (--arg1 ... train script args...)
+
+Elastic (``min=1``, ``max=4``, tolerates up to 3 membership
+changes or failures)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+::
+
+ torchrun
+ --nnodes=1:4
+ --nproc-per-node=$NUM_TRAINERS
+ --max-restarts=3
+ YOUR_TRAINING_SCRIPT.py (--arg1 ... train script args...)
+
+Note on rendezvous backend
+------------------------------
+
+For multi-node training you need to specify:
+
+1. ``--network-check``: Bool, whether to check the node network to find the
+ fault node or straggler.
+2. ``--rdzv-conf``: We can set timeout into rdzv_conf like
+ ```--rdzv-conf join_timeout=600,lastcall_timeout=60,pend_timeout=3600`.
+
+For auto-tuning parallelism configuration, you need to specify:
+
+1. ``--auto-tunning``: Whether to auto tune the batch size and learning rate.
+"""
+
import os
import sys
import telnetlib
| 新需求Issue:作业Worker POD启动后网络异常, 超过一定时间(比如15min)还没有发送ready 信息,master 就把这个 Pod 主动kill掉, 重新拉起一个POD。
1、作业Worker POD启动后网络异常, 超过一定时间(比如15min)还没有发送ready 信息,master 就把这个 Pod 主动kill掉, 重新拉起一个POD。
2、此场景可以通过参数控制,不开启时按照当前流程重新组网缩容,开启时可以主动kill掉pod,重新拉起一个。
| 我通过 chaos 将worker-0 网络的丢包率设置为100%,命令如下:`./blade create network loss --percent 100 --interface eth0`
worker 会因为网络连接失败自动报错。如果 worker 自动报错,如果配置了 restartPolicy: Always, 那么edl 会自动重启 worker-0 的。

| 2023-09-25T04:18:49 | 0.0 | [] | [] |
||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-718 | 596f82173b83624dbf03af6ad275922b327124d0 | diff --git a/dlrover/python/common/constants.py b/dlrover/python/common/constants.py
index f49300130..f8ea63182 100644
--- a/dlrover/python/common/constants.py
+++ b/dlrover/python/common/constants.py
@@ -260,3 +260,4 @@ class ConfigPath(object):
PARAL_CONFIG = "/tmp/dlrover/auto_paral_config.json"
ENV_RUNTIME_METRICS = "RUNTIME_METRICS_PATH"
RUNTIME_METRICS = "/tmp/dlrover/runtime_metrics.json"
+ NETWORK_CHECK_DATA_DIR = "/tmp/dlrover/network_check/"
diff --git a/dlrover/python/common/grpc.py b/dlrover/python/common/grpc.py
index 35a6e009b..11266c968 100644
--- a/dlrover/python/common/grpc.py
+++ b/dlrover/python/common/grpc.py
@@ -271,8 +271,9 @@ class NodeAddress(NodeMeta):
pass
-class NodeStatus(NodeMeta):
- pass
+@dataclass
+class NetworkStatus(NodeMeta):
+ elasped_time: float = 0.0
@dataclass
@@ -320,9 +321,13 @@ class NetworkReadyRequest(Message):
pass
+class StragglerExistRequest(Message):
+ pass
+
+
@dataclass
-class NetworkReady(Message):
- success: bool = False
+class NetworkCheckResult(Message):
+ nodes: List[int] = None # type: ignore
reason: str = ""
diff --git a/dlrover/python/elastic_agent/config/paral_config_tuner.py b/dlrover/python/elastic_agent/config/paral_config_tuner.py
index e0e56a931..6f7f40fbe 100644
--- a/dlrover/python/elastic_agent/config/paral_config_tuner.py
+++ b/dlrover/python/elastic_agent/config/paral_config_tuner.py
@@ -59,6 +59,8 @@ def _periodically_update_paral_config(self):
"""
while True:
local_config = self._read_paral_config(self.config_path)
+ if not self._master_client:
+ break
self._master_client.report_paral_config(local_config)
time.sleep(30)
config: ParallelConfig = self._master_client.get_paral_config()
diff --git a/dlrover/python/elastic_agent/master_client.py b/dlrover/python/elastic_agent/master_client.py
index 9fa04a146..cc1a2360b 100644
--- a/dlrover/python/elastic_agent/master_client.py
+++ b/dlrover/python/elastic_agent/master_client.py
@@ -310,11 +310,11 @@ def get_comm_world(self, rdzv_name, rank_id):
result: grpc.RendezvousState = self._get(request)
return result.group, result.world
- def network_check_success(self, timeout=300):
+ def check_fault_node(self, timeout=300):
request = grpc.NetworkReadyRequest()
start = time.time()
while True:
- result: grpc.NetworkReady = self._get(request)
+ result: grpc.NetworkCheckResult = self._get(request)
if (
result.reason == NetworkFailureReason.WAITING_NODE
and time.time() - start < timeout
@@ -322,7 +322,21 @@ def network_check_success(self, timeout=300):
time.sleep(5)
continue
break
- return result.success
+ return result.nodes
+
+ def check_straggler(self, timeout=300):
+ request = grpc.StragglerExistRequest()
+ start = time.time()
+ while True:
+ result: grpc.NetworkCheckResult = self._get(request)
+ if (
+ result.reason == NetworkFailureReason.WAITING_NODE
+ and time.time() - start < timeout
+ ):
+ time.sleep(5)
+ continue
+ break
+ return result.nodes
def report_rdzv_params(
self, min_nodes, max_nodes, waiting_timeout, node_unit
@@ -336,8 +350,10 @@ def report_rdzv_params(
response = self._report(message)
return response.success
- def report_node_status(self, rank_id, status):
- message = grpc.NodeStatus(rank=rank_id, status=status)
+ def report_network_status(self, rank_id, status, elasped_time):
+ message = grpc.NetworkStatus(
+ rank=rank_id, status=status, elasped_time=elasped_time
+ )
self._report(message)
def report_failures(self, error_data, restart_count=-1, level=""):
diff --git a/dlrover/python/elastic_agent/torch/training.py b/dlrover/python/elastic_agent/torch/training.py
index 2d08af533..0a6ea0c3d 100644
--- a/dlrover/python/elastic_agent/torch/training.py
+++ b/dlrover/python/elastic_agent/torch/training.py
@@ -88,11 +88,14 @@ class ElasticLaunchConfig(LaunchConfig):
node_unit: the number of unit of nodes. The number of nodes must be
a multiple of node_unit.
auto_tunning: whether to auto-tune the parallelism configuration.
+ exclude_straggler: The node will exit if it is a straggler in network
+ check and exclude_straggler is True.
"""
network_check: bool = False
node_unit: int = 1
auto_tunning: bool = False
+ exclude_straggler: bool = False
@dataclass
@@ -673,6 +676,7 @@ def __init__(
)
self._log_dir = log_dir or tempfile.mkdtemp(prefix="network_check_")
self._check_round = 2
+ self._config: ElasticLaunchConfig = config
def run(self, role: str = DEFAULT_ROLE) -> bool:
spec = self._worker_group.spec
@@ -683,38 +687,56 @@ def run(self, role: str = DEFAULT_ROLE) -> bool:
f"{spec.get_entrypoint_name()}"
)
success = False
+ fault_nodes = []
+ stragglers = []
for i in range(self._check_round):
- result = self._run_network_check(spec.monitor_interval)
- logger.info(f"Network check round {i} is {result}")
+ result, elapsed_time = self._run_network_check()
+ logger.info(
+ f"Network check time of round {i} is {elapsed_time}"
+ f" and succeed is {result}."
+ )
status = NodeStatus.SUCCEEDED if result else NodeStatus.FAILED
- self._client.report_node_status(self._rank_id, status)
+ self._client.report_network_status(
+ self._rank_id,
+ status,
+ elapsed_time,
+ )
success = success or result
- network_ready = self._client.network_check_success()
+ fault_nodes = self._client.check_fault_node()
+ stragglers = self._client.check_straggler()
+ logger.info(
+ f"Fault nodes are: {fault_nodes} "
+ f" and stragglers are: {stragglers}."
+ )
self._stop_workers(self._worker_group)
- if network_ready:
- return True
- else:
+ if fault_nodes or stragglers:
total_worker_num = len(self._client.get_running_nodes())
- # If the number of nodes <= 2, we cannot determine which node
- # breakdowns because there is no normal node in the job to
- # execute allgather tasks with the two nodes.
- if total_worker_num <= 2:
- logger.error(
- "Fail to check network when there are only 2 nodes."
- )
+ if total_worker_num <= 3:
+ # If the number of nodes <= 3, we cannot determine which
+ # node if fault because there is no normal node in the job
+ # to execute allgather tasks with the two nodes.
+ logger.error("Network check needs at least 4 nodes.")
raise RuntimeError("The node network is breakdown.")
- time.sleep(1)
- if not success:
+ else:
+ # Run the next round check to detect the fault node.
+ time.sleep(3)
+ continue
+ else:
+ return True
+ if self._rank_id in fault_nodes:
self._client.report_failures(
NodeErrorMessage.NETWORKER_ERROR,
level=TrainingMsgLevel.NODE_ERROR,
)
raise RuntimeError("The node network is breakdown.")
- return False
+ elif self._config.exclude_straggler and self._rank_id in stragglers:
+ raise RuntimeError("The node is a straggler and exits.")
+ return True
- def _run_network_check(self, monitor_interval, timeout=300):
+ def _run_network_check(self, monitor_interval=3, timeout=300):
self._initialize_workers(self._worker_group)
start = time.time()
+ succeed = False
while True:
assert self._worker_group.state != WorkerState.INIT
time.sleep(monitor_interval)
@@ -724,9 +746,34 @@ def _run_network_check(self, monitor_interval, timeout=300):
if state == WorkerState.HEALTHY:
if time.time() - start > timeout:
logger.error(f"Timeout {timeout} to check network.")
- return False
+ break
continue
- return state == WorkerState.SUCCEEDED
+ elif state == WorkerState.SUCCEEDED:
+ succeed = True
+ break
+ else:
+ break
+
+ if succeed:
+ elapsed_time = self._get_network_check_time()
+ else:
+ elapsed_time = 3600
+ return succeed, elapsed_time
+
+ def _get_network_check_time(self):
+ root = ConfigPath.NETWORK_CHECK_DATA_DIR
+ elapsed_time = 0
+ if not os.path.exists(root):
+ return elapsed_time
+ for filename in os.listdir(root):
+ path = os.path.join(root, filename)
+ with open(path, "r") as f:
+ data = f.read()
+ if not data:
+ continue
+ data = json.loads(data)
+ elapsed_time = max(elapsed_time, data.get("time", 0))
+ return elapsed_time
def network_check(
diff --git a/dlrover/python/master/elastic_training/rdzv_manager.py b/dlrover/python/master/elastic_training/rdzv_manager.py
index feaa463e2..aecd51e58 100644
--- a/dlrover/python/master/elastic_training/rdzv_manager.py
+++ b/dlrover/python/master/elastic_training/rdzv_manager.py
@@ -198,7 +198,9 @@ def get_comm_world(self, rank_id):
pass
@abstractmethod
- def report_network_check_result(self, node_id: int, normal: bool):
+ def report_network_check_result(
+ self, node_id: int, normal: bool, elapsed_time: float
+ ):
"""The node updates its status"""
pass
@@ -243,7 +245,7 @@ def get_comm_world(self, rank_id):
self._rdzv_round += 1
return self._rdzv_round, self._rdzv_nodes
- def report_network_check_result(self, node_id, normal):
+ def report_network_check_result(self, node_id, normal, elapsed_time):
return
@@ -265,9 +267,12 @@ def __init__(self):
super().__init__()
self._name = RendezvousName.NETWORK_CHECK
self._node_status: Dict[int, bool] = {}
+ self._node_times: Dict[int, float] = {}
self._reported_nodes = set()
self._node_groups: List[Dict[int, int]] = []
self._check_round = 2
+ self._fault_nodes: List[int] = []
+ self._straggler_nodes: List[int] = []
def get_comm_world(self, rank_id):
"""Return the communication world if a round rendezvous is completed.
@@ -286,7 +291,6 @@ def get_comm_world(self, rank_id):
self._node_status = {}
self._reported_nodes = set()
self._rdzv_round += 1
-
for i, group in enumerate(self._node_groups):
if rank_id in group:
return i, group
@@ -315,39 +319,60 @@ def _group_nodes(self, round):
else:
node_groups.append(group)
elif round == 1:
- abnormal_nodes = []
- normal_nodes = []
- for node_id, status in self._node_status.items():
- if status:
- normal_nodes.append(node_id)
- else:
- abnormal_nodes.append(node_id)
- logger.info(
- f"Normal nodes: {normal_nodes}.\n"
- f"Abnormal nodes: {abnormal_nodes}"
- )
- if len(abnormal_nodes) > len(normal_nodes):
- return node_groups
- for i, node_id in enumerate(abnormal_nodes):
+ self._check_abnormal_nodes()
+ node_times = sorted(self._node_times.items(), key=lambda x: x[1])
+ cur_nodes = []
+ for node_id, _ in node_times:
+ if node_id in self._rdzv_nodes:
+ cur_nodes.append(node_id)
+ left, right = 0, len(cur_nodes) - 1
+ while True:
group = {}
- group[node_id] = self._rdzv_nodes[node_id]
- group[normal_nodes[i]] = self._rdzv_nodes[node_id]
- node_groups.append(group)
- group = {}
- for node_id in normal_nodes[len(abnormal_nodes) :]: # noqa: E203
- group[node_id] = self._rdzv_nodes[node_id]
- if group:
- node_groups.append(group)
+ node0 = cur_nodes[left]
+ node1 = cur_nodes[right]
+ group[node0] = self._rdzv_nodes[node0]
+ group[node1] = self._rdzv_nodes[node1]
+ if len(group) == 2:
+ node_groups.append(group)
+ left += 1
+ right -= 1
+ if right < left:
+ break
+ if len(group) == 1:
+ if len(node_groups) > 0:
+ node_groups[-1].update(group)
+ else:
+ node_groups.append(group)
return node_groups
- def report_network_check_result(self, node_id: int, succeed):
+ def _check_abnormal_nodes(self):
+ abnormal_nodes = []
+ normal_nodes = []
+ for node_id, status in self._node_status.items():
+ if status:
+ normal_nodes.append(node_id)
+ else:
+ abnormal_nodes.append(node_id)
+ logger.info(
+ f"Normal nodes: {normal_nodes}.\n"
+ f"Abnormal nodes: {abnormal_nodes}"
+ )
+
+ def report_network_check_result(
+ self, node_id: int, succeed: bool, elapsed_time: float
+ ):
self._reported_nodes.add(node_id)
- self._node_status.setdefault(node_id, False)
+ self._node_status.setdefault(node_id, succeed)
+ self._node_times.setdefault(node_id, elapsed_time)
self._node_status[node_id] = self._node_status[node_id] or succeed
+ self._node_times[node_id] = min(
+ self._node_times[node_id], elapsed_time
+ )
if len(self._reported_nodes) == len(self._rdzv_nodes):
logger.info(
- f"The node normal status of {self._rdzv_round} check "
- f"is {self._node_status}."
+ f"The node status of {self._rdzv_round} check "
+ f"is {self._node_status}.\n"
+ f"The elapsed time of nodes are {self._node_times}"
)
def join_rendezvous(
@@ -364,25 +389,71 @@ def join_rendezvous(
int: the number of rendezvous round.
"""
self._node_groups = []
+ self._fault_nodes = []
+ self._straggler_nodes = []
return super().join_rendezvous(rank_id, local_world_size)
- def network_check_success(self):
- """Check the network task is succeed. Each task contains 3 rounds
- allgather. If succeed, the round should be set to the multiples of 3.
+ def check_fault_node(self):
+ """Check whether the job has fault nodes. Each task contains 2 rounds
+ allgather. If succeed, the round should be set to the multiples of 2.
"""
with self._lock:
reason = ""
- success = False
+ fault_nodes = self._fault_nodes
if len(self._reported_nodes) < len(self._rdzv_nodes):
reason = NetworkFailureReason.WAITING_NODE
+ elif self._fault_nodes:
+ reason = NetworkFailureReason.NODE_FAILURE
else:
- if self._node_status:
- success = all(list(self._node_status.values()))
- if success:
+ self._fault_nodes = []
+ for node_id, status in self._node_status.items():
+ if not status:
+ self._fault_nodes.append(node_id)
+ if self._fault_nodes:
+ logger.warning(f"Fault nodes {self._fault_nodes}")
+ stragglers = self._detect_stragglers()
+ if not self._fault_nodes and not stragglers:
self._rdzv_round = (
math.ceil(self._rdzv_round / self._check_round)
* self._check_round
)
else:
reason = NetworkFailureReason.NODE_FAILURE
- return success, reason
+ return fault_nodes, reason
+
+ def get_straggler(self):
+ """Detect whether there is the straggler according to the
+ elapsed time of node to run the test task. If the elapsed
+ time of node is bigger than 2*median_time, the node is
+ a straggler.
+ """
+ with self._lock:
+ reason = ""
+ stragglers: Dict[int, float] = {}
+ if len(self._reported_nodes) < len(self._rdzv_nodes):
+ reason = NetworkFailureReason.WAITING_NODE
+ elif self._straggler_nodes:
+ return self._straggler_nodes, reason
+ else:
+ stragglers = self._detect_stragglers()
+ if stragglers:
+ logger.warning(f"Straggler: {stragglers}.")
+ self._straggler_nodes = list(stragglers.keys())
+ return self._straggler_nodes, reason
+
+ def _detect_stragglers(self):
+ """Detect wether there is the straggler in the job."""
+ stragglers: Dict[int, float] = {}
+ times = sorted(list(self._node_times.values()))
+ if not times:
+ return stragglers
+ if len(times) % 2 == 0:
+ i = len(times) // 2
+ med_time = (times[i] + times[i - 1]) / 2
+ else:
+ i = len(times) // 2
+ med_time = times[i]
+ for node_id, t in self._node_times.items():
+ if t > med_time * 2:
+ stragglers[node_id] = t
+ return stragglers
diff --git a/dlrover/python/master/hyperparams/simple_strategy_generator.py b/dlrover/python/master/hyperparams/simple_strategy_generator.py
index e07706be1..47c2d5988 100644
--- a/dlrover/python/master/hyperparams/simple_strategy_generator.py
+++ b/dlrover/python/master/hyperparams/simple_strategy_generator.py
@@ -70,7 +70,7 @@ def generate_opt_strategy(
)
paral_configs[node.id] = ParallelConfig(dataloader, optimizer)
node.paral_config = paral_configs[node.id]
- if paral_configs == {}:
+ if not paral_configs:
logger.debug("No parallel config.")
return None
else:
diff --git a/dlrover/python/master/servicer.py b/dlrover/python/master/servicer.py
index 8d7def3a5..2fd16b2ca 100644
--- a/dlrover/python/master/servicer.py
+++ b/dlrover/python/master/servicer.py
@@ -33,6 +33,7 @@
KVStoreService,
)
from dlrover.python.master.elastic_training.rdzv_manager import (
+ NetworkCheckRendezvousManager,
RendezvousManager,
)
from dlrover.python.master.monitor.speed_monitor import SpeedMonitor
@@ -106,7 +107,9 @@ def get(self, request, _):
elif isinstance(req_message, grpc.WaitingNodeNumRequest):
message = self._num_nodes_waiting()
elif isinstance(req_message, grpc.NetworkReadyRequest):
- message = self._network_check_success()
+ message = self._check_fault_node()
+ elif isinstance(req_message, grpc.StragglerExistRequest):
+ message = self._check_straggler()
elif isinstance(req_message, grpc.JoinRendezvousRequest):
message = self._join_rendezvous(req_message)
elif isinstance(req_message, grpc.CommWorldRequest):
@@ -212,10 +215,20 @@ def _get_training_status(self):
res.status = TrainingLoopStatus.PENDING
return res
- def _network_check_success(self):
- net_rdzv_manager = self._rdzv_managers[RendezvousName.NETWORK_CHECK]
- success, reason = net_rdzv_manager.network_check_success()
- res = grpc.NetworkReady(success=success, reason=reason)
+ def _check_fault_node(self):
+ rdzv_manager: NetworkCheckRendezvousManager = self._rdzv_managers[
+ RendezvousName.NETWORK_CHECK
+ ]
+ nodes, reason = rdzv_manager.check_fault_node()
+ res = grpc.NetworkCheckResult(nodes=nodes, reason=reason)
+ return res
+
+ def _check_straggler(self):
+ rdzv_manager: NetworkCheckRendezvousManager = self._rdzv_managers[
+ RendezvousName.NETWORK_CHECK
+ ]
+ nodes, reason = rdzv_manager.get_straggler()
+ res = grpc.NetworkCheckResult(nodes=nodes, reason=reason)
return res
def _join_rendezvous(self, request: grpc.JoinRendezvousRequest):
@@ -280,7 +293,7 @@ def report(self, request, _):
success = self._update_cluster_version(message)
elif isinstance(message, grpc.NodeAddress):
success = self._update_node_address(message)
- elif isinstance(message, grpc.NodeStatus):
+ elif isinstance(message, grpc.NetworkStatus):
success = self._update_node_status(message)
elif isinstance(message, grpc.NodeEvent):
success = self._update_node_event(message)
@@ -446,13 +459,15 @@ def _update_node_address(self, message: grpc.NodeAddress):
)
return True
- def _update_node_status(self, message: grpc.NodeStatus):
+ def _update_node_status(self, message: grpc.NetworkStatus):
net_rdzv_manager = self._rdzv_managers.get(
RendezvousName.NETWORK_CHECK, None
)
if net_rdzv_manager:
succeed = message.status == NodeStatus.SUCCEEDED
- net_rdzv_manager.report_network_check_result(message.rank, succeed)
+ net_rdzv_manager.report_network_check_result(
+ message.rank, succeed, message.elasped_time
+ )
return True
def _update_node_event(self, message: grpc.NodeEvent):
diff --git a/dlrover/trainer/torch/elastic_run.py b/dlrover/trainer/torch/elastic_run.py
index 766be29f5..e75f0cf8d 100644
--- a/dlrover/trainer/torch/elastic_run.py
+++ b/dlrover/trainer/torch/elastic_run.py
@@ -122,6 +122,14 @@ def parse_args(args):
action=check_env,
help="Whether to auto-tune the parallel configuraion.",
)
+ parser.add_argument(
+ "--exclude-straggler",
+ "--exclude_straggler",
+ action=check_env,
+ help="Bool, The node will exit if the node is straggler and "
+ "the argument is True. The argument only works when network-check "
+ "is True.",
+ )
return parser.parse_args(args)
@@ -225,6 +233,7 @@ def _elastic_config_from_args(
elastic_config.network_check = args.network_check
elastic_config.node_unit = args.node_unit
elastic_config.auto_tunning = args.auto_tunning
+ elastic_config.exclude_straggler = args.exclude_straggler
return elastic_config, cmd, cmd_args
diff --git a/dlrover/trainer/torch/run_network_check.py b/dlrover/trainer/torch/run_network_check.py
index 943744a1f..e0984c093 100644
--- a/dlrover/trainer/torch/run_network_check.py
+++ b/dlrover/trainer/torch/run_network_check.py
@@ -11,6 +11,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import argparse
+import json
import os
import time
from datetime import timedelta
@@ -18,8 +20,12 @@
import torch
import torch.distributed as dist
+from dlrover.python.common.constants import ConfigPath
from dlrover.python.common.log import default_logger as logger
+FAULT_CHECK_TASK = "fault-check"
+STRAGGLER_CHECK_TASK = "straggler-check"
+
def bm_all_gather(shape, use_cuda):
world_size = dist.get_world_size()
@@ -29,24 +35,77 @@ def bm_all_gather(shape, use_cuda):
tensor_list = [
torch.zeros_like(data).to(device) for _ in range(world_size)
]
+
start = int(time.time())
for _ in range(10):
dist.all_gather(tensor_list, data)
- end = time.time()
- if local_rank == 0:
- logger.info(f"Test allgather costs {end - start}s")
+ elapsed_time = time.time() - start
+ return elapsed_time
+
+def matmul(use_cuda, round=10):
+ local_rank = int(os.getenv("LOCAL_RANK", 0))
+ device = torch.device(f"cuda:{local_rank}" if use_cuda else "cpu")
+ tensor1 = torch.randn(10, 2048, 1024).to(device)
+ tensor2 = torch.randn(10, 1024, 2048).to(device)
+
+ start = int(time.time())
+ for _ in range(round):
+ torch.matmul(tensor1, tensor2)
+ elapsed_time = time.time() - start
+ return elapsed_time
-def main(use_cuda):
- shape = 1 << 20
- bm_all_gather(shape, use_cuda)
+def write_time_to_file(time, local_rank):
+ data = {"time": time, "local_rank": local_rank}
+ root = ConfigPath.NETWORK_CHECK_DATA_DIR
+ os.makedirs(root, exist_ok=True)
+ path = os.path.join(root, f"{local_rank}.txt")
+ with open(path, "w") as f:
+ f.write(json.dumps(data))
-if __name__ == "__main__":
+
+def main(task):
use_cuda = torch.cuda.is_available()
+ start_init = time.time()
if use_cuda:
dist.init_process_group("nccl", timeout=timedelta(seconds=180))
else:
dist.init_process_group("gloo", timeout=timedelta(seconds=180))
- main(use_cuda)
- logger.info("Finish testing allgather.")
+ init_time = round(time.time() - start_init, 3)
+ task_time = 0
+ if task == FAULT_CHECK_TASK:
+ shape = 1 << 20
+ task_time = bm_all_gather(shape, use_cuda)
+ elif task == STRAGGLER_CHECK_TASK:
+ task_time = matmul(use_cuda)
+ shape = 1 << 24
+ task_time += bm_all_gather(shape, use_cuda)
+ local_rank = int(os.environ["LOCAL_RANK"])
+ elapsed_time = init_time + task_time
+ write_time_to_file(elapsed_time, local_rank)
+ if local_rank == 0:
+ logger.info(
+ f"Init process group costs {init_time}."
+ f"Execution costs {task_time}s"
+ )
+ return elapsed_time
+
+
+def arg_parser():
+ parser = argparse.ArgumentParser(description="Network checker")
+ parser.add_argument(
+ "--task",
+ type=str,
+ default=STRAGGLER_CHECK_TASK,
+ choices=[FAULT_CHECK_TASK, STRAGGLER_CHECK_TASK],
+ required=False,
+ )
+ return parser
+
+
+if __name__ == "__main__":
+ parser = arg_parser()
+ args = parser.parse_args()
+ main(args.task)
+ logger.info("Finish testing machine.")
| Automatically detect the straggler.
In the collective communication job, the straggler will significantly affect the performance of training. If there are many nodes in a cluster, it is difficult to detect which node is the straggler. The reason of the straggler may be slow RDMA or nvlink or unknown hardware failure.
| 2023-09-21T07:51:51 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-712 | 5657dd23f745a56545c9919ba844d3bec3124eb0 | diff --git a/dlrover/trainer/torch/elastic/sampler.py b/dlrover/trainer/torch/elastic/sampler.py
index 138e20698..f85894471 100644
--- a/dlrover/trainer/torch/elastic/sampler.py
+++ b/dlrover/trainer/torch/elastic/sampler.py
@@ -135,12 +135,21 @@ def load_state_dict(self, state: Dict[str, int]):
"""
self.epoch = int(state.get("epoch", 0))
completed_num = int(state.get("completed_num", 0))
- self.num_samples = int(
- (self.total_size - completed_num) / self.num_replicas
- )
- if completed_num > self.total_size:
- completed_num = completed_num % self.total_size
+ dataset_size = len(self.dataset)
+ if completed_num > dataset_size:
+ completed_num = completed_num % dataset_size
+ remaining_samples = dataset_size - completed_num
self._epoch_checkpoint[self.epoch] = completed_num
+ if self.drop_last and remaining_samples % self.num_replicas != 0:
+ # Split to nearest available length that is evenly divisible.
+ # This is to ensure each rank receives the same amount of data when
+ # using this Sampler.
+ self.num_samples = math.ceil(
+ (remaining_samples - self.num_replicas) / self.num_replicas
+ )
+ else:
+ self.num_samples = math.ceil(remaining_samples / self.num_replicas)
+ self.total_size = self.num_samples * self.num_replicas + completed_num
logger.info(
"Load epoch = %s, completed num = %s, num_samples = %s",
self.epoch,
| Bug: Fail to load checkpoint.
实验1:--nnodes= 2:4 副本数3
启动3个worker,将worker1注入网络异常, 5分钟后恢复;worker1网络异常时,worker0和worker2日志 Report Resource CPUXXX 未训练,网络恢复后, 3个worker报错:assert len(indices) == self.num_samples;
| 2023-09-18T09:35:31 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-681 | 05dd93a4a04bc7e3b28ae3df671a66c5b3a36c58 | diff --git a/dlrover/python/common/constants.py b/dlrover/python/common/constants.py
index 761a4d834..f49300130 100644
--- a/dlrover/python/common/constants.py
+++ b/dlrover/python/common/constants.py
@@ -253,3 +253,10 @@ class TrainingMsgLevel(object):
NODE_ERROR = "node_error"
WARNING = "warning"
INFO = "info"
+
+
+class ConfigPath(object):
+ ENV_PARAL_CONFIG = "DLROVER_PARAL_CONFIG_PATH"
+ PARAL_CONFIG = "/tmp/dlrover/auto_paral_config.json"
+ ENV_RUNTIME_METRICS = "RUNTIME_METRICS_PATH"
+ RUNTIME_METRICS = "/tmp/dlrover/runtime_metrics.json"
diff --git a/dlrover/python/elastic_agent/monitor/training.py b/dlrover/python/elastic_agent/monitor/training.py
index 23375070c..c8eb7f4f2 100644
--- a/dlrover/python/elastic_agent/monitor/training.py
+++ b/dlrover/python/elastic_agent/monitor/training.py
@@ -13,6 +13,7 @@
import json
import os
+import threading
import time
from dlrover.python.common.log import default_logger as logger
@@ -35,7 +36,7 @@ def is_tf_chief():
@singleton
-class TrainingProcessReporter(object):
+class TFTrainingProcessReporter(object):
def __init__(self):
self._resource_monitor = ResourceMonitor()
self._last_timestamp = 0
@@ -71,3 +72,57 @@ def report_resource_with_step(self, step):
)
except Exception as e:
logger.warning(e)
+
+
+@singleton
+class TorchTrainingMonitor(object):
+ def __init__(self, metrics_path):
+ self._resource_monitor = ResourceMonitor()
+ self._last_timestamp = 0
+ self._start_time = 0
+ self._group_rank = int(os.getenv("WORKER_RANK", "0"))
+ self._master_client = GlobalMasterClient.MASTER_CLIENT
+ if os.path.exists(metrics_path):
+ os.remove(metrics_path)
+ self._metrics_path = metrics_path
+
+ def start(self):
+ self._resource_monitor.start()
+ self._resource_monitor.start_monitor_cpu()
+ thread = threading.Thread(
+ target=self._periodically_report_step,
+ name="report-step",
+ daemon=True,
+ )
+ thread.start()
+
+ def stop(self):
+ self._resource_monitor.stop()
+
+ def report_resource_with_step(self):
+ if self._group_rank != 0:
+ return
+ try:
+ if not os.path.exists(self._metrics_path):
+ return
+ with open(self._metrics_path, "r") as f:
+ record = json.load(f)
+ step = record.get("step", 0)
+ timestamp = record.get("timestamp", 0)
+ if step > 0 and timestamp - self._last_timestamp > 15:
+ self._resource_monitor.report_resource()
+ logger.info("Report global step = {}".format(step))
+ self._last_timestamp = timestamp
+ self._master_client.report_global_step(
+ step,
+ self._last_timestamp,
+ )
+ except Exception as e:
+ logger.warning(e)
+
+ def _periodically_report_step(self):
+ if self._group_rank != 0:
+ return
+ while True:
+ self.report_resource_with_step()
+ time.sleep(15)
diff --git a/dlrover/python/elastic_agent/sharding/client.py b/dlrover/python/elastic_agent/sharding/client.py
index 2c3656920..6dcd4b08d 100644
--- a/dlrover/python/elastic_agent/sharding/client.py
+++ b/dlrover/python/elastic_agent/sharding/client.py
@@ -22,7 +22,7 @@
from dlrover.python.common.log import default_logger as logger
from dlrover.python.elastic_agent.master_client import GlobalMasterClient
from dlrover.python.elastic_agent.monitor.training import (
- TrainingProcessReporter,
+ TFTrainingProcessReporter,
)
_DEFAULT_MINI_BATCH_NUM_PER_SHARD = 10
@@ -88,7 +88,7 @@ def __init__(
self._max_shard_count = sys.maxsize
self._shard_count = 0
self._report_sharding_params()
- self._training_reporter = TrainingProcessReporter()
+ self._training_reporter = TFTrainingProcessReporter()
def _report_sharding_params(self):
if self._num_epochs and self._dataset_size:
diff --git a/dlrover/python/elastic_agent/tensorflow/hooks.py b/dlrover/python/elastic_agent/tensorflow/hooks.py
index 70cbce0ed..202d5eb17 100644
--- a/dlrover/python/elastic_agent/tensorflow/hooks.py
+++ b/dlrover/python/elastic_agent/tensorflow/hooks.py
@@ -22,7 +22,7 @@
from dlrover.python.common.log import default_logger as logger
from dlrover.python.elastic_agent.master_client import GlobalMasterClient
from dlrover.python.elastic_agent.monitor.training import (
- TrainingProcessReporter,
+ TFTrainingProcessReporter,
is_tf_chief,
)
from dlrover.python.elastic_agent.sharding.client import ShardingClient
@@ -64,7 +64,7 @@ def __init__(self):
the DLRover master.
"""
self._is_chief = False
- self._training_reporter = TrainingProcessReporter()
+ self._training_reporter = TFTrainingProcessReporter()
self._training_reporter.called_in_tf_hook = True
self._global_step = 0
self._op_stats = None
diff --git a/dlrover/python/elastic_agent/torch/training.py b/dlrover/python/elastic_agent/torch/training.py
index 9ab62d5a3..e339d2c98 100644
--- a/dlrover/python/elastic_agent/torch/training.py
+++ b/dlrover/python/elastic_agent/torch/training.py
@@ -51,6 +51,7 @@
from torch.distributed.launcher.api import LaunchConfig, _get_entrypoint_name
from dlrover.python.common.constants import (
+ ConfigPath,
NodeEnv,
NodeErrorMessage,
NodeStatus,
@@ -59,19 +60,17 @@
)
from dlrover.python.common.log import default_logger as logger
from dlrover.python.elastic_agent.master_client import GlobalMasterClient
-from dlrover.python.elastic_agent.monitor.resource import ResourceMonitor
+from dlrover.python.elastic_agent.monitor.training import TorchTrainingMonitor
from dlrover.python.elastic_agent.torch.master_kv_store import MasterKVStore
-from dlrover.trainer.constants.torch import WorkerEnv
__all__ = ["launch_agent"]
def _set_paral_config():
- config_dir = os.path.dirname(WorkerEnv.PARAL_CONFIG_PATH.default)
+ config_dir = os.path.dirname(ConfigPath.PARAL_CONFIG)
os.makedirs(config_dir, exist_ok=True)
- os.environ[
- WorkerEnv.PARAL_CONFIG_PATH.name
- ] = WorkerEnv.PARAL_CONFIG_PATH.default
+ os.environ[ConfigPath.ENV_PARAL_CONFIG] = ConfigPath.PARAL_CONFIG
+ os.environ[ConfigPath.ENV_RUNTIME_METRICS] = ConfigPath.RUNTIME_METRICS
@dataclass
@@ -266,7 +265,7 @@ def __init__(
def _periodically_update_paral_config(self):
while True:
config = self._client.get_paral_config()
- with open(WorkerEnv.PARAL_CONFIG_PATH.default, "w") as f:
+ with open(ConfigPath.PARAL_CONFIG, "w") as f:
f.write(config.to_json())
time.sleep(30)
@@ -516,7 +515,7 @@ def launch_agent(
f" metrics_cfg : {config.metrics_cfg}\n"
)
- monitor = ResourceMonitor()
+ monitor = TorchTrainingMonitor(ConfigPath.RUNTIME_METRICS)
monitor.start()
rdzv_parameters = RendezvousParameters(
backend=config.rdzv_backend,
diff --git a/dlrover/python/master/local_master.py b/dlrover/python/master/local_master.py
index 51d4373f8..c26f7011b 100644
--- a/dlrover/python/master/local_master.py
+++ b/dlrover/python/master/local_master.py
@@ -15,6 +15,7 @@
from typing import Dict
from dlrover.python.common.constants import (
+ NodeType,
OptimizeMode,
RendezvousName,
ReporterType,
@@ -49,6 +50,8 @@ def __init__(self, port, args: JobArgs):
)
self._master_server = self._create_master_grpc_service(port, args)
self._job_args = args
+ self.speed_monitor.add_running_worker(NodeType.WORKER, 0)
+ self.speed_monitor.set_target_worker_num(1)
def _create_master_grpc_service(self, port, params: JobArgs):
return create_master_service(
diff --git a/dlrover/trainer/constants/torch.py b/dlrover/trainer/constants/torch.py
deleted file mode 100644
index 99fa709ae..000000000
--- a/dlrover/trainer/constants/torch.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2023 The DLRover Authors. All rights reserved.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from dlrover.trainer.constants.constants import Constant
-
-
-class WorkerEnv(Constant):
- PARAL_CONFIG_PATH = Constant(
- "DLROVER_PARAL_CONFIG_PATH",
- "/tmp/dlrover/auto_paral_config.json",
- )
diff --git a/dlrover/trainer/torch/elastic/dataloader.py b/dlrover/trainer/torch/elastic/dataloader.py
index 4777aa7db..b0e789d8c 100644
--- a/dlrover/trainer/torch/elastic/dataloader.py
+++ b/dlrover/trainer/torch/elastic/dataloader.py
@@ -17,9 +17,9 @@
from torch.utils.data import DataLoader
-from dlrover.trainer.constants.torch import WorkerEnv
+from dlrover.python.common.constants import ConfigPath
-logging.basicConfig(level=logging.NOTSET)
+logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
@@ -68,8 +68,8 @@ def __init__(self, *args, config_file=None, **kwargs):
self.config_file = config_file
if not self.config_file:
self.config_file = os.getenv(
- WorkerEnv.PARAL_CONFIG_PATH.name,
- WorkerEnv.PARAL_CONFIG_PATH.default,
+ ConfigPath.ENV_PARAL_CONFIG,
+ ConfigPath.PARAL_CONFIG,
)
if self.config_file:
self.load_config(self.config_file)
diff --git a/dlrover/trainer/torch/elastic/trainer.py b/dlrover/trainer/torch/elastic/trainer.py
index 915d57f81..3b2c1a3b9 100644
--- a/dlrover/trainer/torch/elastic/trainer.py
+++ b/dlrover/trainer/torch/elastic/trainer.py
@@ -14,13 +14,17 @@
import contextlib
import os
import socket
+import time
from contextlib import contextmanager
+from dataclasses import dataclass
from typing import Any, Dict, Optional
import torch
import torch.distributed as dist
+from dlrover.python.common.constants import ConfigPath
from dlrover.python.common.log import default_logger as logger
+from dlrover.python.common.serialize import JsonSerializable
def find_free_port() -> int:
@@ -39,6 +43,12 @@ def get_rank():
return rank
+@dataclass
+class TrainingRecord(JsonSerializable):
+ step: int = 0
+ timestamp: int = 0
+
+
class GradientState(object):
"""
Singleton class that has information related to gradient
@@ -252,6 +262,8 @@ def __init__(
self.use_fsdp = use_fsdp
self.ckpt_interval = ckpt_interval
self.shared_storage_path = shared_storage_path
+ self._report_step_interval = 15 # 15s
+ self._last_report_time = 0
def prepare(self, optimizer, lr_scheduler=None):
"""
@@ -383,9 +395,13 @@ def _after_step(self):
self._save_fsdp_ckpt()
if self.gradient_state.sync_gradients:
self.gradient_state.num_steps += 1
+ now = time.time()
+ if now - self._last_report_time > self._report_step_interval:
+ self.report_training_step()
+ self._last_report_time = now
def _set_gradient_accumulation_steps(self):
- max_worker_num = int(os.getenv("WORKER_NUM", 0))
+ max_worker_num = int(os.getenv("WORKER_NUM", 1))
if max_worker_num == 0:
self.gradient_accumulation_steps = 1
@@ -405,3 +421,12 @@ def _set_gradient_accumulation_steps(self):
cur_world_size,
self.gradient_accumulation_steps,
)
+
+ def report_training_step(self):
+ timestamp = time.time()
+ record = TrainingRecord(self.gradient_state.num_steps, timestamp)
+ metric_path = os.getenv(ConfigPath.ENV_RUNTIME_METRICS, "")
+ rank = get_rank()
+ if os.path.exists(os.path.dirname(metric_path)) and rank == 0:
+ with open(metric_path, "w") as f:
+ f.write(record.to_json(indent=4))
diff --git a/dlrover/trainer/torch/elastic_run.py b/dlrover/trainer/torch/elastic_run.py
index 8219df82b..e55aacf89 100644
--- a/dlrover/trainer/torch/elastic_run.py
+++ b/dlrover/trainer/torch/elastic_run.py
@@ -26,6 +26,7 @@
from torch.distributed.launcher.api import launch_agent as torch_launch_agent
from torch.distributed.run import config_from_args, get_args_parser
+from dlrover.python.common.constants import NodeEnv
from dlrover.python.common.grpc import find_free_port
from dlrover.python.common.log import default_logger as logger
from dlrover.python.elastic_agent.master_client import (
@@ -108,8 +109,10 @@ def _launch_dlrover_local_master():
cmd = os.getenv("PYTHON_EXEC", sys.executable)
host = "127.0.0.1"
port = find_free_port()
- log_dir = tempfile.mkdtemp(prefix="dlrover_master_")
- job_name = log_dir.split("_")[-1]
+ root_dir = "/tmp/dlrover_master/"
+ os.makedirs(root_dir, exist_ok=True)
+ log_dir = tempfile.mkdtemp(prefix="", dir=root_dir)
+ job_name = log_dir.split("/")[-1]
stdout = os.path.join(log_dir, "stdout.log")
stderr = os.path.join(log_dir, "stderror.log")
logger.info(f"The master log file:\n stdout: {stdout} \n stderr: {stderr}")
@@ -147,10 +150,11 @@ def _check_dlrover_master_available(addr, timeout=60):
def run(args):
master_handler = None
- master_addr = os.getenv("DLROVER_MASTER_ADDR", "")
+ master_addr = os.getenv(NodeEnv.DLROVER_MASTER_ADDR, "")
use_dlrover_launch = False
if args.standalone:
master_handler, master_addr = _launch_dlrover_local_master()
+ os.environ[NodeEnv.DLROVER_MASTER_ADDR] = master_addr
if _check_dlrover_master_available(master_addr):
GlobalMasterClient.MASTER_CLIENT = build_master_client(master_addr)
use_dlrover_launch = True
| [Feature]: Collect the training throughput and elapsed time step.
| 2023-09-11T09:39:05 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-497 | 98c1f61ad4f5119b1a3ae065bcbc6416303840f0 | diff --git a/dlrover/python/master/cluster/quota.py b/dlrover/python/master/cluster/quota.py
index 6f4cd92a6..7a47380e4 100644
--- a/dlrover/python/master/cluster/quota.py
+++ b/dlrover/python/master/cluster/quota.py
@@ -17,13 +17,21 @@
class QuotaChecker(metaclass=ABCMeta):
@abstractmethod
- def get_avaliable_worker_num(self):
+ def get_free_node_num(self):
pass
class UnlimitedQuotaChecker(QuotaChecker):
"""No resource limits."""
- def get_avaliable_worker_num(self):
+ def get_free_node_num(self):
"""Assume there is always enough resource."""
return sys.maxsize
+
+
+class NoFreeQuotaChecker(QuotaChecker):
+ """No any free nodes."""
+
+ def get_free_node_num(self):
+ """Assume there is always enough resource."""
+ return 0
diff --git a/dlrover/python/master/node/job_auto_scaler.py b/dlrover/python/master/node/job_auto_scaler.py
index 8aa1a53e7..ef8e337b6 100644
--- a/dlrover/python/master/node/job_auto_scaler.py
+++ b/dlrover/python/master/node/job_auto_scaler.py
@@ -11,7 +11,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import copy
import threading
import time
from abc import ABCMeta, abstractmethod
@@ -25,7 +24,6 @@
from dlrover.python.common.global_context import Context
from dlrover.python.common.log import default_logger as logger
from dlrover.python.common.node import Node, NodeResource
-from dlrover.python.master.cluster.quota import UnlimitedQuotaChecker
from dlrover.python.master.monitor.speed_monitor import SpeedMonitor
from dlrover.python.master.node.ps import ParameterServerManager
from dlrover.python.master.node.worker import WorkerManager
@@ -249,16 +247,13 @@ def __init__(
self._scaler = node_scaler
self._workers = job_nodes[NodeType.WORKER]
self._autoscaling_started = False
- self._resource_checker = UnlimitedQuotaChecker()
- self._schedule_worker_base2 = False
+ self._scale_interval = 30
def start_auto_scaling(self):
"""Start auto-scaling nodes of a job"""
if not self._autoscaling_started:
self._autoscaling_started = True
- plan = self._job_optimizer.get_job_resource_plan()
- self.execute_job_optimization_plan(plan)
- if self._schedule_worker_base2:
+ if _dlrover_context.auto_worker_enabled:
threading.Thread(
target=self._periodic_adjust_worker,
name="allreduce-autoscaler",
@@ -266,43 +261,17 @@ def start_auto_scaling(self):
).start()
def _periodic_adjust_worker(self):
- """Adjust the number of worker according to the available number
- of workers.
- For example: There are 4 alive workers and the configured number of
- worker is 16. However, the available number of worker in the cluster
- if 5. The auto-scaler will scale worker count to 8 not 9 to collaborate
- with the optimizer to keep the total batch size fixed.
- """
+ """Periodicaly adjust the number of worker."""
+ logger.info("Start to auto scale the number of workers.")
while True:
- time.sleep(30)
- available_num = self._get_available_worker_num()
+ time.sleep(self._scale_interval)
alive_num = self._get_alive_worker_num()
- if available_num <= alive_num:
+ self._job_optimizer.set_alive_node_num(alive_num)
+ plan = self._job_optimizer.get_job_resource_plan()
+ new_worker_num = plan.node_group_resources[NodeType.WORKER].count
+ if new_worker_num <= alive_num:
continue
- worker_resource = copy.deepcopy(
- self._job_resource.node_group_resources[NodeType.WORKER]
- )
- worker_resource.count = available_num
- plan = self._worker_manager.adjust_worker(worker_resource)
- self._scaler.scale(plan)
-
- def _get_available_worker_num(self):
- """
- Get the number of available GPU to scale up.
- """
- worker_resource = self._job_resource.node_group_resources[
- NodeType.WORKER
- ]
- worker_num = worker_resource.count
- alive_worker_num = self._get_alive_worker_num()
- while worker_num > alive_worker_num:
- required_num = worker_num - alive_worker_num
- available_num = self._resource_checker.get_avaliable_worker_num()
- if available_num > required_num:
- return worker_num
- else:
- worker_num = worker_num >> 1
- return alive_worker_num
+ self.execute_job_optimization_plan(plan)
def _get_alive_worker_num(self):
worker_num = 0
diff --git a/dlrover/python/master/node/job_manager.py b/dlrover/python/master/node/job_manager.py
index 3f5eeed99..35f040b21 100644
--- a/dlrover/python/master/node/job_manager.py
+++ b/dlrover/python/master/node/job_manager.py
@@ -677,6 +677,10 @@ def handle_training_failure(
else:
self._error_monitor.handle_node_error(node, error_data)
+ def update_allreduce_node_unit(self, node_unit):
+ if isinstance(self._job_optimizer, AllreduceJobResourceOptimizer):
+ self._job_optimizer.set_node_unit(node_unit)
+
def create_job_manager(args: JobArgs, speed_monitor) -> JobManager:
critical_worker_index = get_critical_worker_index(args)
diff --git a/dlrover/python/master/resource/job.py b/dlrover/python/master/resource/job.py
index 2b51f7b37..890ca7572 100644
--- a/dlrover/python/master/resource/job.py
+++ b/dlrover/python/master/resource/job.py
@@ -32,7 +32,7 @@
from dlrover.python.master.resource.brain_optimizer import (
BrainResoureOptimizer,
)
-from dlrover.python.master.resource.local_optimizer import LocalOptimizer
+from dlrover.python.master.resource.local_optimizer import PSLocalOptimizer
from dlrover.python.master.resource.optimizer import (
ResourcePlan,
SimpleOptimizer,
@@ -44,7 +44,7 @@
_dlrover_context = Context.singleton_instance()
-def new_resource_optimizer(
+def new_ps_resource_optimizer(
optimize_mode: str, job_uuid, resoure_limits: ResourceLimits
):
logger.info(
@@ -57,9 +57,9 @@ def new_resource_optimizer(
logger.warning(
"Brain service is not available, use a local optimizer"
)
- return LocalOptimizer(job_uuid, resoure_limits)
+ return PSLocalOptimizer(job_uuid, resoure_limits)
elif optimize_mode == OptimizeMode.SINGLE_JOB:
- return LocalOptimizer(job_uuid, resoure_limits)
+ return PSLocalOptimizer(job_uuid, resoure_limits)
else:
logger.warning(
"Not support optiimzem mode %s, use a simple optimizer",
@@ -179,7 +179,7 @@ def init_job_resource(self, job_resource: JobResource):
pass
@abstractmethod
- def get_job_resource_plan(self):
+ def get_job_resource_plan(self) -> ResourcePlan:
"""Get resource plan for a job."""
pass
@@ -208,7 +208,7 @@ def __init__(
self._ps_resource = ps_resource
self._original_worker_resource = copy.deepcopy(self._worker_resource)
self._original_ps_resource = copy.deepcopy(self._ps_resource)
- self._resource_optimizer = new_resource_optimizer(
+ self._resource_optimizer = new_ps_resource_optimizer(
optimize_mode, job_uuid, resource_limits
)
self._lock = threading.Lock()
@@ -506,22 +506,32 @@ def __init__(
self._original_worker_resource = copy.deepcopy(self._worker_resource)
self._job_uuid = job_uuid
self._lock = threading.Lock()
+ self._node_unit = 1
+ self._alive_node_num = 0
def update_job_uuid(self, job_uuid):
pass
def init_job_resource(self, job_resource: JobResource):
- """The job only launches the first worker at begining and
- launches workers once the first worker is running"""
- job_resource.node_group_resources[NodeType.WORKER].count = 1
+ pass
- def get_job_resource_plan(self):
- """Get resource plan for a job."""
+ def get_job_resource_plan(self) -> ResourcePlan:
+ """Check wether there are free nodes in the cluster."""
plan = ResourcePlan()
- worker_config = self._original_worker_resource
+ worker_config = copy.deepcopy(self._original_worker_resource)
+ max_node_num = self._original_worker_resource.count
+ request_num = max_node_num - self._alive_node_num
+ free_num = self._get_free_gpu_node()
+ free_num = (free_num // self._node_unit) * self._node_unit
+ new_num = min(free_num, request_num)
+ worker_config.count = self._alive_node_num + new_num
plan.node_group_resources[NodeType.WORKER] = worker_config
return plan
+ # TODO: implement the function to query the number free GPU nodes.
+ def _get_free_gpu_node(self):
+ return 0
+
def adjust_oom_resource(self, node: Node):
"""Adjust the resource configuration for OOM nodes"""
node.config_resource.memory *= 2
@@ -531,3 +541,9 @@ def get_config_resource(self):
worker_config = self._original_worker_resource
job_config.node_group_resources[NodeType.WORKER] = worker_config
return job_config
+
+ def set_node_unit(self, node_unit):
+ self._node_unit = node_unit
+
+ def set_alive_node_num(self, node_num):
+ self._alive_node_num = node_num
diff --git a/dlrover/python/master/resource/local_optimizer.py b/dlrover/python/master/resource/local_optimizer.py
index c64c662da..a42040eff 100644
--- a/dlrover/python/master/resource/local_optimizer.py
+++ b/dlrover/python/master/resource/local_optimizer.py
@@ -63,8 +63,8 @@ def __init__(self, worker_cpu, ps_cpu, worker_memory) -> None:
self.worker_memory = worker_memory
-class LocalOptimizer(ResourceOptimizer):
- """Local optimizer stores runtime statistics locally and optimize
+class PSLocalOptimizer(ResourceOptimizer):
+ """PS Local optimizer stores runtime statistics locally and optimize
the resource of a training job without cluster information.
"""
diff --git a/dlrover/python/master/servicer.py b/dlrover/python/master/servicer.py
index 940cb2207..986e909c6 100644
--- a/dlrover/python/master/servicer.py
+++ b/dlrover/python/master/servicer.py
@@ -418,6 +418,10 @@ def num_nodes_waiting(self, request, _):
return res
def report_rdzv_params(self, request, _):
+ # Enable auto-scaling workers if elasticity is enabled.
+ _dlrover_context.auto_worker_enabled = (
+ request.max_nodes > request.min_nodes
+ )
for manager in self._rdzv_managers.values():
manager.update_rdzv_params(
min_nodes=request.min_nodes,
@@ -425,6 +429,7 @@ def report_rdzv_params(self, request, _):
waiting_timeout=request.waiting_timeout,
node_unit=request.node_unit,
)
+ self._job_manager.update_allreduce_node_unit(request.node_unit)
res = elastic_training_pb2.Response()
res.success = True
return res
| Scale down nodes with the number unit if not enough nodes.
| 2023-07-12T09:27:37 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-466 | d481dd3b4fae77176b6a325683fe7c48f5b0e38a | diff --git a/dlrover/proto/elastic_training.proto b/dlrover/proto/elastic_training.proto
index 3bdd7c78c..4ecf70a46 100644
--- a/dlrover/proto/elastic_training.proto
+++ b/dlrover/proto/elastic_training.proto
@@ -170,7 +170,7 @@ message NodeMeta {
string gpu_type = 6;
int32 id = 7;
int32 rank = 8;
- bool normal = 9;
+ string status = 9;
}
message NodeEvent {
@@ -288,7 +288,6 @@ service Master {
rpc kv_store_get(KeyValuePair) returns (KeyValuePair);
rpc report_failure(NodeFailure) returns (Response);
rpc network_check_success(RendezvousRequest) returns (Response);
- rpc report_network_check_result(NodeMeta) returns (Response);
rpc report_prestop(ReportPreStopRequest) returns (google.protobuf.Empty);
rpc update_node_status(NodeMeta) returns (Response);
diff --git a/dlrover/python/common/constants.py b/dlrover/python/common/constants.py
index 6e22bf344..38b99eb9a 100644
--- a/dlrover/python/common/constants.py
+++ b/dlrover/python/common/constants.py
@@ -74,6 +74,7 @@ class NodeStatus(object):
INITIAL = "Initial"
DELETED = "Deleted"
UNKNOWN = "Unknown"
+ BREAKDOWN = "Breakdown"
class NodeEventType(object):
diff --git a/dlrover/python/elastic_agent/master_client.py b/dlrover/python/elastic_agent/master_client.py
index ad64d7a59..ec86b0b88 100644
--- a/dlrover/python/elastic_agent/master_client.py
+++ b/dlrover/python/elastic_agent/master_client.py
@@ -328,24 +328,35 @@ def get_running_nodes(self):
return response.nodes
@retry_grpc_request
- def num_nodes_waiting(self):
- request = empty_pb2.Empty()
+ def num_nodes_waiting(self, rdzv_name):
+ request = elastic_training_pb2.RendezvousRequest()
+ request.rdzv_name = rdzv_name
response = self._stub.num_nodes_waiting(request)
return response.waiting_num
@retry_grpc_request
- def join_rendezvous(self, node_id, local_world_size):
- request = elastic_training_pb2.NodeMeta()
- request.id = node_id
+ def join_rendezvous(self, node_id, local_world_size, rdzv_name=""):
+ request = elastic_training_pb2.RendezvousRequest()
+ request.node_id = node_id
request.local_world_size = local_world_size
+ request.rdzv_name = rdzv_name
response = self._stub.join_rendezvous(request)
return response.round
@retry_grpc_request
- def get_comm_world(self):
- request = empty_pb2.Empty()
+ def get_comm_world(self, rdzv_name, node_id):
+ request = elastic_training_pb2.RendezvousRequest()
+ request.node_id = node_id
+ request.rdzv_name = rdzv_name
response = self._stub.get_comm_world(request)
- return response.world
+ return response.group, response.world
+
+ @retry_grpc_request
+ def network_check_success(self, node_id):
+ request = elastic_training_pb2.RendezvousRequest()
+ request.node_id = node_id
+ response = self._stub.network_check_success(request)
+ return response.success
@retry_grpc_request
def report_rdzv_params(self, min_nodes, max_nodes, waiting_timeout):
@@ -372,15 +383,14 @@ def kv_store_get(self, key):
return response.value
@retry_grpc_request
- def report_node_status(self, rank):
- if rank is None:
- return
+ def report_node_status(self, status):
request = elastic_training_pb2.NodeMeta()
request.id = self._node_id
request.type = self._node_type
- request.rank = int(rank)
+ request.status = status
self._stub.update_node_status(request)
+ @retry_grpc_request
def report_failures(self, error_data):
request = elastic_training_pb2.NodeFailure()
request.node_id = self._node_id
@@ -457,8 +467,7 @@ def __init__(self, node_id):
self._datasets: Dict[str, LocalDataset] = {}
self._task_type = None
self._kv_store: Dict[str, str] = {}
- self._rdzv_states: Dict[str, bytes] = {}
- self._rdzv_tokens: Dict[str, int] = {}
+ self._rdzv_nodes: Dict[int, int] = {}
def reset_dataset(self, dataset_name):
"""Reset a dataset
@@ -545,16 +554,37 @@ def report_model_metric(self, *args):
def report_used_resource(self, memory, cpu):
return empty_pb2.Empty()
+ def num_nodes_waiting(self):
+ return 0
+
+ def join_rendezvous(
+ self, node_id, local_world_size, rdzv_name="", round=0
+ ):
+ self._rdzv_nodes[node_id] = local_world_size
+ return 0
+
+ def get_comm_world(self, *args, **kwarg):
+ return 0, self._rdzv_nodes
+
+ def network_check_success(self, node_id):
+ return True
+
+ def report_node_status(self, normal):
+ return True
+
+ def report_rdzv_params(self, min_nodes, max_nodes, waiting_timeout):
+ return True
+
def kv_store_set(self, key, value):
self._kv_store[key] = value
+ logger.info(self._kv_store)
return True
def kv_store_get(self, key):
- return self._kv_store.get(key, "")
+ return self._kv_store.get(key, "".encode())
- def report_node_status(self, rank):
- logger.info(f"Report rank {rank}")
- return
+ def report_failures(self, error_data):
+ return True
def build_master_client(master_addr=None):
diff --git a/dlrover/python/elastic_agent/torch/training.py b/dlrover/python/elastic_agent/torch/training.py
new file mode 100644
index 000000000..42bca816e
--- /dev/null
+++ b/dlrover/python/elastic_agent/torch/training.py
@@ -0,0 +1,704 @@
+# Copyright 2023 The DLRover Authors. All rights reserved.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import copy
+import functools
+import json
+import os
+import socket
+import tempfile
+import time
+import uuid
+from dataclasses import dataclass
+from datetime import datetime, timedelta
+from typing import Any, Callable, Dict, List, Optional, Union
+
+import torch.distributed.elastic.timer as timer
+from torch.distributed import PrefixStore, Store
+from torch.distributed.elastic import events, metrics
+from torch.distributed.elastic.agent.server.api import (
+ DEFAULT_ROLE,
+ RunResult,
+ Worker,
+ WorkerGroup,
+ WorkerSpec,
+ WorkerState,
+ _get_fq_hostname,
+ _RoleInstanceInfo,
+)
+from torch.distributed.elastic.agent.server.local_elastic_agent import (
+ LocalElasticAgent,
+)
+from torch.distributed.elastic.metrics import put_metric
+from torch.distributed.elastic.metrics.api import prof
+from torch.distributed.elastic.multiprocessing import PContext, SignalException
+from torch.distributed.elastic.multiprocessing.errors import (
+ ChildFailedError,
+ ProcessFailure,
+)
+from torch.distributed.elastic.rendezvous import RendezvousParameters
+from torch.distributed.elastic.rendezvous.api import RendezvousHandler
+from torch.distributed.launcher.api import LaunchConfig, _get_entrypoint_name
+
+from dlrover.python.common.constants import NodeEnv, NodeStatus, RendezvousName
+from dlrover.python.common.log import default_logger as logger
+from dlrover.python.elastic_agent.master_client import GlobalMasterClient
+from dlrover.python.elastic_agent.torch.master_kv_store import MasterKVStore
+
+__all__ = ["launch_agent"]
+
+
+@dataclass
+class ProcessError:
+ local_rank: int
+ exitcode: int
+ message: str
+ datetime: Any
+
+
+class MasterRendezvousHandler(RendezvousHandler):
+ def __init__(self, name, node_id, rdzv_params: RendezvousParameters):
+ self._name = name
+ self._node_id = node_id
+ self._rdzv_params = rdzv_params
+ self.join_timeout = rdzv_params.get("join_timeout", 600)
+ self._client = GlobalMasterClient.MASTER_CLIENT
+ self._store = MasterKVStore(self._name, timedelta(seconds=60))
+ lastcall_timeout = rdzv_params.get("lastcall_timeout", 60)
+ self._client.report_rdzv_params(
+ rdzv_params.min_nodes,
+ rdzv_params.max_nodes,
+ lastcall_timeout,
+ )
+
+ def get_backend(self) -> str:
+ return "dlrover-master"
+
+ def is_closed(self) -> bool:
+ return False
+
+ def set_closed(self):
+ """Marks the rendezvous as closed."""
+ pass
+
+ def join_rendezvous(self, local_world_size):
+ """The node join a rendezvous by sending its
+ ID and local world size.
+ """
+ round = self._client.join_rendezvous(
+ self._node_id, local_world_size, rdzv_name=self._name
+ )
+ return round
+
+ def next_rendezvous(self, round):
+ """The handler will peroidically query the world from the master until
+ the world is not empty. The world is a dictionary like
+ like {0: 8, 1: 8, 2: 8} where the key is the node ID and the value is
+ the local world size. The handler can get its rank by the position
+ of it node ID in the world.
+ """
+ start_join = time.time()
+ node_name = os.getenv("POD_NAME", "")
+ msg = (
+ f"The node node_name attempts to join the next round of the "
+ f"rendezvous '{self._name}' with timeout {self.join_timeout}."
+ )
+ logger.info(msg)
+ while True:
+ group, world = self._client.get_comm_world(
+ self._name, self._node_id
+ )
+ world = dict(sorted(world.items()))
+ if world:
+ break
+ if time.time() - start_join > self.join_timeout:
+ raise TimeoutError(
+ f"Timeout {self.join_timeout}s to complete next rendezous."
+ )
+ time.sleep(3)
+ rank = list(world.keys()).index(self._node_id)
+ world_size = len(world)
+ logger.info(
+ f"The node{node_name} has joined round {round} of "
+ f"the {self._name} rendezvous as rank {rank} in a world of size "
+ f"{world_size}."
+ )
+ store = self._get_store(round, group)
+ return store, world
+
+ def _get_store(self, round, group) -> Store:
+ key_prefix = f"torch.rendezvous.{self._name}.{round}.{group}"
+ return PrefixStore(key_prefix, self._store)
+
+ def num_nodes_waiting(self) -> int:
+ return self._client.num_nodes_waiting(self._name)
+
+ def get_run_id(self) -> str:
+ """Returns the run id of the rendezvous.
+
+ The run id is a user-defined id that uniquely identifies an instance of
+ a distributed application. It typically maps to a job id and is used to
+ allow nodes to join the correct distributed application.
+ """
+ return self._rdzv_params.run_id
+
+ def shutdown(self) -> bool:
+ """Closes all resources that were open for the rendezvous.
+
+ Example::
+
+ rdzv_handler = ...
+ try:
+ store, rank, world_size = rdzv_handler.next_rendezvous()
+ finally:
+ rdzv_handler.shutdown()
+ """
+ pass
+
+
+class ElasticTrainingAgent(LocalElasticAgent):
+ """
+ An implementation of :py:class:`torchelastic.agent.server.ElasticAgent`
+ that handles host-local workers.
+ This agent is deployed per host and is configured to spawn ``n`` workers.
+ When using GPUs, ``n`` maps to the number of GPUs available on the host.
+
+ The agent select to fail or relaunch subprocesses according to the
+ failed reason of subprocess. Now, if the exitcode is not 1, the agent
+ will fail and the DLRover will relaunch the node. Because, we find
+ the exitcode is 1 if the hardware breakdowns.
+ """
+
+ def __init__(
+ self,
+ node_id,
+ config,
+ entrypoint,
+ spec: WorkerSpec,
+ start_method="spawn",
+ exit_barrier_timeout: float = 300,
+ log_dir: Optional[str] = None,
+ ):
+ super().__init__(spec, exit_barrier_timeout)
+ self._node_id = node_id
+ self._config = config
+ self._entrypoint = entrypoint
+ self._start_method = start_method
+ self._pcontext: Optional[PContext] = None
+ self._log_dir = log_dir or tempfile.mkdtemp(prefix="torchelastic_")
+ self._worker_watchdog: Optional[timer.FileTimerServer] = None
+ self._reamining_fo_count: int = self._remaining_restarts
+ self._client = GlobalMasterClient.MASTER_CLIENT
+
+ @prof
+ def _rendezvous(self, worker_group: WorkerGroup) -> None:
+ r"""
+ Runs rendezvous for the workers specified by worker spec.
+ Assigns workers a new global rank and world size.
+ Updates the rendezvous store for the worker group.
+ """
+
+ spec = worker_group.spec
+ round = spec.rdzv_handler.join_rendezvous(spec.local_world_size)
+ store, world = spec.rdzv_handler.next_rendezvous(round)
+ self._store = store
+ group_world_size = len(world)
+ group_rank = list(world.keys()).index(self._node_id)
+
+ workers = self._assign_worker_ranks(self._node_id, world, spec)
+ worker_group.workers = workers
+ worker_group.store = store
+ worker_group.group_rank = group_rank
+ worker_group.group_world_size = group_world_size
+
+ if group_rank == 0:
+ self._set_master_addr_port(
+ store,
+ spec.master_addr,
+ spec.master_port,
+ spec.local_addr,
+ )
+
+ master_addr, master_port = self._get_master_addr_port(store)
+ restart_count = spec.max_restarts - self._remaining_restarts
+
+ logger.info(
+ f"[{spec.role}] Rendezvous complete for workers. Result:\n"
+ f" restart_count={restart_count}\n"
+ f" master_addr={master_addr}\n"
+ f" master_port={master_port}\n"
+ f" group_rank={group_rank}\n"
+ f" group_world_size={group_world_size}\n"
+ f" local_ranks={[worker.local_rank for worker in workers]}\n"
+ f" role_ranks={[worker.role_rank for worker in workers]}\n"
+ f" global_ranks={[worker.global_rank for worker in workers]}\n"
+ f" role_world_sizes="
+ f"{[worker.role_world_size for worker in workers]}\n"
+ f" global_world_sizes="
+ f"{[worker.world_size for worker in workers]}\n"
+ )
+
+ # pyre-fixme[56]: Pyre was not able to infer the type of the decorator
+ # `torch.distributed.elastic.metrics.prof`.
+ @prof
+ def _assign_worker_ranks(
+ self, node_id, world, spec: WorkerSpec
+ ) -> List[Worker]:
+ """
+ Determines proper ranks for worker processes. The rank assignment
+ is done according to the following algorithm:
+
+ 1. Each agent writes its configuration(group_rank, group_world_size
+ , num_workers) to the common store.
+ 2. Each agent retrieves configuration for all agents
+ and performs two level sort using role and rank.
+ 3. Determine the global rank: the global rank of workers for the
+ current agent is the offset of infos array up to group_rank
+ of the agent. The offset is computed as a sum of local_world_size
+ of all agents that have rank less than the group_rank.
+ The workers would have the ranks: [offset, offset+local_world_size)
+ 4. Determine the role rank: The role rank is determined using the
+ algorithms in the point 3 with the exception that the offset is
+ done from the first agent that has the same role as current one
+ and has the minimum group rank.
+ """
+
+ role_infos: List[_RoleInstanceInfo] = []
+ nodes = list(world.keys())
+ for i, local_world_size in world.items():
+ group_rank = nodes.index(i)
+ role_info = _RoleInstanceInfo(
+ spec.role, group_rank, local_world_size
+ )
+ role_infos.append(role_info)
+ group_rank = nodes.index(node_id)
+ my_role_info = role_infos[group_rank]
+ worker_world_size, worker_global_ranks = self._get_ranks(
+ role_infos, group_rank
+ )
+ role_infos = sorted(
+ role_infos, key=functools.cmp_to_key(_RoleInstanceInfo.compare)
+ )
+ role_start_idx, role_end_idx = _RoleInstanceInfo.find_role_boundaries(
+ role_infos, my_role_info.role
+ )
+ role_pos = next(
+ idx
+ for idx, role_info in enumerate(role_infos)
+ if _RoleInstanceInfo.compare(role_info, my_role_info) == 0
+ )
+ role_world_size, role_ranks = self._get_ranks(
+ role_infos, role_pos, role_start_idx, role_end_idx + 1
+ )
+ workers = []
+ for ind in range(spec.local_world_size):
+ worker = Worker(
+ local_rank=ind,
+ global_rank=worker_global_ranks[ind],
+ role_rank=role_ranks[ind],
+ world_size=worker_world_size,
+ role_world_size=role_world_size,
+ )
+ workers.append(worker)
+ return workers
+
+ def _initialize_workers(self, worker_group):
+ if self._config.network_check:
+ run_network_check(self._config, self._entrypoint)
+ super()._initialize_workers(worker_group)
+
+ def _invoke_run(self, role: str = DEFAULT_ROLE) -> RunResult:
+ # NOTE: currently only works for a single role
+
+ spec = self._worker_group.spec
+ role = spec.role
+
+ logger.info(
+ f"[{role}] starting workers for entrypoint: "
+ f"{spec.get_entrypoint_name()}"
+ )
+
+ self._initialize_workers(self._worker_group)
+ monitor_interval = spec.monitor_interval
+ rdzv_handler = spec.rdzv_handler
+
+ while True:
+ assert self._worker_group.state != WorkerState.INIT
+ time.sleep(monitor_interval)
+ run_result = self._monitor_workers(self._worker_group)
+ state = run_result.state
+ self._worker_group.state = state
+
+ put_metric(
+ f"workers.{role}.remaining_restarts", self._remaining_restarts
+ )
+ put_metric(f"workers.{role}.{state.name.lower()}", 1)
+
+ if state == WorkerState.SUCCEEDED:
+ logger.info(
+ f"[{role}] worker group successfully finished."
+ f" Waiting {self._exit_barrier_timeout} seconds "
+ "for other agents to finish."
+ )
+ self._exit_barrier()
+ return run_result
+ elif state in {WorkerState.UNHEALTHY, WorkerState.FAILED}:
+ self._report_failure_to_master(run_result.failures)
+ if self._reamining_fo_count > 0:
+ logger.info(
+ f"[{role}] Worker group {state.name}. "
+ f"{self._remaining_restarts}/{spec.max_restarts}"
+ f" attempts left; will restart worker group"
+ )
+ self._reamining_fo_count -= 1
+ self._restart_workers(self._worker_group)
+ else:
+ self._stop_workers(self._worker_group)
+ self._worker_group.state = WorkerState.FAILED
+ return run_result
+ elif state == WorkerState.HEALTHY:
+ # membership changes do not count as retries
+ if self._membership_changed(role, rdzv_handler):
+ self._restart_workers(self._worker_group)
+ else:
+ raise Exception(f"[{role}] Worker group in {state.name} state")
+
+ def _report_failure_to_master(self, failures: Dict[int, ProcessFailure]):
+ errors = {}
+ for rank, failure in failures.items():
+ dt = str(datetime.utcfromtimestamp(int(failure.timestamp)))
+ error = ProcessError(
+ failure.local_rank, failure.exitcode, failure.message, dt
+ )
+ errors[rank] = error.__dict__
+ error_data = json.dumps(errors)
+ self._client.report_failures(error_data)
+
+ def _restart_workers(self, worker_group: WorkerGroup):
+ self._remaining_restarts -= 1
+ super()._restart_workers(worker_group)
+
+ def should_shutdown_rdzv(self):
+ return self._reamining_fo_count == 0
+
+ def _membership_changed(self, role, rdzv_handler: RendezvousHandler):
+ # Timeout may happen when to query TCPStore.
+ try:
+ num_nodes_waiting = rdzv_handler.num_nodes_waiting()
+ except Exception as e:
+ logger.warning("Fail to call num_node_waiting.", e)
+ num_nodes_waiting = 0
+
+ group_rank = self._worker_group.group_rank
+ if num_nodes_waiting > 0:
+ logger.info(
+ f"[{role}] Detected {num_nodes_waiting} "
+ f"new nodes from group_rank={group_rank}; "
+ f"will restart worker group"
+ )
+ return True
+ return False
+
+
+def launch_agent(
+ config: LaunchConfig,
+ entrypoint: Union[Callable, str, None],
+ args: List[Any],
+) -> Dict[int, Any]:
+ if not config.run_id:
+ run_id = str(uuid.uuid4().int)
+ logger.warning(
+ f"config has no run_id, generated a random run_id: {run_id}"
+ )
+ config.run_id = run_id
+
+ entrypoint_name = _get_entrypoint_name(entrypoint, args)
+ node_id = int(os.getenv(NodeEnv.WORKER_ID, 0))
+
+ logger.info(
+ f"Starting elastic_operator with launch configs:\n"
+ f" entrypoint : {entrypoint_name}\n"
+ f" min_nodes : {config.min_nodes}\n"
+ f" max_nodes : {config.max_nodes}\n"
+ f" nproc_per_node : {config.nproc_per_node}\n"
+ f" run_id : {config.run_id}\n"
+ f" rdzv_backend : {config.rdzv_backend}\n"
+ f" rdzv_endpoint : {config.rdzv_endpoint}\n"
+ f" rdzv_configs : {config.rdzv_configs}\n"
+ f" max_restarts : {config.max_restarts}\n"
+ f" monitor_interval : {config.monitor_interval}\n"
+ f" log_dir : {config.log_dir}\n"
+ f" metrics_cfg : {config.metrics_cfg}\n"
+ )
+
+ rdzv_parameters = RendezvousParameters(
+ backend=config.rdzv_backend,
+ endpoint=config.rdzv_endpoint,
+ run_id=config.run_id,
+ min_nodes=config.min_nodes,
+ max_nodes=config.max_nodes,
+ local_addr=config.local_addr,
+ **config.rdzv_configs,
+ )
+
+ master_addr = os.environ.get(
+ "MY_POD_IP", socket.gethostbyname(_get_fq_hostname())
+ )
+
+ rdzv_handler = MasterRendezvousHandler(
+ RendezvousName.ELASTIC_TRAINING,
+ node_id,
+ rdzv_parameters,
+ )
+ spec = WorkerSpec(
+ role=config.role,
+ local_world_size=config.nproc_per_node,
+ entrypoint=entrypoint,
+ args=tuple(args),
+ rdzv_handler=rdzv_handler,
+ max_restarts=config.max_restarts,
+ monitor_interval=config.monitor_interval,
+ redirects=config.redirects,
+ tee=config.tee,
+ master_addr=master_addr,
+ local_addr=config.local_addr,
+ )
+
+ agent = ElasticTrainingAgent(
+ node_id=node_id,
+ config=config,
+ entrypoint=entrypoint,
+ spec=spec,
+ start_method=config.start_method,
+ log_dir=config.log_dir,
+ )
+
+ shutdown_rdzv = True
+ try:
+ metrics.initialize_metrics(metrics.MetricsConfig(config.metrics_cfg))
+
+ result = agent.run()
+ # records that agent.run() has succeeded NOT
+ # that workers have succeeded
+ events.record(agent.get_event_succeeded())
+
+ if result.is_failed():
+ # ChildFailedError is treated specially by @record
+ # if the error files for the failed children exist
+ # @record will copy the first error (root cause)
+ # to the error file of the launcher process.
+ raise ChildFailedError(
+ name=entrypoint_name,
+ failures=result.failures,
+ )
+
+ return result.return_values
+ except ChildFailedError:
+ if not agent.should_shutdown_rdzv():
+ shutdown_rdzv = False
+ raise
+ except SignalException:
+ # when the agent dies with a signal do NOT shutdown the rdzv_handler
+ # since this closes the rendezvous on this rdzv_id permanently and
+ # prevents any additional scaling events
+ shutdown_rdzv = False
+ events.record(agent.get_event_failed())
+ raise
+ except Exception:
+ events.record(agent.get_event_failed())
+ raise
+ finally:
+ if shutdown_rdzv:
+ spec.rdzv_handler.shutdown()
+
+
+class NcclCheckElasticAgent(ElasticTrainingAgent):
+ """
+ An implementation of :py:class:`torchelastic.agent.server.ElasticAgent`
+ that handles host-local workers. This agent will run 3 round allgather
+ to check network. We show the detail with 4 nodes to check network.
+ Round 1: all nodes join a communication world {0:8, 1:8, 2:8, 3:8}
+ where the key is the node id and the value is the local world size
+ of the node. The check passes if allgather of all nodes is succeed.
+ Otherwise, the round 2 starts.
+ Round 2: the manager splits nodes into groups and each group contains
+ two nodes, like [{0:8, 1:8},{2:8, 3:8}]. The node in each group will
+ execute allgather independently and report its result to the manager.
+ For example, the result is {0:False, 1:False, 2:True, 3:True}.
+ Round 3: the manager will group the abnormal node with a normal node like
+ [{0:8, 2:8}, {1:8, 2:8}]. Then, the node executes allgather again.
+ If the result is {0:True, 1:False, 2:False, 3:True}, the network of
+ node-1 if not available.
+ """
+
+ def __init__(
+ self,
+ node_id,
+ config,
+ entrypoint,
+ spec: WorkerSpec,
+ start_method="spawn",
+ exit_barrier_timeout: float = 300,
+ log_dir: Optional[str] = None,
+ ):
+ super().__init__(
+ node_id,
+ config,
+ entrypoint,
+ spec,
+ start_method,
+ exit_barrier_timeout,
+ log_dir,
+ )
+ self._start_method = start_method
+ self._pcontext: Optional[PContext] = None
+ self._log_dir = log_dir or tempfile.mkdtemp(prefix="torchelastic_")
+ self._worker_watchdog: Optional[timer.FileTimerServer] = None
+ self._reamining_fo_count: int = self._remaining_restarts
+ self._node_id = node_id
+ self._client = GlobalMasterClient.MASTER_CLIENT
+ self._max_check_round = 3
+
+ def run(self, role: str = DEFAULT_ROLE) -> bool:
+ spec = self._worker_group.spec
+ role = spec.role
+
+ logger.info(
+ f"[{role}] starting workers for entrypoint: "
+ f"{spec.get_entrypoint_name()}"
+ )
+ success = False
+ for i in range(self._max_check_round):
+ result = self._run_network_check(spec.monitor_interval)
+ logger.info(f"Network check round {i} is {result}")
+ status = NodeStatus.SUCCEEDED if result else NodeStatus.FAILED
+ self._client.report_node_status(status)
+ success = success or result
+ network_ready = self._client.network_check_success(self._node_id)
+ self._stop_workers(self._worker_group)
+ if network_ready:
+ return True
+ time.sleep(1)
+ if not success:
+ self._client.report_node_status(NodeStatus.BREAKDOWN)
+ raise RuntimeError("The node network is breakdown.")
+ return False
+
+ def _run_network_check(self, monitor_interval):
+ self._initialize_workers(self._worker_group)
+
+ while True:
+ assert self._worker_group.state != WorkerState.INIT
+ time.sleep(monitor_interval)
+ run_result = self._monitor_workers(self._worker_group)
+ state = run_result.state
+ self._worker_group.state = state
+ if state == WorkerState.HEALTHY:
+ continue
+ return state == WorkerState.SUCCEEDED
+
+
+def network_check(
+ config: LaunchConfig,
+ entrypoint: Union[Callable, str, None],
+ args: List[Any],
+) -> bool:
+ config = copy.deepcopy(config)
+ config.network_check = False
+ if not config.run_id:
+ run_id = str(uuid.uuid4().int)
+ logger.warning(
+ f"config has no run_id, generated a random run_id: {run_id}"
+ )
+ config.run_id = run_id
+
+ entrypoint_name = _get_entrypoint_name(entrypoint, args)
+ node_id = int(os.getenv(NodeEnv.WORKER_ID, 0))
+
+ logger.info(
+ f"Starting elastic_operator with launch configs:\n"
+ f" entrypoint : {entrypoint_name}\n"
+ f" min_nodes : {config.min_nodes}\n"
+ f" max_nodes : {config.max_nodes}\n"
+ f" nproc_per_node : {config.nproc_per_node}\n"
+ f" run_id : {config.run_id}\n"
+ f" rdzv_backend : {config.rdzv_backend}\n"
+ f" rdzv_endpoint : {config.rdzv_endpoint}\n"
+ f" rdzv_configs : {config.rdzv_configs}\n"
+ f" max_restarts : {config.max_restarts}\n"
+ f" monitor_interval : {config.monitor_interval}\n"
+ f" log_dir : {config.log_dir}\n"
+ f" metrics_cfg : {config.metrics_cfg}\n"
+ )
+
+ rdzv_parameters = RendezvousParameters(
+ backend=config.rdzv_backend,
+ endpoint=config.rdzv_endpoint,
+ run_id=config.run_id,
+ min_nodes=config.min_nodes,
+ max_nodes=config.max_nodes,
+ local_addr=config.local_addr,
+ **config.rdzv_configs,
+ )
+
+ master_addr = os.environ.get(
+ "MY_POD_IP", socket.gethostbyname(_get_fq_hostname())
+ )
+ rdzv_handler = MasterRendezvousHandler(
+ RendezvousName.NETWORK_CHECK,
+ node_id,
+ rdzv_parameters,
+ )
+ spec = WorkerSpec(
+ role=config.role,
+ local_world_size=config.nproc_per_node,
+ entrypoint=entrypoint,
+ args=tuple(args),
+ rdzv_handler=rdzv_handler,
+ max_restarts=0,
+ monitor_interval=config.monitor_interval,
+ master_addr=master_addr,
+ )
+
+ agent = NcclCheckElasticAgent(
+ node_id=node_id,
+ config=config,
+ entrypoint=entrypoint,
+ spec=spec,
+ start_method=config.start_method,
+ log_dir=config.log_dir,
+ )
+
+ metrics.initialize_metrics(metrics.MetricsConfig(config.metrics_cfg))
+ result = agent.run()
+ logger.info("Network check result is %s", result)
+ return result
+
+
+def run_network_check(config, entrypoint):
+ cmd_args = ["-m", "dlrover.trainer.torch.run_network_check"]
+ for _ in range(config.max_restarts):
+ # If network fails because other abnormal node, We
+ # will retry to check network after the new node is starting.
+ # DLRover will replace the abnormal node with a new node.
+ success = network_check(
+ config=config, entrypoint=entrypoint, args=cmd_args
+ )
+ if success:
+ logger.error("Network check pass.")
+ return success
+ else:
+ logger.error(
+ "Network of the cluster is not available "
+ "because of abnormal node."
+ )
+ return success
diff --git a/dlrover/python/master/elastic_training/rdzv_manager.py b/dlrover/python/master/elastic_training/rdzv_manager.py
index 39a67a5c5..2ef3bba1d 100644
--- a/dlrover/python/master/elastic_training/rdzv_manager.py
+++ b/dlrover/python/master/elastic_training/rdzv_manager.py
@@ -331,9 +331,9 @@ def _group_nodes(self, round):
node_groups.append(group)
return node_groups
- def report_network_check_result(self, node_id: int, normal: bool):
+ def report_network_check_result(self, node_id: int, succeed):
self._node_status.setdefault(node_id, False)
- self._node_status[node_id] = self._node_status[node_id] or normal
+ self._node_status[node_id] = self._node_status[node_id] or succeed
def join_rendezvous(
self,
diff --git a/dlrover/python/master/node/job_manager.py b/dlrover/python/master/node/job_manager.py
index 3e1be355a..aae69d1d7 100644
--- a/dlrover/python/master/node/job_manager.py
+++ b/dlrover/python/master/node/job_manager.py
@@ -657,6 +657,10 @@ def pend_without_workers(self):
else:
return False
+ def remove_breakdown_node(self, node_type, node_id):
+ node = self._job_nodes[node_type][node_id]
+ logger.warning(f"Node {node.name} is breakdown.")
+
def create_job_manager(args: JobArgs, speed_monitor) -> JobManager:
critical_worker_index = get_critical_worker_index(args)
diff --git a/dlrover/python/master/scaler/pod_scaler.py b/dlrover/python/master/scaler/pod_scaler.py
index c5c6121fa..c70ce8c74 100644
--- a/dlrover/python/master/scaler/pod_scaler.py
+++ b/dlrover/python/master/scaler/pod_scaler.py
@@ -330,7 +330,7 @@ def _check_cluster_ready_for_pod(self, node: Node):
def _create_pod(self, node: Node, pod_stats: Dict[str, int], ps_addrs):
# Find that master pod that will be used as the owner reference
# for the ps or worker pod.
- node.update_priority(pod_stats[node.type])
+ node.update_priority(pod_stats.get(node.type, 0))
pod_name = get_pod_name(self._job_name, node.type, node.id)
logger.info(
"Create Pod %s with resource %s",
diff --git a/dlrover/python/master/servicer.py b/dlrover/python/master/servicer.py
index ea37bb16e..5154e2d5d 100644
--- a/dlrover/python/master/servicer.py
+++ b/dlrover/python/master/servicer.py
@@ -22,6 +22,7 @@
from dlrover.proto import elastic_training_pb2, elastic_training_pb2_grpc
from dlrover.python.common.constants import (
GRPC,
+ NodeStatus,
NodeType,
RendezvousName,
TrainingLoopStatus,
@@ -286,9 +287,22 @@ def update_node_status(self, request, _):
node_id = request.id
server_addr = request.addr
- self._job_manager.update_node_service_addr(
- node_type, node_id, server_addr
- )
+ if server_addr:
+ self._job_manager.update_node_service_addr(
+ node_type, node_id, server_addr
+ )
+ node_status = request.status
+ if node_status in [NodeStatus.SUCCEEDED, NodeStatus.FAILED]:
+ net_rdzv_manager = self._rdzv_managers.get(
+ RendezvousName.NETWORK_CHECK, None
+ )
+ if net_rdzv_manager:
+ succeed = request.status == NodeStatus.SUCCEEDED
+ net_rdzv_manager.report_network_check_result(node_id, succeed)
+
+ if request.status == NodeStatus.BREAKDOWN:
+ self._job_manager.remove_breakdown_node(node_type, node_id)
+
response = elastic_training_pb2.Response()
response.success = True
return response
@@ -441,14 +455,6 @@ def network_check_success(self, request, _):
res.success = success
return res
- def report_network_check_result(self, request, _):
- res = elastic_training_pb2.Response()
- node_id = request.id
- net_rdzv_manager = self._rdzv_managers[RendezvousName.NETWORK_CHECK]
- net_rdzv_manager.report_network_check_result(node_id, request.normal)
- res.success = True
- return res
-
def create_master_service(
port,
diff --git a/dlrover/trainer/torch/elastic_run.py b/dlrover/trainer/torch/elastic_run.py
index 619293f4b..08dbc0a58 100644
--- a/dlrover/trainer/torch/elastic_run.py
+++ b/dlrover/trainer/torch/elastic_run.py
@@ -11,495 +11,26 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import functools
-import json
-import os
-import socket
-import tempfile
-import time
import uuid
-from dataclasses import dataclass
-from datetime import datetime, timedelta
-from typing import Any, Callable, Dict, List, Optional, Union
+from typing import Callable, Union
-import torch.distributed.elastic.timer as timer
-from torch.distributed import PrefixStore, Store
-from torch.distributed.elastic import events, metrics
-from torch.distributed.elastic.agent.server.api import (
- DEFAULT_ROLE,
- RunResult,
- Worker,
- WorkerGroup,
- WorkerSpec,
- WorkerState,
- _get_fq_hostname,
- _RoleInstanceInfo,
-)
-from torch.distributed.elastic.agent.server.local_elastic_agent import (
- LocalElasticAgent,
-)
-from torch.distributed.elastic.metrics import put_metric
-from torch.distributed.elastic.metrics.api import prof
-from torch.distributed.elastic.multiprocessing import PContext, SignalException
-from torch.distributed.elastic.multiprocessing.errors import (
- ChildFailedError,
- ProcessFailure,
- record,
-)
-from torch.distributed.elastic.rendezvous import RendezvousParameters
-from torch.distributed.elastic.rendezvous.api import RendezvousHandler
-from torch.distributed.launcher.api import LaunchConfig, _get_entrypoint_name
-from torch.distributed.run import config_from_args, parse_args
+from torch.distributed.argparse_util import check_env
+from torch.distributed.elastic.multiprocessing.errors import record
+from torch.distributed.launcher.api import LaunchConfig
+from torch.distributed.run import config_from_args, get_args_parser
-from dlrover.python.common.constants import NodeEnv
from dlrover.python.common.log import default_logger as logger
-from dlrover.python.elastic_agent.master_client import GlobalMasterClient
-from dlrover.python.elastic_agent.torch.master_kv_store import MasterKVStore
+from dlrover.python.elastic_agent.torch.training import launch_agent
-__all__ = ["LaunchConfig", "elastic_launch", "launch_agent"]
-
-class MasterRendezvousHandler(RendezvousHandler):
- def __init__(self, node_id, rdzv_params: RendezvousParameters):
- self._node_id = node_id
- self._rdzv_params = rdzv_params
- self.join_timeout = rdzv_params.get("join_timeout", 600)
- self._client = GlobalMasterClient.MASTER_CLIENT
- self._store = MasterKVStore("dlrover-elastic", timedelta(seconds=60))
- lastcall_timeout = rdzv_params.get("lastcall_timeout", 60)
- self._client.report_rdzv_params(
- rdzv_params.min_nodes,
- rdzv_params.max_nodes,
- lastcall_timeout,
- )
-
- def get_backend(self) -> str:
- return "dlrover-master"
-
- def is_closed(self) -> bool:
- return False
-
- def set_closed(self):
- """Marks the rendezvous as closed."""
- pass
-
- def join_rendezvous(self, local_world_size):
- """The node join a rendezvous by sending its
- ID and local world size.
- """
- round = self._client.join_rendezvous(self._node_id, local_world_size)
- return round
-
- def next_rendezvous(self, round):
- """The handler will peroidically query the world from the master until
- the world is not empty. The world is a dictionary like
- like {0: 8, 1: 8, 2: 8} where the key is the node ID and the value is
- the local world size. The handler can get its rank by the position
- of it node ID in the world.
- """
- start_join = time.time()
- node_name = os.getenv("POD_NAME", "")
- msg = (
- f"The node node_name attempts to join the next round "
- f"of the rendezvous '{self._rdzv_params.run_id}'."
- )
- logger.info(msg)
- while True:
- world = self._client.get_comm_world()
- world = dict(sorted(world.items()))
- if world:
- break
- if time.time() - start_join > self.join_timeout:
- raise TimeoutError(
- f"Timeout {self.join_timeout}s to complete next rendezous."
- )
- time.sleep(3)
- rank = list(world.keys()).index(self._node_id)
- world_size = len(world)
- logger.info(
- f"The node{node_name} has joined round {round} of "
- f"the rendezvous as rank {rank} in a world of size "
- f"{world_size}."
- )
- store = self._get_store(round)
- return store, world
-
- def _get_store(self, round) -> Store:
- key_prefix = f"torch.rendezvous.{self._rdzv_params.run_id}.{round}"
- return PrefixStore(key_prefix, self._store)
-
- def num_nodes_waiting(self) -> int:
- return self._client.num_nodes_waiting()
-
- def get_run_id(self) -> str:
- """Returns the run id of the rendezvous.
-
- The run id is a user-defined id that uniquely identifies an instance of
- a distributed application. It typically maps to a job id and is used to
- allow nodes to join the correct distributed application.
- """
- return self._rdzv_params.run_id
-
- def shutdown(self) -> bool:
- """Closes all resources that were open for the rendezvous.
-
- Example::
-
- rdzv_handler = ...
- try:
- store, rank, world_size = rdzv_handler.next_rendezvous()
- finally:
- rdzv_handler.shutdown()
- """
- pass
-
-
-@dataclass
-class ProcessError:
- local_rank: int
- exitcode: int
- message: str
- datetime: Any
-
-
-class DLRoverElasticAgent(LocalElasticAgent):
- """
- An implementation of :py:class:`torchelastic.agent.server.ElasticAgent`
- that handles host-local workers.
- This agent is deployed per host and is configured to spawn ``n`` workers.
- When using GPUs, ``n`` maps to the number of GPUs available on the host.
-
- The agent select to fail or relaunch subprocesses according to the
- failed reason of subprocess. Now, if the exitcode is not 1, the agent
- will fail and the DLRover will relaunch the node. Because, we find
- the exitcode is 1 if the hardware breakdowns.
- """
-
- def __init__(
- self,
- node_id,
- spec: WorkerSpec,
- start_method="spawn",
- exit_barrier_timeout: float = 300,
- log_dir: Optional[str] = None,
- ):
- super().__init__(spec, exit_barrier_timeout)
- self._start_method = start_method
- self._pcontext: Optional[PContext] = None
- self._log_dir = log_dir or tempfile.mkdtemp(prefix="torchelastic_")
- self._worker_watchdog: Optional[timer.FileTimerServer] = None
- self._reamining_fo_count: int = self._remaining_restarts
- self._node_id = node_id
- self._client = GlobalMasterClient.MASTER_CLIENT
-
- @prof
- def _rendezvous(self, worker_group: WorkerGroup) -> None:
- r"""
- Runs rendezvous for the workers specified by worker spec.
- Assigns workers a new global rank and world size.
- Updates the rendezvous store for the worker group.
- """
-
- spec = worker_group.spec
- round = spec.rdzv_handler.join_rendezvous(spec.local_world_size)
- store, world = spec.rdzv_handler.next_rendezvous(round)
- group_world_size = len(world)
- group_rank = list(world.keys()).index(self._node_id)
- self._store = store
-
- workers = self._assign_worker_ranks(self._node_id, world, spec)
- worker_group.workers = workers
- worker_group.store = store
- worker_group.group_rank = group_rank
- worker_group.group_world_size = group_world_size
-
- if group_rank == 0:
- self._set_master_addr_port(
- store,
- spec.master_addr,
- spec.master_port,
- spec.local_addr,
- )
-
- master_addr, master_port = self._get_master_addr_port(store)
- restart_count = spec.max_restarts - self._remaining_restarts
-
- logger.info(
- f"[{spec.role}] Rendezvous complete for workers. Result:\n"
- f" restart_count={restart_count}\n"
- f" master_addr={master_addr}\n"
- f" master_port={master_port}\n"
- f" group_rank={group_rank}\n"
- f" group_world_size={group_world_size}\n"
- f" local_ranks={[worker.local_rank for worker in workers]}\n"
- f" role_ranks={[worker.role_rank for worker in workers]}\n"
- f" global_ranks={[worker.global_rank for worker in workers]}\n"
- f" role_world_sizes="
- f"{[worker.role_world_size for worker in workers]}\n"
- f" global_world_sizes="
- f"{[worker.world_size for worker in workers]}\n"
- )
-
- # pyre-fixme[56]: Pyre was not able to infer the type of the decorator
- # `torch.distributed.elastic.metrics.prof`.
- @prof
- def _assign_worker_ranks(
- self, node_id, world, spec: WorkerSpec
- ) -> List[Worker]:
- """
- Determines proper ranks for worker processes. The rank assignment
- is done according to the following algorithm:
-
- 1. Each agent writes its configuration(group_rank, group_world_size
- , num_workers) to the common store.
- 2. Each agent retrieves configuration for all agents
- and performs two level sort using role and rank.
- 3. Determine the global rank: the global rank of workers for the
- current agent is the offset of infos array up to group_rank
- of the agent. The offset is computed as a sum of local_world_size
- of all agents that have rank less than the group_rank.
- The workers would have the ranks: [offset, offset+local_world_size)
- 4. Determine the role rank: The role rank is determined using the
- algorithms in the point 3 with the exception that the offset is
- done from the first agent that has the same role as current one
- and has the minimum group rank.
- """
-
- role_infos: List[_RoleInstanceInfo] = []
- nodes = list(world.keys())
- for i, local_world_size in world.items():
- group_rank = nodes.index(i)
- role_info = _RoleInstanceInfo(
- spec.role, group_rank, local_world_size
- )
- role_infos.append(role_info)
- group_rank = nodes.index(node_id)
- my_role_info = role_infos[group_rank]
- worker_world_size, worker_global_ranks = self._get_ranks(
- role_infos, group_rank
- )
- role_infos = sorted(
- role_infos, key=functools.cmp_to_key(_RoleInstanceInfo.compare)
- )
- role_start_idx, role_end_idx = _RoleInstanceInfo.find_role_boundaries(
- role_infos, my_role_info.role
- )
- role_pos = next(
- idx
- for idx, role_info in enumerate(role_infos)
- if _RoleInstanceInfo.compare(role_info, my_role_info) == 0
- )
- role_world_size, role_ranks = self._get_ranks(
- role_infos, role_pos, role_start_idx, role_end_idx + 1
- )
- workers = []
- for ind in range(spec.local_world_size):
- worker = Worker(
- local_rank=ind,
- global_rank=worker_global_ranks[ind],
- role_rank=role_ranks[ind],
- world_size=worker_world_size,
- role_world_size=role_world_size,
- )
- workers.append(worker)
- return workers
-
- def _invoke_run(self, role: str = DEFAULT_ROLE) -> RunResult:
- # NOTE: currently only works for a single role
-
- spec = self._worker_group.spec
- role = spec.role
-
- logger.info(
- f"[{role}] starting workers for entrypoint: "
- f"{spec.get_entrypoint_name()}"
- )
-
- self._initialize_workers(self._worker_group)
- monitor_interval = spec.monitor_interval
- rdzv_handler = spec.rdzv_handler
-
- while True:
- assert self._worker_group.state != WorkerState.INIT
- time.sleep(monitor_interval)
- run_result = self._monitor_workers(self._worker_group)
- state = run_result.state
- self._worker_group.state = state
-
- put_metric(
- f"workers.{role}.remaining_restarts", self._remaining_restarts
- )
- put_metric(f"workers.{role}.{state.name.lower()}", 1)
-
- if state == WorkerState.SUCCEEDED:
- logger.info(
- f"[{role}] worker group successfully finished."
- f" Waiting {self._exit_barrier_timeout} seconds "
- "for other agents to finish."
- )
- self._exit_barrier()
- return run_result
- elif state in {WorkerState.UNHEALTHY, WorkerState.FAILED}:
- self._report_failure_to_master(run_result.failures)
- if self._reamining_fo_count > 0:
- logger.info(
- f"[{role}] Worker group {state.name}. "
- f"{self._remaining_restarts}/{spec.max_restarts}"
- f" attempts left; will restart worker group"
- )
- self._reamining_fo_count -= 1
- self._restart_workers(self._worker_group)
- else:
- self._stop_workers(self._worker_group)
- self._worker_group.state = WorkerState.FAILED
- return run_result
- elif state == WorkerState.HEALTHY:
- # membership changes do not count as retries
- if self._membership_changed(role, rdzv_handler):
- self._restart_workers(self._worker_group)
- else:
- raise Exception(f"[{role}] Worker group in {state.name} state")
-
- def _report_failure_to_master(self, failures: Dict[int, ProcessFailure]):
- errors = {}
- for rank, failure in failures.items():
- dt = str(datetime.utcfromtimestamp(int(failure.timestamp)))
- error = ProcessError(
- failure.local_rank, failure.exitcode, failure.message, dt
- )
- errors[rank] = error.__dict__
- error_data = json.dumps(errors)
- self._client.report_failures(error_data)
-
- def _restart_workers(self, worker_group: WorkerGroup):
- self._remaining_restarts -= 1
- super()._restart_workers(worker_group)
-
- def should_shutdown_rdzv(self):
- return self._reamining_fo_count == 0
-
- def _membership_changed(self, role, rdzv_handler: RendezvousHandler):
- # Timeout may happen when to query TCPStore.
- try:
- num_nodes_waiting = rdzv_handler.num_nodes_waiting()
- except Exception as e:
- logger.warning("Fail to call num_node_waiting.", e)
- num_nodes_waiting = 0
-
- group_rank = self._worker_group.group_rank
- if num_nodes_waiting > 0:
- logger.info(
- f"[{role}] Detected {num_nodes_waiting} "
- f"new nodes from group_rank={group_rank}; "
- f"will restart worker group"
- )
- return True
- return False
-
-
-def launch_agent(
- config: LaunchConfig,
- entrypoint: Union[Callable, str, None],
- args: List[Any],
-) -> Dict[int, Any]:
- if not config.run_id:
- run_id = str(uuid.uuid4().int)
- logger.warning(
- f"config has no run_id, generated a random run_id: {run_id}"
- )
- config.run_id = run_id
-
- entrypoint_name = _get_entrypoint_name(entrypoint, args)
- node_id = int(os.getenv(NodeEnv.WORKER_ID, 0))
-
- logger.info(
- f"Starting elastic_operator with launch configs:\n"
- f" entrypoint : {entrypoint_name}\n"
- f" min_nodes : {config.min_nodes}\n"
- f" max_nodes : {config.max_nodes}\n"
- f" nproc_per_node : {config.nproc_per_node}\n"
- f" run_id : {config.run_id}\n"
- f" rdzv_backend : {config.rdzv_backend}\n"
- f" rdzv_endpoint : {config.rdzv_endpoint}\n"
- f" rdzv_configs : {config.rdzv_configs}\n"
- f" max_restarts : {config.max_restarts}\n"
- f" monitor_interval : {config.monitor_interval}\n"
- f" log_dir : {config.log_dir}\n"
- f" metrics_cfg : {config.metrics_cfg}\n"
+def parse_args(args):
+ parser = get_args_parser()
+ parser.add_argument(
+ "--network-check",
+ action=check_env,
+ help="Whether to check network before starting training process.",
)
-
- rdzv_parameters = RendezvousParameters(
- backend=config.rdzv_backend,
- endpoint=config.rdzv_endpoint,
- run_id=config.run_id,
- min_nodes=config.min_nodes,
- max_nodes=config.max_nodes,
- local_addr=config.local_addr,
- **config.rdzv_configs,
- )
-
- master_addr = os.environ.get(
- "MY_POD_IP", socket.gethostbyname(_get_fq_hostname())
- )
-
- spec = WorkerSpec(
- role=config.role,
- local_world_size=config.nproc_per_node,
- entrypoint=entrypoint,
- args=tuple(args),
- rdzv_handler=MasterRendezvousHandler(node_id, rdzv_parameters),
- max_restarts=config.max_restarts,
- monitor_interval=config.monitor_interval,
- redirects=config.redirects,
- tee=config.tee,
- master_addr=master_addr,
- local_addr=config.local_addr,
- )
-
- agent = DLRoverElasticAgent(
- node_id=node_id,
- spec=spec,
- start_method=config.start_method,
- log_dir=config.log_dir,
- )
-
- shutdown_rdzv = True
- try:
- metrics.initialize_metrics(metrics.MetricsConfig(config.metrics_cfg))
-
- result = agent.run()
- # records that agent.run() has succeeded NOT
- # that workers have succeeded
- events.record(agent.get_event_succeeded())
-
- if result.is_failed():
- # ChildFailedError is treated specially by @record
- # if the error files for the failed children exist
- # @record will copy the first error (root cause)
- # to the error file of the launcher process.
- raise ChildFailedError(
- name=entrypoint_name,
- failures=result.failures,
- )
-
- return result.return_values
- except ChildFailedError:
- if not agent.should_shutdown_rdzv():
- shutdown_rdzv = False
- raise
- except SignalException:
- # when the agent dies with a signal do NOT shutdown the rdzv_handler
- # since this closes the rendezvous on this rdzv_id permanently and
- # prevents any additional scaling events
- shutdown_rdzv = False
- events.record(agent.get_event_failed())
- raise
- except Exception:
- events.record(agent.get_event_failed())
- raise
- finally:
- if shutdown_rdzv:
- spec.rdzv_handler.shutdown()
+ return parser.parse_args(args)
class elastic_launch:
@@ -558,6 +89,7 @@ def run(args):
)
config, cmd, cmd_args = config_from_args(args)
+ config.network_check = args.network_check
elastic_launch(
config=config,
entrypoint=cmd,
@@ -568,3 +100,7 @@ def run(args):
def main(args=None):
args = parse_args(args)
run(args)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/dlrover/trainer/torch/run_network_check.py b/dlrover/trainer/torch/run_network_check.py
new file mode 100644
index 000000000..b30fca541
--- /dev/null
+++ b/dlrover/trainer/torch/run_network_check.py
@@ -0,0 +1,54 @@
+# Copyright 2023 The DLRover Authors. All rights reserved.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import time
+from datetime import timedelta
+
+import torch
+import torch.distributed as dist
+
+from dlrover.python.common.log import default_logger as logger
+
+
+def bm_all_gather(shape, use_cuda):
+ world_size = dist.get_world_size()
+ local_rank = int(os.environ["LOCAL_RANK"])
+ device = torch.device(f"cuda:{local_rank}" if use_cuda else "cpu")
+ data = torch.randn(shape, dtype=torch.float32).to(device)
+ tensor_list = [
+ torch.zeros_like(data).to(device) for _ in range(world_size)
+ ]
+ start = int(time.time())
+ for _ in range(10):
+ dist.all_gather(tensor_list, data)
+ end = time.time()
+ if local_rank == 0:
+ logger.info(f"Networker check costs {end - start}s")
+
+
+def main(use_cuda):
+ shape = 1 << 20
+ bm_all_gather(shape, use_cuda)
+
+
+if __name__ == "__main__":
+ try:
+ use_cuda = torch.cuda.is_available()
+ if use_cuda:
+ dist.init_process_group("nccl", timeout=timedelta(seconds=60))
+ else:
+ dist.init_process_group("gloo", timeout=timedelta(seconds=60))
+ main(use_cuda)
+ finally:
+ dist.destroy_process_group()
diff --git a/docker/ci.dockerfile b/docker/ci.dockerfile
index 283480576..eb10ce47a 100644
--- a/docker/ci.dockerfile
+++ b/docker/ci.dockerfile
@@ -1,4 +1,4 @@
-FROM python:3.6.15
+FROM python:3.8.14
ARG EXTRA_PYPI_INDEX=https://pypi.org/simple
# Allows for log messages by `print` in Python to be immediately dumped
| Rendezvous manager to help the node to check network.
Fix #463
| 2023-06-30T02:44:51 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-465 | 704cb09ade12e0b79918e78776f1e5287af6b1eb | diff --git a/dlrover/proto/elastic_training.proto b/dlrover/proto/elastic_training.proto
index f55bccc74..3bdd7c78c 100644
--- a/dlrover/proto/elastic_training.proto
+++ b/dlrover/proto/elastic_training.proto
@@ -170,7 +170,7 @@ message NodeMeta {
string gpu_type = 6;
int32 id = 7;
int32 rank = 8;
- int32 local_world_size = 9;
+ bool normal = 9;
}
message NodeEvent {
@@ -206,6 +206,13 @@ message RendezvousState {
map<int32, int32> world = 1;
int32 waiting_num = 2;
int32 round = 3;
+ int32 group = 4;
+}
+
+message RendezvousRequest {
+ int32 node_id = 1;
+ int32 local_world_size = 2;
+ string rdzv_name = 4;
}
message RendezvousParams {
@@ -273,13 +280,15 @@ service Master {
returns (google.protobuf.Empty);
// rpc for torch elastic
- rpc get_comm_world(google.protobuf.Empty) returns (RendezvousState);
- rpc join_rendezvous(NodeMeta) returns (RendezvousState);
- rpc num_nodes_waiting(google.protobuf.Empty) returns (RendezvousState);
+ rpc get_comm_world(RendezvousRequest) returns (RendezvousState);
+ rpc join_rendezvous(RendezvousRequest) returns (RendezvousState);
+ rpc num_nodes_waiting(RendezvousRequest) returns (RendezvousState);
rpc report_rdzv_params(RendezvousParams) returns (Response);
rpc kv_store_set(KeyValuePair) returns (Response);
rpc kv_store_get(KeyValuePair) returns (KeyValuePair);
rpc report_failure(NodeFailure) returns (Response);
+ rpc network_check_success(RendezvousRequest) returns (Response);
+ rpc report_network_check_result(NodeMeta) returns (Response);
rpc report_prestop(ReportPreStopRequest) returns (google.protobuf.Empty);
rpc update_node_status(NodeMeta) returns (Response);
diff --git a/dlrover/python/common/constants.py b/dlrover/python/common/constants.py
index d8f015fa9..6e22bf344 100644
--- a/dlrover/python/common/constants.py
+++ b/dlrover/python/common/constants.py
@@ -223,3 +223,8 @@ class MemoryUnit(object):
class k8sAPIExceptionReason(object):
NOT_FOUND = "Not Found"
+
+
+class RendezvousName(object):
+ ELASTIC_TRAINING = "elastic-training"
+ NETWORK_CHECK = "network-check"
diff --git a/dlrover/python/master/elastic_training/rdzv_manager.py b/dlrover/python/master/elastic_training/rdzv_manager.py
index cbdd407cb..39a67a5c5 100644
--- a/dlrover/python/master/elastic_training/rdzv_manager.py
+++ b/dlrover/python/master/elastic_training/rdzv_manager.py
@@ -11,14 +11,61 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import math
import time
+from abc import ABCMeta, abstractmethod
from threading import Lock
-from typing import Dict
+from typing import Dict, List
from dlrover.python.common.log import default_logger as logger
from dlrover.python.common.node import Node
+class RendezvousManager(metaclass=ABCMeta):
+ def __init__(self):
+ self._lock = Lock()
+ self._alive_nodes = set()
+ self._released_workers = []
+ self._waiting_nodes: Dict[int, int] = {}
+ self._rdzv_nodes = {}
+ self._lastcall_time = 0
+ self._rdzv_params = RendezvousParameters(0, 0)
+ self._rdzv_round = 0
+
+ def update_rdzv_params(self, min_nodes, max_ndoes, waiting_timeout):
+ """Update rendezvous parameters"""
+
+ @abstractmethod
+ def add_alive_node(self, node: Node):
+ """When a node is running, the master will add it to alive list."""
+ pass
+
+ @abstractmethod
+ def remove_alive_node(self, node: Node):
+ """When a node is exited, the master will remove it from alive list."""
+ pass
+
+ @abstractmethod
+ def get_comm_world(self, node_id):
+ """Get communication world of all alive nodes."""
+ pass
+
+ @abstractmethod
+ def join_rendezvous(self, node_id, local_world_size):
+ """The node joins a rond rendezvous."""
+ pass
+
+ @abstractmethod
+ def report_network_check_result(self, node_id: int, normal: bool):
+ """The node updates its status"""
+ pass
+
+ @abstractmethod
+ def num_nodes_waiting(self):
+ """Get the number of waiting nodes."""
+ pass
+
+
class RendezvousParameters(object):
"""Holds the parameters to construct rendezvous.
Args:
@@ -43,8 +90,8 @@ def __init__(
self.waiting_timeout = waiting_timeout
-class RendezvousManager(object):
- """RendezvousManager runs on the DLRover master. The manager
+class ElasticTrainingRendezvousManager(RendezvousManager):
+ """ElasticTrainingRendezvousManager runs on the DLRover master. The manager
add workers into a waiting list and completes a rendezvous
if the number of workers in the wait list is beyond the minimum
nodes.
@@ -60,15 +107,7 @@ class RendezvousManager(object):
"""
def __init__(self):
- self._lock = Lock()
- self._alive_nodes = set()
- self._scale_down_ts = 0
- self._released_workers = []
- self._waiting_nodes: Dict[int, int] = {}
- self._rdzv_nodes = {}
- self._lastcall_time = 0
- self._rdzv_params = RendezvousParameters(0, 0)
- self._rdzv_round = 0
+ super().__init__()
def update_rdzv_params(self, min_nodes, max_ndoes, waiting_timeout):
"""Update rendezvous parameters"""
@@ -90,7 +129,7 @@ def remove_alive_node(self, node: Node):
def get_released_workers(self):
return []
- def get_comm_world(self):
+ def get_comm_world(self, node_id):
"""Return the communication world if a round rendezvous is completed.
The rendezvous is completed if one of the following conditions
is satisfied:
@@ -106,7 +145,7 @@ def get_comm_world(self):
with self._lock:
rdzv_completed = False
if self._rdzv_nodes:
- return self._rdzv_nodes
+ return 0, self._rdzv_nodes
if len(self._waiting_nodes) == self._rdzv_params.max_nodes:
rdzv_completed = True
else:
@@ -129,7 +168,7 @@ def get_comm_world(self):
)
self._rdzv_round += 1
- return self._rdzv_nodes
+ return 0, self._rdzv_nodes
def join_rendezvous(self, node_id, local_world_size):
"""The node joins the current rond rendezvous.
@@ -156,3 +195,182 @@ def num_nodes_waiting(self):
"""
with self._lock:
return len(self._waiting_nodes)
+
+ def report_network_check_result(self, node_id, normal):
+ return
+
+
+class NetworkCheckRendezvousManager(RendezvousManager):
+ """NcclCheckRendezvousManager runs on the DLRover master. The task
+ to check network contains 3 round to execute allgather on all nodes.
+ We show the detail to check network assuming there are 4 nodes.
+ Round 1: all nodes join a communication world {0:8, 1:8, 2:8, 3:8}
+ where the key is the node id and the value is the local world size
+ of the node. The check passes if allgather of all nodes is succeed.
+ Otherwise, the round 2 starts.
+ Round 2: the manager splits nodes into groups and each group contains
+ two nodes, like [{0:8, 1:8},{2:8, 3:8}]. The node in each group will
+ execute allgather independently and report its result to the manager.
+ For example, the result is {0:False, 1:False, 2:True, 3:True}.
+ Round 3: the manager will group the abnormal node with a normal node like
+ [{0:8, 2:8}, {1:8, 2:8}]. Then, the node executes allgather again.
+ If the result is {0:True, 1:False, 2:False, 3:True}, the network of
+ node-1 if not available.
+ """
+
+ def __init__(self):
+ super().__init__()
+ self._node_status: Dict[int, bool] = {}
+ self._node_groups: List[Dict[int, int]] = []
+
+ def update_rdzv_params(self, min_nodes, max_ndoes, waiting_timeout):
+ """Update rendezvous parameters"""
+ self._rdzv_params.min_nodes = min_nodes
+ self._rdzv_params.max_nodes = max_ndoes
+ self._rdzv_params.waiting_timeout = waiting_timeout
+
+ def add_alive_node(self, node: Node):
+ """When a node is running, the master will add it to alive list."""
+ self._alive_nodes.add(node.id)
+ logger.info(f"Add alive worker {node.name} to Rendezvous.")
+
+ def remove_alive_node(self, node: Node):
+ """When a node is exited, the master will remove it from alive list."""
+ if node.id in self._alive_nodes:
+ self._alive_nodes.remove(node.id)
+ logger.info(f"Remove exited worker {node.name} from Rendezvous.")
+
+ def get_released_workers(self):
+ return []
+
+ def get_comm_world(self, node_id):
+ """Return the communication world if a round rendezvous is completed.
+ The rendezvous is completed if one of the following conditions.
+ """
+ with self._lock:
+ rdzv_completed = False
+ if not self._node_groups:
+ if len(self._waiting_nodes) == self._rdzv_params.max_nodes:
+ rdzv_completed = True
+ else:
+ waiting_num = len(self._waiting_nodes)
+ alive_num = len(self._alive_nodes)
+ waiting_time = time.time() - self._lastcall_time
+ rdzv_completed = (
+ waiting_num >= self._rdzv_params.min_nodes
+ and waiting_num == alive_num
+ and waiting_time >= self._rdzv_params.waiting_timeout
+ )
+
+ if rdzv_completed:
+ self._rdzv_nodes = dict(
+ sorted(self._waiting_nodes.items())
+ )
+ self._waiting_nodes = dict()
+ self._lastcall_time = 0
+ logger.info(
+ f"Completed {self._rdzv_round} round "
+ f"rendezvous {self._rdzv_nodes}"
+ )
+ self._node_groups = self._group_nodes(self._rdzv_round)
+ logger.info(
+ f"Round {self._rdzv_round} "
+ f"node group: {self._node_groups}"
+ )
+ if self._rdzv_round % 3 == 0:
+ self._node_status = {}
+ self._rdzv_round += 1
+
+ for i, group in enumerate(self._node_groups):
+ if node_id in group:
+ return i, group
+ return 0, {}
+
+ def _group_nodes(self, round):
+ """Group nodes into goups.
+ Round 0: group all nodes into a group like {0:8, 1:8, 2:8, 3:8}.
+ Round 1: Split nodes into groups and each group contains
+ two nodes, like [{0:8, 1:8},{2:8, 3:8}].
+ Round 1: group the abnormal node with a normal node like
+ [{0:8, 2:8}, {1:8, 2:8}].
+ """
+ round = round % 3
+ node_groups = []
+ if round == 0:
+ node_groups.append(self._rdzv_nodes)
+ elif round == 1:
+ group = {}
+ for node_id, local_world_size in self._rdzv_nodes.items():
+ group[node_id] = local_world_size
+ if len(group) == 2:
+ node_groups.append(group)
+ group = {}
+ elif round == 2:
+ abnormal_nodes = []
+ normal_nodes = []
+ for node_id, status in self._node_status.items():
+ if status:
+ normal_nodes.append(node_id)
+ else:
+ abnormal_nodes.append(node_id)
+ logger.info(
+ f"Normal nodes: {normal_nodes}.\n"
+ f"Abnormal nodes: {abnormal_nodes}"
+ )
+ if len(abnormal_nodes) > len(normal_nodes):
+ return node_groups
+ for i, node_id in enumerate(abnormal_nodes):
+ group = {}
+ group[node_id] = self._rdzv_nodes[node_id]
+ group[normal_nodes[i]] = self._rdzv_nodes[node_id]
+ node_groups.append(group)
+ group = {}
+ for node_id in normal_nodes[len(abnormal_nodes) :]: # noqa: E203
+ group[node_id] = self._rdzv_nodes[node_id]
+ if group:
+ node_groups.append(group)
+ return node_groups
+
+ def report_network_check_result(self, node_id: int, normal: bool):
+ self._node_status.setdefault(node_id, False)
+ self._node_status[node_id] = self._node_status[node_id] or normal
+
+ def join_rendezvous(
+ self,
+ node_id,
+ local_world_size,
+ ):
+ """The node joins the current rond rendezvous.
+ Args:
+ node_id: the node ID which is unique in an ElasticJob of DLrover.
+ local_world_size: the local world size of a node.
+
+ Returns:
+ int: the number of rendezvous round.
+ """
+ with self._lock:
+ if node_id in self._waiting_nodes:
+ return
+ self._waiting_nodes[node_id] = local_world_size
+ self._rdzv_nodes = {}
+ self._node_groups = []
+ if len(self._waiting_nodes) >= self._rdzv_params.min_nodes:
+ if self._lastcall_time == 0:
+ self._lastcall_time = time.time()
+ return self._rdzv_round
+
+ def num_nodes_waiting(self):
+ with self._lock:
+ return len(self._waiting_nodes)
+
+ def network_check_success(self):
+ """Check the network task is succeed. Each task contains 3 rounds
+ allgather. If succeed, the round should be set to the multiples of 3.
+ """
+ with self._lock:
+ success = self._node_status and all(
+ list(self._node_status.values())
+ )
+ if success:
+ self._rdzv_round = math.ceil(self._rdzv_round / 3) * 3
+ return success
diff --git a/dlrover/python/master/main.py b/dlrover/python/master/main.py
index 09bf77da2..08a6013cc 100644
--- a/dlrover/python/master/main.py
+++ b/dlrover/python/master/main.py
@@ -38,7 +38,7 @@ def run(args):
job_args.initilize()
logger.info("Job args : %s", job_args.toJSON())
_dlrover_context.config_master_port(port=args.port)
- _dlrover_context.relaunch_error = args.relaunch_error
+ _dlrover_context.relaunch_error = args.relaunch_error or True
update_context(job_args)
master = Master(_dlrover_context.master_port, job_args)
master.prepare()
diff --git a/dlrover/python/master/master.py b/dlrover/python/master/master.py
index 40170d6be..9a41b1c8a 100644
--- a/dlrover/python/master/master.py
+++ b/dlrover/python/master/master.py
@@ -18,12 +18,14 @@
JobExitReason,
NodeType,
OptimizeMode,
+ RendezvousName,
ReporterType,
)
from dlrover.python.common.log import default_logger as logger
from dlrover.python.master.elastic_training.elastic_ps import ElasticPsService
from dlrover.python.master.elastic_training.rdzv_manager import (
- RendezvousManager,
+ ElasticTrainingRendezvousManager,
+ NetworkCheckRendezvousManager,
)
from dlrover.python.master.elastic_training.sync_service import SyncService
from dlrover.python.master.monitor.speed_monitor import SpeedMonitor
@@ -61,7 +63,11 @@ def __init__(self, port, args: JobArgs):
if args.enable_dynamic_sharding
else None
)
- self.rdzv_manager = RendezvousManager()
+ elastic_training = RendezvousName.ELASTIC_TRAINING
+ self.rdzv_managers = {
+ elastic_training: ElasticTrainingRendezvousManager(),
+ RendezvousName.NETWORK_CHECK: NetworkCheckRendezvousManager(),
+ }
self.job_metric_collector = self._create_metric_collector_if_needed(
args
)
@@ -79,7 +85,7 @@ def _create_master_grpc_service(self, port, params: JobArgs):
self.task_manager,
self.job_manager,
self.speed_monitor,
- self.rdzv_manager,
+ self.rdzv_managers,
self.job_metric_collector,
self.elastic_ps_service,
self.sync_service,
@@ -192,7 +198,8 @@ def run(self):
def _remove_not_participated_workers(self):
"""Remove workers who do not participate training."""
- workers = self.rdzv_manager.get_released_workers()
+ et_manager = self.rdzv_managers[RendezvousName.ELASTIC_TRAINING]
+ workers = et_manager.get_released_workers()
if workers:
self.job_manager.remove_not_participated_workers(workers)
diff --git a/dlrover/python/master/node/event_callback.py b/dlrover/python/master/node/event_callback.py
index 4c5679dc2..314645766 100644
--- a/dlrover/python/master/node/event_callback.py
+++ b/dlrover/python/master/node/event_callback.py
@@ -13,6 +13,7 @@
import abc
from datetime import datetime
+from typing import Dict
from dlrover.python.common.constants import (
JobExitReason,
@@ -210,7 +211,9 @@ def __init__(self, master):
super(AllReduceNodeHandlingCallback, self).__init__()
self._master = master
self._speed_monitor: SpeedMonitor = self._master.speed_monitor
- self._rdzv_manager: RendezvousManager = self._master.rdzv_manager
+ self._rdzv_managers: Dict[
+ str, RendezvousManager
+ ] = self._master.rdzv_managers
def get_job_exit_reason(self, node: Node):
if self._master.task_manager.training_started():
@@ -228,7 +231,8 @@ def get_job_exit_reason(self, node: Node):
def on_node_started(self, node: Node, cluster_context):
if node.type == NodeType.WORKER and node.id == 0:
self._master.job_manager.start_auto_scaling()
- self._rdzv_manager.add_alive_node(node)
+ for manager in self._rdzv_managers.values():
+ manager.add_alive_node(node)
@NodeEventCallback.log_callback_exception
def on_node_succeeded(self, node: Node, cluster_context: ClusterContext):
@@ -253,14 +257,16 @@ def on_node_failed(self, node: Node, cluster_context):
[(node.type, node.id)]
)
self._speed_monitor.remove_running_worker(node.type, node.id)
- self._rdzv_manager.remove_alive_node(node)
+ for manager in self._rdzv_managers.values():
+ manager.add_alive_node(node)
@NodeEventCallback.log_callback_exception
def on_node_deleted(self, node, cluster_context):
node.finish_time = datetime.now() # type: ignore
self._stop_job_if_needed(node)
self._speed_monitor.remove_running_worker(node.type, node.id)
- self._rdzv_manager.remove_alive_node(node)
+ for manager in self._rdzv_managers.values():
+ manager.add_alive_node(node)
def _stop_job_if_needed(self, node: Node):
if node.critical and node.is_unrecoverable_failure():
diff --git a/dlrover/python/master/servicer.py b/dlrover/python/master/servicer.py
index 121d9c6d4..ea37bb16e 100644
--- a/dlrover/python/master/servicer.py
+++ b/dlrover/python/master/servicer.py
@@ -14,13 +14,18 @@
import threading
import time
from concurrent import futures
-from typing import List, Optional
+from typing import Dict, List
import grpc
from google.protobuf import empty_pb2
from dlrover.proto import elastic_training_pb2, elastic_training_pb2_grpc
-from dlrover.python.common.constants import GRPC, NodeType, TrainingLoopStatus
+from dlrover.python.common.constants import (
+ GRPC,
+ NodeType,
+ RendezvousName,
+ TrainingLoopStatus,
+)
from dlrover.python.common.global_context import Context
from dlrover.python.common.log import default_logger as logger
from dlrover.python.master.elastic_training.elastic_ps import ElasticPsService
@@ -53,7 +58,7 @@ def __init__(
task_manager: TaskManager,
job_manager: JobManager,
speed_monitor: SpeedMonitor,
- rdzv_manager: Optional[RendezvousManager],
+ rdzv_managers: Dict[str, RendezvousManager],
job_metric_collector=None,
elastic_ps_service=None,
sync_service=None,
@@ -62,7 +67,7 @@ def __init__(
self._task_manager = task_manager
self._job_manager = job_manager
self._speed_monitor = speed_monitor
- self._rdzv_manager = rdzv_manager
+ self._rdzv_managers = rdzv_managers
self._kv_store = KVStoreService()
self._job_metric_collector: JobMetricCollector = job_metric_collector
self._elastic_ps_service: ElasticPsService = elastic_ps_service
@@ -123,18 +128,7 @@ def get_task(self, request, _):
res.shard.end = task.shard.end
res.shard.indices.extend(task.shard.record_indices)
elif not dataset.completed():
- # If the todo and doing tasks are not empty,
- # Otherwise if the callback list is not empty,
- # we are trying to pop and invoke the callback.
- # Then the master tells the worker to wait
- # in case of new tasks later.
- if self._rdzv_manager:
- # If there is no more task, master only send wait task to
- # the last worker and other workers exit.
- if len(self._job_manager.get_running_workers()) == 1:
- res.type = elastic_training_pb2.WAIT
- else:
- res.type = elastic_training_pb2.WAIT
+ res.type = elastic_training_pb2.WAIT
with self._lock:
self._task_manager.reset_worker_start_task_time(request.worker_id)
return res
@@ -361,8 +355,6 @@ def get_dataset_shard_num(self, request, _):
return res
def report_prestop(self, request, _):
- worker_host = request.worker_host
- self._rdzv_manager.report_prestop(worker_host)
return empty_pb2.Empty()
def join_sync(self, request, _):
@@ -388,32 +380,37 @@ def barrier(self, request, _):
return res
def get_comm_world(self, request, _):
- nodes = self._rdzv_manager.get_comm_world()
+ rdzv_manager = self._rdzv_managers[request.rdzv_name]
+ group, nodes = rdzv_manager.get_comm_world(request.node_id)
res = elastic_training_pb2.RendezvousState()
+ res.group = group
for node_id, worker_num in nodes.items():
res.world[node_id] = worker_num
return res
def join_rendezvous(self, request, _):
- round = self._rdzv_manager.join_rendezvous(
- request.id, request.local_world_size
+ rdzv_manager = self._rdzv_managers[request.rdzv_name]
+ round = rdzv_manager.join_rendezvous(
+ request.node_id, request.local_world_size
)
res = elastic_training_pb2.RendezvousState()
res.round = round
return res
def num_nodes_waiting(self, request, _):
- waiting_num = self._rdzv_manager.num_nodes_waiting()
+ rdzv_manager = self._rdzv_managers[request.rdzv_name]
+ waiting_num = rdzv_manager.num_nodes_waiting()
res = elastic_training_pb2.RendezvousState()
res.waiting_num = waiting_num
return res
def report_rdzv_params(self, request, _):
- self._rdzv_manager.update_rdzv_params(
- min_nodes=request.min_nodes,
- max_ndoes=request.max_nodes,
- waiting_timeout=request.waiting_timeout,
- )
+ for manager in self._rdzv_managers.values():
+ manager.update_rdzv_params(
+ min_nodes=request.min_nodes,
+ max_ndoes=request.max_nodes,
+ waiting_timeout=request.waiting_timeout,
+ )
res = elastic_training_pb2.Response()
res.success = True
return res
@@ -437,13 +434,28 @@ def report_failure(self, request, _):
res.success = True
return res
+ def network_check_success(self, request, _):
+ res = elastic_training_pb2.Response()
+ net_rdzv_manager = self._rdzv_managers[RendezvousName.NETWORK_CHECK]
+ success = net_rdzv_manager.network_check_success()
+ res.success = success
+ return res
+
+ def report_network_check_result(self, request, _):
+ res = elastic_training_pb2.Response()
+ node_id = request.id
+ net_rdzv_manager = self._rdzv_managers[RendezvousName.NETWORK_CHECK]
+ net_rdzv_manager.report_network_check_result(node_id, request.normal)
+ res.success = True
+ return res
+
def create_master_service(
port,
task_manager,
job_manager,
speed_monitor,
- rdzv_service,
+ rdzv_managers,
job_metric_collector,
elastic_ps_service,
sync_service,
@@ -464,7 +476,7 @@ def create_master_service(
task_manager=task_manager,
job_manager=job_manager,
speed_monitor=speed_monitor,
- rdzv_manager=rdzv_service,
+ rdzv_managers=rdzv_managers,
job_metric_collector=job_metric_collector,
elastic_ps_service=elastic_ps_service,
sync_service=sync_service,
| Network check before starting training processes.
Execute allgather to test the network of all nodes and find the bad node.
| 2023-06-30T02:25:46 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-458 | 3cb888fe231fe0ce8f75cce774a5b47a961d9252 | diff --git a/dlrover/examples/torch_mnist_master_backend_job.yaml b/dlrover/examples/torch_mnist_master_backend_job.yaml
index 8632ca006..87914c524 100644
--- a/dlrover/examples/torch_mnist_master_backend_job.yaml
+++ b/dlrover/examples/torch_mnist_master_backend_job.yaml
@@ -8,20 +8,20 @@ spec:
optimizeMode: single-job
replicaSpecs:
worker:
- replicas: 2
+ replicas: 4
template:
spec:
restartPolicy: Never
containers:
- name: main
# yamllint disable-line rule:line-length
- image: registry.cn-hangzhou.aliyuncs.com/intell-ai/dlrover:torch113-mnist
+ image: registry.cn-hangzhou.aliyuncs.com/intell-ai/dlrover:torch201-mnist-test
imagePullPolicy: Always
command:
- /bin/bash
- -c
- - "torchrun --nnodes=$WORKER_NUM --nproc_per_node=1 \
- --max_restarts=3 --rdzv_backend=dlrover_master \
+ - "dlrover-run --nnodes=$WORKER_NUM \
+ --nproc_per_node=2 --max_restarts=3 \
model_zoo/pytorch/mnist_cnn.py --num_epochs 2 \
--training_data /data/mnist_png/training/ \
--validation_data /data/mnist_png/testing/"
diff --git a/dlrover/proto/elastic_training.proto b/dlrover/proto/elastic_training.proto
index 2aeb558d5..f55bccc74 100644
--- a/dlrover/proto/elastic_training.proto
+++ b/dlrover/proto/elastic_training.proto
@@ -170,6 +170,7 @@ message NodeMeta {
string gpu_type = 6;
int32 id = 7;
int32 rank = 8;
+ int32 local_world_size = 9;
}
message NodeEvent {
@@ -202,12 +203,15 @@ message BarrierRequest {
}
message RendezvousState {
- string rdzv_key = 1;
- bytes state_bits = 2;
- int32 token = 3;
- map<string, int32> participants = 4;
- repeated string wait_list = 5;
- string host_name = 6;
+ map<int32, int32> world = 1;
+ int32 waiting_num = 2;
+ int32 round = 3;
+}
+
+message RendezvousParams {
+ int32 min_nodes = 1;
+ int32 max_nodes = 2;
+ int32 waiting_timeout = 3;
}
message KeyValuePair {
@@ -269,8 +273,10 @@ service Master {
returns (google.protobuf.Empty);
// rpc for torch elastic
- rpc get_rdzv_state(RendezvousState) returns (RendezvousState);
- rpc set_rdzv_state(RendezvousState) returns (Response);
+ rpc get_comm_world(google.protobuf.Empty) returns (RendezvousState);
+ rpc join_rendezvous(NodeMeta) returns (RendezvousState);
+ rpc num_nodes_waiting(google.protobuf.Empty) returns (RendezvousState);
+ rpc report_rdzv_params(RendezvousParams) returns (Response);
rpc kv_store_set(KeyValuePair) returns (Response);
rpc kv_store_get(KeyValuePair) returns (KeyValuePair);
rpc report_failure(NodeFailure) returns (Response);
diff --git a/dlrover/python/elastic_agent/master_client.py b/dlrover/python/elastic_agent/master_client.py
index 2b441b74b..ad64d7a59 100644
--- a/dlrover/python/elastic_agent/master_client.py
+++ b/dlrover/python/elastic_agent/master_client.py
@@ -328,36 +328,32 @@ def get_running_nodes(self):
return response.nodes
@retry_grpc_request
- def get_rdzv_state(self, key):
- request = elastic_training_pb2.RendezvousState()
- request.rdzv_key = key
- request.host_name = self._host_name
- res = self._stub.get_rdzv_state(request)
- return res.state_bits, res.token
+ def num_nodes_waiting(self):
+ request = empty_pb2.Empty()
+ response = self._stub.num_nodes_waiting(request)
+ return response.waiting_num
@retry_grpc_request
- def set_rdzv_state(self, key, state_bits, token, participants, wait_list):
- """Set RendezvousState into the master store.
+ def join_rendezvous(self, node_id, local_world_size):
+ request = elastic_training_pb2.NodeMeta()
+ request.id = node_id
+ request.local_world_size = local_world_size
+ response = self._stub.join_rendezvous(request)
+ return response.round
- Args:
- The aguments are same as
- `torch.distributed.elastic.rendezvous.
- dynamic_rendezvous._RendezvousState`
- """
- request = elastic_training_pb2.RendezvousState()
- request.rdzv_key = key
- request.state_bits = state_bits
- request.token = token
- request.host_name = self._host_name
- for node, rank in participants.items():
- node_name = "{}".format(node)
- request.participants[node_name] = rank
-
- for node in wait_list:
- node_name = "{}".format(node)
- request.wait_list.append(node_name)
-
- response = self._stub.set_rdzv_state(request)
+ @retry_grpc_request
+ def get_comm_world(self):
+ request = empty_pb2.Empty()
+ response = self._stub.get_comm_world(request)
+ return response.world
+
+ @retry_grpc_request
+ def report_rdzv_params(self, min_nodes, max_nodes, waiting_timeout):
+ request = elastic_training_pb2.RendezvousParams()
+ request.min_nodes = min_nodes
+ request.max_nodes = max_nodes
+ request.waiting_timeout = waiting_timeout
+ response = self._stub.report_rdzv_params(request)
return response.success
@retry_grpc_request
@@ -549,18 +545,6 @@ def report_model_metric(self, *args):
def report_used_resource(self, memory, cpu):
return empty_pb2.Empty()
- def get_rdzv_state(self, key):
- state_bits = self._rdzv_states[key]
- token = self._rdzv_tokens[key]
- return state_bits, token
-
- def set_rdzv_state(self, key, state_bits, token, paricipant_num, wait_num):
- if state_bits == self._rdzv_states.get(key, None):
- return False
- self._rdzv_states[key] = state_bits
- self._rdzv_tokens[key] = token
- return True
-
def kv_store_set(self, key, value):
self._kv_store[key] = value
return True
diff --git a/dlrover/python/elastic_agent/torch/rdzv_backend.py b/dlrover/python/elastic_agent/torch/rdzv_backend.py
deleted file mode 100644
index 389852157..000000000
--- a/dlrover/python/elastic_agent/torch/rdzv_backend.py
+++ /dev/null
@@ -1,150 +0,0 @@
-# Copyright 2023 The DLRover Authors. All rights reserved.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import pickle
-from typing import Optional, Tuple
-
-import grpc
-from torch.distributed import Store
-from torch.distributed.elastic.rendezvous.api import (
- RendezvousConnectionError,
- RendezvousHandler,
- RendezvousParameters,
-)
-from torch.distributed.elastic.rendezvous.dynamic_rendezvous import (
- RendezvousBackend,
- Token,
- create_handler,
-)
-from torch.distributed.elastic.rendezvous.registry import handler_registry
-
-from dlrover.python.elastic_agent.master_client import (
- GlobalMasterClient,
- MasterClient,
-)
-from dlrover.python.elastic_agent.torch.master_kv_store import MasterKVStore
-
-
-class DlroverRendezvousBackend(RendezvousBackend):
- """Represents an etcd-based rendezvous backend.
-
- Args:
- client:
- The ``master_client.MasterClient`` instance to use
- to communicate with the master server.
- run_id:
- The run id of the rendezvous.
- """
-
- _client: MasterClient
- _key: str
-
- def __init__(self, run_id: str, key_prefix) -> None:
- if not run_id:
- raise ValueError("The run id must be a non-empty string.")
-
- self._client = GlobalMasterClient.MASTER_CLIENT
- self._key = key_prefix + run_id
-
- @property
- def name(self) -> str:
- """See base class."""
- return "dlrover_master"
-
- def get_state(self) -> Optional[Tuple[bytes, Token]]:
- """See base class."""
- try:
- result = self._client.get_rdzv_state(self._key)
- except grpc.RpcError as exc:
- raise RendezvousConnectionError(
- "The connection to job master has failed."
- "See inner exception for details."
- ) from exc
-
- new_state_bits = result[0]
- token = result[1]
- if new_state_bits == b"":
- return None
- return new_state_bits, token
-
- def set_state(
- self, state: bytes, token: Optional[Token] = None
- ) -> Optional[Tuple[bytes, Token, bool]]:
- """See base class."""
-
- def get_state():
- result = self.get_state()
- if result is not None:
- tmp = *result, False
- return tmp
- return None
-
- if token:
- try:
- token = int(token)
- except ValueError:
- return get_state()
- else:
- token = 0
- try:
- rdzv_state = pickle.loads(state)
- succeed = self._client.set_rdzv_state(
- self._key,
- state,
- token,
- rdzv_state.participants,
- rdzv_state.wait_list,
- )
-
- except grpc.RpcError as exc:
- succeed = False
- raise RendezvousConnectionError(
- "The connection to job master has failed. "
- "See inner exception for details."
- ) from exc
-
- if not succeed:
- return get_state()
-
- return state, token, succeed
-
-
-def create_backend(
- params: RendezvousParameters,
-) -> Tuple[DlroverRendezvousBackend, Store]:
- """Creates a new :py:class:`DlroverRendezvousBackend` from the specified
- parameters.
- """
-
- backend = DlroverRendezvousBackend(
- params.run_id, key_prefix="torch.elastic.rendezvous."
- )
-
- store = MasterKVStore("/torch/elastic/store")
-
- return backend, store
-
-
-def _create_dlrover_master_handler(
- params: RendezvousParameters,
-) -> RendezvousHandler:
-
- backend, store = create_backend(params)
- return create_handler(store, backend, params)
-
-
-def register_dlrover_backend():
- handler_registry.register(
- "dlrover_master",
- _create_dlrover_master_handler,
- )
diff --git a/dlrover/python/master/elastic_training/rdzv_manager.py b/dlrover/python/master/elastic_training/rdzv_manager.py
new file mode 100644
index 000000000..cbdd407cb
--- /dev/null
+++ b/dlrover/python/master/elastic_training/rdzv_manager.py
@@ -0,0 +1,158 @@
+# Copyright 2023 The DLRover Authors. All rights reserved.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import time
+from threading import Lock
+from typing import Dict
+
+from dlrover.python.common.log import default_logger as logger
+from dlrover.python.common.node import Node
+
+
+class RendezvousParameters(object):
+ """Holds the parameters to construct rendezvous.
+ Args:
+ min_nodes:
+ The minimum number of nodes to admit to the rendezvous.
+ max_nodes:
+ The maximum number of nodes to admit to the rendezvous.
+ waiting_timeout:
+ An additional wait amount before completing the rendezvous once
+ the rendezvous has the minimum number of required participants.
+ Default 30s,
+ """
+
+ def __init__(
+ self,
+ min_nodes: int,
+ max_nodes: int,
+ waiting_timeout=30,
+ ):
+ self.min_nodes = min_nodes
+ self.max_nodes = max_nodes
+ self.waiting_timeout = waiting_timeout
+
+
+class RendezvousManager(object):
+ """RendezvousManager runs on the DLRover master. The manager
+ add workers into a waiting list and completes a rendezvous
+ if the number of workers in the wait list is beyond the minimum
+ nodes.
+
+ The node report its ID and local_world_size to the manager.
+ The manager will add the node into a waiting list to join the rendezvous
+ and freeze the rendezvous if the size of waiting list is equal
+ the max nodes or is bigger than the min nodes. Then the node will
+ periodically query the world which contains
+ all nodes like {0: 8, 1: 8, 2:8}. The key in the world dictionary
+ is the node ID and the value is the local world size. In an
+ Elasticjob of DLRover, the node has an unique node ID.
+ """
+
+ def __init__(self):
+ self._lock = Lock()
+ self._alive_nodes = set()
+ self._scale_down_ts = 0
+ self._released_workers = []
+ self._waiting_nodes: Dict[int, int] = {}
+ self._rdzv_nodes = {}
+ self._lastcall_time = 0
+ self._rdzv_params = RendezvousParameters(0, 0)
+ self._rdzv_round = 0
+
+ def update_rdzv_params(self, min_nodes, max_ndoes, waiting_timeout):
+ """Update rendezvous parameters"""
+ self._rdzv_params.min_nodes = min_nodes
+ self._rdzv_params.max_nodes = max_ndoes
+ self._rdzv_params.waiting_timeout = waiting_timeout
+
+ def add_alive_node(self, node: Node):
+ """When a node is running, the master will add it to alive list."""
+ self._alive_nodes.add(node.id)
+ logger.info(f"Add alive worker {node.name} to Rendezvous.")
+
+ def remove_alive_node(self, node: Node):
+ """When a node is exited, the master will remove it from alive list."""
+ if node.id in self._alive_nodes:
+ self._alive_nodes.remove(node.id)
+ logger.info(f"Remove exited worker {node.name} from Rendezvous.")
+
+ def get_released_workers(self):
+ return []
+
+ def get_comm_world(self):
+ """Return the communication world if a round rendezvous is completed.
+ The rendezvous is completed if one of the following conditions
+ is satisfied:
+ 1. The size of waiting node list is equal to the max_nodes.
+ 2. The size of waiting node list is bigger than the min_nodes and
+ equal to the size of alive node list. What's more, no more worker
+ join the rendezvous in waiting_timeout.
+
+ Returns:
+ world: Dict like {0: 8, 1: 8, 2: 8} where the key is the node ID
+ and the value is the local world size of the node.
+ """
+ with self._lock:
+ rdzv_completed = False
+ if self._rdzv_nodes:
+ return self._rdzv_nodes
+ if len(self._waiting_nodes) == self._rdzv_params.max_nodes:
+ rdzv_completed = True
+ else:
+ waiting_num = len(self._waiting_nodes)
+ alive_num = len(self._alive_nodes)
+ waiting_time = time.time() - self._lastcall_time
+ rdzv_completed = (
+ waiting_num >= self._rdzv_params.min_nodes
+ and waiting_num == alive_num
+ and waiting_time >= self._rdzv_params.waiting_timeout
+ )
+
+ if rdzv_completed:
+ self._rdzv_nodes = dict(sorted(self._waiting_nodes.items()))
+ self._waiting_nodes = dict()
+ self._lastcall_time = 0
+ logger.info(
+ f"Completed {self._rdzv_round} round "
+ f"rendezvous {self._rdzv_nodes}"
+ )
+ self._rdzv_round += 1
+
+ return self._rdzv_nodes
+
+ def join_rendezvous(self, node_id, local_world_size):
+ """The node joins the current rond rendezvous.
+ Args:
+ node_id: the node ID which is unique in an ElasticJob of DLrover.
+ local_world_size: the local world size of a node.
+
+ Returns:
+ int: the number of rendezvous round.
+ """
+ with self._lock:
+ if node_id in self._waiting_nodes:
+ return
+ self._waiting_nodes[node_id] = local_world_size
+ self._rdzv_nodes = {}
+ if len(self._waiting_nodes) >= self._rdzv_params.min_nodes:
+ if self._lastcall_time == 0:
+ self._lastcall_time = time.time()
+ return self._rdzv_round
+
+ def num_nodes_waiting(self):
+ """The number of waiting nodes. The agent of a node will re-join
+ a rendezvous if it finds there are waiting nodes.
+ """
+ with self._lock:
+ return len(self._waiting_nodes)
diff --git a/dlrover/python/master/elastic_training/rdzv_service.py b/dlrover/python/master/elastic_training/rdzv_service.py
deleted file mode 100644
index aa744f2de..000000000
--- a/dlrover/python/master/elastic_training/rdzv_service.py
+++ /dev/null
@@ -1,117 +0,0 @@
-# Copyright 2023 The DLRover Authors. All rights reserved.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-from threading import Lock
-from typing import Any, Dict, List, Optional, Tuple
-
-from dlrover.python.common.log import default_logger as logger
-from dlrover.python.common.node import Node
-from dlrover.python.master.elastic_training.kv_store_service import (
- KVStoreService,
-)
-
-
-class RendezvousState(object):
- def __init__(self) -> None:
- self.latest_state_bits = b""
- self.completed_state_bits = b""
- self.participants: Dict[str, int] = {}
- self.wait_list: List[str] = []
-
-
-class TorchRendezvousService(object):
- """TorchRendezvousService runs on the DLRover master.
- The service can update the rendezvous states according to
- the node status.
- """
-
- def __init__(self):
- self.kv_store = KVStoreService()
- self._lock = Lock()
- self._rdzv_states: Dict[str, RendezvousState] = {}
- self._token = -1
- self._alive_workers = []
- self._scale_down_ts = 0
- self._released_workers = []
-
- def add_alive_worker(self, worker: Node):
- self._alive_workers.append(worker.name)
- self._alive_workers = sorted(self._alive_workers)
- logger.info(f"Add alive worker {worker.name} to Rendezvous.")
- self.kv_store.clear()
-
- def remove_alive_worker(self, worker: Node):
- if worker.name in self._alive_workers:
- self._alive_workers.remove(worker.name)
- self._scale_down_ts = int(time.time())
- self.kv_store.clear()
-
- def get_released_workers(self):
- released_workers = self._released_workers
- self._released_workers = []
- return released_workers
-
- def start(self):
- pass
-
- def set_state(
- self,
- key,
- state_bits: bytes,
- token: Optional[Any],
- participants,
- wait_list,
- host_name,
- ):
- """Set the _RendezvousState into the store in the master.
- Returns:
- A tuple of the serialized rendezvous state, its fencing token, and
- a boolean value indicating whether our set attempt succeeded.
- """
- if host_name not in self._alive_workers:
- logger.info(
- "Host %s is not in alive worker %s",
- host_name,
- self._alive_workers,
- )
- return False
- with self._lock:
- self._rdzv_states.setdefault(key, RendezvousState())
- if self._rdzv_states[key].latest_state_bits == state_bits:
- return True
- rdzv_state = self._rdzv_states[key]
- rdzv_state.latest_state_bits = state_bits
- rdzv_state.participants = participants
- rdzv_state.wait_list = wait_list
- self._token += 1
- return True
-
- def get_state(self, worker_name, key) -> Optional[Tuple[bytes, Any]]:
- """Return a new state only if len(_RendezvousState.participants)
- + len(_RendezvousState.wait_list) is base 2. Then, we can
- keep the fixed batch size by setting backward_passes_per_step
- in the worker.
- Returns:
- A tuple of the encoded rendezvous state and its fencing token or
- `None` if no state is found in the backend.
- """
- with self._lock:
- completed_state_bits = b""
- if key not in self._rdzv_states:
- return completed_state_bits, self._token
-
- rdzv_state = self._rdzv_states[key]
- rdzv_state.completed_state_bits = rdzv_state.latest_state_bits
- completed_state_bits = rdzv_state.completed_state_bits
- return completed_state_bits, self._token
diff --git a/dlrover/python/master/master.py b/dlrover/python/master/master.py
index 7aa3435b6..40170d6be 100644
--- a/dlrover/python/master/master.py
+++ b/dlrover/python/master/master.py
@@ -22,8 +22,8 @@
)
from dlrover.python.common.log import default_logger as logger
from dlrover.python.master.elastic_training.elastic_ps import ElasticPsService
-from dlrover.python.master.elastic_training.rdzv_service import (
- TorchRendezvousService,
+from dlrover.python.master.elastic_training.rdzv_manager import (
+ RendezvousManager,
)
from dlrover.python.master.elastic_training.sync_service import SyncService
from dlrover.python.master.monitor.speed_monitor import SpeedMonitor
@@ -61,7 +61,7 @@ def __init__(self, port, args: JobArgs):
if args.enable_dynamic_sharding
else None
)
- self.rdzv_service = TorchRendezvousService()
+ self.rdzv_manager = RendezvousManager()
self.job_metric_collector = self._create_metric_collector_if_needed(
args
)
@@ -79,7 +79,7 @@ def _create_master_grpc_service(self, port, params: JobArgs):
self.task_manager,
self.job_manager,
self.speed_monitor,
- self.rdzv_service,
+ self.rdzv_manager,
self.job_metric_collector,
self.elastic_ps_service,
self.sync_service,
@@ -115,8 +115,6 @@ def prepare(self):
# Start the components one by one
if self.task_manager:
self.task_manager.start()
- if self.rdzv_service:
- self.rdzv_service.start()
if self.job_manager:
self.job_manager.start()
@@ -194,7 +192,7 @@ def run(self):
def _remove_not_participated_workers(self):
"""Remove workers who do not participate training."""
- workers = self.rdzv_service.get_released_workers()
+ workers = self.rdzv_manager.get_released_workers()
if workers:
self.job_manager.remove_not_participated_workers(workers)
diff --git a/dlrover/python/master/node/event_callback.py b/dlrover/python/master/node/event_callback.py
index ede5d04a6..4c5679dc2 100644
--- a/dlrover/python/master/node/event_callback.py
+++ b/dlrover/python/master/node/event_callback.py
@@ -20,8 +20,8 @@
NodeType,
)
from dlrover.python.common.log import default_logger as logger
-from dlrover.python.master.elastic_training.rdzv_service import (
- TorchRendezvousService,
+from dlrover.python.master.elastic_training.rdzv_manager import (
+ RendezvousManager,
)
from dlrover.python.master.monitor.speed_monitor import SpeedMonitor
from dlrover.python.master.watcher.base_watcher import Node
@@ -210,7 +210,7 @@ def __init__(self, master):
super(AllReduceNodeHandlingCallback, self).__init__()
self._master = master
self._speed_monitor: SpeedMonitor = self._master.speed_monitor
- self._rdzv_service: TorchRendezvousService = self._master.rdzv_service
+ self._rdzv_manager: RendezvousManager = self._master.rdzv_manager
def get_job_exit_reason(self, node: Node):
if self._master.task_manager.training_started():
@@ -228,7 +228,7 @@ def get_job_exit_reason(self, node: Node):
def on_node_started(self, node: Node, cluster_context):
if node.type == NodeType.WORKER and node.id == 0:
self._master.job_manager.start_auto_scaling()
- self._rdzv_service.add_alive_worker(node)
+ self._rdzv_manager.add_alive_node(node)
@NodeEventCallback.log_callback_exception
def on_node_succeeded(self, node: Node, cluster_context: ClusterContext):
@@ -253,14 +253,14 @@ def on_node_failed(self, node: Node, cluster_context):
[(node.type, node.id)]
)
self._speed_monitor.remove_running_worker(node.type, node.id)
- self._rdzv_service.remove_alive_worker(node)
+ self._rdzv_manager.remove_alive_node(node)
@NodeEventCallback.log_callback_exception
def on_node_deleted(self, node, cluster_context):
node.finish_time = datetime.now() # type: ignore
self._stop_job_if_needed(node)
self._speed_monitor.remove_running_worker(node.type, node.id)
- self._rdzv_service.remove_alive_worker(node)
+ self._rdzv_manager.remove_alive_node(node)
def _stop_job_if_needed(self, node: Node):
if node.critical and node.is_unrecoverable_failure():
diff --git a/dlrover/python/master/node/job_manager.py b/dlrover/python/master/node/job_manager.py
index 973412b77..3e1be355a 100644
--- a/dlrover/python/master/node/job_manager.py
+++ b/dlrover/python/master/node/job_manager.py
@@ -590,10 +590,6 @@ def update_node_service_addr(self, node_type, node_id, service_addr):
node.is_released = False
self._job_nodes[node_type][node_id] = node
- def log_rank_zero_node(self, node_type, node_id, node_rank):
- node = self._job_nodes[node_type][node_id]
- logger.info("Rank %s: %s", node_rank, node.name)
-
def get_cur_cluster_ps(self):
"""Get PS nodes in the current training cluster."""
logger.info("job nodes are {}".format(self._job_nodes))
diff --git a/dlrover/python/master/servicer.py b/dlrover/python/master/servicer.py
index efa54d41f..121d9c6d4 100644
--- a/dlrover/python/master/servicer.py
+++ b/dlrover/python/master/servicer.py
@@ -24,8 +24,11 @@
from dlrover.python.common.global_context import Context
from dlrover.python.common.log import default_logger as logger
from dlrover.python.master.elastic_training.elastic_ps import ElasticPsService
-from dlrover.python.master.elastic_training.rdzv_service import (
- TorchRendezvousService,
+from dlrover.python.master.elastic_training.kv_store_service import (
+ KVStoreService,
+)
+from dlrover.python.master.elastic_training.rdzv_manager import (
+ RendezvousManager,
)
from dlrover.python.master.elastic_training.sync_service import SyncService
from dlrover.python.master.monitor.speed_monitor import SpeedMonitor
@@ -50,7 +53,7 @@ def __init__(
task_manager: TaskManager,
job_manager: JobManager,
speed_monitor: SpeedMonitor,
- rdzv_service: Optional[TorchRendezvousService],
+ rdzv_manager: Optional[RendezvousManager],
job_metric_collector=None,
elastic_ps_service=None,
sync_service=None,
@@ -59,7 +62,8 @@ def __init__(
self._task_manager = task_manager
self._job_manager = job_manager
self._speed_monitor = speed_monitor
- self._rdzv_serivce = rdzv_service
+ self._rdzv_manager = rdzv_manager
+ self._kv_store = KVStoreService()
self._job_metric_collector: JobMetricCollector = job_metric_collector
self._elastic_ps_service: ElasticPsService = elastic_ps_service
self._sync_service: SyncService = sync_service
@@ -124,7 +128,7 @@ def get_task(self, request, _):
# we are trying to pop and invoke the callback.
# Then the master tells the worker to wait
# in case of new tasks later.
- if self._rdzv_serivce:
+ if self._rdzv_manager:
# If there is no more task, master only send wait task to
# the last worker and other workers exit.
if len(self._job_manager.get_running_workers()) == 1:
@@ -291,11 +295,6 @@ def update_node_status(self, request, _):
self._job_manager.update_node_service_addr(
node_type, node_id, server_addr
)
-
- node_rank = request.rank
- if node_rank >= 0:
- self._job_manager.log_rank_zero_node(node_type, node_id, node_rank)
-
response = elastic_training_pb2.Response()
response.success = True
return response
@@ -363,7 +362,7 @@ def get_dataset_shard_num(self, request, _):
def report_prestop(self, request, _):
worker_host = request.worker_host
- self._rdzv_serivce.report_prestop(worker_host)
+ self._rdzv_manager.report_prestop(worker_host)
return empty_pb2.Empty()
def join_sync(self, request, _):
@@ -388,31 +387,39 @@ def barrier(self, request, _):
res.success = self._sync_service.barrier(request.barrier_name)
return res
- def get_rdzv_state(self, request, _):
- rdzv_key = request.rdzv_key
- worker_name = request.host_name
- state_bits, token = self._rdzv_serivce.get_state(worker_name, rdzv_key)
+ def get_comm_world(self, request, _):
+ nodes = self._rdzv_manager.get_comm_world()
res = elastic_training_pb2.RendezvousState()
- res.rdzv_key = rdzv_key
- res.state_bits = state_bits
- res.token = token
+ for node_id, worker_num in nodes.items():
+ res.world[node_id] = worker_num
return res
- def set_rdzv_state(self, request, _):
- succeed = self._rdzv_serivce.set_state(
- request.rdzv_key,
- request.state_bits,
- request.token,
- request.participants,
- request.wait_list,
- request.host_name,
+ def join_rendezvous(self, request, _):
+ round = self._rdzv_manager.join_rendezvous(
+ request.id, request.local_world_size
+ )
+ res = elastic_training_pb2.RendezvousState()
+ res.round = round
+ return res
+
+ def num_nodes_waiting(self, request, _):
+ waiting_num = self._rdzv_manager.num_nodes_waiting()
+ res = elastic_training_pb2.RendezvousState()
+ res.waiting_num = waiting_num
+ return res
+
+ def report_rdzv_params(self, request, _):
+ self._rdzv_manager.update_rdzv_params(
+ min_nodes=request.min_nodes,
+ max_ndoes=request.max_nodes,
+ waiting_timeout=request.waiting_timeout,
)
res = elastic_training_pb2.Response()
- res.success = succeed
+ res.success = True
return res
def kv_store_set(self, request, _):
- self._rdzv_serivce.kv_store.set(request.key, request.value)
+ self._kv_store.set(request.key, request.value)
res = elastic_training_pb2.Response()
res.success = True
return res
@@ -420,7 +427,7 @@ def kv_store_set(self, request, _):
def kv_store_get(self, request, _):
res = elastic_training_pb2.KeyValuePair()
res.key = request.key
- res.value = self._rdzv_serivce.kv_store.get(request.key)
+ res.value = self._kv_store.get(request.key)
return res
def report_failure(self, request, _):
@@ -457,7 +464,7 @@ def create_master_service(
task_manager=task_manager,
job_manager=job_manager,
speed_monitor=speed_monitor,
- rdzv_service=rdzv_service,
+ rdzv_manager=rdzv_service,
job_metric_collector=job_metric_collector,
elastic_ps_service=elastic_ps_service,
sync_service=sync_service,
diff --git a/dlrover/trainer/torch/elastic.py b/dlrover/trainer/torch/elastic.py
index ca8e8cd7f..0550b4c50 100644
--- a/dlrover/trainer/torch/elastic.py
+++ b/dlrover/trainer/torch/elastic.py
@@ -14,20 +14,13 @@
import contextlib
import os
import socket
-import time
from contextlib import contextmanager
from typing import Any, Dict
import torch
import torch.distributed as dist
-from dlrover.python.common.constants import NodeEnv
from dlrover.python.common.log import default_logger as logger
-from dlrover.python.elastic_agent.master_client import GlobalMasterClient
-
-_MASTER_ADDR_KEY = "MASTER_ADDR"
-_MASTER_PORT_KEY = "MASTER_PORT"
-_MASTER_ENDPOINT_KEY = "MASTER_ENDPOINT"
def find_free_port() -> int:
@@ -39,55 +32,6 @@ def find_free_port() -> int:
return sockname[1]
-def set_master_addr(timeout=120):
- """Dynamically setup MASTER_ADDR as the ip of pod with rank=0 because
- the pod with rank-0 may change in an elastic training job.
- Args:
- timeout: timeout to wait the rank-0 node broadcase MASTER_ADDR,
- default 120s.
- """
- if NodeEnv.DLROVER_MASTER_ADDR not in os.environ:
- return
- master_client = GlobalMasterClient.MASTER_CLIENT
- rank = os.getenv("RANK", None)
- rdzv_endpoint = os.getenv("RDZV_ENDPOINT", "")
- if rank is not None:
- if rank == "0":
- host_name = socket.gethostname()
- local_ip = socket.gethostbyname(host_name)
- master_client.kv_store_set(_MASTER_ENDPOINT_KEY, local_ip.encode())
- logger.info("Broadcast master endpoint %s", local_ip)
-
- start_time = time.time()
- while True:
- endpoint = master_client.kv_store_get(_MASTER_ENDPOINT_KEY)
- if endpoint:
- endpoint = endpoint.decode()
- break
- if time.time() - start_time > timeout:
- logger.warning(
- "Timeout %s to wait rank 0 to broadcast MASTER_ADDR",
- timeout,
- )
- break
- logger.info("Wait rank 0 to broadcast the master endpoint.")
- time.sleep(3)
- if endpoint:
- os.environ[_MASTER_ADDR_KEY] = endpoint
- elif rdzv_endpoint:
- os.environ[_MASTER_ADDR_KEY] = rdzv_endpoint
- group_rank = os.getenv("GROUP_RANK", None)
- local_rank = os.getenv("LOCAL_RANK", None)
- if local_rank == "0" and group_rank is not None:
- # Only one process to report node status.
- master_client.report_node_status(group_rank)
- logger.info(
- "MASTER_ADDR=%s MASTER_PORT=%s",
- os.environ[_MASTER_ADDR_KEY],
- os.environ[_MASTER_PORT_KEY],
- )
-
-
def get_rank():
rank = 0
if dist.is_initialized():
diff --git a/dlrover/trainer/torch/elastic_run.py b/dlrover/trainer/torch/elastic_run.py
index 06ef44666..619293f4b 100644
--- a/dlrover/trainer/torch/elastic_run.py
+++ b/dlrover/trainer/torch/elastic_run.py
@@ -11,27 +11,35 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import functools
import json
+import os
+import socket
+import tempfile
import time
import uuid
from dataclasses import dataclass
-from datetime import datetime
+from datetime import datetime, timedelta
from typing import Any, Callable, Dict, List, Optional, Union
-import torch.distributed.elastic.rendezvous.registry as rdzv_registry
import torch.distributed.elastic.timer as timer
+from torch.distributed import PrefixStore, Store
from torch.distributed.elastic import events, metrics
from torch.distributed.elastic.agent.server.api import (
DEFAULT_ROLE,
RunResult,
+ Worker,
WorkerGroup,
WorkerSpec,
WorkerState,
+ _get_fq_hostname,
+ _RoleInstanceInfo,
)
from torch.distributed.elastic.agent.server.local_elastic_agent import (
LocalElasticAgent,
)
from torch.distributed.elastic.metrics import put_metric
+from torch.distributed.elastic.metrics.api import prof
from torch.distributed.elastic.multiprocessing import PContext, SignalException
from torch.distributed.elastic.multiprocessing.errors import (
ChildFailedError,
@@ -40,20 +48,110 @@
)
from torch.distributed.elastic.rendezvous import RendezvousParameters
from torch.distributed.elastic.rendezvous.api import RendezvousHandler
-from torch.distributed.elastic.utils.logging import get_logger
-from torch.distributed.launcher.api import (
- LaunchConfig,
- _get_addr_and_port,
- _get_entrypoint_name,
-)
+from torch.distributed.launcher.api import LaunchConfig, _get_entrypoint_name
from torch.distributed.run import config_from_args, parse_args
+from dlrover.python.common.constants import NodeEnv
+from dlrover.python.common.log import default_logger as logger
from dlrover.python.elastic_agent.master_client import GlobalMasterClient
+from dlrover.python.elastic_agent.torch.master_kv_store import MasterKVStore
__all__ = ["LaunchConfig", "elastic_launch", "launch_agent"]
-logger = get_logger()
+class MasterRendezvousHandler(RendezvousHandler):
+ def __init__(self, node_id, rdzv_params: RendezvousParameters):
+ self._node_id = node_id
+ self._rdzv_params = rdzv_params
+ self.join_timeout = rdzv_params.get("join_timeout", 600)
+ self._client = GlobalMasterClient.MASTER_CLIENT
+ self._store = MasterKVStore("dlrover-elastic", timedelta(seconds=60))
+ lastcall_timeout = rdzv_params.get("lastcall_timeout", 60)
+ self._client.report_rdzv_params(
+ rdzv_params.min_nodes,
+ rdzv_params.max_nodes,
+ lastcall_timeout,
+ )
+
+ def get_backend(self) -> str:
+ return "dlrover-master"
+
+ def is_closed(self) -> bool:
+ return False
+
+ def set_closed(self):
+ """Marks the rendezvous as closed."""
+ pass
+
+ def join_rendezvous(self, local_world_size):
+ """The node join a rendezvous by sending its
+ ID and local world size.
+ """
+ round = self._client.join_rendezvous(self._node_id, local_world_size)
+ return round
+
+ def next_rendezvous(self, round):
+ """The handler will peroidically query the world from the master until
+ the world is not empty. The world is a dictionary like
+ like {0: 8, 1: 8, 2: 8} where the key is the node ID and the value is
+ the local world size. The handler can get its rank by the position
+ of it node ID in the world.
+ """
+ start_join = time.time()
+ node_name = os.getenv("POD_NAME", "")
+ msg = (
+ f"The node node_name attempts to join the next round "
+ f"of the rendezvous '{self._rdzv_params.run_id}'."
+ )
+ logger.info(msg)
+ while True:
+ world = self._client.get_comm_world()
+ world = dict(sorted(world.items()))
+ if world:
+ break
+ if time.time() - start_join > self.join_timeout:
+ raise TimeoutError(
+ f"Timeout {self.join_timeout}s to complete next rendezous."
+ )
+ time.sleep(3)
+ rank = list(world.keys()).index(self._node_id)
+ world_size = len(world)
+ logger.info(
+ f"The node{node_name} has joined round {round} of "
+ f"the rendezvous as rank {rank} in a world of size "
+ f"{world_size}."
+ )
+ store = self._get_store(round)
+ return store, world
+
+ def _get_store(self, round) -> Store:
+ key_prefix = f"torch.rendezvous.{self._rdzv_params.run_id}.{round}"
+ return PrefixStore(key_prefix, self._store)
+
+ def num_nodes_waiting(self) -> int:
+ return self._client.num_nodes_waiting()
+
+ def get_run_id(self) -> str:
+ """Returns the run id of the rendezvous.
+
+ The run id is a user-defined id that uniquely identifies an instance of
+ a distributed application. It typically maps to a job id and is used to
+ allow nodes to join the correct distributed application.
+ """
+ return self._rdzv_params.run_id
+
+ def shutdown(self) -> bool:
+ """Closes all resources that were open for the rendezvous.
+
+ Example::
+
+ rdzv_handler = ...
+ try:
+ store, rank, world_size = rdzv_handler.next_rendezvous()
+ finally:
+ rdzv_handler.shutdown()
+ """
+ pass
@dataclass
@@ -79,6 +177,7 @@ class DLRoverElasticAgent(LocalElasticAgent):
def __init__(
self,
+ node_id,
spec: WorkerSpec,
start_method="spawn",
exit_barrier_timeout: float = 300,
@@ -87,11 +186,124 @@ def __init__(
super().__init__(spec, exit_barrier_timeout)
self._start_method = start_method
self._pcontext: Optional[PContext] = None
- self._log_dir = log_dir
+ self._log_dir = log_dir or tempfile.mkdtemp(prefix="torchelastic_")
self._worker_watchdog: Optional[timer.FileTimerServer] = None
self._reamining_fo_count: int = self._remaining_restarts
+ self._node_id = node_id
self._client = GlobalMasterClient.MASTER_CLIENT
+ @prof
+ def _rendezvous(self, worker_group: WorkerGroup) -> None:
+ r"""
+ Runs rendezvous for the workers specified by worker spec.
+ Assigns workers a new global rank and world size.
+ Updates the rendezvous store for the worker group.
+ """
+
+ spec = worker_group.spec
+ round = spec.rdzv_handler.join_rendezvous(spec.local_world_size)
+ store, world = spec.rdzv_handler.next_rendezvous(round)
+ group_world_size = len(world)
+ group_rank = list(world.keys()).index(self._node_id)
+ self._store = store
+
+ workers = self._assign_worker_ranks(self._node_id, world, spec)
+ worker_group.workers = workers
+ worker_group.store = store
+ worker_group.group_rank = group_rank
+ worker_group.group_world_size = group_world_size
+
+ if group_rank == 0:
+ self._set_master_addr_port(
+ store,
+ spec.master_addr,
+ spec.master_port,
+ spec.local_addr,
+ )
+
+ master_addr, master_port = self._get_master_addr_port(store)
+ restart_count = spec.max_restarts - self._remaining_restarts
+
+ logger.info(
+ f"[{spec.role}] Rendezvous complete for workers. Result:\n"
+ f" restart_count={restart_count}\n"
+ f" master_addr={master_addr}\n"
+ f" master_port={master_port}\n"
+ f" group_rank={group_rank}\n"
+ f" group_world_size={group_world_size}\n"
+ f" local_ranks={[worker.local_rank for worker in workers]}\n"
+ f" role_ranks={[worker.role_rank for worker in workers]}\n"
+ f" global_ranks={[worker.global_rank for worker in workers]}\n"
+ f" role_world_sizes="
+ f"{[worker.role_world_size for worker in workers]}\n"
+ f" global_world_sizes="
+ f"{[worker.world_size for worker in workers]}\n"
+ )
+
+ # pyre-fixme[56]: Pyre was not able to infer the type of the decorator
+ # `torch.distributed.elastic.metrics.prof`.
+ @prof
+ def _assign_worker_ranks(
+ self, node_id, world, spec: WorkerSpec
+ ) -> List[Worker]:
+ """
+ Determines proper ranks for worker processes. The rank assignment
+ is done according to the following algorithm:
+
+ 1. Each agent writes its configuration(group_rank, group_world_size
+ , num_workers) to the common store.
+ 2. Each agent retrieves configuration for all agents
+ and performs two level sort using role and rank.
+ 3. Determine the global rank: the global rank of workers for the
+ current agent is the offset of infos array up to group_rank
+ of the agent. The offset is computed as a sum of local_world_size
+ of all agents that have rank less than the group_rank.
+ The workers would have the ranks: [offset, offset+local_world_size)
+ 4. Determine the role rank: The role rank is determined using the
+ algorithms in the point 3 with the exception that the offset is
+ done from the first agent that has the same role as current one
+ and has the minimum group rank.
+ """
+
+ role_infos: List[_RoleInstanceInfo] = []
+ nodes = list(world.keys())
+ for i, local_world_size in world.items():
+ group_rank = nodes.index(i)
+ role_info = _RoleInstanceInfo(
+ spec.role, group_rank, local_world_size
+ )
+ role_infos.append(role_info)
+ group_rank = nodes.index(node_id)
+ my_role_info = role_infos[group_rank]
+ worker_world_size, worker_global_ranks = self._get_ranks(
+ role_infos, group_rank
+ )
+ role_infos = sorted(
+ role_infos, key=functools.cmp_to_key(_RoleInstanceInfo.compare)
+ )
+ role_start_idx, role_end_idx = _RoleInstanceInfo.find_role_boundaries(
+ role_infos, my_role_info.role
+ )
+ role_pos = next(
+ idx
+ for idx, role_info in enumerate(role_infos)
+ if _RoleInstanceInfo.compare(role_info, my_role_info) == 0
+ )
+ role_world_size, role_ranks = self._get_ranks(
+ role_infos, role_pos, role_start_idx, role_end_idx + 1
+ )
+ workers = []
+ for ind in range(spec.local_world_size):
+ worker = Worker(
+ local_rank=ind,
+ global_rank=worker_global_ranks[ind],
+ role_rank=role_ranks[ind],
+ world_size=worker_world_size,
+ role_world_size=role_world_size,
+ )
+ workers.append(worker)
+ return workers
+
def _invoke_run(self, role: str = DEFAULT_ROLE) -> RunResult:
# NOTE: currently only works for a single role
@@ -198,6 +410,7 @@ def launch_agent(
config.run_id = run_id
entrypoint_name = _get_entrypoint_name(entrypoint, args)
+ node_id = int(os.getenv(NodeEnv.WORKER_ID, 0))
logger.info(
f"Starting elastic_operator with launch configs:\n"
@@ -225,25 +438,29 @@ def launch_agent(
**config.rdzv_configs,
)
- master_addr, master_port = _get_addr_and_port(rdzv_parameters)
+ master_addr = os.environ.get(
+ "MY_POD_IP", socket.gethostbyname(_get_fq_hostname())
+ )
spec = WorkerSpec(
role=config.role,
local_world_size=config.nproc_per_node,
entrypoint=entrypoint,
args=tuple(args),
- rdzv_handler=rdzv_registry.get_rendezvous_handler(rdzv_parameters),
+ rdzv_handler=MasterRendezvousHandler(node_id, rdzv_parameters),
max_restarts=config.max_restarts,
monitor_interval=config.monitor_interval,
redirects=config.redirects,
tee=config.tee,
master_addr=master_addr,
- master_port=master_port,
local_addr=config.local_addr,
)
agent = DLRoverElasticAgent(
- spec=spec, start_method=config.start_method, log_dir=config.log_dir
+ node_id=node_id,
+ spec=spec,
+ start_method=config.start_method,
+ log_dir=config.log_dir,
)
shutdown_rdzv = True
diff --git a/dlrover/trainer/torch/main.py b/dlrover/trainer/torch/main.py
index 6e1e19b18..762629460 100644
--- a/dlrover/trainer/torch/main.py
+++ b/dlrover/trainer/torch/main.py
@@ -11,13 +11,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from dlrover.python.elastic_agent.torch.rdzv_backend import (
- register_dlrover_backend,
-)
from dlrover.trainer.torch.elastic_run import main
-register_dlrover_backend()
-
-
if __name__ == "__main__":
main()
diff --git a/model_zoo/pytorch/mnist_cnn.py b/model_zoo/pytorch/mnist_cnn.py
index 5a4094b08..6aca54547 100644
--- a/model_zoo/pytorch/mnist_cnn.py
+++ b/model_zoo/pytorch/mnist_cnn.py
@@ -13,6 +13,7 @@
import argparse
import os
+from datetime import timedelta
import torch
import torch.distributed as dist
@@ -20,12 +21,13 @@
import torch.nn.functional as F
import torch.optim as optim
import torchvision
+from torch.distributed.elastic.multiprocessing.errors import record
from torch.nn.parallel import DistributedDataParallel as DDP
from torch.optim.lr_scheduler import StepLR
from torch.utils.data import DataLoader
from torchvision import transforms
-from dlrover.trainer.torch.elastic import ElasticTrainer, set_master_addr
+from dlrover.trainer.torch.elastic import ElasticTrainer
from dlrover.trainer.torch.elastic_sampler import ElasticDistributedSampler
CHEKPOINT_PATH = "model.pt"
@@ -69,17 +71,17 @@ def cleanup():
def setup():
use_cuda = torch.cuda.is_available()
- set_master_addr()
if use_cuda:
- dist.init_process_group("nccl")
+ dist.init_process_group("nccl", timeout=timedelta(seconds=120))
torch.cuda.set_device(int(os.environ["LOCAL_RANK"]))
else:
- dist.init_process_group("gloo")
+ dist.init_process_group("gloo", timeout=timedelta(seconds=120))
rank = dist.get_rank()
local_rank = os.environ["LOCAL_RANK"]
print(f"rank {rank} is initialized local_rank = {local_rank}")
+@record
def train(args):
"""The function to run the training loop.
Args:
diff --git a/model_zoo/pytorch/mnist_lightning.py b/model_zoo/pytorch/mnist_lightning.py
index 3e5898ab5..a5502f8c4 100644
--- a/model_zoo/pytorch/mnist_lightning.py
+++ b/model_zoo/pytorch/mnist_lightning.py
@@ -21,7 +21,6 @@
from torch.utils.data import DataLoader
from torchvision import transforms
-from dlrover.trainer.torch.elastic import set_master_addr
from dlrover.trainer.torch.elastic_sampler import ElasticDistributedSampler
@@ -139,7 +138,6 @@ def load_state_dict(self, state_dict) -> None:
def train(args):
- set_master_addr()
data_module = MNISTDataModule(
args.training_data, args.validation_data, args.batch_size, args.shuffle
)
diff --git a/setup.py b/setup.py
index 487096b83..671198ba3 100644
--- a/setup.py
+++ b/setup.py
@@ -30,7 +30,7 @@
setup(
name="dlrover",
- version="0.2.0rc0.dev0",
+ version="0.3.0",
description="An Automatic Distributed Deep Learning Framework",
long_description="DLRover helps model developers focus on model algorithm"
" itself, without taking care of any engineering stuff,"
@@ -56,6 +56,6 @@
]
},
entry_points={
- "console_scripts": ["torchrun=dlrover.trainer.torch.main:main"]
+ "console_scripts": ["dlrover-run=dlrover.trainer.torch.main:main"]
},
)
| Rendezvous is closed if some agent fail.
We found some drawbacks when we use TorchElastic to train LLM model on the k8s cluster.
- The agent of TorchElastic will close rendezvous when it exits. The agent of the remaining running Pods can not join the next round rendezvous because it is closed.
- The rendezvous state is stored in a GPU node when rendezvous backend is c10d. All agents will fails if the node breakdowns.
- The rank of nodes is random after a round rendezvous is completed. Users may has trouble to find the rank-0 which has some training logs.
| 2023-06-27T02:06:12 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-445 | 5751c13130c0668cd34971cf18351febe7a42978 | diff --git a/dlrover/proto/elastic_training.proto b/dlrover/proto/elastic_training.proto
index 5820f421a..a3376596a 100644
--- a/dlrover/proto/elastic_training.proto
+++ b/dlrover/proto/elastic_training.proto
@@ -169,6 +169,7 @@ message NodeMeta {
int32 gpu = 5;
string gpu_type = 6;
int32 id = 7;
+ int32 rank = 8;
}
message NodeEvent {
@@ -269,6 +270,6 @@ service Master {
rpc kv_store_get(KeyValuePair) returns (KeyValuePair);
rpc report_prestop(ReportPreStopRequest) returns (google.protobuf.Empty);
- rpc update_node_addr(NodeMeta) returns (Response);
+ rpc update_node_status(NodeMeta) returns (Response);
rpc update_node_event(NodeEvent) returns (google.protobuf.Empty);
}
diff --git a/dlrover/python/elastic_agent/master_client.py b/dlrover/python/elastic_agent/master_client.py
index e0ef9eff4..0f56c5a0f 100644
--- a/dlrover/python/elastic_agent/master_client.py
+++ b/dlrover/python/elastic_agent/master_client.py
@@ -248,7 +248,8 @@ def update_node_addr(self, task_type, task_id, node_addr):
request.id = task_id
request.type = task_type
request.addr = node_addr
- res = self._stub.update_node_addr(request)
+ request.rank = -1
+ res = self._stub.update_node_status(request)
return res
@retry_grpc_request
@@ -374,6 +375,16 @@ def kv_store_get(self, key):
response = self._stub.kv_store_get(request)
return response.value
+ @retry_grpc_request
+ def report_node_status(self, rank):
+ if rank is None:
+ return
+ request = elastic_training_pb2.NodeMeta()
+ request.id = self._node_id
+ request.type = self._node_type
+ request.rank = int(rank)
+ self._stub.update_node_status(request)
+
class LocalDataset(object):
def __init__(
@@ -551,6 +562,10 @@ def kv_store_set(self, key, value):
def kv_store_get(self, key):
return self._kv_store.get(key, "")
+ def report_node_status(self, rank):
+ logger.info(f"Report rank {rank}")
+ return
+
def build_master_client(master_addr=None):
if master_addr is None:
diff --git a/dlrover/python/master/node/job_manager.py b/dlrover/python/master/node/job_manager.py
index 214c9903c..26265a8b8 100644
--- a/dlrover/python/master/node/job_manager.py
+++ b/dlrover/python/master/node/job_manager.py
@@ -578,18 +578,15 @@ def update_node_resource_usage(self, node_type, node_id, cpu, memory):
node.update_resource_usage(cpu, memory)
def update_node_service_addr(self, node_type, node_id, service_addr):
- logger.info("job nodes are {}".format(self._job_nodes))
- logger.info(node_id)
node = self._job_nodes[node_type][node_id]
- logger.info(
- "update_node_service_addr id of node is {}".format(id(node))
- )
node.update_service_address(service_addr)
node.status = NodeStatus.RUNNING
node.is_released = False
- logger.info("node status {}".format(node.status))
self._job_nodes[node_type][node_id] = node
- logger.info("job nodes are {}".format(self._job_nodes))
+
+ def log_rank_zero_node(self, node_type, node_id, node_rank):
+ node = self._job_nodes[node_type][node_id]
+ logger.info("Rank %s: %s", node_rank, node.name)
def get_cur_cluster_ps(self):
"""Get PS nodes in the current training cluster."""
diff --git a/dlrover/python/master/servicer.py b/dlrover/python/master/servicer.py
index e73c6bef2..06f209c32 100644
--- a/dlrover/python/master/servicer.py
+++ b/dlrover/python/master/servicer.py
@@ -283,20 +283,21 @@ def update_cluster_version(self, request, _):
)
return empty_pb2.Empty()
- def update_node_addr(self, reqeuest, _):
+ def update_node_status(self, request, _):
+ node_type = request.type
+ node_id = request.id
+ server_addr = request.addr
- task_type = reqeuest.type
- task_id = reqeuest.id
- server_addr = reqeuest.addr
-
- logger.info("update node addr")
self._job_manager.update_node_service_addr(
- task_type, task_id, server_addr
+ node_type, node_id, server_addr
)
+ node_rank = request.rank
+ if node_rank >= 0:
+ self._job_manager.log_rank_zero_node(node_type, node_id, node_rank)
+
response = elastic_training_pb2.Response()
response.success = True
- logger.info(response)
return response
def update_node_event(self, request, _):
diff --git a/dlrover/trainer/torch/elastic.py b/dlrover/trainer/torch/elastic.py
index dec8427ca..f05fa5d34 100644
--- a/dlrover/trainer/torch/elastic.py
+++ b/dlrover/trainer/torch/elastic.py
@@ -76,6 +76,7 @@ def set_master_addr(timeout=120):
os.environ[_MASTER_ADDR_KEY] = endpoint
elif rdzv_endpoint:
os.environ[_MASTER_ADDR_KEY] = rdzv_endpoint
+ master_client.report_node_status(rank)
logger.info(
"MASTER_ADDR=%s MASTER_PORT=%s",
os.environ[_MASTER_ADDR_KEY],
| Master log show which node is rank 0.
| 2023-06-14T03:27:31 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-419 | c663e88b646a2a577b5dbec025cd690a9f706b5d | diff --git a/dlrover/python/master/scaler/pod_scaler.py b/dlrover/python/master/scaler/pod_scaler.py
index ff57bf812..5dec53f01 100644
--- a/dlrover/python/master/scaler/pod_scaler.py
+++ b/dlrover/python/master/scaler/pod_scaler.py
@@ -350,7 +350,7 @@ def _create_pod(self, node: Node, pod_stats: Dict[str, int], ps_addrs):
env.append(V1EnvVar(name=NodeEnv.GRPC_ENABLE_FORK, value="False"))
worker_num = self._config_worker_num
- if pod_stats[node.type] > worker_num:
+ if worker_num == 0:
worker_num = pod_stats[node.type]
env.append(V1EnvVar(name=NodeEnv.WORKER_NUM, value=str(worker_num)))
env.append(
| Error when a worker is deleted.
```
Traceback (most recent call last):
File "/usr/local/bin/torchrun", line 8, in <module>
sys.exit(main())
File "/usr/local/lib/python3.8/site-packages/torch/distributed/elastic/multiprocessing/errors/__init__.py", line 346, in wrapper
return f(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/torch/distributed/run.py", line 762, in main
run(args)
File "/usr/local/lib/python3.8/site-packages/torch/distributed/run.py", line 753, in run
elastic_launch(
File "/usr/local/lib/python3.8/site-packages/torch/distributed/launcher/api.py", line 132, in __call__
return launch_agent(self._config, self._entrypoint, list(args))
File "/usr/local/lib/python3.8/site-packages/torch/distributed/launcher/api.py", line 237, in launch_agent
result = agent.run()
File "/usr/local/lib/python3.8/site-packages/torch/distributed/elastic/metrics/api.py", line 129, in wrapper
result = f(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/torch/distributed/elastic/agent/server/api.py", line 709, in run
result = self._invoke_run(role)
File "/usr/local/lib/python3.8/site-packages/torch/distributed/elastic/agent/server/api.py", line 873, in _invoke_run
self._restart_workers(self._worker_group)
File "/usr/local/lib/python3.8/site-packages/torch/distributed/elastic/metrics/api.py", line 129, in wrapper
result = f(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/torch/distributed/elastic/agent/server/api.py", line 700, in _restart_workers
self._initialize_workers(worker_group)
File "/usr/local/lib/python3.8/site-packages/torch/distributed/elastic/metrics/api.py", line 129, in wrapper
result = f(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/torch/distributed/elastic/agent/server/api.py", line 678, in _initialize_workers
self._rendezvous(worker_group)
File "/usr/local/lib/python3.8/site-packages/torch/distributed/elastic/metrics/api.py", line 129, in wrapper
result = f(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/torch/distributed/elastic/agent/server/api.py", line 538, in _rendezvous
store, group_rank, group_world_size = spec.rdzv_handler.next_rendezvous()
File "/usr/local/lib/python3.8/site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py", line 1025, in next_rendezvous
self._op_executor.run(join_op, deadline)
File "/usr/local/lib/python3.8/site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py", line 629, in run
action = state_handler(ctx, deadline)
File "/usr/local/lib/python3.8/site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py", line 839, in __call__
if cast(datetime, state.deadline) < datetime.utcnow():
```
| 2023-05-31T10:38:21 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-396 | 09bf04f7e6d3667dcdb0ce7fe70642e5d76b383e | diff --git a/dlrover/python/master/master.py b/dlrover/python/master/master.py
index 242d47028..7aa3435b6 100644
--- a/dlrover/python/master/master.py
+++ b/dlrover/python/master/master.py
@@ -147,13 +147,16 @@ def run(self):
break
self._remove_not_participated_workers()
if self.job_manager and self.job_manager.all_workers_exited():
+ if self.job_manager.pend_without_workers():
+ time.sleep(30)
+ continue
if self.job_manager.all_workers_failed():
logger.error("All workers failed")
self._exit_code = 1
self._exit_reason = JobExitReason.UNKNOWN_ERROR
- break
-
- if self.task_manager and not self.task_manager.finished():
+ elif (
+ self.task_manager and not self.task_manager.finished()
+ ):
logger.warning(
"All workers exited but there also are "
"unfinished tasks",
diff --git a/dlrover/python/master/node/job_manager.py b/dlrover/python/master/node/job_manager.py
index 2156f3a6b..b9a2d37f3 100644
--- a/dlrover/python/master/node/job_manager.py
+++ b/dlrover/python/master/node/job_manager.py
@@ -665,6 +665,15 @@ def remove_not_participated_workers(self, workers):
plan = self._worker_manager.remove_not_participated_workers(workers)
self._scaler.scale(plan)
+ def pend_without_workers(self):
+ """Check whether to wait for evicted workers."""
+ if self._worker_manager.has_failed_worker():
+ return False
+ elif self._worker_manager.wait_worker_restart():
+ return True
+ else:
+ return False
+
def create_job_manager(args: JobArgs, speed_monitor) -> JobManager:
# relaunch on worker failure for PS or custom strategy
diff --git a/dlrover/python/master/node/ps.py b/dlrover/python/master/node/ps.py
index 0a5a83c71..783705b6a 100644
--- a/dlrover/python/master/node/ps.py
+++ b/dlrover/python/master/node/ps.py
@@ -85,6 +85,7 @@ def relaunch_node(self, node: Node):
node.is_released = True
new_id = next(self._node_id_iter)
self._nodes[new_id] = node.get_relaunch_node_info(new_id)
+ self._nodes.pop(node.id)
if node in self._training_ps_cluster:
i = self._training_ps_cluster.index(node)
self._training_ps_cluster[i] = self._nodes[new_id]
diff --git a/dlrover/python/master/node/training_node.py b/dlrover/python/master/node/training_node.py
index 8a8ea4052..41b3101a4 100644
--- a/dlrover/python/master/node/training_node.py
+++ b/dlrover/python/master/node/training_node.py
@@ -179,6 +179,7 @@ def relaunch_node(self, node: Node):
new_id = next(self._node_id_iter)
relaunch_node = node.get_relaunch_node_info(new_id)
self._nodes[new_id] = relaunch_node
+ self._nodes.pop(node.id)
logger.info("Relaunch node %s to %s", node.name, new_id)
plan.launch_nodes.append(
Node(
diff --git a/dlrover/python/master/node/worker.py b/dlrover/python/master/node/worker.py
index 07270a502..93fdd6a1e 100644
--- a/dlrover/python/master/node/worker.py
+++ b/dlrover/python/master/node/worker.py
@@ -14,7 +14,11 @@
import copy
from typing import Dict, List
-from dlrover.python.common.constants import NodeStatus, NodeType
+from dlrover.python.common.constants import (
+ NodeExitReason,
+ NodeStatus,
+ NodeType,
+)
from dlrover.python.common.log import default_logger as logger
from dlrover.python.common.node import Node, NodeGroupResource, NodeResource
from dlrover.python.master.node.training_node import (
@@ -250,3 +254,23 @@ def remove_not_participated_workers(self, workers):
if p:
plan.merge(p)
return plan
+
+ def has_failed_worker(self):
+ """Check whether there is failed worker except evicted workers."""
+ for worker in self._nodes.values():
+ if worker.exit_reason in [
+ NodeExitReason.FATAL_ERROR,
+ NodeExitReason.UNKNOWN_ERROR,
+ ]:
+ return True
+ return False
+
+ def wait_worker_restart(self):
+ """Check whether there are workers tha have remaining retries."""
+ for worker in self._nodes.values():
+ if (
+ worker.exit_reason == NodeExitReason.KILLED
+ and worker.relaunch_count < worker.max_relaunch_count
+ ):
+ return True
+ return False
| Job pends to relaunch workers if all workers are evicted.
| 2023-05-05T03:08:56 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-388 | 6043ada7b88d0123dc2831e87f1db247422d80f0 | diff --git a/dlrover/proto/elastic_training.proto b/dlrover/proto/elastic_training.proto
index 3a8abab9a..5820f421a 100644
--- a/dlrover/proto/elastic_training.proto
+++ b/dlrover/proto/elastic_training.proto
@@ -158,6 +158,7 @@ message GlobalStepRecord {
message QueryPsNodesResponse {
repeated NodeMeta ps_nodes = 1;
bool new_ps_ready = 2;
+ bool ps_failure = 3;
}
message NodeMeta {
diff --git a/dlrover/python/common/global_context.py b/dlrover/python/common/global_context.py
index 77be1a8bb..37a486b90 100644
--- a/dlrover/python/common/global_context.py
+++ b/dlrover/python/common/global_context.py
@@ -30,6 +30,7 @@ class ConfigKeys(object):
SECONDS_HUGE_TRAINING_THRESHOLD = "seconds_huge_training_threshold"
GLOBAL_STEP_COUNT_TO_AUTO_WORKER = "global_step_count_to_auto_worker"
SECONDS_TO_CHANGE_PS = "seconds_to_change_ps"
+ SECONDS_TO_WAIT_FAILED_PS = "seconds_to_wait_failed_ps"
class DefaultConfigValues(object):
@@ -44,6 +45,7 @@ class DefaultConfigValues(object):
DEFAULT_SECONDS_HUGE_TRAINING_THRESHOLD = 1800 # 30min
DEFALUT_GLOBAL_STEP_COUNT_TO_AUTO_WORKER = 5
DEFAULT_SECONDS_TO_CHANGE_PS = 3600 # 1h
+ DEFAULT_SECONDS_TO_WAIT_FAILED_PS = 600 # 10min
class Context(object):
@@ -94,6 +96,10 @@ def __init__(self):
ConfigKeys.SECONDS_TO_CHANGE_PS,
DefaultConfigValues.DEFAULT_SECONDS_TO_CHANGE_PS,
)
+ self.seconds_to_wait_failed_ps = self.get_param_value_from_brain(
+ ConfigKeys.SECONDS_TO_WAIT_FAILED_PS,
+ DefaultConfigValues.DEFAULT_SECONDS_TO_WAIT_FAILED_PS,
+ )
self.auto_worker_enabled = False
self.auto_ps_enabled = False
self.is_tfv1_ps = False
diff --git a/dlrover/python/common/node.py b/dlrover/python/common/node.py
index 7738e0a88..aa5c64803 100644
--- a/dlrover/python/common/node.py
+++ b/dlrover/python/common/node.py
@@ -145,6 +145,7 @@ class Node(object):
is_released: bool, ture if the master deletes the node.
exit_reason: str, the exited reason of a node.
used_resource: the resource usage of the node.
+ init_time: the timestamp to initialize the node object.
"""
def __init__(
@@ -181,6 +182,7 @@ def __init__(
self.config_resource = config_resource
self.used_resource = NodeResource(0.0, 0.0)
self.start_hang_time = 0
+ self.init_time = time.time()
def inc_relaunch_count(self):
self.relaunch_count += 1
@@ -221,6 +223,7 @@ def get_relaunch_node_info(self, new_id):
new_node.start_time = None
new_node.is_released = False
new_node.relaunchable = True
+ new_node.init_time = time.time()
return new_node
def is_unrecoverable_failure(self):
@@ -262,6 +265,14 @@ def update_priority(self, group_node_num):
"high/low/a fraction value.".format(priority)
)
+ def timeout(self, timeout):
+ now = time.time()
+ if (
+ now - self.init_time > timeout
+ and self.status == NodeStatus.INITIAL
+ ):
+ return True
+
def __repr__(self):
return (
"name:" + str(self.name) + ";"
diff --git a/dlrover/python/elastic_agent/master_client.py b/dlrover/python/elastic_agent/master_client.py
index ac8026952..dab09954c 100644
--- a/dlrover/python/elastic_agent/master_client.py
+++ b/dlrover/python/elastic_agent/master_client.py
@@ -271,7 +271,7 @@ def update_cluster_version(
def query_ps_nodes(self):
request = empty_pb2.Empty()
response = self._stub.query_ps_nodes(request)
- return response.ps_nodes, response.new_ps_ready
+ return response.ps_nodes, response.ps_failure
@retry_grpc_request
def query_training_status(self):
diff --git a/dlrover/python/master/node/job_manager.py b/dlrover/python/master/node/job_manager.py
index c56a66a89..2156f3a6b 100644
--- a/dlrover/python/master/node/job_manager.py
+++ b/dlrover/python/master/node/job_manager.py
@@ -617,8 +617,13 @@ def get_next_cluster_ps(self):
return self._ps_manager.get_next_training_ps_cluster()
def ready_for_new_ps_cluster(self):
+ """Check whether ps cluster is used to training"""
return self._ps_manager.get_ready_for_new_ps_cluster()
+ def has_ps_failure(self):
+ """Check whether ther is PS failure"""
+ return self._ps_manager.has_ps_failure()
+
def remove_training_nodes(self):
"""Remove all PS and workers"""
self._job_autoscaler.stop_auto_scaling()
diff --git a/dlrover/python/master/node/ps.py b/dlrover/python/master/node/ps.py
index c1c21f97e..0a5a83c71 100644
--- a/dlrover/python/master/node/ps.py
+++ b/dlrover/python/master/node/ps.py
@@ -18,12 +18,15 @@
from typing import Dict, List
from dlrover.python.common.constants import NodeStatus, NodeType
+from dlrover.python.common.global_context import Context
from dlrover.python.common.log import default_logger as logger
from dlrover.python.common.node import Node, NodeGroupResource, NodeResource
from dlrover.python.master.node.training_node import TrainingNodeManager
from dlrover.python.master.resource.job import JobResource
from dlrover.python.master.scaler.base_scaler import ScalePlan
+_dlrover_ctx = Context.singleton_instance()
+
class ParameterServerManager(TrainingNodeManager):
def __init__(
@@ -53,7 +56,7 @@ def __init__(
self._new_service_fn = new_service_fn
self._pre_dropped_ps: List[Node] = []
self._lock = threading.Lock()
- self._ready_for_new_ps_cluster = False
+ self._ps_cluster_changed = True
self._migrated_ps_nodes: Dict[int, Node] = {}
self._next_training_ps_cluster: List[Node] = []
self._training_ps_cluster: List[Node] = []
@@ -97,6 +100,7 @@ def relaunch_node(self, node: Node):
relaunch_count=node.relaunch_count,
)
)
+ self._ps_cluster_changed = True
return plan
def adjust_ps(self, ps_resource: NodeGroupResource):
@@ -120,7 +124,7 @@ def _scale_up_ps(self, up_num):
logger.info("Scale up ps with the number %s", up_num)
new_ps = []
with self._lock:
- self._ready_for_new_ps_cluster = False
+ self._ps_cluster_changed = True
alive_num = len(self.get_training_ps_cluster())
task_id_iter = itertools.count(alive_num)
for _ in range(up_num):
@@ -148,7 +152,7 @@ def _scale_up_ps(self, up_num):
def _scale_down_ps(self, down_num):
with self._lock:
self._pre_dropped_ps = []
- self._ready_for_new_ps_cluster = False
+ self._ps_cluster_changed = True
new_ps_num = self._job_resource.ps_num - down_num
self._job_resource.update_node_group_resource(
NodeType.PS, new_ps_num, 0, 0
@@ -163,7 +167,7 @@ def _scale_down_ps(self, down_num):
logger.info("Scale down PS %s", dropped_ps)
def process_after_ps_cluster_ready(self):
- self._ready_for_new_ps_cluster = True
+ self._ps_cluster_changed = False
self._training_ps_cluster = []
logger.info("Process PS nodes after ps training is ready")
self._training_ps_cluster.extend(self._next_training_ps_cluster)
@@ -195,21 +199,36 @@ def get_next_training_ps_cluster(self):
After rescaling PS, it should return the new PS set until
all new PS are running. Otherwise, it returns the old PS set.
"""
- if self._ready_for_new_ps_cluster:
+ if not self._ps_cluster_changed:
return self._next_training_ps_cluster
all_new_ps_ready = True
for node in self._nodes.values():
- if not node.is_released and node.status in [
- NodeStatus.INITIAL,
- NodeStatus.PENDING,
- ]:
+ if self._wait_ps_node(node):
all_new_ps_ready = False
break
if all_new_ps_ready:
self._next_training_ps_cluster = self._get_all_non_migrated_ps()
return self._next_training_ps_cluster
+ def _wait_ps_node(self, node: Node):
+ """Whether to wait the PS node is running"""
+ return (
+ not node.is_released
+ and not node.timeout(_dlrover_ctx.seconds_to_wait_failed_ps)
+ and node.status in [NodeStatus.INITIAL, NodeStatus.PENDING]
+ )
+
+ def has_ps_failure(self):
+ """
+ Check whether there is PS failure and the master does not relaunch
+ the failed PS node.
+ """
+ for node in self._nodes.values():
+ if node.timeout(_dlrover_ctx.seconds_to_wait_failed_ps):
+ return True
+ return False
+
def _get_all_non_migrated_ps(self):
"""Get all running PS pods without migrated PS nodes for training"""
training_ps = {}
@@ -255,7 +274,7 @@ def get_training_ps_cluster(self):
return training_ps
def get_ready_for_new_ps_cluster(self):
- return self._ready_for_new_ps_cluster
+ return not self._ps_cluster_changed
def get_ps_addrs(self):
"""Get the address list of ps services"""
@@ -315,7 +334,7 @@ def _migrate_parameter_server(self, name: str, cpu=0, memory=0):
resource = copy.deepcopy(original_pod.config_resource)
with self._lock:
- self._ready_for_new_ps_cluster = False
+ self._ps_cluster_changed = True
new_ps_id = next(self._node_id_iter)
resource.cpu = cpu
resource.memory = memory
diff --git a/dlrover/python/master/servicer.py b/dlrover/python/master/servicer.py
index d1e7def36..e73c6bef2 100644
--- a/dlrover/python/master/servicer.py
+++ b/dlrover/python/master/servicer.py
@@ -317,6 +317,7 @@ def update_node_event(self, request, _):
def query_ps_nodes(self, request, _):
training_ps: List[Node] = self._job_manager.get_next_cluster_ps()
ready = self._job_manager.ready_for_new_ps_cluster()
+ ps_failure = self._job_manager.has_ps_failure()
res = elastic_training_pb2.QueryPsNodesResponse()
for ps in training_ps:
ps_meta = res.ps_nodes.add()
@@ -326,7 +327,7 @@ def query_ps_nodes(self, request, _):
ps_meta.memory = int(ps.config_resource.memory)
logger.info("PS nodes : %s", res)
res.new_ps_ready = ready
-
+ res.ps_failure = ps_failure
return res
def query_running_nodes(self, request, _):
| Train with exists PS nodes if some PS fails.
| 2023-04-10T11:50:10 | 0.0 | [] | [] |
|||
intelligent-machine-learning/dlrover | intelligent-machine-learning__dlrover-288 | 978d9c2bd93da4f729d6d54cb24f0dedaf1722bd | diff --git a/.isort.cfg b/.isort.cfg
index 77bdef286..b086f4c01 100644
--- a/.isort.cfg
+++ b/.isort.cfg
@@ -1,5 +1,5 @@
[settings]
-known_third_party = DeepFMAdaptor,MyEstimator,deepctr,deepfm,google,grpc,kubernetes,numpy,psutil,pyhocon,ray,setuptools,tensorflow,tensorflow_estimator,yaml
+known_third_party = DeepFMAdaptor,MyEstimator,deepctr,deepfm,google,grpc,kubernetes,numpy,psutil,pyhocon,ray,setuptools,tensorflow,tensorflow_estimator,torch,yaml
multi_line_output=3
line_length=79
include_trailing_comma=True
\ No newline at end of file
diff --git a/dlrover/python/elastic_agent/pytorch/__init__.py b/dlrover/python/elastic_agent/pytorch/__init__.py
new file mode 100644
index 000000000..561b39cf2
--- /dev/null
+++ b/dlrover/python/elastic_agent/pytorch/__init__.py
@@ -0,0 +1,12 @@
+# Copyright 2023 The DLRover Authors. All rights reserved.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/dlrover/python/elastic_agent/pytorch/elastic_dataset.py b/dlrover/python/elastic_agent/pytorch/elastic_dataset.py
new file mode 100644
index 000000000..d30f5e41c
--- /dev/null
+++ b/dlrover/python/elastic_agent/pytorch/elastic_dataset.py
@@ -0,0 +1,71 @@
+# Copyright 2023 The DLRover Authors. All rights reserved.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from abc import ABCMeta, abstractmethod
+
+from torch.utils.data import Dataset
+
+from dlrover.python.elastic_agent.sharding.client import IndexShardingClient
+
+
+def read_txt(path):
+ with open(path, "r") as fp:
+ content = fp.readlines()
+ return content
+
+
+class ElasticDataset(Dataset, metaclass=ABCMeta):
+ def __init__(self, path, batch_size, epochs, shuffle):
+ """Using ElasticDataset, the node can read samples without
+ duplicates with other nodes in an epoch. DLRover master
+ will dispatch the index of sample in a dataset to one node.
+
+ Args:
+ path: str, the path of dataset meta file. For example, if the image
+ is stored in a folder. The meta file should be a
+ text file where each line is the absolute path of a image.
+ batch_size: int, the size of batch samples to compute gradients
+ in a trainer process.
+ epochs: int, the number of epoch.
+ shuffle: bool, whether to shuffle samples in the dataset.
+ """
+ self.lines = read_txt(path)
+ dataset_size = len(self.lines)
+ self._shard_client = IndexShardingClient(
+ dataset_name=path,
+ batch_size=batch_size,
+ num_epochs=epochs,
+ dataset_size=dataset_size,
+ shuffle=shuffle,
+ storage_type="text",
+ )
+
+ def __len__(self):
+ return self._shard_client.get_total_sample_num()
+
+ def __getitem__(self, _):
+ index = self._shard_client.fetch_sample_index()
+ return self.read_sample(index)
+
+ def get_epoch(self):
+ self._shard_client.get_current_epoch()
+
+ def report_batch_done(self, batch_size=None):
+ """After updating models using the samples, the dataset need to
+ report the batch completion."""
+ self._shard_client.report_batch_done(batch_size)
+
+ @abstractmethod
+ def read_sample(self, index):
+ """Implement to read sample data by the index."""
+ pass
diff --git a/dlrover/python/elastic_agent/sharding/client.py b/dlrover/python/elastic_agent/sharding/client.py
index 6a3f1db3f..e0c41df50 100644
--- a/dlrover/python/elastic_agent/sharding/client.py
+++ b/dlrover/python/elastic_agent/sharding/client.py
@@ -14,6 +14,7 @@
import threading
import time
from collections import OrderedDict
+from multiprocessing import SimpleQueue
from dlrover.proto import elastic_training_pb2
from dlrover.python.elastic_agent.master_client import GlobalMasterClient
@@ -25,23 +26,33 @@
class ShardingClient(object):
+ """ShardingClient queries data shards from the DLRover master.
+ Args:
+ dataset_name: the name of dataset.
+ batch_size: the size of batch data.
+ num_epochs: the number of epochs.
+ dataset_size: the size of dataset.
+ shuffle: whether to shuffle shards.
+ task_type: Task type is the computation type like
+ elastic_training_pb2.TRAINING, elastic_training_pb2.EVALUATION.
+ num_minibatches_per_shard: the number of batch in each shard.
+ storage_type: the storage type of dataset. It is "text" if the
+ dataset is stored in a text file. It is "table" if the
+ dataset is stored in a table like MaxCompute and Hive.
+ """
+
def __init__(
self,
dataset_name,
batch_size,
- num_epochs=None,
- dataset_size=None,
+ num_epochs,
+ dataset_size,
shuffle=False,
task_type=elastic_training_pb2.TRAINING,
num_minibatches_per_shard=0,
- master_client=None,
storage_type="",
):
- self._mc = (
- master_client
- if master_client
- else GlobalMasterClient.MASTER_CLIENT
- )
+ self._mc = GlobalMasterClient.MASTER_CLIENT
self._batch_size = batch_size
self._num_epochs = num_epochs
self._dataset_size = dataset_size
@@ -80,7 +91,7 @@ def reset_dataset(self):
def get_current_task(self):
return self._current_task
- def get_task(self):
+ def get_task(self) -> elastic_training_pb2.Task:
training_reporter.set_start_time()
for _ in range(5):
success, task = self._mc.get_task(self._dataset_name)
@@ -190,3 +201,102 @@ def restore_shard_from_checkpoint(self, shard_checkpoint):
def get_current_epoch(self):
res = self._mc.get_dataset_epoch(self._dataset_name)
return res.epoch
+
+ def get_total_sample_num(self):
+ return self._dataset_size * self._num_epochs
+
+
+class IndexShardingClient(ShardingClient):
+ """ShardingClient queries data shards from the DLRover master
+ and generates the index of sample from the shard.
+ Users can read data from the disk by the sample index.
+ Args:
+ dataset_name: the name of dataset.
+ batch_size: the size of batch data.
+ num_epochs: the number of epochs.
+ dataset_size: the size of dataset.
+ shuffle: whether to shuffle shards.
+ task_type: Task type is the computation type like
+ elastic_training_pb2.TRAINING, elastic_training_pb2.EVALUATION.
+ num_minibatches_per_shard: the number of batch in each shard.
+ storage_type: the storage type of dataset. It is "text" if the
+ dataset is stored in a text file. It is "table" if the
+ dataset is stored in a table like MaxCompute and Hive.
+ num_workers: the number of worker processes to share the client
+ to get the sample index.
+ """
+
+ def __init__(
+ self,
+ dataset_name,
+ batch_size,
+ num_epochs,
+ dataset_size,
+ shuffle=False,
+ task_type=elastic_training_pb2.TRAINING,
+ num_minibatches_per_shard=0,
+ storage_type="",
+ num_workers=1,
+ ):
+ super(IndexShardingClient, self).__init__(
+ dataset_name,
+ batch_size,
+ num_epochs,
+ dataset_size,
+ shuffle,
+ task_type,
+ num_minibatches_per_shard,
+ storage_type,
+ )
+ self._num_workers = num_workers
+ self._sample_queue = SimpleQueue()
+ self._report_sharding_params()
+
+ threading.Thread(
+ target=self._fetch_sample_indices,
+ name="fetch_sample_indices",
+ daemon=True,
+ ).start()
+
+ def _fetch_sample_indices(self):
+ while True:
+ if self._sample_queue.empty():
+ task = self.get_task()
+ if not task or not task.shard:
+ for _ in range(self._num_workers):
+ self._sample_queue.put(None)
+ break
+ ids = (
+ task.shard.indices
+ if task.shard.indices
+ else list(range(task.shard.start, task.shard.end))
+ )
+ for i in ids:
+ self._sample_queue.put(i)
+ else:
+ time.sleep(0.001)
+
+ def fetch_sample_index(self):
+ """Fetch an index of the sample. The function get an index
+ from a queue because there may be multiple sub-process to call
+ the function.
+ """
+ while True:
+ for _ in range(5):
+ index = self._sample_queue.get()
+ if index is not None:
+ break
+ else:
+ time.sleep(0.1)
+ if index is None:
+ raise StopIteration
+ return index
+
+ def clear_shard_queue(self):
+ self._sample_queue = SimpleQueue()
+
+ def restore_shard_from_checkpoint(self, shard_checkpoint):
+ # To avoid duplicate shards, drop all shards in the _shard_queue
+ # before restoring shard from checkpoint
+ self.clear_shard_queue()
+ super().restore_shard_from_checkpoint(shard_checkpoint)
| Implement the dataset to support Torch Distributed elastic
| 2023-03-07T11:14:40 | 0.0 | [] | [] |
Subsets and Splits