code
stringlengths 1
5.19M
| package
stringlengths 1
81
| path
stringlengths 9
304
| filename
stringlengths 4
145
|
---|---|---|---|
from typing import Any, Tuple, TypeVar
from zuper_typing.annotations_tricks import (
get_VarTuple_arg,
is_Any,
is_FixedTuple,
is_Tuple,
is_TupleLike,
is_VarTuple,
make_Tuple,
is_FixedTupleLike,
get_FixedTupleLike_args,
)
#
# T1 = Tuple
# T2 = Tuple[int]
# T3 = Tuple[Any, ...]
from zuper_typing.my_dict import make_CustomTuple
X = TypeVar("X")
# T4 = Tuple[X, ...]
def test_tuple_special_1():
T1 = Tuple
assert is_Tuple(T1), T1
assert not is_FixedTuple(T1), T1
assert is_VarTuple(T1), T1
x = get_VarTuple_arg(T1)
assert is_Any(x), x
def test_tuple_special_2():
T1 = Tuple[Any, ...]
assert is_Tuple(T1), T1
assert not is_FixedTuple(T1), T1
assert is_VarTuple(T1), T1
x = get_VarTuple_arg(T1)
assert is_Any(x), x
def test_tuple_special_3():
T1 = Tuple[X, ...]
assert is_Tuple(T1), T1
assert not is_FixedTuple(T1), T1
assert is_VarTuple(T1), T1
x = get_VarTuple_arg(T1)
assert x == X
def test_tuple_special_4():
T1 = Tuple[int, str]
assert is_Tuple(T1), T1
assert is_FixedTuple(T1), T1
assert not is_VarTuple(T1), T1
def test_tuple_special_5():
T1 = make_Tuple()
assert is_Tuple(T1), T1
assert is_FixedTuple(T1), T1
assert not is_VarTuple(T1), T1
def test_tuple_special_6():
T = tuple
assert not is_Tuple(T)
assert is_TupleLike(T)
assert is_VarTuple(T)
X2 = get_VarTuple_arg(T)
assert is_Any(X2)
def test_customtuple_1():
X = (int, str)
T = make_CustomTuple(X)
assert not is_Tuple(T)
assert is_TupleLike(T)
assert not is_VarTuple(T)
assert is_FixedTupleLike(T)
assert not is_FixedTuple(T)
X2 = get_FixedTupleLike_args(T)
assert X2 == X
|
zuper-typing-z5
|
/zuper-typing-z5-5.3.0.tar.gz/zuper-typing-z5-5.3.0/src/zuper_typing_tests/test_tuples_special.py
|
test_tuples_special.py
|
from typing import Optional, Union
from nose.tools import raises
from zuper_typing.annotations_tricks import (
get_Optional_arg,
get_Union_args,
is_Optional,
is_Union,
make_Union,
)
from zuper_typing.recursive_tricks import replace_typevars
from zuper_typing.uninhabited import make_Uninhabited
def test_making_union():
make_Union(int)
make_Union(int, float)
make_Union(int, float, bool)
make_Union(int, float, bool, str)
make_Union(int, float, bool, str, bytes)
make_Union(int, float, bool, str, bytes, int)
@raises(ValueError)
def test_corner_cases_empty_union():
make_Union()
# @raises(ValueError)
def test_corner_cases_empty_union1():
make_Union(int)
def test_multiple_optional():
a = Union[int, str, type(None)]
assert is_Optional(a), a
U = get_Optional_arg(a)
assert is_Union(U), U
assert int, str == get_Union_args(U)
def test_multiple_optional2():
ts = (int, str, type(None))
a = make_Union(*ts)
print(f"a = {a}")
assert is_Optional(a), a
U = get_Optional_arg(a)
assert is_Union(U), U
assert int, str == get_Union_args(U)
def test_multiple_optional3():
ts = (int, type(None), str)
a = make_Union(*ts)
assert is_Optional(a), a
U = get_Optional_arg(a)
assert is_Union(U), U
assert int, str == get_Union_args(U)
def test_optional1():
T = Optional[int]
S = get_Optional_arg(T)
assert S is int
def test_multiple_union2():
ts = (int, type(None))
U = make_Union(*ts)
assert is_Optional(U)
def test_unh():
u = make_Uninhabited()
replace_typevars(u, bindings={}, symbols={})
def test_union_simple():
x = Union[int, str]
assert is_Union(x)
|
zuper-typing-z5
|
/zuper-typing-z5-5.3.0.tar.gz/zuper-typing-z5-5.3.0/src/zuper_typing_tests/test_union.py
|
test_union.py
|
from zuper_typing.uninhabited import is_Uninhabited, make_Uninhabited
def test_uninhabited1():
U = make_Uninhabited()
assert is_Uninhabited(U)
|
zuper-typing-z5
|
/zuper-typing-z5-5.3.0.tar.gz/zuper-typing-z5-5.3.0/src/zuper_typing_tests/test_uninhabited.py
|
test_uninhabited.py
|
from typing import Dict
from zuper_typing.annotations_tricks import get_Dict_args, is_Any
from zuper_typing.my_dict import make_dict, make_list, make_set
def test_dict_1():
K, V = get_Dict_args(Dict)
assert is_Any(K), K
assert is_Any(V), V
def test_dict_2_copy():
A = make_dict(int, str)
a = A({1: "one"})
a.copy()
def test_list_2_copy():
A = make_list(int)
a = A([1])
a.copy()
def test_set_2_copy():
A = make_set(int)
a = A([1])
a.copy()
|
zuper-typing-z5
|
/zuper-typing-z5-5.3.0.tar.gz/zuper-typing-z5-5.3.0/src/zuper_typing_tests/test_dict.py
|
test_dict.py
|
from dataclasses import dataclass
from datetime import datetime
from typing import Dict, List, Optional, Set, Tuple, TypeVar, Union
from zuper_typing.exceptions import ZValueError
from zuper_typing.get_patches_ import assert_equivalent_types, NotEquivalentException
from zuper_typing.my_intersection import Intersection
from zuper_typing.subcheck import can_be_used_as2
from zuper_typing.type_algebra import type_inf, type_sup
from zuper_typing.uninhabited import is_Uninhabited, make_Uninhabited
def test_algebra_sup_1():
X = TypeVar("X")
W = TypeVar("W")
Z = TypeVar("Z")
# noinspection PyTypeHints
X2 = TypeVar("X")
Y = TypeVar("Y")
U = make_Uninhabited()
@dataclass
class D:
a: int
cases = [
(bool, object, object),
(bool, int, int),
(bool, U, bool),
(int, str, Union[int, str]),
(int, type(None), Optional[int]),
(List[bool], List[int], List[int]),
(Set[bool], Set[int], Set[int]),
(Dict[bool, str], Dict[int, str], Dict[int, str]),
(Dict[str, bool], Dict[str, int], Dict[str, int]),
(Tuple[bool, ...], Tuple[int, ...], Tuple[int, ...]),
(Tuple[bool, str], Tuple[int, str], Tuple[int, str]),
(X, Y, Union[X, Y]),
(X, X2, X),
(Set[bool], D, Union[Set[bool], D]),
(Union[int, float], Union[str, datetime], Union[datetime, float, int, str]),
(Optional[int], Optional[bool], Optional[int]),
(Union[X, Y], Union[W, Z], Union[W, X, Y, Z]),
]
for A, B, expect in cases:
yield check_sup, A, B, expect
yield check_sup, B, A, expect
def test_algebra_inf_1():
X = TypeVar("X")
W = TypeVar("W")
Z = TypeVar("Z")
# noinspection PyTypeHints
X2 = TypeVar("X")
Y = TypeVar("Y")
U = make_Uninhabited()
@dataclass
class D:
a: int
cases = [
(bool, object, bool),
(bool, int, bool),
(int, str, U),
(U, str, U),
(int, type(None), U),
(List[bool], List[int], List[bool]),
(List[bool], int, U),
(List[bool], Set[int], U),
(Set[bool], Set[int], Set[bool]),
(Set[bool], int, U),
(Set[bool], List[int], U),
(Dict[bool, str], Dict[int, str], Dict[bool, str]),
(Dict[str, bool], Dict[str, int], Dict[str, bool]),
(Tuple[bool, ...], Tuple[int, ...], Tuple[bool, ...]),
(Tuple[bool, str], Tuple[int, str], Tuple[bool, str]),
(X, Y, Intersection[X, Y]),
(X, X2, X),
(Optional[bool], type(None), type(None)), # ?
(Set[bool], D, U),
(Union[int, float], Union[str, datetime], U),
(Optional[int], Optional[bool], Optional[bool]),
(Intersection[X, Y], Intersection[W, Z], Intersection[W, X, Y, Z]),
]
for A, B, expect in cases:
yield check_inf, A, B, expect
yield check_inf, B, A, expect
def check_sup(A, B, expect):
r = can_be_used_as2(A, expect)
if not r:
msg = "I A <= expect"
raise ZValueError(msg, A=A, expect=expect)
r = can_be_used_as2(B, expect)
if not r:
msg = "I B <= expect"
raise ZValueError(msg, B=B, expect=expect)
res = type_sup(A, B)
try:
assert_equivalent_types(res, expect)
except NotEquivalentException as e:
msg = "Failed to compute sup (union)"
raise ZValueError(msg, A=A, B=B, expect=expect, res=res) from e
def check_inf(A, B, expect):
r = can_be_used_as2(expect, A)
if not r:
msg = "I expect <= A"
raise ZValueError(msg, expect=expect, A=A)
r = can_be_used_as2(expect, B)
if not r:
msg = "I expect <= B"
raise ZValueError(msg, expect=expect, B=B)
res = type_inf(A, B)
try:
assert_equivalent_types(res, expect)
except NotEquivalentException as e:
msg = "Failed to compute inf (intersection)"
raise ZValueError(msg, A=A, B=B, expect=expect, res=res) from e
def test_optional1():
r = can_be_used_as2(type(None), Optional[int])
assert r, r
def test_algebra_dc1():
@dataclass
class A1:
a: bool
@dataclass
class A2:
a: int
assert can_be_used_as2(A1, A2)
ti = type_inf(A1, A2)
ts = type_sup(A1, A2)
eq1 = equivalent(ti, A1)
assert eq1, eq1
eq2 = equivalent(ts, A2)
assert eq2, eq2
def test_algebra_dc2():
@dataclass
class A1:
a: bool
@dataclass
class A2:
a: str
@dataclass
class A3:
a: Union[str, bool]
ti = type_inf(A1, A2)
ts = type_sup(A1, A2)
assert is_Uninhabited(ti), ti
assert equivalent(ts, A3)
def equivalent(x, y):
r1 = can_be_used_as2(x, y)
if not r1:
return r1
r2 = can_be_used_as2(x, y, r1.M)
return r2
|
zuper-typing-z5
|
/zuper-typing-z5-5.3.0.tar.gz/zuper-typing-z5-5.3.0/src/zuper_typing_tests/test_type_algebra.py
|
test_type_algebra.py
|
from typing import ClassVar, Dict, IO, Iterable
from mypy_extensions import NamedArg
from zuper_typing.annotations_tricks import make_ForwardRef, name_for_type_like
from zuper_typing.monkey_patching_typing import original_dict_getitem
from zuper_typing.my_dict import make_set
def test_names():
from typing import Iterator, List, Tuple, Set, Type, Callable
xs = (
ClassVar[int],
Iterator[int],
List,
List[int],
Tuple,
Tuple[int],
Set,
Set[int],
Type[int],
Dict[int, int],
make_set(int),
original_dict_getitem((int, int)),
Callable[[int], int],
Callable[[], int],
Callable[[NamedArg(int, "a")], int],
Callable,
IO,
Iterable[int],
make_ForwardRef("varname"),
type(None),
)
for x in xs:
name_for_type_like(x)
if __name__ == "__main__":
test_names()
|
zuper-typing-z5
|
/zuper-typing-z5-5.3.0.tar.gz/zuper-typing-z5-5.3.0/src/zuper_typing_tests/test_names.py
|
test_names.py
|
import typing
from typing import Any, ClassVar, NewType, Optional, Tuple, Type, TypeVar, Union
from nose.tools import assert_equal
from zuper_typing.annotations_tricks import (
get_ClassVar_arg,
get_Dict_args,
get_ForwardRef_arg,
get_NewType_arg,
get_Optional_arg,
get_Type_arg,
is_Any,
is_ClassVar,
is_Dict,
is_ForwardRef,
is_NewType,
is_Optional,
is_Tuple,
is_Type,
name_for_type_like,
)
from zuper_typing.constants import PYTHON_36, PYTHON_37
from zuper_typing.monkey_patching_typing import original_dict_getitem
from zuper_typing.my_dict import is_CustomDict, make_dict
def test_union():
a = Union[int, str]
# print(a)
# print(type(a))
if PYTHON_37:
assert isinstance(a, typing._GenericAlias)
# print(a.__dict__)
assert a.__origin__ == Union
def test_optional():
a = Optional[int]
assert is_Optional(a)
assert get_Optional_arg(a) is int
class Tree:
n: Optional["Tree"]
symbols = {"Tree": Tree}
def test_forward():
x = Tree.__annotations__["n"]
assert is_Optional(x)
t = get_Optional_arg(x)
# print(t)
# print(type(t))
# print(t.__dict__)
assert is_ForwardRef(t)
# print(f'__forward_arg__: {t.__forward_arg__!r}')
# print(f'__forward_code__: {t.__forward_code__!r}')
# print(f'__forward_evaluated__: {t.__forward_evaluated__!r}')
# print(f'__forward_value__: {t.__forward_value__!r}')
# print(f'__forward_is_argument__: {t.__forward_is_argument__!r}')
assert get_ForwardRef_arg(t) == "Tree"
if PYTHON_36: # pragma: no cover
t._eval_type(localns=locals(), globalns=globals())
else:
t._evaluate(localns=locals(), globalns=globals())
# print(f'__forward_arg__: {t.__forward_arg__!r}')
# print(f'__forward_code__: {t.__forward_code__!r}')
# print(f'__forward_evaluated__: {t.__forward_evaluated__!r}')
# print(f'__forward_value__: {t.__forward_value__!r}')
# print(f'__forward_is_argument__: {t.__forward_is_argument__!r}')
def test_any():
a = Any
assert is_Any(a)
def test_any2():
a = int
assert not is_Any(a)
def test_any3():
a = Tree
assert not is_Any(a)
def test_Tuple1():
a = Tuple[int, str]
assert is_Tuple(a)
def test_Tuple2():
a = Tuple[int, ...]
assert is_Tuple(a)
def test_Typevar():
a = TypeVar("a")
assert isinstance(a, TypeVar)
def test_ClassVar():
a = ClassVar[int]
assert is_ClassVar(a)
assert get_ClassVar_arg(a) is int
def test_Type():
X = TypeVar("X")
a = Type[X]
assert is_Type(a)
assert get_Type_arg(a) == X
# assert get_ClassVar_arg(a) is int
def test_NewType():
C = NewType("C", str)
assert is_NewType(C)
assert get_NewType_arg(C) is str
# assert get_ClassVar_arg(a) is int
def test_DictName():
D = original_dict_getitem((int, str))
# print(D.__dict__)
assert is_Dict(D)
# assert get_Dict_name(D) == 'Dict[int,str]'
def test_Dict1():
K, V = get_Dict_args(typing.Dict)
assert_equal(K, Any)
assert_equal(V, Any)
def test_Dict2():
X = typing.Dict[str, int]
# print(type(X))
# print(f"{X!r}")
assert is_Dict(X)
K, V = get_Dict_args(X)
assert_equal(K, str)
assert_equal(V, int)
# print(K, V)
N = name_for_type_like(X)
assert_equal(N, "Dict[str,int]")
def test_Dict3():
D = make_dict(str, int)
assert is_CustomDict(D)
N = name_for_type_like(D)
assert_equal(N, "Dict[str,int]")
def test_corner_Type():
T = Type
get_Type_arg(T)
|
zuper-typing-z5
|
/zuper-typing-z5-5.3.0.tar.gz/zuper-typing-z5-5.3.0/src/zuper_typing_tests/test_annotations_tricks.py
|
test_annotations_tricks.py
|
from dataclasses import dataclass
from typing import Callable
from nose.tools import assert_equal
# from zuper_ipce_tests.test_utils import assert_type_roundtrip
from zuper_typing.annotations_tricks import get_Callable_info, is_Callable
from zuper_typing.monkey_patching_typing import MyNamedArg as NamedArg
def test_detection_1():
T = Callable[[], int]
# print(T.__dict__)
assert is_Callable(T)
res = get_Callable_info(T)
assert_equal(res.parameters_by_name, {})
assert_equal(res.parameters_by_position, ())
assert_equal(res.returns, int)
def test_detection_2():
T = Callable[[NamedArg(str, "A")], int]
assert is_Callable(T)
res = get_Callable_info(T)
assert_equal(res.returns, int)
assert_equal(res.parameters_by_position, (str,))
assert_equal(res.parameters_by_name, {"A": str})
def test_detection_3():
T = Callable[[NamedArg(str, "A")], int]
assert is_Callable(T)
res = get_Callable_info(T)
assert_equal(res.returns, int)
assert_equal(res.parameters_by_position, (str,))
assert_equal(res.parameters_by_name, {"A": str})
def test_detection_4():
@dataclass
class MyClass:
pass
T = Callable[[NamedArg(MyClass, "A")], int]
assert is_Callable(T)
res = get_Callable_info(T)
assert_equal(res.returns, int)
assert_equal(res.parameters_by_position, (MyClass,))
assert_equal(res.parameters_by_name, {"A": MyClass})
def test_NamedArg_eq():
a = NamedArg(int, "A")
b = NamedArg(int, "A")
assert_equal(a, b)
A = Callable[[NamedArg(int, "A")], int]
B = Callable[[NamedArg(int, "A")], int]
assert_equal(A, B)
# @raises(TypeError)
def test_back():
T = Callable[[NamedArg(str, "A")], int]
assert is_Callable(T)
res = get_Callable_info(T)
T2 = res.as_callable()
|
zuper-typing-z5
|
/zuper-typing-z5-5.3.0.tar.gz/zuper-typing-z5-5.3.0/src/zuper_typing_tests/test_callable.py
|
test_callable.py
|
from zuper_typing.annotations_tricks import make_Tuple
def test_making():
make_Tuple()
make_Tuple(int)
make_Tuple(int, float)
make_Tuple(int, float, bool)
make_Tuple(int, float, bool, str)
make_Tuple(int, float, bool, str, bytes)
make_Tuple(int, float, bool, str, bytes, int)
|
zuper-typing-z5
|
/zuper-typing-z5-5.3.0.tar.gz/zuper-typing-z5-5.3.0/src/zuper_typing_tests/test_tuple.py
|
test_tuple.py
|
from functools import wraps
from typing import Tuple
from unittest import SkipTest
from nose.plugins.attrib import attr
def known_failure(f, forbid: Tuple[type, ...] = ()): # pragma: no cover
@wraps(f)
def run_test(*args, **kwargs):
try:
f(*args, **kwargs)
except BaseException as e:
if forbid:
if isinstance(e, forbid):
msg = f"Known failure test is not supposed to raise {type(e).__name__}"
raise AssertionError(msg) from e
raise SkipTest("Known failure test failed: " + str(e))
raise AssertionError("test passed but marked as work in progress")
return attr("known_failure")(run_test)
def relies_on_missing_features(f):
msg = "Test relying on not implemented feature."
@wraps(f)
def run_test(*args, **kwargs): # pragma: no cover
try:
f(*args, **kwargs)
except BaseException as e:
raise SkipTest(msg) from e
raise AssertionError("test passed but marked as work in progress")
return attr("relies_on_missing_features")(run_test)
|
zuper-typing-z5
|
/zuper-typing-z5-5.3.0.tar.gz/zuper-typing-z5-5.3.0/src/zuper_typing_tests/test_utils.py
|
test_utils.py
|
from datetime import datetime
from typing import (
Any,
Callable,
ClassVar,
Dict,
Iterator,
List,
Optional,
Sequence,
Set,
Tuple,
Type,
TypeVar,
Union,
)
from nose.tools import assert_equal
from zuper_typing import dataclass
from zuper_typing.annotations_tricks import (
get_Callable_info,
is_Callable,
is_Iterator,
is_Sequence,
name_for_type_like,
)
from zuper_typing.literal import make_Literal
from zuper_typing.my_dict import make_list, make_set
from zuper_typing.my_intersection import Intersection
from zuper_typing.recursive_tricks import replace_typevars
from zuper_typing.subcheck import can_be_used_as2
from zuper_typing.uninhabited import make_Uninhabited
from zuper_typing_tests.test_utils import known_failure
X = TypeVar("X")
Y = TypeVar("Y")
def test_corner_cases10():
assert can_be_used_as2(str, str)
assert not can_be_used_as2(str, int)
def test_corner_cases11():
assert can_be_used_as2(Dict, Dict)
assert can_be_used_as2(Dict[str, str], Dict[str, str])
assert can_be_used_as2(Dict[str, str], Dict[str, Any])
def test_corner_cases12():
assert not can_be_used_as2(Dict[str, str], str)
def test_corner_cases13():
assert not can_be_used_as2(str, Dict[str, str])
def test_corner_cases14():
assert not can_be_used_as2(Union[int, str], str)
def test_corner_cases15():
assert can_be_used_as2(str, Union[int, str])
def test_corner_cases16():
assert can_be_used_as2(Union[int], Union[int, str])
def test_corner_cases17():
assert not can_be_used_as2(Union[int, str], Union[str])
def test_corner_cases18():
@dataclass
class A:
a: int
@dataclass
class B(A):
pass
@dataclass
class C:
a: int
assert can_be_used_as2(B, A)
assert can_be_used_as2(C, A)
def test_corner_cases18b():
@dataclass
class A:
a: int
@dataclass
class C:
a: str
assert not can_be_used_as2(A, C)
def test_corner_cases18c():
@dataclass
class A:
a: int
@dataclass
class C:
pass
assert not can_be_used_as2(C, A)
assert can_be_used_as2(A, C)
def test_corner_cases19():
assert not can_be_used_as2(str, int)
def test_corner_cases06():
assert can_be_used_as2(int, Optional[int]).result
def test_corner_cases20():
res = can_be_used_as2(Tuple[int, int], Tuple[int, Any])
assert res, res
def test_corner_cases20b():
res = can_be_used_as2(Tuple[int, int], Tuple[int, object])
assert res, res
def test_corner_cases21():
assert not can_be_used_as2(Tuple[int, int], int)
def test_corner_cases22():
assert not can_be_used_as2(object, int)
assert can_be_used_as2(Any, int)
def test_corner_cases23():
@dataclass
class A:
a: int
@dataclass
class B(A):
pass
@dataclass
class C(A):
pass
res = can_be_used_as2(Union[B, C], A)
assert res, res
def test_corner_cases24():
assert not can_be_used_as2(Tuple[int, int], Tuple[int, str])
def test_corner_cases30():
assert not can_be_used_as2(Sequence, List[int])
def test_corner_cases31():
assert not can_be_used_as2(List[str], List[int])
def test_corner_cases32():
assert can_be_used_as2(List[str], List)
def test_corner_cases33():
class A:
pass
class B:
pass
assert not can_be_used_as2(A, B)
def test_corner_cases36():
A = Set[str]
B = Set[Any]
assert can_be_used_as2(A, B)
assert can_be_used_as2(B, A)
def test_corner_cases36b():
A = Set[str]
B = Set[object]
assert can_be_used_as2(A, B)
assert not can_be_used_as2(B, A)
def test_corner_cases34():
assert not can_be_used_as2(Dict[str, str], Dict[int, str])
def test_corner_cases35():
assert not can_be_used_as2(Dict[str, str], Dict[int, str])
def test_corner_cases25():
D1 = Dict[str, Any]
D2 = Dict[str, int]
res = can_be_used_as2(D2, D1)
assert res, res
def test_corner_cases26():
D1 = Dict[str, Any]
D2 = Dict[str, int]
res = can_be_used_as2(D1, D2)
assert res, res
def test_corner_cases26b():
D1 = Dict[str, object]
D2 = Dict[str, int]
res = can_be_used_as2(D1, D2)
assert not res, res
def test_match_List1a():
X = TypeVar("X")
L1 = List[str]
L2 = List[X]
res = can_be_used_as2(L1, L2)
# print(L1, L2, res)
assert res, res
assert res.M.get_lb("X") is str, res
assert res.M.get_ub("X") is None, res
def test_match_List1b():
X = TypeVar("X")
L1 = List[X]
L2 = List[str]
res = can_be_used_as2(L1, L2)
# print(L1, L2, res)
assert res, res
assert res.M.get_ub("X") is str, res
assert res.M.get_lb("X") is None, res
def test_match_List2():
X = TypeVar("X")
L1 = List[Any]
L2 = List[X]
res = can_be_used_as2(L1, L2)
# print(L1, L2, res)
assert res, res
assert res.M.get_ub("X") is None
assert res.M.get_lb("X") is None
def test_match_List3():
""" We want that match(X, Any) does not match X at all. """
X = TypeVar("X")
L1 = List[Any]
L2 = List[X]
res = can_be_used_as2(L2, L1)
assert res, res
assert not "X" in res.matches, res
assert res.M.get_ub("X") is None
assert res.M.get_lb("X") is None
# assert is_Any(res.matches['X']), res
def test_match_TypeVar0():
L1 = Tuple[str]
L2 = TypeVar("L2")
res = can_be_used_as2(L1, L2)
# print(res)
assert res, res
def test_match_TypeVar0b():
L1 = str
L2 = X
res = can_be_used_as2(L1, L2)
assert res.M.get_ub("X") is None, res
assert res.M.get_lb("X") is str, res
assert res, res
def test_match_MySet1():
C1 = Set[str]
C2 = make_set(str)
res = can_be_used_as2(C1, C2)
assert res, res
def test_match_Tuple0():
L1 = Tuple[str]
L2 = Tuple[X]
res = can_be_used_as2(L1, L2)
# print(res)
assert res.M.get_ub("X") is None, res
assert res.M.get_lb("X") is str, res
assert res, res
def test_match_Tuple1():
L1 = Tuple[str, int]
L2 = Tuple[X, Y]
res = can_be_used_as2(L1, L2)
# print(res)
assert res.M.get_ub("X") is None, res
assert res.M.get_lb("X") is str, res
assert res.M.get_ub("Y") is None, res
assert res.M.get_lb("Y") is int, res
assert res, res
def test_replace_typevars():
X = TypeVar("X")
Y = TypeVar("Y")
# noinspection PyTypeHints
X2 = TypeVar("X") # note: needs this to make the test work
S = {X2: str, Y: int}
tries = (
(X, {X2: str}, str),
(Any, {}, Any),
(List[X], {X2: str}, List[str]),
(Tuple[X], {X2: str}, Tuple[str]),
(Tuple[X, ...], {X2: str}, Tuple[str, ...]),
(Tuple[bool, ...], {X2: str}, Tuple[bool, ...]),
(Callable[[X], Y], {X2: str, Y: int}, Callable[[str], int]),
(Optional[X], {X2: str}, Optional[str]),
(Union[X, Y], {X2: str, Y: int}, Union[int, str]),
(ClassVar[X], {X2: str}, ClassVar[str]),
(Dict[X, Y], {X2: str, Y: int}, Dict[str, int]),
(Sequence[X], {X2: str}, Sequence[str]),
(Iterator[X], {X2: str}, Iterator[str]),
(Set[X], S, Set[str]),
(Type[X], {X2: str}, Type[str]),
(Type[int], {X2: str}, Type[int]),
(ClassVar[List[X]], {X2: str}, ClassVar[List[str]]),
(ClassVar[int], {X2: str}, ClassVar[int]),
(Iterator, S, Iterator[Any]),
(List, S, List[Any]),
(make_list(bool), S, make_list(bool)),
(make_list(X), S, make_list(str)),
(Sequence, S, Sequence[Any]),
(Intersection[X, int], S, Intersection[str, int]),
(Intersection[X, str], S, str),
(make_Literal(2, 3), S, make_Literal(2, 3)),
)
for orig, subst, result in tries:
yield try_, orig, subst, result
def try_(orig, subst, result):
obtained = replace_typevars(orig, bindings=subst, symbols={})
# print(f"obtained {type(obtained)} {obtained!r}")
assert_equal(name_for_type_like(obtained), name_for_type_like(result))
def test_dataclass2():
@dataclass
class A:
data: int
parent: "A"
assert A.__annotations__["parent"] is A
X = TypeVar("X")
bindings = {X: int}
A2 = replace_typevars(A, bindings=bindings, symbols={})
def test_callable1():
T = Callable[[int], str]
assert is_Callable(T)
cinfo = get_Callable_info(T)
# print(cinfo)
assert cinfo.parameters_by_name == {"0": int}
assert cinfo.parameters_by_position == (int,)
assert cinfo.returns is str
def test_callable2():
X = TypeVar("X")
Y = TypeVar("Y")
T = Callable[[X], Y]
assert is_Callable(T)
cinfo = get_Callable_info(T)
# print(cinfo)
assert cinfo.parameters_by_name == {"0": X}
assert cinfo.parameters_by_position == (X,)
assert cinfo.returns == Y
subs = {X: str, Y: int}
def f(x):
return subs.get(x, x)
cinfo2 = cinfo.replace(f)
assert cinfo2.parameters_by_name == {"0": str}, cinfo2
assert cinfo2.parameters_by_position == (str,), cinfo2
assert cinfo2.returns == int, cinfo2
def test_Sequence1():
assert is_Sequence(Sequence[int])
def test_Sequence2():
assert is_Sequence(Sequence)
def test_Iterator1():
assert is_Iterator(Iterator[int])
def test_Iterator2():
assert is_Iterator(Iterator)
def test_typevar1a():
X = TypeVar("X")
expect_type = X
found_type = str
res = can_be_used_as2(found_type, expect_type)
assert res.result, res
def test_typevar1b():
X = TypeVar("X")
expect_type = str
found_type = X
res = can_be_used_as2(found_type, expect_type)
assert res.result, res
def test_typevar2():
X = TypeVar("X")
expect_type = List[str]
found_type = List[X]
res = can_be_used_as2(found_type, expect_type)
assert res.result, res
def assert_can(a, b):
res = can_be_used_as2(a, b)
assert res.result, res
def assert_not(a, b):
res = can_be_used_as2(a, b)
assert not res.result, res
@known_failure
def test_subcheck_literal_01():
T = make_Literal(1)
U = make_Literal(1, 2)
V = make_Literal(1, 2, 3)
W = make_Literal(1, 2, 4)
assert_can(T, T)
assert_can(T, U)
assert_not(U, T)
assert_can(T, V)
assert_not(V, T)
assert_can(T, W)
assert_not(W, T)
assert_not(V, W)
assert_not(W, V)
assert_not(U, List[int])
assert_can(make_Uninhabited(), U)
assert_can(U, int)
assert_can(U, Union[int, float])
|
zuper-typing-z5
|
/zuper-typing-z5-5.3.0.tar.gz/zuper-typing-z5-5.3.0/src/zuper_typing_tests/test_subcheck.py
|
test_subcheck.py
|
from typing import Optional
from zuper_typing import dataclass
def test_ordering():
@dataclass
class C:
root: Optional[int]
links: int
# print(C.__annotations__)
a = C(1, 2)
assert a.root == 1
assert a.links == 2
assert C(1, 2) == C(root=1, links=2)
|
zuper-typing-z5
|
/zuper-typing-z5-5.3.0.tar.gz/zuper-typing-z5-5.3.0/src/zuper_typing_tests/test_ordering.py
|
test_ordering.py
|
from typing import TypeVar
from zuper_typing import dataclass, Generic
def test_hash1():
@dataclass
class Parametric1:
a: int
b: int
p1 = Parametric1(0, 1)
assert p1.__hash__ is not None
X = TypeVar("X")
@dataclass(unsafe_hash=True)
class Parametric2(Generic[X]):
a: X
b: X
p2 = Parametric2[int](1, 2)
assert p2.__hash__ is not None
|
zuper-typing-z5
|
/zuper-typing-z5-5.3.0.tar.gz/zuper-typing-z5-5.3.0/src/zuper_typing_tests/test_hashing.py
|
test_hashing.py
|
from nose.tools import assert_equal
from zuper_typing.my_intersection import (
Intersection,
get_Intersection_args,
is_Intersection,
make_Intersection,
)
def test_intersection1():
args = (bool, int)
T = make_Intersection(args)
assert is_Intersection(T)
assert_equal(get_Intersection_args(T), args)
def test_intersection2():
T = Intersection[bool, int]
assert is_Intersection(T)
assert_equal(get_Intersection_args(T), (bool, int))
|
zuper-typing-z5
|
/zuper-typing-z5-5.3.0.tar.gz/zuper-typing-z5-5.3.0/src/zuper_typing_tests/test_intersection.py
|
test_intersection.py
|
from setuptools import find_packages, setup
def get_version(filename):
import ast
version = None
with open(filename) as f:
for line in f:
if line.startswith("__version__"):
version = ast.parse(line).body[0].value.s
break
else:
raise ValueError("No version found in %r." % filename)
if version is None:
raise ValueError(filename)
return version
install_requires = [
"zuper-commons-z6>=6.1.4",
"oyaml",
"pybase64",
"PyYAML",
"validate_email",
"mypy_extensions",
"typing_extensions",
"nose",
"coverage>=1.4.33",
"jsonschema",
"numpy",
"base58<2.0,>=1.0.2",
"frozendict",
"pytz",
"termcolor",
"numpy",
]
import sys
system_version = tuple(sys.version_info)[:3]
if system_version < (3, 7):
install_requires.append("dataclasses")
module = "zuper_typing"
line = "z6"
package = f"zuper-typing-{line}"
src = "src"
version = get_version(filename=f"src/{module}/__init__.py")
setup(
name=package,
package_dir={"": src},
packages=["zuper_typing"],
version=version,
zip_safe=False,
entry_points={"console_scripts": []},
install_requires=install_requires,
)
|
zuper-typing-z6
|
/zuper-typing-z6-6.2.3.tar.gz/zuper-typing-z6-6.2.3/setup.py
|
setup.py
|
from typing import Any
from .annotations_tricks import make_CustomTuple, make_dict, make_list, make_set
__all__ = [
"DictStrStr",
"SetStr",
"SetObject",
"DictStrType",
"DictStrObject",
"ListStr",
"DictStrAny",
"empty_tuple",
"EmptyTupleType",
"ListObject",
]
ListObject = make_list(object)
DictStrStr = make_dict(str, str)
DictStrObject = make_dict(str, object)
DictStrAny = make_dict(str, Any)
DictStrType = make_dict(str, type)
SetObject = make_set(object)
SetStr = make_set(str)
ListStr = make_list(str)
EmptyTupleType = make_CustomTuple(())
empty_tuple = EmptyTupleType()
|
zuper-typing-z6
|
/zuper-typing-z6-6.2.3.tar.gz/zuper-typing-z6-6.2.3/src/zuper_typing/common.py
|
common.py
|
import typing
from typing import Union
from .constants import PYTHON_36
if PYTHON_36: # pragma: no cover
TypeLike = type
else:
TypeLike = Union[type, typing._SpecialForm]
__all__ = ["TypeLike"]
|
zuper-typing-z6
|
/zuper-typing-z6-6.2.3.tar.gz/zuper-typing-z6-6.2.3/src/zuper_typing/aliases.py
|
aliases.py
|
from dataclasses import Field, fields, is_dataclass
from typing import Dict, List, Optional, Set, Tuple, Type, TypeVar
from zuper_commons.types import ZValueError
from . import dataclass
from .annotations_tricks import (
get_VarTuple_arg,
is_VarTuple,
)
from .dataclass_info import is_dataclass_instance
from .annotations_tricks import (
get_DictLike_args,
get_ListLike_arg,
get_SetLike_arg,
is_DictLike,
is_ListLike,
is_SetLike,
is_FixedTupleLike,
get_FixedTupleLike_args,
)
__all__ = ["eq", "EqualityResult"]
X = TypeVar("X")
@dataclass
class EqualityResult:
result: bool
why: "Dict[str, EqualityResult]"
a: object
b: object
msg: Optional[str] = None
def __bool__(self) -> bool:
return self.result
def eq(T: Type[X], a: X, b: X) -> EqualityResult:
# logger.info("eq", T=T, a=a, b=b)
# todo: tuples
if is_dataclass(T):
return eq_dataclass(T, a, b)
elif is_ListLike(T):
return eq_listlike(T, a, b)
elif is_FixedTupleLike(T):
return eq_tuplelike_fixed(T, a, b)
elif is_VarTuple(T):
return eq_tuplelike_var(T, a, b)
elif is_DictLike(T):
return eq_dictlike(T, a, b)
elif is_SetLike(T):
return eq_setlike(T, a, b)
else:
if not (a == b):
return EqualityResult(result=False, a=a, b=b, why={}, msg="by equality")
else:
return EqualityResult(result=True, a=a, b=b, why={})
K = TypeVar("K")
V = TypeVar("V")
def eq_dictlike(T: Type[Dict[K, V]], a: Dict[K, V], b: Dict[K, V]) -> EqualityResult:
k1 = set(a)
k2 = set(b)
if k1 != k2:
return EqualityResult(result=False, a=a, b=b, why={}, msg="different keys")
_, V = get_DictLike_args(T)
why = {}
for k in k1:
va = a[k]
vb = b[k]
r = eq(V, va, vb)
if not r.result:
why[k] = r
result = len(why) == 0
return EqualityResult(result=result, why=(why), a=a, b=b)
def eq_listlike(T: Type[List[V]], a: List[V], b: List[V]) -> EqualityResult:
k1 = len(a)
k2 = len(b)
if k1 != k2:
return EqualityResult(result=False, a=a, b=b, why={}, msg="different length")
V = get_ListLike_arg(T)
why = {}
for i in range(k1):
va = a[i]
vb = b[i]
r = eq(V, va, vb)
if not r.result:
why[str(i)] = r
result = len(why) == 0
return EqualityResult(result=result, why=why, a=a, b=b)
def eq_setlike(T: Type[Set[V]], a: Set[V], b: Set[V]) -> EqualityResult:
k1 = len(a)
k2 = len(b)
if k1 != k2:
return EqualityResult(result=False, a=a, b=b, why={}, msg="different length")
V = get_SetLike_arg(T)
why = {}
for i, va in enumerate(a):
for vb in b:
r = eq(V, va, vb)
if r:
break
else:
why["a" + str(i)] = EqualityResult(result=False, a=va, b=None, why={}, msg="Missing")
for i, vb in enumerate(b):
for va in a:
r = eq(V, va, vb)
if r:
break
else:
why["b" + str(i)] = EqualityResult(result=False, a=None, b=vb, why={}, msg="Missing")
result = len(why) == 0
return EqualityResult(result=result, why=why, a=a, b=b)
def eq_tuplelike_fixed(T: Type[Tuple], a: Tuple, b: Tuple) -> EqualityResult:
assert is_FixedTupleLike(T), T
args = get_FixedTupleLike_args(T)
n = len(args)
k1 = len(a)
k2 = len(b)
if not (k1 == k2 == n):
return EqualityResult(result=False, a=a, b=b, why={}, msg="different length")
why = {}
for i, V in enumerate(args):
va = a[i]
vb = b[i]
r = eq(V, va, vb)
if not r.result:
why[str(i)] = r
result = len(why) == 0
return EqualityResult(result=result, why=why, a=a, b=b)
def eq_tuplelike_var(T: Type[Tuple], a: Tuple, b: Tuple) -> EqualityResult:
assert is_VarTuple(T), T
V = get_VarTuple_arg(T)
k1 = len(a)
k2 = len(b)
if not (k1 == k2):
return EqualityResult(result=False, a=a, b=b, why={}, msg="different length")
why = {}
for i in range(k1):
va = a[i]
vb = b[i]
r = eq(V, va, vb)
if not r.result:
why[str(i)] = r
result = len(why) == 0
return EqualityResult(result=result, why=why, a=a, b=b)
def eq_dataclass(T, a, b):
if not is_dataclass(T): # pragma: no cover
raise ZValueError(T=T, a=a, b=b)
if not is_dataclass_instance(a) or not is_dataclass_instance(b):
return EqualityResult(result=False, why={}, a=a, b=b, msg="not even dataclasses")
_fields: List[Field] = fields(T)
why = {}
for f in _fields:
va = getattr(a, f.name)
vb = getattr(b, f.name)
res = eq(f.type, va, vb)
if not res.result:
why[f.name] = res
result = len(why) == 0
return EqualityResult(result=result, why=dict(why), a=a, b=b)
|
zuper-typing-z6
|
/zuper-typing-z6-6.2.3.tar.gz/zuper-typing-z6-6.2.3/src/zuper_typing/structural_equalities.py
|
structural_equalities.py
|
import datetime
from dataclasses import is_dataclass
from decimal import Decimal
from numbers import Number
from typing import Callable, cast, ClassVar, Dict, List, NewType, Optional, Set, Tuple, Type
import numpy as np
from zuper_commons.types import ZAssertionError
from .aliases import TypeLike
from .annotations_tricks import (
get_Callable_info,
get_ClassVar_arg,
get_Dict_args,
get_List_arg,
get_NewType_arg,
get_NewType_name,
get_Optional_arg,
get_Set_arg,
get_Type_arg,
get_Union_args,
get_VarTuple_arg,
is_Any,
is_Callable,
is_ClassVar,
is_Dict,
is_ForwardRef,
is_List,
is_NewType,
is_Optional,
is_Set,
is_Type,
is_TypeVar,
is_Union,
is_VarTuple,
make_Tuple,
make_Union,
)
from .dataclass_info import DataclassInfo, get_dataclass_info, set_dataclass_info
from .monkey_patching_typing import my_dataclass
from .annotations_tricks import (
CustomDict,
CustomList,
CustomSet,
get_CustomDict_args,
get_CustomList_arg,
get_CustomSet_arg,
is_CustomDict,
is_CustomList,
is_CustomSet,
is_TupleLike,
make_dict,
make_list,
make_set,
is_FixedTupleLike,
get_FixedTupleLike_args,
)
__all__ = ["recursive_type_subst", "check_no_placeholders_left"]
def recursive_type_subst(
T: TypeLike, f: Callable[[TypeLike], TypeLike], ignore: tuple = ()
) -> TypeLike:
if T in ignore:
# logger.info(f'ignoring {T} in {ignore}')
return T
r = lambda _: recursive_type_subst(_, f, ignore + (T,))
if is_Optional(T):
a = get_Optional_arg(T)
a2 = r(a)
if a == a2:
return T
# logger.info(f'Optional unchanged under {f.__name__}: {a} == {a2}')
return Optional[a2]
elif is_ForwardRef(T):
return f(T)
elif is_Union(T):
ts0 = get_Union_args(T)
ts = tuple(r(_) for _ in ts0)
if ts0 == ts:
# logger.info(f'Union unchanged under {f.__name__}: {ts0} == {ts}')
return T
return make_Union(*ts)
elif is_TupleLike(T):
T = cast(Type[Tuple], T)
if is_VarTuple(T):
X = get_VarTuple_arg(T)
X2 = r(X)
if X == X2:
return T
return Tuple[X2, ...]
elif is_FixedTupleLike(T):
argst = get_FixedTupleLike_args(T)
ts = tuple(r(_) for _ in argst)
if argst == ts:
return T
return make_Tuple(*ts)
else:
assert False
elif is_Dict(T):
T = cast(Type[Dict], T)
K, V = get_Dict_args(T)
K2, V2 = r(K), r(V)
if (K, V) == (K2, V2):
return T
return Dict[K, V]
# return original_dict_getitem((K, V))
elif is_CustomDict(T):
T = cast(Type[CustomDict], T)
K, V = get_CustomDict_args(T)
K2, V2 = r(K), r(V)
if (K, V) == (K2, V2):
return T
return make_dict(K2, V2)
elif is_List(T):
T = cast(Type[List], T)
V = get_List_arg(T)
V2 = r(V)
if V == V2:
return T
return List[V2]
elif is_CustomList(T):
T = cast(Type[CustomList], T)
V = get_CustomList_arg(T)
V2 = r(V)
if V == V2:
return T
return make_list(V2)
elif is_Set(T):
T = cast(Type[Set], T)
V = get_Set_arg(T)
V2 = r(V)
if V == V2:
return T
return make_set(V2)
elif is_CustomSet(T):
T = cast(Type[CustomSet], T)
V = get_CustomSet_arg(T)
V2 = r(V)
if V == V2:
return T
return make_set(V2)
elif is_NewType(T):
name = get_NewType_name(T)
a = get_NewType_arg(T)
a2 = r(a)
if a == a2:
return T
return NewType(name, a2)
elif is_ClassVar(T):
V = get_ClassVar_arg(T)
V2 = r(V)
if V == V2:
return T
return ClassVar[V2]
elif is_dataclass(T):
return recursive_type_subst_dataclass(T, f, ignore)
elif T in (
int,
bool,
float,
Decimal,
datetime.datetime,
bytes,
str,
type(None),
type,
np.ndarray,
Number,
object,
):
return f(T)
elif is_TypeVar(T):
return f(T)
elif is_Type(T):
V = get_Type_arg(T)
V2 = r(V)
if V == V2:
return T
return Type[V2]
elif is_Any(T):
return f(T)
elif is_Callable(T):
info = get_Callable_info(T)
args = []
for k, v in info.parameters_by_name.items():
# if is_MyNamedArg(v):
# # try:
# v = v.original
# TODO: add MyNamedArg
args.append(f(v))
fret = f(info.returns)
args = list(args)
# noinspection PyTypeHints
return Callable[args, fret]
# noinspection PyTypeHints
elif isinstance(T, type) and "Placeholder" in T.__name__:
return f(T)
else: # pragma: no cover
# raise ZNotImplementedError(T=T)
# FIXME
return T
def recursive_type_subst_dataclass(T, f: Callable[[TypeLike], TypeLike], ignore: tuple = ()):
def r(_):
return recursive_type_subst(_, f, ignore + (T,))
annotations = dict(getattr(T, "__annotations__", {}))
annotations2 = {}
nothing_changed = True
for k, v0 in list(annotations.items()):
v2 = r(v0)
nothing_changed &= v0 == v2
annotations2[k] = v2
if nothing_changed:
# logger.info(f'Union unchanged under {f.__name__}: {ts0} == {ts}')
return T
from .zeneric2 import GenericProxy
class Base(GenericProxy):
pass
Base.__annotations__ = annotations2
Base.__module__ = T.__module__
T2 = my_dataclass(Base)
for k in annotations:
if hasattr(T, k):
setattr(T2, k, getattr(T, k))
# always
setattr(T2, "__doc__", getattr(T, "__doc__", None))
clsi = get_dataclass_info(T)
# bindings2 = {r(k): r(v) for k, v in clsi.bindings.items()}
# extra2 = tuple(r(_) for _ in clsi.extra)
orig2 = tuple(r(_) for _ in clsi.orig)
clsi2 = DataclassInfo(name="", orig=orig2)
from .zeneric2 import get_name_for
name2 = get_name_for(T.__name__, clsi2)
clsi2.name = name2
setattr(T2, "__name__", name2)
qualname = getattr(T, "__qualname__")
qualname2 = qualname.replace(T.__name__, name2)
setattr(T2, "__qualname__", qualname2)
set_dataclass_info(T2, clsi2)
return T2
def check_no_placeholders_left(T: type):
""" Check that there is no Placeholder* left in the type. """
def f(x):
if isinstance(x, type) and x.__name__.startswith("Placeholder"):
msg = "Found Placeholder"
raise ZAssertionError(msg, x=x)
return x
return recursive_type_subst(T, f)
|
zuper-typing-z6
|
/zuper-typing-z6-6.2.3.tar.gz/zuper-typing-z6-6.2.3/src/zuper_typing/assorted_recursive_type_subst.py
|
assorted_recursive_type_subst.py
|
import os
import sys
from typing import ClassVar
from . import logger
__all__ = [
"PYTHON_36",
"PYTHON_37",
"ZuperTypingGlobals",
"ATT_PRINT_ORDER",
"PYTHON_38",
"ANNOTATIONS_ATT",
"ATT_TUPLE_TYPES",
"NAME_ARG",
"INTERSECTION_ATT",
"ATT_LIST_TYPE",
"TUPLE_EMPTY_ATTR",
"MULTI_ATT",
]
PYTHON_36 = sys.version_info[1] == 6
PYTHON_37 = sys.version_info[1] == 7
PYTHON_38 = sys.version_info[1] == 8
NAME_ARG = "__name_arg__" # XXX: repeated
ANNOTATIONS_ATT = "__annotations__"
DEPENDS_ATT = "__depends__"
INTERSECTION_ATT = "__intersection__"
ATT_TUPLE_TYPES = "__tuple_types__"
MULTI_ATT = "__dataclass_info__"
TUPLE_EMPTY_ATTR = "__empty__"
ATT_LIST_TYPE = "__list_type__"
class ZuperTypingGlobals:
cache_enabled: ClassVar[bool] = False
enable_type_checking: ClassVar[bool] = True
enable_type_checking_difficult: ClassVar[bool] = True
check_tuple_values = False # in CustomTuple
paranoid = False
verbose = True
class MakeTypeCache:
cache = {}
circle_job = os.environ.get("CIRCLE_JOB", None)
if circle_job == "test-3.7-no-cache": # pragma: no cover
ZuperTypingGlobals.cache_enabled = False
logger.warning("Disabling cache (zuper_typing:cache_enabled) due to circle_job.")
class DataclassHooks:
dc_repr = None
dc_str = None
ATT_PRINT_ORDER = "__print_order__"
|
zuper-typing-z6
|
/zuper-typing-z6-6.2.3.tar.gz/zuper-typing-z6-6.2.3/src/zuper_typing/constants.py
|
constants.py
|
from dataclasses import dataclass, field, is_dataclass
from datetime import datetime
from decimal import Decimal
from functools import reduce
from numbers import Number
from typing import Any, cast, Dict, Iterable, List, Optional, Sequence, Set, Tuple, Type, TypeVar
import numpy as np
from zuper_commons.text import indent
from zuper_commons.types import (
ZAssertionError,
ZException,
ZNotImplementedError,
ZTypeError,
ZValueError,
)
from . import logger
from .aliases import TypeLike
from .annotations_tricks import (
get_DictLike_args,
get_FixedTupleLike_args,
get_ForwardRef_arg,
get_Iterable_arg,
get_ListLike_arg,
get_NewType_arg,
get_NewType_name,
get_Optional_arg,
get_Sequence_arg,
get_SetLike_arg,
get_tuple_types,
get_Type_arg,
get_TypeVar_name,
get_Union_args,
get_VarTuple_arg,
is_Any,
is_Callable,
is_ClassVar,
is_DictLike,
is_FixedTupleLike,
is_ForwardRef,
is_Iterable,
is_List,
is_ListLike,
is_Optional,
is_Sequence,
is_SetLike,
is_TupleLike,
is_Type,
is_TypeVar,
is_Union,
is_VarTuple,
make_dict,
make_Tuple,
MyStr,
)
from .constants import ANNOTATIONS_ATT, ZuperTypingGlobals
from .literal import get_Literal_args, is_Literal, make_Literal
from .my_intersection import get_Intersection_args, is_Intersection
from .type_algebra import Matches, type_sup
from .uninhabited import is_Uninhabited
__all__ = ["can_be_used_as2", "value_liskov", "CanBeUsed", "check_value_liskov", "type_liskov"]
@dataclass
class CanBeUsed:
result: bool
why: str
M: Matches
matches: Optional[Dict[str, type]] = None
reasons: "Dict[str, CanBeUsed]" = field(default_factory=dict)
def __post_init__(self):
assert isinstance(self.M, Matches), self
self.matches = self.M.get_matches()
self.reasons = DictStrCan(self.reasons)
def __bool__(self):
return self.result
DictStrCan = make_dict(str, CanBeUsed)
class CanBeUsedCache:
can_be_used_cache = {}
from .annotations_tricks import is_NewType
from .get_patches_ import is_placeholder
def type_liskov(
T1: TypeLike,
T2: TypeLike,
matches: Optional[Matches] = None,
assumptions0: Tuple[Tuple[Any, Any], ...] = (),
allow_is_shortcut: bool = True,
) -> CanBeUsed:
if matches is None:
matches = Matches()
else:
assert isinstance(matches, Matches), matches
if is_placeholder(T1) or is_placeholder(T2):
msg = "cannot compare classes with 'Placeholder' in the name (reserved internally)."
raise ZValueError(msg, T1=T1, T2=T2)
if is_Any(T2):
return CanBeUsed(True, "Any", matches)
if is_Any(T1):
return CanBeUsed(True, "Any", matches)
if is_Uninhabited(T1):
return CanBeUsed(True, "Empty", matches)
if (T1, T2) in assumptions0:
return CanBeUsed(True, "By assumption", matches)
if allow_is_shortcut:
if (T1 is T2) or (T1 == T2):
return CanBeUsed(True, "equal", matches)
# redundant with above
# if is_Any(T1) or is_Any(T2):
# return CanBeUsed(True, "Any ignores everything", matches)
if T2 is object:
return CanBeUsed(True, "object is the top", matches)
# cop out for the easy cases
assumptions = assumptions0 + ((T1, T2),)
if is_NewType(T1) and is_NewType(T2):
# special case of same alias
t1 = get_NewType_arg(T1)
t2 = get_NewType_arg(T2)
n1 = get_NewType_name(T1)
n2 = get_NewType_name(T2)
res = can_be_used_as2(t1, t2, matches, assumptions, allow_is_shortcut=allow_is_shortcut)
if res:
if n1 == n2:
return res
if is_NewType(T2):
T2 = get_NewType_arg(T2)
if is_Literal(T1):
v1 = get_Literal_args(T1)
if is_Literal(T2):
v2 = get_Literal_args(T2)
included = all(any(x1 == x2 for x2 in v2) for x1 in v1)
if included:
return CanBeUsed(True, "included", matches)
else:
return CanBeUsed(False, "not included", matches)
else:
t1 = type(v1[0])
return can_be_used_as2(t1, T2, matches, assumptions, allow_is_shortcut=allow_is_shortcut)
# logger.info(f'can_be_used_as\n {T1} {T2}\n {assumptions0}')
if is_Literal(T2):
return CanBeUsed(False, "T1 not literal", matches)
if T1 is type(None):
if is_Optional(T2):
return CanBeUsed(True, "", matches)
# This never happens because it is caught by T1 is T2
elif T2 is type(None):
return CanBeUsed(True, "", matches)
else:
msg = f"Needs type(None), got {T2}"
return CanBeUsed(False, msg, matches)
if is_Union(T1):
if is_Union(T2):
if get_Union_args(T1) == get_Union_args(T2):
return CanBeUsed(True, "same", matches)
# can_be_used(Union[A,B], C)
# == can_be_used(A,C) and can_be_used(B,C)
for t in get_Union_args(T1):
can = can_be_used_as2(t, T2, matches, assumptions, allow_is_shortcut=allow_is_shortcut)
# logger.info(f'can_be_used_as t = {t} {T2}')
if not can.result:
msg = f"Cannot match {t}"
return CanBeUsed(False, msg, matches)
return CanBeUsed(True, "", matches)
if is_Union(T2):
reasons = []
for t in get_Union_args(T2):
can = can_be_used_as2(T1, t, matches, assumptions, allow_is_shortcut=allow_is_shortcut)
if can.result:
return CanBeUsed(True, f"union match with {t} ", can.M)
reasons.append(f"- {t}: {can.why}")
msg = f"Cannot use {T1} as any of {T2}:\n" + "\n".join(reasons)
return CanBeUsed(False, msg, matches)
if is_TypeVar(T2):
n2 = get_TypeVar_name(T2)
if is_TypeVar(T1):
n1 = get_TypeVar_name(T1)
if n1 == n2:
# TODO: intersection of bounds
return CanBeUsed(True, "", matches)
else:
matches = matches.must_be_subtype_of(n1, T2)
# raise ZNotImplementedError(T1=T1,T2=T2)
matches = matches.must_be_supertype_of(n2, T1)
return CanBeUsed(True, "", matches)
if is_Intersection(T1):
if is_Intersection(T2):
if get_Intersection_args(T1) == get_Intersection_args(T2):
return CanBeUsed(True, "same", matches)
# Int[a, b] <= Int[C, D]
# = Int[a, b] <= C Int[a, b] <= D
for t2 in get_Intersection_args(T2):
can = can_be_used_as2(T1, t2, matches, assumptions, allow_is_shortcut=allow_is_shortcut)
# logger.info(f'can_be_used_as t = {t} {T2}')
if not can.result:
msg = f"Cannot match {t2}"
return CanBeUsed(False, msg, matches)
return CanBeUsed(True, "", matches)
if is_Intersection(T2):
# a <= Int[C, D]
# = a <= C and a <= D
reasons = []
for t2 in get_Intersection_args(T2):
can = can_be_used_as2(T1, t2, matches, assumptions, allow_is_shortcut=allow_is_shortcut)
if not can.result:
return CanBeUsed(False, f"no match {T1} {t2} ", can.M)
msg = f"Cannot use {T1} as any of {T2}:\n" + "\n".join(reasons)
return CanBeUsed(False, msg, matches)
if is_TypeVar(T1):
n1 = get_TypeVar_name(T1)
matches = matches.must_be_subtype_of(n1, T2)
return CanBeUsed(True, "Any", matches)
# TODO: not implemented
if is_Optional(T1):
t1 = get_Optional_arg(T1)
if is_Optional(T2):
t2 = get_Optional_arg(T2)
return can_be_used_as2(t1, t2, matches, assumptions, allow_is_shortcut=allow_is_shortcut)
if T2 is type(None):
return CanBeUsed(True, "", matches)
return can_be_used_as2(t1, T2, matches, assumptions, allow_is_shortcut=allow_is_shortcut)
if is_Optional(T2):
t2 = get_Optional_arg(T2)
if is_Optional(T1):
t1 = get_Optional_arg(T1)
return can_be_used_as2(t1, t2, matches, assumptions, allow_is_shortcut=allow_is_shortcut)
return can_be_used_as2(T1, t2, matches, assumptions, allow_is_shortcut=allow_is_shortcut)
# ---- concrete
if T1 is MyStr:
if T2 in (str, MyStr):
return CanBeUsed(True, "str ~ MyStr", matches)
else:
return CanBeUsed(False, "MyStr wants str", matches)
if T2 is MyStr:
if T1 in (str, MyStr):
return CanBeUsed(True, "str ~ MyStr", matches)
else:
return CanBeUsed(False, "MyStr wants str", matches)
trivial = (int, str, bool, Decimal, datetime, float, Number)
if T1 in trivial:
if T2 not in trivial:
return CanBeUsed(False, "A trivial cannot be a subclass of non-trivial", matches)
if T2 in trivial:
if T1 in trivial + (np.float32, np.float64):
return CanBeUsed(issubclass(T1, T2), "trivial subclass", matches)
# raise ZNotImplementedError(T1=T1, T2=T2)
return CanBeUsed(False, f"Not a trivial type (T1={T1}, T2={T2})", matches)
if is_DictLike(T2):
if not is_DictLike(T1):
msg = f"Expecting a dictionary, got {T1}"
return CanBeUsed(False, msg, matches)
else:
T1 = cast(Type[Dict], T1)
T2 = cast(Type[Dict], T2)
K1, V1 = get_DictLike_args(T1)
K2, V2 = get_DictLike_args(T2)
rk = can_be_used_as2(K1, K2, matches, assumptions, allow_is_shortcut=allow_is_shortcut)
if not rk:
return CanBeUsed(False, f"keys {K1} {K2}: {rk}", matches)
rv = can_be_used_as2(V1, V2, rk.M, assumptions, allow_is_shortcut=allow_is_shortcut)
if not rv:
return CanBeUsed(False, f"values {V1} {V2}: {rv}", matches)
return CanBeUsed(True, f"ok: {rk} {rv}", rv.M)
else:
if is_DictLike(T1):
msg = "A Dict needs a dictionary"
return CanBeUsed(False, msg, matches)
assert not is_Union(T2)
if is_dataclass(T1):
if not is_dataclass(T2):
msg = "Second is not dataclass "
return CanBeUsed(False, msg, matches)
from .zeneric2 import StructuralTyping
if isinstance(T2, StructuralTyping):
if not isinstance(T1, StructuralTyping):
msg = "Not structural typing"
return CanBeUsed(False, msg, matches)
if is_dataclass(T2):
if not is_dataclass(T1):
if ZuperTypingGlobals.verbose:
msg = f"Expecting dataclass to match to {T2}, got something that is not a " f"dataclass: {T1}"
msg += f" union: {is_Union(T1)}"
else: # pragma: no cover
msg = "not dataclass"
return CanBeUsed(False, msg, matches)
# h1 = get_type_hints(T1)
# h2 = get_type_hints(T2)
key = (T1.__module__, T1.__qualname__, T2.__module__, T2.__qualname__)
if key in CanBeUsedCache.can_be_used_cache:
return CanBeUsedCache.can_be_used_cache[key]
h1 = getattr(T1, ANNOTATIONS_ATT, {})
h2 = getattr(T2, ANNOTATIONS_ATT, {})
for k, v2 in h2.items():
if not k in h1:
if ZuperTypingGlobals.verbose:
msg = (
f'Type {T2}\n requires field "{k}" \n of type {v2} \n but {T1} does '
f"not have it. "
)
else: # pragma: no cover
msg = k
res = CanBeUsed(False, msg, matches)
CanBeUsedCache.can_be_used_cache[key] = res
return res
v1 = h1[k]
# XXX
if is_ClassVar(v1):
continue
can = can_be_used_as2(v1, v2, matches, assumptions, allow_is_shortcut=allow_is_shortcut)
if not can.result:
if ZuperTypingGlobals.verbose:
msg = (
f'Type {T2}\n requires field "{k}"\n of type\n {v2} \n but'
+ f" {T1}\n has annotated it as\n {v1}\n which cannot be used. "
)
msg += "\n\n" + f"assumption: {assumptions}"
msg += "\n\n" + indent(can.why, "> ")
else: # pragma: no cover
msg = ""
res = CanBeUsed(False, msg, matches)
CanBeUsedCache.can_be_used_cache[key] = res
return res
res = CanBeUsed(True, "dataclass", matches)
CanBeUsedCache.can_be_used_cache[key] = res
return res
if is_FixedTupleLike(T1):
T1 = cast(Type[Tuple], T1)
if not is_TupleLike(T2):
msg = "A tuple can only be used as a tuple"
return CanBeUsed(False, msg, matches)
T2 = cast(Type[Tuple], T2)
if is_FixedTupleLike(T2):
t1s = get_tuple_types(T1)
t2s = get_tuple_types(T2)
if len(t1s) != len(t2s):
msg = "Different length"
return CanBeUsed(False, msg, matches)
for i, (t1, t2) in enumerate(zip(t1s, t2s)):
can = can_be_used_as2(t1, t2, matches, assumptions, allow_is_shortcut=allow_is_shortcut)
if not can:
return CanBeUsed(False, f"{t1} {t2}", matches, reasons={str(i): can})
matches = can.M
return CanBeUsed(True, "", matches)
elif is_VarTuple(T2):
t1s = get_tuple_types(T1)
if len(t1s) == 0:
return CanBeUsed(True, "Empty tuple counts as var tuple", matches)
T = get_VarTuple_arg(T2)
tmax = reduce(type_sup, t1s)
can = can_be_used_as2(tmax, T, matches, assumptions, allow_is_shortcut=allow_is_shortcut)
if not can:
msg = "The sup of the types in T1 is not a sub of T"
return CanBeUsed(False, msg, matches, reasons={"": can})
return CanBeUsed(True, "", matches)
else:
raise ZAssertionError(T1=T1, T2=T2)
if is_VarTuple(T1):
T1 = cast(Type[Tuple], T1)
if not is_VarTuple(T2):
msg = "A var tuple can only be used as a var tuple"
return CanBeUsed(False, msg, matches)
T2 = cast(Type[Tuple], T2)
t1 = get_VarTuple_arg(T1)
t2 = get_VarTuple_arg(T2)
can = can_be_used_as2(t1, t2, matches, assumptions, allow_is_shortcut=allow_is_shortcut)
if not can:
return CanBeUsed(False, f"{t1} {t2}", matches, reasons={"": can})
else:
return CanBeUsed(True, "", matches)
if is_TupleLike(T2):
assert not is_TupleLike(T1)
return CanBeUsed(False, f"Not a tuple type T1={T1} T2={T2}", matches)
if is_Any(T1):
assert not is_Union(T2)
if not is_Any(T2):
msg = "Any is the top"
return CanBeUsed(False, msg, matches)
if is_ListLike(T2):
if not is_ListLike(T1):
msg = "A List can only be used as a List"
return CanBeUsed(False, msg, matches)
T1 = cast(Type[List], T1)
T2 = cast(Type[List], T2)
t1 = get_ListLike_arg(T1)
t2 = get_ListLike_arg(T2)
# print(f'matching List with {t1} {t2}')
can = can_be_used_as2(t1, t2, matches, assumptions, allow_is_shortcut=allow_is_shortcut)
if not can.result:
return CanBeUsed(False, f"{t1} {T2}", matches)
return CanBeUsed(True, "", can.M)
if is_Callable(T2):
if not is_Callable(T1):
return CanBeUsed(False, "not callable", matches)
raise ZNotImplementedError(T1=T1, T2=T2)
if is_ForwardRef(T1):
n1 = get_ForwardRef_arg(T1)
if is_ForwardRef(T2):
n2 = get_ForwardRef_arg(T2)
if n1 == n2:
return CanBeUsed(True, "", matches)
else:
return CanBeUsed(False, "different name", matches)
else:
return CanBeUsed(False, "not fw ref", matches)
if is_ForwardRef(T2):
n2 = get_ForwardRef_arg(T2)
if hasattr(T1, "__name__"):
if T1.__name__ == n2:
return CanBeUsed(True, "", matches)
else:
return CanBeUsed(False, "different name", matches)
if is_Iterable(T2):
T2 = cast(Type[Iterable], T2)
t2 = get_Iterable_arg(T2)
if is_Iterable(T1):
T1 = cast(Type[Iterable], T1)
t1 = get_Iterable_arg(T1)
return can_be_used_as2(t1, t2, matches, allow_is_shortcut=allow_is_shortcut)
if is_SetLike(T1):
T1 = cast(Type[Set], T1)
t1 = get_SetLike_arg(T1)
return can_be_used_as2(t1, t2, matches, allow_is_shortcut=allow_is_shortcut)
if is_ListLike(T1):
T1 = cast(Type[List], T1)
t1 = get_ListLike_arg(T1)
return can_be_used_as2(t1, t2, matches, allow_is_shortcut=allow_is_shortcut)
if is_DictLike(T1):
T1 = cast(Type[Dict], T1)
K, V = get_DictLike_args(T1)
t1 = Tuple[K, V]
return can_be_used_as2(t1, t2, matches, allow_is_shortcut=allow_is_shortcut)
return CanBeUsed(False, "expect iterable", matches)
if is_SetLike(T2):
if not is_SetLike(T1):
msg = "A Set can only be used as a Set"
return CanBeUsed(False, msg, matches)
T1 = cast(Type[Set], T1)
T2 = cast(Type[Set], T2)
t1 = get_SetLike_arg(T1)
t2 = get_SetLike_arg(T2)
# print(f'matching List with {t1} {t2}')
can = can_be_used_as2(t1, t2, matches, assumptions, allow_is_shortcut=allow_is_shortcut)
if not can.result:
return CanBeUsed(False, f"Set argument fails", matches, reasons={"set_arg": can})
return CanBeUsed(True, "", can.M)
if is_Sequence(T1):
T1 = cast(Type[Sequence], T1)
t1 = get_Sequence_arg(T1)
if is_ListLike(T2):
T2 = cast(Type[List], T2)
t2 = get_ListLike_arg(T2)
can = can_be_used_as2(t1, t2, matches, assumptions, allow_is_shortcut=allow_is_shortcut)
if not can.result:
return CanBeUsed(False, f"{t1} {T2}", matches)
return CanBeUsed(True, "", can.M)
msg = f"Needs a Sequence[{t1}], got {T2}"
return CanBeUsed(False, msg, matches)
if isinstance(T1, type) and isinstance(T2, type):
# NOTE: this didn't work with Number for whatever reason
# NOTE: issubclass(A, B) == type(T2).__subclasscheck__(T2, T1)
# a0 = type.__subclasscheck__(T2, T1)
# b0 = type.__subclasscheck__(T1, T2)
logger.info(T1=T1, T2=T2)
a = issubclass(T1, T2)
# assert a0 == a and b0 == b, (T1, T2, a0, b0, a, b)
if a:
return CanBeUsed(True, f"type.__subclasscheck__ {T1} {T2}", matches)
else:
msg = f"Type {T1} is not a subclass of {T2} "
# msg += f"; viceversa : {b}"
return CanBeUsed(False, msg, matches)
if is_List(T1):
msg = f"Needs a List, got {T2}"
return CanBeUsed(False, msg, matches)
if T2 is type(None):
msg = f"Needs type(None), got {T1}"
return CanBeUsed(False, msg, matches)
if is_NewType(T1):
n1 = get_NewType_arg(T1)
if is_NewType(T2):
n2 = get_NewType_arg(T2)
return can_be_used_as2(n1, n2, matches, assumptions, allow_is_shortcut=allow_is_shortcut)
# if n1 == n2:
# return CanBeUsed(True, "", matches)
# else:
# raise ZNotImplementedError(T1=T1, T2=T2)
else:
return can_be_used_as2(n1, T2, matches, assumptions, allow_is_shortcut=allow_is_shortcut)
if is_Type(T1):
if not is_Type(T2):
return CanBeUsed(False, f"Not a Type[X], T1={T1}, T2={T2}", matches)
sc1 = get_Type_arg(T1)
sc2 = get_Type_arg(T2)
return can_be_used_as2(sc1, sc2, matches, assumptions, allow_is_shortcut=allow_is_shortcut)
# msg = f"{T1} ? {T2}" # pragma: no cover
raise ZNotImplementedError(T1=T1, T2=T2)
can_be_used_as2 = type_liskov
def value_liskov(a: object, T: TypeLike) -> CanBeUsed:
if is_Literal(T):
res = a in get_Literal_args(T) # XXX
return CanBeUsed(res, "literal", Matches())
if is_DictLike(T):
return value_liskov_DictLike(a, cast(Type[Dict], T))
if is_SetLike(T):
return value_liskov_SetLike(a, cast(Type[Set], T))
if is_ListLike(T):
return value_liskov_ListLike(a, cast(Type[List], T))
if is_FixedTupleLike(T):
return value_liskov_FixedTupleLike(a, cast(Type[Tuple], T))
if is_VarTuple(T):
return value_liskov_VarTuple(a, cast(Type[Tuple], T))
if is_Union(T):
return value_liskov_Union(a, T)
S = ztype(a)
return can_be_used_as2(S, T)
def value_liskov_Union(a: object, T: type) -> CanBeUsed:
ts = get_Union_args(T)
reasons = {}
for i, t in enumerate(ts):
ok_k = value_liskov(a, t)
if ok_k:
msg = f"Match #{i}"
return CanBeUsed(True, msg, ok_k.M, ok_k.matches, reasons={str(i): ok_k})
reasons[str(i)] = ok_k
return CanBeUsed(False, "No match", Matches(), reasons=reasons)
def value_liskov_DictLike(a: object, T: Type[Dict]) -> CanBeUsed:
K, V = get_DictLike_args(cast(Type[Dict], T))
if not isinstance(a, dict):
return CanBeUsed(False, "not a dict", Matches())
for k, v in a.items():
ok_k = value_liskov(k, K)
if not ok_k:
msg = f"Invalid key: {ok_k}"
return CanBeUsed(False, msg, Matches())
ok_v = value_liskov(v, V)
if not ok_v:
msg = f"Invalid value: {ok_v}"
return CanBeUsed(False, msg, Matches())
return CanBeUsed(True, "ok", Matches())
def value_liskov_SetLike(a: object, T: Type[Set]) -> CanBeUsed:
V = get_SetLike_arg(T)
if not isinstance(a, set):
return CanBeUsed(False, "not a set", Matches())
for i, v in enumerate(a):
ok = value_liskov(v, V)
if not ok:
msg = f"Invalid value #{i}: {ok}"
return CanBeUsed(False, msg, Matches())
return CanBeUsed(True, "ok", Matches())
def value_liskov_ListLike(a: object, T: Type[List]) -> CanBeUsed:
V = get_ListLike_arg(T)
if not isinstance(a, list):
return CanBeUsed(False, f"not a list: {type(a)}", Matches())
for i, v in enumerate(a):
ok = value_liskov(v, V)
if not ok:
msg = f"Invalid value #{i}: {ok}"
return CanBeUsed(False, msg, Matches())
return CanBeUsed(True, "ok", Matches())
def value_liskov_VarTuple(a: object, T: Type[Tuple]) -> CanBeUsed:
V = get_VarTuple_arg(T)
if not isinstance(a, tuple):
return CanBeUsed(False, "not a tuple", Matches())
for i, v in enumerate(a):
ok = value_liskov(v, V)
if not ok:
msg = f"Invalid value #{i}"
return CanBeUsed(False, msg, reasons={str(i): ok}, M=Matches())
return CanBeUsed(True, "ok", Matches())
def value_liskov_FixedTupleLike(a: object, T: Type[Tuple]) -> CanBeUsed:
VS = get_FixedTupleLike_args(T)
if not isinstance(a, tuple):
return CanBeUsed(False, "not a tuple", Matches())
if len(a) != len(VS):
return CanBeUsed(False, "wrong length", Matches())
for i, (v, V) in enumerate(zip(a, VS)):
ok = value_liskov(v, V)
if not ok:
msg = f"Invalid value #{i}"
return CanBeUsed(False, msg, reasons={str(i): ok}, M=Matches())
return CanBeUsed(True, "ok", Matches())
X = TypeVar("X")
def check_value_liskov(ob: object, T: Type[X]) -> X:
try:
ok = value_liskov(ob, T)
except ZException as e:
msg = "Could not run check_value_liskov() successfully."
raise ZAssertionError(msg, ob=ob, Tob=type(ob), T=T) from e
if not ok:
raise ZTypeError(ob=ob, T=T, ok=ok)
else:
return cast(T, ob)
def ztype(a: object) -> type:
if type(a) is tuple:
a = cast(tuple, a)
ts = tuple(make_Literal(_) for _ in a)
return make_Tuple(*ts)
# todo: substitute tuple, list, dict with estimated
return type(a)
|
zuper-typing-z6
|
/zuper-typing-z6-6.2.3.tar.gz/zuper-typing-z6-6.2.3/src/zuper_typing/subcheck.py
|
subcheck.py
|
from dataclasses import dataclass, Field, fields, is_dataclass, MISSING
from typing import Dict, Optional, Tuple, Type
from zuper_commons.text import pretty_dict
from zuper_commons.types import ZAssertionError, ZValueError
from . import logger
from .aliases import TypeLike
from .annotations_tricks import is_TypeLike
from .constants import ANNOTATIONS_ATT
from .constants import MULTI_ATT
__all__ = [
"get_dataclass_info",
"set_dataclass_info",
"DataclassInfo",
"same_as_default",
"is_dataclass_instance",
"asdict_not_recursive",
"has_default",
"get_default",
"get_fields_values",
]
class DataclassInfo:
name: str # only for keeping track
# These are the open ones
# generic_att_XXX: Tuple[TypeLike, ...]
# These are the bound ones
# bindings: Dict[TypeLike, TypeLike]
# These are the original ones
orig: Tuple[TypeLike, ...]
# Plus the ones we collected
# extra: Tuple[TypeLike, ...]
specializations: Dict[Tuple, type]
_open: Optional[Tuple[TypeLike, ...]]
fti = None
def __init__(self, *, name: str, orig: Tuple[TypeLike, ...]):
self.name = name
# self.bindings = bindings
self.orig = orig
if not isinstance(orig, tuple): # pragma: no cover
raise ZAssertionError(sself=self)
for i, _ in enumerate(orig): # pragma: no cover
if not is_TypeLike(_):
msg = f"Element #{i} is not a typelike."
raise ZAssertionError(msg, name=name, orig=orig)
# self.extra = extra
# if bindings: raise ZException(self=self)
self.specializations = {}
self._open = None
def get_open(self) -> Tuple[TypeLike, ...]:
if DataclassInfo.fti is None:
from .recursive_tricks import find_typevars_inside
DataclassInfo.fti = find_typevars_inside
_open = getattr(self, "_open", None)
if _open is None:
res = []
for x in self.orig:
for y in DataclassInfo.fti(x):
if y not in res:
res.append(y)
self._open = tuple(res)
return self._open
#
# def get_open_old(self) -> Tuple[TypeLike, ...]:
# res = []
# for x in self.orig:
# if x not in self.bindings:
# res.append(x)
# for x in self.extra:
# if x not in self.bindings:
# res.append(x)
#
# return tuple(res)
# def __post_init__(self):
# for k in self.bindings:
# if k not in (self.orig + self.extra):
# msg = f"There is a bound variable {k} which is not an original one or child. "
# raise ZValueError(msg, di=self)
def __repr__(self):
# from .debug_print_ import debug_print
debug_print = str
return pretty_dict(
"DataclassInfo",
dict(
name=self.name,
orig=debug_print(self.orig),
# extra=debug_print(self.extra),
# bindings=debug_print(self.bindings)
open=self.get_open(),
),
)
def set_dataclass_info(T, di: DataclassInfo):
assert is_TypeLike(T), T
if not hasattr(T, MULTI_ATT):
setattr(T, MULTI_ATT, {})
ma = getattr(T, MULTI_ATT)
ma[id(T)] = di
def get_dataclass_info(T: Type[dataclass]) -> DataclassInfo:
assert is_TypeLike(T), T
default = DataclassInfo(name=T.__name__, orig=())
if not hasattr(T, MULTI_ATT):
return default
ma = getattr(T, MULTI_ATT)
if not id(T) in ma:
msg = f"Cannot find type info for {T} ({id(T)}"
logger.info(msg, ma=ma)
return default
return ma[id(T)]
def get_fields_values(x: dataclass) -> Dict[str, object]:
assert is_dataclass_instance(x), x
res = {}
T = type(x)
try:
fields_ = fields(T)
except Exception as e:
raise ZValueError(T=T) from e
for f in fields_:
k = f.name
v0 = getattr(x, k)
res[k] = v0
return res
def get_all_annotations(cls: type) -> Dict[str, type]:
""" Gets all the annotations including the parents. """
res = {}
for base in cls.__bases__:
annotations = getattr(base, ANNOTATIONS_ATT, {})
res.update(annotations)
return res
def asdict_not_recursive(x: dataclass) -> Dict[str, object]:
""" Note: this does not return the classvars"""
return get_fields_values(x)
def is_dataclass_instance(x: object) -> bool:
return not isinstance(x, type) and is_dataclass(x)
def has_default(f: Field):
""" Returns true if it has a default value or factory"""
if f.default != MISSING:
return True
elif f.default_factory != MISSING:
return True
else:
return False
def has_default_value(f: Field):
if f.default != MISSING:
return True
else:
return False
def has_default_factory(f: Field):
if f.default_factory != MISSING:
return True
else:
return False
def get_default(f: Field) -> object:
assert has_default(f)
if f.default != MISSING:
return f.default
elif f.default_factory != MISSING:
return f.default_factory()
assert False
def same_as_default(f: Field, value: object) -> bool:
if f.default != MISSING:
return f.default == value
elif f.default_factory != MISSING:
default = f.default_factory()
return default == value
else:
return False
|
zuper-typing-z6
|
/zuper-typing-z6-6.2.3.tar.gz/zuper-typing-z6-6.2.3/src/zuper_typing/dataclass_info.py
|
dataclass_info.py
|
import traceback
from dataclasses import dataclass, is_dataclass
from datetime import datetime
from decimal import Decimal
from typing import cast, Dict, Iterator, List, Optional, Set, Tuple, Type, Union
from zuper_commons.types import ZNotImplementedError, ZValueError
from . import logger
from .aliases import TypeLike
from .annotations_tricks import (
get_ClassVar_arg,
get_DictLike_args,
get_fields_including_static,
get_FixedTupleLike_args,
get_ListLike_arg,
get_NewType_arg,
get_NewType_name,
get_Optional_arg,
get_SetLike_arg,
get_Type_arg,
get_TypeVar_name,
get_Union_args,
get_VarTuple_arg,
is_Any,
is_ClassVar,
is_DictLike,
is_FixedTupleLike,
is_ListLike,
is_NewType,
is_Optional,
is_SetLike,
is_Type,
is_TypeLike,
is_TypeVar,
is_Union,
is_VarTuple,
MyBytes,
MyStr,
)
from .dataclass_info import get_fields_values, is_dataclass_instance
from .literal import get_Literal_args, is_Literal
from .my_intersection import get_Intersection_args, is_Intersection
from .uninhabited import is_Uninhabited
assert logger
__all__ = [
"assert_equivalent_objects",
"assert_equivalent_types",
"get_patches",
"is_placeholder",
"patch",
"NotEquivalentException",
]
@dataclass
class Patch:
__print_order__ = ["prefix_str", "value1", "value2", "msg"]
prefix: Tuple[Union[str, int], ...]
value1: object
value2: Optional[object]
prefix_str: Optional[str] = None
msg: Optional[str] = None
def __post_init__(self):
self.prefix_str = "/".join(map(str, self.prefix))
def assert_equivalent_objects(ob1: object, ob2: object, compare_types: bool = True):
if is_TypeLike(ob1):
ob1 = cast(TypeLike, ob1)
ob2 = cast(TypeLike, ob2)
assert_equivalent_types(ob1, ob2)
else:
patches = get_patches(ob1, ob2, compare_types)
if patches:
msg = "The objects are not equivalent"
raise ZValueError(msg, ob1=ob1, ob2=ob2, patches=patches)
def get_patches(a: object, b: object, compare_types: bool = True) -> List[Patch]:
patches = list(patch(a, b, (), compare_types))
return patches
def patch(
o1, o2, prefix: Tuple[Union[str, int], ...], compare_types: bool = True
) -> Iterator[Patch]:
import numpy as np
if isinstance(o1, np.ndarray):
if np.all(o1 == o2):
return
else:
yield Patch(prefix, o1, o2)
if o1 == o2:
return
if is_TypeLike(o1) and is_TypeLike(o2):
try:
assert_equivalent_types(o1, o2)
except NotEquivalentException as e:
yield Patch(prefix, o1, o2, msg=traceback.format_exc())
elif is_dataclass_instance(o1) and is_dataclass_instance(o2):
if compare_types:
try:
assert_equivalent_types(type(o1), type(o2))
except BaseException as e:
yield Patch(
prefix=prefix + ("$schema",),
value1=type(o1),
value2=type(o2),
msg=traceback.format_exc(),
)
fields1 = get_fields_values(o1)
fields2 = get_fields_values(o2)
if list(fields1) != list(fields2):
yield Patch(prefix, o1, o2)
for k in fields1:
v1 = fields1[k]
v2 = fields2[k]
yield from patch(v1, v2, prefix + (k,), compare_types)
elif isinstance(o1, dict) and isinstance(o2, dict):
for k, v in o1.items():
if not k in o2:
yield Patch(prefix + (k,), v, None)
else:
yield from patch(v, o2[k], prefix + (k,), compare_types)
elif isinstance(o1, list) and isinstance(o2, list):
n = max(len(o1), len(o2))
for i in range(n):
if i >= len(o1):
yield Patch(prefix + (i,), value1=o1, value2=o2, msg="Different length")
elif i >= len(o2):
yield Patch(prefix + (i,), value1=o1, value2=o2, msg="Different length")
else:
yield from patch(o1[i], o2[i], prefix + (i,), compare_types)
# todo: we also need to check
else:
if o1 != o2:
yield Patch(prefix, o1, o2)
class NotEquivalentException(ZValueError):
pass
from .zeneric2 import DataclassInfo
def check_dataclass_info_same(d1, d2, assume_yes: Set[Tuple[int, int]]):
d1 = cast(DataclassInfo, d1)
d2 = cast(DataclassInfo, d2)
if len(d1.orig) != len(d2.orig):
msg = "Different orig"
raise NotEquivalentException(msg, d1=d1, d2=d2)
for t1, t2 in zip(d1.orig, d2.orig):
assert_equivalent_types(t1, t2, assume_yes=assume_yes)
def sort_dict(x: dict) -> dict:
keys = list(x)
keys_sorted = sorted(keys, key=lambda _: repr(_))
return {k: x[k] for k in keys_sorted}
def is_placeholder(x):
return hasattr(x, "__name__") and "Placeholder" in x.__name__
def strip_my_types(T: TypeLike):
if T is MyBytes:
return bytes
if T is MyStr:
return str
return T
from .zeneric2 import get_dataclass_info
def assert_equivalent_types(
T1: TypeLike, T2: TypeLike, assume_yes: set = None, bypass_identity=False
):
T1 = strip_my_types(T1)
T2 = strip_my_types(T2)
if assume_yes is None:
# logger.warn('assuming yes from scratch')
assume_yes = set()
# debug(f'equivalent', T1=T1, T2=T2)
key = (id(T1), id(T2))
if key in assume_yes:
return
if is_placeholder(T1) or is_placeholder(T2):
msg = "One class is incomplete"
raise NotEquivalentException(msg, T1=T1, T2=T2)
assume_yes = set(assume_yes)
assume_yes.add(key)
recursive = lambda t1, t2: assert_equivalent_types(
T1=t1, T2=t2, assume_yes=assume_yes, bypass_identity=bypass_identity
)
try:
# print(f'assert_equivalent_types({T1},{T2})')
if (T1 is T2) and (not is_dataclass(T1) or bypass_identity):
# logger.debug('same by equality')
return
if is_dataclass(T1):
if not is_dataclass(T2):
raise NotEquivalentException(T1=T1, T2=T2)
# warnings.warn("devl")
if False:
if type(T1) != type(T2):
msg = f"Different types for types: {type(T1)} {type(T2)}"
raise NotEquivalentException(msg, T1=T1, T2=T2)
atts = ["__name__", "__module__"]
# atts.append('__doc__')
if "__doc__" not in atts:
pass
# warnings.warn("de-selected __doc__ comparison")
for k in atts:
v1 = getattr(T1, k, ())
v2 = getattr(T2, k, ())
if v1 != v2:
msg = f"Difference for {k} of {T1} ({type(T1)}) and {T2} ({type(T2)}"
raise NotEquivalentException(msg, v1=v1, v2=v2)
T1i = get_dataclass_info(T1)
T2i = get_dataclass_info(T2)
check_dataclass_info_same(T1i, T2i, assume_yes)
fields1 = get_fields_including_static(T1)
fields2 = get_fields_including_static(T2)
if list(fields1) != list(fields2):
msg = f"Different fields"
raise NotEquivalentException(msg, fields1=fields1, fields2=fields2)
ann1 = getattr(T1, "__annotations__", {})
ann2 = getattr(T2, "__annotations__", {})
for k in fields1:
t1 = fields1[k].type
t2 = fields2[k].type
try:
recursive(t1, t2)
except NotEquivalentException as e:
msg = f"Could not establish the annotation {k!r} to be equivalent"
raise NotEquivalentException(
msg,
t1=t1,
t2=t2,
t1_ann=T1.__annotations__[k],
t2_ann=T2.__annotations__[k],
t1_att=getattr(T1, k, "no attribute"),
t2_att=getattr(T2, k, "no attribute"),
) from e
d1 = fields1[k].default
d2 = fields2[k].default
try:
assert_equivalent_objects(d1, d2)
except ZValueError as e:
raise NotEquivalentException(d1=d1, d2=d2) from e
# if d1 != d2:
# msg = f"Defaults for {k!r} are different."
# raise NotEquivalentException(msg, d1=d1, d2=d2)
#
# d1 = fields1[k].default_factory
# d2 = fields2[k].default
# if d1 != d2:
# msg = f"Defaults for {k!r} are different."
# raise NotEquivalentException(msg, d1=d1, d2=d2)
for k in ann1:
t1 = ann1[k]
t2 = ann2[k]
try:
recursive(t1, t2)
except NotEquivalentException as e:
msg = f"Could not establish the annotation {k!r} to be equivalent"
raise NotEquivalentException(
msg,
t1=t1,
t2=t2,
t1_ann=T1.__annotations__[k],
t2_ann=T2.__annotations__[k],
t1_att=getattr(T1, k, "no attribute"),
t2_att=getattr(T2, k, "no attribute"),
) from e
elif is_Literal(T1):
if not is_Literal(T2):
raise NotEquivalentException(T1=T1, T2=T2)
values1 = get_Literal_args(T1)
values2 = get_Literal_args(T2)
if values1 != values2:
raise NotEquivalentException(T1=T1, T2=T2)
elif is_ClassVar(T1):
if not is_ClassVar(T2):
raise NotEquivalentException(T1=T1, T2=T2)
t1 = get_ClassVar_arg(T1)
t2 = get_ClassVar_arg(T2)
recursive(t1, t2)
elif is_Optional(T1):
if not is_Optional(T2):
raise NotEquivalentException(T1=T1, T2=T2)
t1 = get_Optional_arg(T1)
t2 = get_Optional_arg(T2)
recursive(t1, t2)
elif T1 is type(None):
if not T2 is type(None):
raise NotEquivalentException(T1=T1, T2=T2)
elif is_Union(T1):
if not is_Union(T2):
raise NotEquivalentException(T1=T1, T2=T2)
ts1 = get_Union_args(T1)
ts2 = get_Union_args(T2)
for t1, t2 in zip(ts1, ts2):
recursive(t1, t2)
elif is_Intersection(T1):
if not is_Intersection(T2):
raise NotEquivalentException(T1=T1, T2=T2)
ts1 = get_Intersection_args(T1)
ts2 = get_Intersection_args(T2)
for t1, t2 in zip(ts1, ts2):
recursive(t1, t2)
elif is_FixedTupleLike(T1):
if not is_FixedTupleLike(T2):
raise NotEquivalentException(T1=T1, T2=T2)
T1 = cast(Type[Tuple], T1)
T2 = cast(Type[Tuple], T2)
ts1 = get_FixedTupleLike_args(T1)
ts2 = get_FixedTupleLike_args(T2)
for t1, t2 in zip(ts1, ts2):
recursive(t1, t2)
elif is_VarTuple(T1):
if not is_VarTuple(T2):
raise NotEquivalentException(T1=T1, T2=T2)
T1 = cast(Type[Tuple], T1)
T2 = cast(Type[Tuple], T2)
t1 = get_VarTuple_arg(T1)
t2 = get_VarTuple_arg(T2)
recursive(t1, t2)
elif is_SetLike(T1):
T1 = cast(Type[Set], T1)
if not is_SetLike(T2):
raise NotEquivalentException(T1=T1, T2=T2)
T2 = cast(Type[Set], T2)
t1 = get_SetLike_arg(T1)
t2 = get_SetLike_arg(T2)
recursive(t1, t2)
elif is_ListLike(T1):
T1 = cast(Type[List], T1)
if not is_ListLike(T2):
raise NotEquivalentException(T1=T1, T2=T2)
T2 = cast(Type[List], T2)
t1 = get_ListLike_arg(T1)
t2 = get_ListLike_arg(T2)
recursive(t1, t2)
elif is_DictLike(T1):
T1 = cast(Type[Dict], T1)
if not is_DictLike(T2):
raise NotEquivalentException(T1=T1, T2=T2)
T2 = cast(Type[Dict], T2)
t1, u1 = get_DictLike_args(T1)
t2, u2 = get_DictLike_args(T2)
recursive(t1, t2)
recursive(u1, u2)
elif is_Any(T1):
if not is_Any(T2):
raise NotEquivalentException(T1=T1, T2=T2)
elif is_TypeVar(T1):
if not is_TypeVar(T2):
raise NotEquivalentException(T1=T1, T2=T2)
n1 = get_TypeVar_name(T1)
n2 = get_TypeVar_name(T2)
if n1 != n2:
raise NotEquivalentException(n1=n1, n2=n2)
elif T1 in (int, str, bool, Decimal, datetime, float, type):
if T1 != T2:
raise NotEquivalentException(T1=T1, T2=T2)
elif is_NewType(T1):
if not is_NewType(T2):
raise NotEquivalentException(T1=T1, T2=T2)
n1 = get_NewType_name(T1)
n2 = get_NewType_name(T2)
if n1 != n2:
raise NotEquivalentException(T1=T1, T2=T2)
o1 = get_NewType_arg(T1)
o2 = get_NewType_arg(T2)
recursive(o1, o2)
elif is_Type(T1):
if not is_Type(T2):
raise NotEquivalentException(T1=T1, T2=T2)
t1 = get_Type_arg(T1)
t2 = get_Type_arg(T2)
recursive(t1, t2)
elif is_Uninhabited(T1):
if not is_Uninhabited(T2):
raise NotEquivalentException(T1=T1, T2=T2)
else:
raise ZNotImplementedError(T1=T1, T2=T2)
except NotEquivalentException as e:
# logger.error(e)
msg = f"Could not establish the two types to be equivalent."
raise NotEquivalentException(msg, T1=T1, T2=T2) from e
# assert T1 == T2
# assert_equal(T1.mro(), T2.mro())
|
zuper-typing-z6
|
/zuper-typing-z6-6.2.3.tar.gz/zuper-typing-z6-6.2.3/src/zuper_typing/get_patches_.py
|
get_patches_.py
|
import inspect
import numbers
from dataclasses import dataclass, field, Field, fields, is_dataclass, replace
from datetime import datetime
from decimal import Decimal
from fractions import Fraction
from typing import Any, Callable, cast, Dict, List, Optional, Sequence, Set, Tuple, Type, TypeVar
import termcolor
from frozendict import frozendict
from zuper_commons.logs import ZLogger
from zuper_commons.text import (
format_table,
get_length_on_screen,
indent,
remove_escapes,
Style,
)
from zuper_commons.types import ZException
from zuper_commons.types.exceptions import disable_colored
from zuper_commons.ui import (
color_constant,
color_float,
color_int,
color_ops,
color_par,
color_synthetic_types,
color_typename,
color_typename2,
colorize_rgb,
)
from zuper_commons.ui.colors import color_magenta, color_ops_light
from .aliases import TypeLike
from .annotations_tricks import (
CustomDict,
CustomList,
CustomSet,
CustomTuple,
get_Callable_info,
get_ClassVar_arg,
get_CustomDict_args,
get_CustomList_arg,
get_CustomSet_arg,
get_CustomTuple_args,
get_Dict_args,
get_fields_including_static,
get_FixedTupleLike_args,
get_List_arg,
get_NewType_arg,
get_NewType_name,
get_Optional_arg,
get_Set_arg,
get_Type_arg,
get_TypeVar_bound,
get_Union_args,
get_VarTuple_arg,
is_Any,
is_Callable,
is_ClassVar,
is_CustomDict,
is_CustomList,
is_CustomSet,
is_CustomTuple,
is_Dict,
is_FixedTupleLike,
is_ForwardRef,
is_Iterable,
is_Iterator,
is_List,
is_NewType,
is_Optional,
is_Sequence,
is_Set,
is_Type,
is_TypeLike,
is_TypeVar,
is_Union,
is_VarTuple,
MyBytes,
MyStr,
name_for_type_like,
)
from .constants import ATT_PRINT_ORDER, DataclassHooks
from .dataclass_info import has_default_factory, has_default_value
from .literal import get_Literal_args, is_Literal
from .my_intersection import get_Intersection_args, is_Intersection
from .uninhabited import is_Uninhabited
from .zeneric2 import get_dataclass_info
__all__ = [
"debug_print",
"DPOptions",
"debug_print_str",
"debug_print0",
"get_default_dpoptions",
]
def nothing(x: object) -> str:
return ""
# return " *"
@dataclass
class DPOptions:
obey_print_order: bool = True
do_special_EV: bool = True
do_not_display_defaults: bool = True
compact: bool = False
abbreviate: bool = False
# |│┃┆
default_border_left = "│ " # <-- note box-drawing
id_gen: Callable[[object], object] = id # cid_if_known
max_initial_levels: int = 20
omit_type_if_empty: bool = True
omit_type_if_short: bool = True
ignores: List[Tuple[str, str]] = field(default_factory=list)
other_decoration_dataclass: Callable[[object], str] = nothing
abbreviate_zuper_lang: bool = False
ignore_dunder_dunder: bool = False
spaces_after_separator: bool = True
preferred_container_width: int = 88
compact_types: bool = True
# different_color_mystr : bool = False
#
def get_default_dpoptions() -> DPOptions:
ignores = []
id_gen = id
return DPOptions(ignores=ignores, id_gen=id_gen)
def remove_color_if_no_support(f):
def f2(*args, **kwargs):
s = f(*args, **kwargs)
if disable_colored(): # pragma: no cover
s = remove_escapes(s)
return s
f2.__name__ = "remove_color_if_no_support"
return f2
@remove_color_if_no_support
def debug_print(x: object, opt: Optional[DPOptions] = None) -> str:
if opt is None:
opt = get_default_dpoptions()
max_levels = opt.max_initial_levels
already = {}
stack = ()
return debug_print0(x, max_levels=max_levels, already=already, stack=stack, opt=opt)
eop = replace(get_default_dpoptions(), max_initial_levels=20)
ZException.entries_formatter = lambda x: debug_print(x, opt=eop)
eop_log = replace(get_default_dpoptions(), max_initial_levels=5) # <lower
ZLogger.debug_print = lambda x: debug_print(x, opt=eop_log)
def debug_print0(
x: object, *, max_levels: int, already: Dict[int, str], stack: Tuple[int, ...], opt: DPOptions
) -> str:
if id(x) in stack:
if hasattr(x, "__name__"):
n = x.__name__
return color_typename2(n + "↑") # ↶'
return "(recursive)"
if len(stack) > 50:
return "!!! recursion not detected"
if opt.compact:
if isinstance(x, type) and is_dataclass(x):
other = opt.other_decoration_dataclass(x)
return color_typename(x.__name__) + other
# logger.info(f'stack: {stack} {id(x)} {type(x)}')
stack2 = stack + (id(x),)
args = dict(max_levels=max_levels, already=already, stack=stack2, opt=opt)
dpa = lambda _: debug_print0(_, **args)
opt_compact = replace(opt, compact=True)
dp_compact = lambda _: debug_print0(
_, max_levels=max_levels, already=already, stack=stack2, opt=opt_compact
)
# abbreviate = True
if not opt.abbreviate:
prefix = ""
else:
show_id = is_dataclass(x) and type(x).__name__ != "Constant"
show_id = True
if show_id:
# noinspection PyBroadException
try:
h = opt.id_gen(x)
except:
prefix = termcolor.colored("!!!", "red")
# except ValueError:
# prefix = '!'
else:
if h is not None:
if h in already:
if isinstance(x, type):
short = type(x).__name__ + "(...) "
else:
short = color_typename(type(x).__name__) + "(...) "
res = short + termcolor.colored("$" + already[h], "green")
# logger.info(f'ok already[h] = {res} already = {already}')
return res
else:
already[h] = f"{len(already)}"
prefix = termcolor.colored("&" + already[h], "green", attrs=["dark"])
else:
prefix = ""
else:
prefix = ""
postfix = " " + prefix if prefix else ""
if False: # TODO: make configurable
postfix += " " + opt.other_decoration_dataclass(x)
postfix = postfix.rstrip()
# prefix = prefix + f' L{max_levels}'
if isinstance(x, int):
return color_int(str(x)) + postfix
if isinstance(x, float):
return color_float(str(x)) + postfix
if x is type:
return color_ops("type") + postfix
if x is BaseException:
return color_ops("BaseException") + postfix
if x is tuple:
return color_ops("tuple") + postfix
if x is object:
return color_ops("object") + postfix
if x is list:
return color_ops("list") + postfix
if x is dict:
return color_ops("dict") + postfix
if x is type(...):
return color_ops("ellipsis") + postfix
if x is int:
return color_ops("int") + postfix
if x is float:
return color_ops("float") + postfix
if x is bool:
return color_ops("bool") + postfix
if x is numbers.Number:
return color_ops("Number") + postfix
if x is str:
return color_ops("str") + postfix
if x is bytes:
return color_ops("bytes") + postfix
if x is set:
return color_ops("set") + postfix
if x is slice:
return color_ops("slice") + postfix
if x is datetime:
return color_ops("datetime") + postfix
if x is Decimal:
return color_ops("Decimal") + postfix
if not isinstance(x, str):
if is_TypeLike(x):
x = cast(TypeLike, x)
return debug_print_typelike(x, dp_compact, dpa=dpa, opt=opt, prefix=prefix, args=args)
if isinstance(x, bytes):
return debug_print_bytes(x) + postfix
if isinstance(x, str):
return debug_print_str(x, prefix=prefix) # + postfix
if isinstance(x, Decimal):
# return color_ops("Dec") + " " + color_float(str(x))
return color_typename2(str(x)) # + postfix
if isinstance(x, Fraction):
if x in known_fraction:
return color_float(known_fraction[x]) # + postfix
# return color_ops("Dec") + " " + color_float(str(x))
return color_float(str(x)) # + postfix
if isinstance(x, datetime):
return debug_print_date(x, prefix=prefix)
if isinstance(x, (set, frozenset)):
return debug_print_set(x, prefix=prefix, **args)
if isinstance(x, (dict, frozendict)):
return debug_print_dict(x, prefix=prefix, **args)
if isinstance(x, tuple):
return debug_print_tuple(x, prefix=prefix, **args)
if isinstance(x, list):
return debug_print_list(x, prefix=prefix, **args)
if isinstance(x, (bool, type(None))):
return color_ops(str(x)) + postfix
if not isinstance(x, type) and is_dataclass(x):
return debug_print_dataclass_instance(x, prefix=prefix, **args)
if "Expr" in type(x).__name__:
return f"{x!r}\n{x}"
repr_own = repr(x)
cname = type(x).__name__
if cname in repr_own:
r = repr_own
else:
r = f"instance of {cname}: {repr_own}"
# assert not 'typing.Union' in r, (r, x, is_Union(x))
return r
known_fraction = {
Fraction(1, 2): "½",
Fraction(1, 3): "⅓",
Fraction(2, 3): "⅔",
Fraction(1, 4): "¼",
Fraction(3, 4): "¾",
Fraction(1, 5): "⅕",
Fraction(2, 5): "⅖",
Fraction(3, 5): "⅗",
Fraction(4, 5): "⅘",
Fraction(1, 6): "⅙",
Fraction(5, 6): "⅚",
Fraction(1, 7): "⅐",
Fraction(1, 8): "⅛",
Fraction(3, 8): "⅜",
Fraction(5, 8): "⅝",
Fraction(7, 8): "⅞",
Fraction(1, 9): "⅑",
Fraction(1, 10): "⅒",
}
cst = color_synthetic_types
def debug_print_typelike(x: TypeLike, dp_compact, dpa, opt: DPOptions, prefix: str, args) -> str:
h = opt.other_decoration_dataclass(x)
assert is_TypeLike(x), x
if is_Any(x):
s = name_for_type_like(x)
s = termcolor.colored(s, on_color="on_magenta")
return s + " " + h
if is_Uninhabited(x):
s = "Nothing"
s = termcolor.colored(s, on_color="on_magenta")
return s + " " + h
if is_NewType(x):
n = get_NewType_name(x)
w = get_NewType_arg(x)
return color_synthetic_types(n) + " " + h # + '<' + debug_print_typelike(w, dpa, opt, '', args)
if (
(x is type(None))
# or is_List(x)
# or is_Dict(x)
# or is_Set(x)
# or is_ClassVar(x)
# or is_Type(x)
or is_Iterator(x)
or is_Sequence(x)
or is_Iterable(x)
or is_NewType(x)
or is_ForwardRef(x)
or is_Uninhabited(x)
):
s = color_ops(name_for_type_like(x))
return s + h
if is_TypeVar(x):
assert isinstance(x, TypeVar), x
name = x.__name__
bound = get_TypeVar_bound(x)
covariant = getattr(x, "__covariant__")
contravariant = getattr(x, "__contravariant__")
if covariant:
n = name + "+"
elif contravariant:
n = name + "-"
else:
n = name + "="
n = cst(n)
if bound is not object:
n += color_ops("<") + dp_compact(bound)
return n + h
if x is MyBytes:
s = cst("MyBytes")
return s + h
if is_CustomDict(x):
x = cast(Type[CustomDict], x)
K, V = get_CustomDict_args(x)
s = cst("Dict") + cst("[") + dp_compact(K) + cst(",") + dp_compact(V) + cst("]")
return s + h
if is_Dict(x):
x = cast(Type[Dict], x)
K, V = get_Dict_args(x)
s = color_ops("Dict") + cst("[") + dp_compact(K) + cst(",") + dp_compact(V) + cst("]")
return s + h
if is_Type(x):
V = get_Type_arg(x)
s = cst("Type") + cst("[") + dp_compact(V) + cst("]")
return s + h
if is_ClassVar(x):
V = get_ClassVar_arg(x)
s = color_ops("ClassVar") + cst("[") + dp_compact(V) + cst("]")
return s + h
if is_CustomSet(x):
x = cast(Type[CustomSet], x)
V = get_CustomSet_arg(x)
s = cst("Set") + cst("[") + dp_compact(V) + cst("]")
return s + h
if is_Set(x):
x = cast(Type[Set], x)
V = get_Set_arg(x)
s = color_ops("Set") + cst("[") + dp_compact(V) + cst("]")
return s + h
if is_CustomList(x):
x = cast(Type[CustomList], x)
V = get_CustomList_arg(x)
s = cst("List") + cst("[") + dp_compact(V) + cst("]")
return s + h
if is_List(x):
x = cast(Type[List], x)
V = get_List_arg(x)
s = color_ops("List") + cst("[") + dp_compact(V) + cst("]")
return s + h
if is_Optional(x):
V = get_Optional_arg(x)
s0 = dp_compact(V)
s = color_ops("Optional") + cst("[") + s0 + cst("]")
return s + h
if is_Literal(x):
vs = get_Literal_args(x)
s = ", ".join(dp_compact(_) for _ in vs)
s = color_ops("Literal") + cst("[") + s + cst("]")
return s + h
if is_CustomTuple(x):
x = cast(Type[CustomTuple], x)
ts = get_CustomTuple_args(x)
ss = []
for t in ts:
ss.append(dp_compact(t))
args = color_ops(",").join(ss)
s = cst("Tuple") + cst("[") + args + cst("]")
return s + h
if is_FixedTupleLike(x):
x = cast(Type[Tuple], x)
ts = get_FixedTupleLike_args(x)
ss = []
for t in ts:
ss.append(dp_compact(t))
args = color_ops(",").join(ss)
s = color_ops("Tuple") + cst("[") + args + cst("]")
return s + h
if is_VarTuple(x):
x = cast(Type[Tuple], x)
t = get_VarTuple_arg(x)
s = color_ops("Tuple") + cst("[") + dp_compact(t) + ", ..." + cst("]")
return s + h
if is_Union(x):
Ts = get_Union_args(x)
if opt.compact or len(Ts) <= 3:
ss = list(dp_compact(v) for v in Ts)
inside = color_ops(",").join(ss)
s = color_ops("Union") + cst("[") + inside + cst("]")
else:
ss = list(dpa(v) for v in Ts)
s = color_ops("Union")
for v in ss:
s += "\n" + indent(v, "", color_ops(f"* "))
return s + h
if is_Intersection(x):
Ts = get_Intersection_args(x)
if opt.compact or len(Ts) <= 3:
ss = list(dp_compact(v) for v in Ts)
inside = color_ops(",").join(ss)
s = color_ops("Intersection") + cst("[") + inside + cst("]")
else:
ss = list(dpa(v) for v in Ts)
s = color_ops("Intersection")
for v in ss:
s += "\n" + indent(v, "", color_ops(f"* "))
return s + h
if is_Callable(x):
info = get_Callable_info(x)
def ps(k, v):
if k.startswith("__"):
return dp_compact(v)
else:
return f"NamedArg({dp_compact(v)},{k!r})"
params = color_ops(",").join(ps(k, v) for k, v in info.parameters_by_name.items())
ret = dp_compact(info.returns)
s = color_ops("Callable") + cst("[[") + params + color_ops("],") + ret + cst("]")
return s + h
if isinstance(x, type) and is_dataclass(x):
if opt.compact_types:
return name_for_type_like(x)
else:
return debug_print_dataclass_type(x, prefix=prefix, **args)
if hasattr(x, "__name__"):
n = x.__name__
if n == "frozendict":
# return 'frozen' + color_ops('dict')
return color_ops_light("fdict")
if n == "frozenset":
# return 'frozen' + color_ops('set')
return color_ops_light("fset")
r = repr(x)
if "frozendict" in r:
raise Exception(r)
r = termcolor.colored(r, "red")
return r + h
def clipped() -> str:
return " " + termcolor.colored("...", "blue", on_color="on_yellow")
braces = "{", "}"
empty_dict = "".join(braces)
def debug_print_dict(x: dict, *, prefix, max_levels: int, already: Dict, stack: Tuple[int], opt: DPOptions):
h = opt.other_decoration_dataclass(x)
lbrace, rbrace = braces
if type(x) is frozendict:
bprefix = "f"
bracket_colors = color_ops_light
elif "Dict[" in type(x).__name__:
bprefix = ""
bracket_colors = color_synthetic_types
else:
bprefix = ""
bracket_colors = color_ops
dpa = lambda _: debug_print0(_, max_levels=max_levels - 1, already=already, stack=stack, opt=opt)
opt_compact = replace(opt, compact=True)
dps = lambda _: debug_print0(_, max_levels=max_levels, already={}, stack=stack, opt=opt_compact)
ps = " " + prefix if prefix else ""
if len(x) == 0:
if opt.omit_type_if_empty:
return bracket_colors(bprefix + empty_dict) + ps + " " + h
else:
return dps(type(x)) + " " + bracket_colors(bprefix + empty_dict) + ps + " " + h
s = dps(type(x)) + f"[{len(x)}]" + ps + " " + h
if max_levels == 0:
return s + clipped()
r = {}
for k, v in x.items():
if isinstance(k, str):
if k.startswith("zd"):
k = "zd..." + k[-4:]
k = termcolor.colored(k, "yellow")
else:
k = dpa(k)
# ks = debug_print(k)
# if ks.startswith("'"):
# ks = k
r[k] = dpa(v)
# colon_sep = ":" + short_space
colon_sep = ": "
ss = [k + colon_sep + v for k, v in r.items()]
nlines = sum(_.count("\n") for _ in ss)
tlen = sum(get_length_on_screen(_) for _ in ss)
if nlines == 0 and tlen < get_max_col(stack):
# x = "," if len(x) == 1 else ""
res = (
bracket_colors(bprefix + lbrace)
+ bracket_colors(", ").join(ss)
+ bracket_colors(rbrace)
+ ps
+ " "
+ h
)
if opt.omit_type_if_short:
return res
else:
return dps(type(x)) + " " + res
leftmargin = bracket_colors(opt.default_border_left)
return pretty_dict_compact(s, r, leftmargin=leftmargin, indent_value=0)
def get_max_col(stack: Tuple) -> int:
max_line = 110
return max_line - 4 * len(stack)
def debug_print_dataclass_type(
x: Type[dataclass], prefix: str, max_levels: int, already: Dict, stack: Tuple, opt: DPOptions
) -> str:
if max_levels <= 0:
return name_for_type_like(x) + clipped()
dpa = lambda _: debug_print0(_, max_levels=max_levels - 1, already=already, stack=stack, opt=opt)
ps = " " + prefix if prefix else ""
# ps += f" {id(x)} {type(x)}" # note breaks string equality
if opt.abbreviate_zuper_lang:
if x.__module__.startswith("zuper_lang."):
return color_constant(x.__name__)
more = ""
if x.__name__ != x.__qualname__:
more += f" ({x.__qualname__})"
mod = x.__module__ + "."
s = color_ops("dataclass") + " " + mod + color_typename(x.__name__) + more + ps # + f' {id(x)}'
# noinspection PyArgumentList
other = opt.other_decoration_dataclass(x)
s += other
cells = {}
# FIXME: what was the unique one ?
seen_fields = set()
row = 0
all_fields: Dict[str, Field] = get_fields_including_static(x)
for name, f in all_fields.items():
T = f.type
if opt.ignore_dunder_dunder:
if f.name.startswith("__"):
continue
cells[(row, 0)] = color_ops("field")
cells[(row, 1)] = f.name
cells[(row, 2)] = color_ops(":")
cells[(row, 3)] = dpa(T)
if has_default_value(f):
cells[(row, 4)] = color_ops("=")
cells[(row, 5)] = dpa(f.default)
elif has_default_factory(f):
cells[(row, 4)] = color_ops("=")
cells[(row, 5)] = f"factory {dpa(f.default_factory)}"
if is_ClassVar(T):
if not hasattr(x, name):
cells[(row, 6)] = "no attribute set"
else:
v = getattr(x, name)
# cells[(row, 4)] = color_ops("=")
cells[(row, 6)] = dpa(v)
seen_fields.add(f.name)
row += 1
try:
xi = get_dataclass_info(x)
except: # pragma: no cover
cells[(row, 1)] = "cannot get the dataclass info"
row += 1
else:
if xi.orig:
cells[(row, 1)] = "original"
cells[(row, 3)] = dpa(xi.orig)
row += 1
open = xi.get_open()
if open:
cells[(row, 1)] = "open"
cells[(row, 3)] = dpa(open)
row += 1
if getattr(x, "__doc__", None):
cells[(row, 1)] = "__doc__"
cells[(row, 3)] = str(getattr(x, "__doc__", "(missing)"))[:50]
row += 1
if not cells:
return s + ": (no fields)"
align_right = Style(halign="right")
col_style = {0: align_right, 1: align_right}
res = format_table(cells, style="spaces", draw_grid_v=False, col_style=col_style)
return s + "\n" + res # indent(res, ' ')
list_parens = "[", "]"
# empty_set = "∅"
empty_list = "".join(list_parens)
def debug_print_list(
x: list, prefix: str, max_levels: int, already: Dict, stack: Tuple, opt: DPOptions
) -> str:
# if type(x) is frozendict:
# bprefix = 'f'
# bracket_colors = color_ops_light
if "List[" in type(x).__name__:
bracket_colors = color_synthetic_types
else:
bracket_colors = color_ops
lbra, rbra = list_parens
lbra = bracket_colors(lbra)
rbra = bracket_colors(rbra)
empty = bracket_colors(empty_list)
dpa = lambda _: debug_print0(_, max_levels=max_levels - 1, already=already, stack=stack, opt=opt)
dps = lambda _: debug_print0(_, opt=opt, max_levels=max_levels, already={}, stack=stack)
ps = " " + prefix if prefix else ""
s = dps(type(x)) + f"[{len(x)}]" + ps
if max_levels <= 0:
return s + clipped()
if len(x) == 0:
if opt.omit_type_if_empty:
return empty + ps
else:
return dps(type(x)) + " " + empty + ps
ss = [dpa(v) for v in x]
nlines = sum(_.count("\n") for _ in ss)
if nlines == 0:
max_width = min(opt.preferred_container_width, get_max_col(stack))
sep = color_ops(",")
if opt.spaces_after_separator:
sep += " "
return arrange_in_rows(ss, start=lbra, sep=sep, stop=rbra + ps, max_width=max_width)
else:
for i, si in enumerate(ss):
# s += '\n' + indent(debug_print(v), '', color_ops(f'#{i} '))
s += "\n" + indent(si, "", color_ops(f"#{i} "))
return s
def arrange_in_rows(ss: Sequence[str], start: str, sep: str, stop: str, max_width: int) -> str:
s = start
indent_length = get_length_on_screen(start)
cur_line_length = indent_length
for i, si in enumerate(ss):
this_one = si
if i != len(ss) - 1:
this_one += sep
this_one_len = get_length_on_screen(this_one)
if cur_line_length + this_one_len > max_width:
# s += f' len {cur_line_length}'
s += "\n"
s += " " * indent_length
cur_line_length = indent_length
s += this_one
cur_line_length += get_length_on_screen(this_one)
s += stop
return s
set_parens = "❨", "❩"
# empty_set = "∅"
empty_set = "".join(set_parens)
def debug_print_set(
x: set, *, prefix: str, max_levels: int, already: Dict, stack: Tuple, opt: DPOptions
) -> str:
h = opt.other_decoration_dataclass(x)
popen, pclose = set_parens
if type(x) is frozenset:
bprefix = "f"
bracket_colors = color_ops_light
elif "Set[" in type(x).__name__:
bprefix = ""
bracket_colors = color_synthetic_types
else:
bprefix = ""
bracket_colors = color_ops
popen = bracket_colors(bprefix + popen)
pclose = bracket_colors(pclose)
dpa = lambda _: debug_print0(_, max_levels=max_levels - 1, already=already, stack=stack, opt=opt)
dps = lambda _: debug_print0(_, max_levels=max_levels, already={}, stack=stack, opt=opt)
# prefix = prefix or 'no prefix'
ps = " " + prefix if prefix else ""
if len(x) == 0:
if opt.omit_type_if_empty:
return bracket_colors(bprefix + empty_set) + ps + " " + h
else:
return dps(type(x)) + " " + bracket_colors(bprefix + empty_set) + ps + " " + h
s = dps(type(x)) + f"[{len(x)}]" + ps + " " + h
if max_levels <= 0:
return s + clipped()
ss = [dpa(v) for v in x]
nlines = sum(_.count("\n") for _ in ss)
tlen = sum(get_length_on_screen(_) for _ in ss)
if nlines == 0 and tlen < get_max_col(stack):
sep = bracket_colors(",")
if opt.spaces_after_separator:
sep += " "
res = bracket_colors(bprefix + popen) + sep.join(ss) + bracket_colors(pclose) + ps + " " + h
if opt.omit_type_if_short:
return res
else:
return dps(type(x)) + " " + res
for i, si in enumerate(ss):
# s += '\n' + indent(debug_print(v), '', color_ops(f'#{i} '))
s += "\n" + indent(si, "", bracket_colors("• "))
return s
# short_space = "\u2009"
tuple_braces = "(", ")"
empty_tuple_str = "".join(tuple_braces)
def debug_print_tuple(
x: tuple, prefix: str, max_levels: int, already: Dict, stack: Tuple, opt: DPOptions
) -> str:
dpa = lambda _: debug_print0(_, max_levels=max_levels - 1, already=already, stack=stack, opt=opt)
dps = lambda _: debug_print0(_, max_levels=max_levels, already={}, stack=stack, opt=opt)
ps = " " + prefix if prefix else ""
if "Tuple[" in type(x).__name__:
bracket_colors = color_synthetic_types
else:
bracket_colors = color_ops
open_brace = bracket_colors(tuple_braces[0])
close_brace = bracket_colors(tuple_braces[1])
if len(x) == 0:
if opt.omit_type_if_empty:
return bracket_colors(empty_tuple_str) + ps
else:
return dps(type(x)) + " " + bracket_colors(empty_tuple_str) + ps
s = dps(type(x)) + f"[{len(x)}]" + ps
if max_levels <= 0:
return s + clipped()
ss = [dpa(v) for v in x]
nlines = sum(_.count("\n") for _ in ss)
tlen = sum(get_length_on_screen(_) for _ in ss)
if nlines == 0 and tlen < get_max_col(stack):
x = "," if len(x) == 1 else ""
sep = bracket_colors(",")
if opt.spaces_after_separator:
sep += " "
res = open_brace + sep.join(ss) + x + close_brace + ps
if opt.omit_type_if_short:
return res
else:
return dps(type(x)) + " " + res
for i, si in enumerate(ss):
s += "\n" + indent(si, "", bracket_colors(f"#{i} "))
return s
def debug_print_dataclass_instance(
x: dataclass, prefix: str, max_levels: int, already: Dict, stack: Tuple, opt: DPOptions
) -> str:
assert is_dataclass(x)
fields_x = fields(x)
dpa = lambda _: debug_print0(_, max_levels=max_levels - 1, already=already, stack=stack, opt=opt)
opt_compact = replace(opt, compact=True)
dp_compact = lambda _: debug_print0(
_, max_levels=max_levels - 1, already=already, stack=stack, opt=opt_compact
)
# noinspection PyArgumentList
other = opt.other_decoration_dataclass(x)
CN = type(x).__name__ # + str(id(type(x)))
special_colors = {
"EV": "#77aa77",
"ZFunction": "#ffffff",
"ArgRef": "#00ffff",
"ZArg": "#00ffff",
"ATypeVar": "#00ffff",
"MakeProcedure": "#ffffff",
"IF": "#fafaaf",
}
if CN in special_colors:
cn = colorize_rgb(CN, special_colors[CN])
else:
cn = color_typename(CN)
ps = " " + prefix if prefix else ""
ps += other
s = cn + ps
if max_levels <= 0:
return s + clipped()
if opt.obey_print_order and hasattr(x, ATT_PRINT_ORDER):
options = getattr(x, ATT_PRINT_ORDER)
else:
options = []
for f in fields_x:
options.append(f.name)
if opt.do_not_display_defaults:
same = []
for f in fields_x:
att = getattr(x, f.name)
if has_default_value(f):
if f.default == att:
same.append(f.name)
elif has_default_factory(f):
default = f.default_factory()
if default == att:
same.append(f.name)
to_display = [_ for _ in options if _ not in same]
else:
to_display = options
r = {}
dpa_result = {}
for k in to_display:
if k == "expect":
att = getattr(x, k)
# logger.info(f'CN {CN} k {k!r} {getattr(att, "val", None)}')
if CN == "EV" and k == "expect" and getattr(att, "val", None) is type:
expects_type = True
continue
if not k in to_display:
continue
if k.startswith("__"): # TODO: make configurable
continue
if (CN, k) in opt.ignores:
continue
# r[color_par(k)] = "(non visualized)"
else:
att = getattr(x, k)
if inspect.ismethod(att):
att = att()
r[color_par(k)] = dpa_result[k] = dpa(att)
# r[(k)] = debug_print(att)
expects_type = False
if len(r) == 0:
return cn + f"()" + prefix + other
if type(x).__name__ == "Constant":
s0 = dpa_result["val"]
if not "\n" in s0:
# 「 」‹ ›
return color_constant("⟬") + s0 + color_constant("⟭")
else:
l = color_constant("│ ") # ║")
f = color_constant("C ")
return indent(s0, l, f)
if type(x).__name__ == "QualifiedName":
module_name = x.module_name
qual_name = x.qual_name
return color_typename("QN") + " " + module_name + "." + color_typename(qual_name)
if type(x).__name__ == "ATypeVar":
if len(r) == 1: # only if no other stuff
return color_synthetic_types(x.typevar_name)
if CN == "EV" and opt.do_special_EV:
if len(r) == 1:
res = list(r.values())[0]
else:
res = pretty_dict_compact("", r, leftmargin="")
if x.pr is not None:
color_to_use = x.pr.get_color()
else:
color_to_use = "#f0f0f0"
def colorit(_: str) -> str:
return colorize_rgb(_, color_to_use)
if expects_type:
F = "ET "
else:
F = "E "
l = colorit("┋ ")
f = colorit(F)
return indent(res, l, f)
if len(r) == 1:
k0 = list(r)[0]
v0 = r[k0]
if not "\n" in v0 and not "(" in v0:
return cn + f"({k0}={v0.rstrip()})" + prefix + other
# ss = list(r.values())
ss = [k + ": " + v for k, v in r.items()]
tlen = sum(get_length_on_screen(_) for _ in ss)
nlines = sum(_.count("\n") for _ in ss)
# npars = sum(_.count("(") for _ in ss)
if nlines == 0 and (tlen < get_max_col(stack)) and (tlen < 50):
# ok, we can do on one line
if type(x).__name__ == "MakeUnion":
# assert len(r) == 1 + 1
ts = x.utypes
v = [dpa(_) for _ in ts]
return "(" + color_ops(" ∪ ").join(v) + ")"
if type(x).__name__ == "MakeIntersection":
# assert len(r) == 1 + 1
ts = x.inttypes
v = [dpa(_) for _ in ts]
return "(" + color_ops(" ∩ ").join(v) + ")"
contents = ", ".join(k + "=" + v for k, v in r.items())
res = cn + "(" + contents + ")" + ps
return res
if CN == "MakeProcedure":
M2 = "┇ "
else:
M2 = opt.default_border_left
if CN in special_colors:
leftmargin = colorize_rgb(M2, special_colors[CN])
else:
leftmargin = color_typename(M2)
return pretty_dict_compact(s, r, leftmargin=leftmargin, indent_value=0)
#
# def debug_print_dataclass_compact(
# x, max_levels: int, already: Dict, stack: Tuple,
# opt: DPOptions
# ):
# dpa = lambda _: debug_print0(_, max_levels=max_levels - 1, already=already, stack=stack, opt=opt)
# # dps = lambda _: debug_print(_, max_levels, already={}, stack=stack)
# s = color_typename(type(x).__name__) + color_par("(")
# ss = []
# for k, v in x.__annotations__.items():
# att = getattr(x, k)
# ss.append(f'{color_par(k)}{color_par("=")}{dpa(att)}')
#
# s += color_par(", ").join(ss)
# s += color_par(")")
# return s
FANCY_BAR = "│"
def pretty_dict_compact(
head: Optional[str], d: Dict[str, Any], leftmargin="|", indent_value: int = 0
): # | <-- note box-making
if not d:
return head + ": (empty dict)" if head else "(empty dict)"
s = []
# n = max(get_length_on_screen(str(_)) for _ in d)
ordered = list(d)
# ks = sorted(d)
for k in ordered:
v = d[k]
heading = str(k) + ":"
# if isinstance(v, TypeVar):
# # noinspection PyUnresolvedReferences
# v = f'TypeVar({v.__name__}, bound={v.__bound__})'
# if isinstance(v, dict):
# v = pretty_dict_compact("", v)
# vs = v
if "\n" in v:
vs = indent(v, " " * indent_value)
s.append(heading)
s.append(vs)
else:
s.append(heading + " " + v)
# s.extend(.split('\n'))
# return (head + ':\n' if head else '') + indent("\n".join(s), '| ')
indented = indent("\n".join(s), leftmargin)
return (head + "\n" if head else "") + indented
def nice_str(self):
return DataclassHooks.dc_repr(self)
def blue(x):
return termcolor.colored(x, "blue")
def nice_repr(self):
s = termcolor.colored(type(self).__name__, "red")
s += blue("(")
ss = []
annotations = getattr(type(self), "__annotations__", {})
for k in annotations:
if not hasattr(self, k):
continue
a = getattr(self, k)
a_s = debug_print_compact(a)
eq = blue("=")
k = termcolor.colored(k, attrs=["dark"])
ss.append(f"{k}{eq}{a_s}")
s += blue(", ").join(ss)
s += blue(")")
return s
def debug_print_compact(x):
if isinstance(x, str):
return debug_print_str(x, prefix="")
if isinstance(x, bytes):
return debug_print_bytes(x)
if isinstance(x, datetime):
return debug_print_date(x, prefix="")
return f"{x!r}"
def debug_print_str(x: str, *, prefix: str):
# Note: this breaks zuper-comp
different_color_mystr = False
if different_color_mystr and isinstance(x, MyStr):
color = color_ops_light
else:
color = color_magenta
if type(x) not in (str, MyStr):
return type(x).__name__ + " - " + debug_print_str(str(x), prefix=prefix)
if x == "\n":
return "'\\n'"
# if x.startswith("Qm"):
# x2 = "Qm..." + x[-4:] + " " + prefix
# return termcolor.colored(x2, "magenta")
# if x.startswith("zd"):
# x2 = "zd..." + x[-4:] + " " + prefix
# return termcolor.colored(x2, "magenta")
if x.startswith("-----BEGIN"):
s = "PEM key" + " " + prefix
return termcolor.colored(s, "yellow")
# if x.startswith("Traceback"):
# lines = x.split("\n")
# colored = [termcolor.colored(_, "red") for _ in lines]
# if colored:
# colored[0] += " " + prefix
# s = "\n".join(colored)
# return s
ps = " " + prefix if prefix else ""
lines = x.split("\n")
if len(lines) > 1:
first = color("|")
lines[0] = lines[0] + ps
try:
return indent(x, first)
# res = box(x, color="magenta") # , attrs=["dark"])
# return side_by_side([res, ps])
except: # pragma: no cover
# print(traceback.format_exc())
return "?"
if x.startswith("zdpu"):
return termcolor.colored(x, "yellow")
if x == "":
return "''"
else:
if x.strip() == x:
return color(x) + ps
else:
return color("'" + x + "'") + ps
# return x.__repr__() + ps
def debug_print_date(x: datetime, *, prefix: str):
s = x.isoformat() # [:19]
s = s.replace("T", " ")
return termcolor.colored(s, "yellow") + (" " + prefix if prefix else "")
def debug_print_bytes(x: bytes):
s = f"{len(x)} bytes " + x[:10].__repr__()
# s = f"{len(x)} bytes " + str(list(x))
return termcolor.colored(s, "yellow")
DataclassHooks.dc_str = lambda self: debug_print(self)
DataclassHooks.dc_repr = nice_repr
|
zuper-typing-z6
|
/zuper-typing-z6-6.2.3.tar.gz/zuper-typing-z6-6.2.3/src/zuper_typing/debug_print_.py
|
debug_print_.py
|
import dataclasses
import typing
from dataclasses import dataclass as original_dataclass
from typing import Dict, Generic, Tuple, TypeVar
from zuper_commons.types import ZTypeError
from .annotations_tricks import make_dict
from .constants import ANNOTATIONS_ATT, DataclassHooks, DEPENDS_ATT, PYTHON_36
from .dataclass_info import get_all_annotations
from .zeneric2 import resolve_types, ZenericFix
__all__ = ["get_remembered_class", "MyNamedArg", "remember_created_class", "my_dataclass", "ZenericFix"]
def _cmp_fn_loose(name, op, self_tuple, other_tuple, *args, **kwargs):
body = [
"if other.__class__.__name__ == self.__class__.__name__:",
# "if other is not None:",
f" return {self_tuple}{op}{other_tuple}",
"return NotImplemented",
]
fn = dataclasses._create_fn(name, ("self", "other"), body)
fn.__doc__ = """
This is a loose comparison function.
Instead of comparing:
self.__class__ is other.__class__
we compare:
self.__class__.__name__ == other.__class__.__name__
"""
return fn
dataclasses._cmp_fn = _cmp_fn_loose
def typevar__repr__(self):
if self.__covariant__:
prefix = "+"
elif self.__contravariant__:
prefix = "-"
else:
prefix = "~"
s = prefix + self.__name__
if self.__bound__:
if isinstance(self.__bound__, type):
b = self.__bound__.__name__
else:
b = str(self.__bound__)
s += f"<{b}"
return s
setattr(TypeVar, "__repr__", typevar__repr__)
NAME_ARG = "__name_arg__"
# need to have this otherwise it's not possible to say that two types are the same
class Reg:
already = {}
def MyNamedArg(T, name: str):
try:
int(name)
except:
pass
else:
msg = f"Tried to create NamedArg with name = {name!r}."
raise ValueError(msg)
key = f"{T} {name}"
if key in Reg.already:
return Reg.already[key]
class CNamedArg:
pass
setattr(CNamedArg, NAME_ARG, name)
setattr(CNamedArg, "original", T)
Reg.already[key] = CNamedArg
return CNamedArg
try:
import mypy_extensions
except ImportError:
pass
else:
setattr(mypy_extensions, "NamedArg", MyNamedArg)
class RegisteredClasses:
klasses: Dict[Tuple[str, str], type] = {}
def get_remembered_class(module_name: str, qual_name: str) -> type: # TODO: not tested
k = (module_name, qual_name)
return RegisteredClasses.klasses[k]
def remember_created_class(res: type, msg: str = ""):
k = (res.__module__, res.__qualname__)
# logger.info(f"Asked to remember {k}: {msg}")
if k in RegisteredClasses.klasses:
pass
# logger.info(f"Asked to remember again {k}: {msg}")
RegisteredClasses.klasses[k] = res
# noinspection PyShadowingBuiltins
def my_dataclass(_cls=None, *, init=True, repr=True, eq=True, order=False, unsafe_hash=False, frozen=False):
def wrap(cls):
# logger.info(f'called my_dataclass for {cls} with bases {_cls.__bases__}')
# if cls.__name__ == 'B' and len(cls.__bases__) == 1 and cls.__bases__[0].__name__
# == 'object' and len(cls.__annotations__) != 2:
# assert False, (cls, cls.__bases__, cls.__annotations__)
res = my_dataclass_(
cls, init=init, repr=repr, eq=eq, order=order, unsafe_hash=unsafe_hash, frozen=frozen
)
# logger.info(f'called my_dataclass for {cls} with bases {_cls.__bases__}, '
# f'returning {res} with bases {res.__bases__} and annotations {
# _cls.__annotations__}')
remember_created_class(res, "my_dataclass")
return res
# See if we're being called as @dataclass or @dataclass().
if _cls is None:
# We're called with parens.
return wrap
# We're called as @dataclass without parens.
return wrap(_cls)
# noinspection PyShadowingBuiltins
def my_dataclass_(_cls, *, init=True, repr=True, eq=True, order=False, unsafe_hash=False, frozen=False):
original_doc = getattr(_cls, "__doc__", None)
# logger.info(_cls.__dict__)
unsafe_hash = True
if hasattr(_cls, "nominal"):
# logger.info('nominal for {_cls}')
nominal = True
else:
nominal = False
#
# if the class does not have a metaclass, add one
# We copy both annotations and constants. This is needed for cases like:
#
# @dataclass
# class C:
# a: List[] = field(default_factory=list)
#
# #
if Generic in _cls.__bases__:
msg = (
f"There are problems with initialization: class {_cls.__name__} inherits from Generic: "
f"{_cls.__bases__}"
)
raise Exception(msg)
if type(_cls) is type:
old_annotations = get_all_annotations(_cls)
from .zeneric2 import StructuralTyping
old_annotations.update(getattr(_cls, ANNOTATIONS_ATT, {}))
attrs = {ANNOTATIONS_ATT: old_annotations}
for k in old_annotations:
if hasattr(_cls, k):
attrs[k] = getattr(_cls, k)
class Base(metaclass=StructuralTyping):
pass
_cls2 = type(_cls.__name__, (_cls, Base) + _cls.__bases__, attrs)
_cls2.__module__ = _cls.__module__
_cls2.__qualname__ = _cls.__qualname__
_cls = _cls2
else:
old_annotations = get_all_annotations(_cls)
old_annotations.update(getattr(_cls, ANNOTATIONS_ATT, {}))
setattr(_cls, ANNOTATIONS_ATT, old_annotations)
k = "__" + _cls.__name__.replace("[", "_").replace("]", "_")
if nominal:
# # annotations = getattr(K, '__annotations__', {})
# old_annotations[k] = bool # typing.Optional[bool]
old_annotations[k] = typing.ClassVar[bool] # typing.Optional[bool]
setattr(_cls, k, True)
# if True:
# anns = getattr(_cls, ANNOTATIONS_ATT)
# anns_reordered = reorder_annotations(_cls, anns)
# setattr(_cls, ANNOTATIONS_ATT, anns_reordered)
if "__hash__" in _cls.__dict__:
unsafe_hash = False
# print(_cls.__dict__)
# _cls.__dict__['__hash__']= None
fields_before = dict(getattr(_cls, dataclasses._FIELDS, {}))
# if hasattr(_cls, dataclasses._FIELDS):
# delattr(_cls, dataclasses._FIELDS)
try:
res = original_dataclass(
_cls, init=init, repr=repr, eq=eq, order=order, unsafe_hash=unsafe_hash, frozen=frozen
)
except KeyboardInterrupt:
raise
except Exception as e:
msg = "Cannot create dataclass "
raise ZTypeError(
msg,
_cls=_cls,
fields=getattr(_cls, dataclasses._FIELDS, "n/a"),
fields_before=fields_before,
anns=getattr(_cls, "__annotations__", "n/a"),
) from e
# do it right away
setattr(res, "__doc__", original_doc)
# assert dataclasses.is_dataclass(res)
refs = getattr(_cls, DEPENDS_ATT, ())
resolve_types(res, refs=refs)
def __repr__(self) -> str:
return DataclassHooks.dc_repr(self)
def __str__(self):
return DataclassHooks.dc_str(self)
setattr(res, "__repr__", __repr__)
setattr(res, "__str__", __str__)
if nominal:
setattr(_cls, k, True) # <!--- FIXME
return res
if False:
if PYTHON_36: # pragma: no cover
from typing import GenericMeta
# noinspection PyUnresolvedReferences
previous_getitem = GenericMeta.__getitem__
else:
from typing import _GenericAlias
previous_getitem = _GenericAlias.__getitem__
class Alias1:
def __getitem__(self, params):
if self is typing.Dict:
K, V = params
if K is not str:
return make_dict(K, V)
# noinspection PyArgumentList
return previous_getitem(self, params)
def original_dict_getitem(a):
# noinspection PyArgumentList
return previous_getitem(Dict, a)
Dict.__getitem__ = Alias1.__getitem__
def monkey_patch_dataclass():
setattr(dataclasses, "dataclass", my_dataclass)
def monkey_patch_Generic():
if PYTHON_36: # pragma: no cover
GenericMeta.__getitem__ = ZenericFix.__getitem__
else:
Generic.__class_getitem__ = ZenericFix.__class_getitem__
_GenericAlias.__getitem__ = Alias1.__getitem__
|
zuper-typing-z6
|
/zuper-typing-z6-6.2.3.tar.gz/zuper-typing-z6-6.2.3/src/zuper_typing/monkey_patching_typing.py
|
monkey_patching_typing.py
|
import typing
from typing import Tuple
from .constants import PYTHON_36, PYTHON_37
if PYTHON_36 or PYTHON_37:
from typing_extensions import Literal
else:
from typing import Literal
from zuper_commons.types import ZValueError
from .aliases import TypeLike
from .constants import PYTHON_36
__all__ = ["make_Literal", "is_Literal", "get_Literal_args"]
def make_Literal(*values: object) -> TypeLike:
if not values:
msg = "A literal needs at least one value"
raise ZValueError(msg, values=values)
types = set(type(_) for _ in values)
if len(types) > 1:
msg = "We only allow values of the same type."
raise ZValueError(msg, values=values, types=types)
values = tuple(sorted(values))
# noinspection PyTypeHints
return Literal[values]
def is_Literal(x: TypeLike) -> bool:
if PYTHON_36:
return type(x).__name__ == "_Literal"
else:
# noinspection PyUnresolvedReferences
return isinstance(x, typing._GenericAlias) and (getattr(x, "__origin__") is Literal)
def get_Literal_args(x: TypeLike) -> Tuple[TypeLike, ...]:
assert is_Literal(x)
if PYTHON_36:
# noinspection PyUnresolvedReferences
return x.__values__
else:
return getattr(x, "__args__")
|
zuper-typing-z6
|
/zuper-typing-z6-6.2.3.tar.gz/zuper-typing-z6-6.2.3/src/zuper_typing/literal.py
|
literal.py
|
__version__ = "6.2.3"
from zuper_commons.logs import ZLogger
logger = ZLogger("typing")
logger.debug(f"version: {__version__}")
from typing import TYPE_CHECKING
if TYPE_CHECKING:
# noinspection PyUnresolvedReferences
from dataclasses import dataclass
# noinspection PyUnresolvedReferences
from typing import Generic
else:
# noinspection PyUnresolvedReferences
from .monkey_patching_typing import my_dataclass as dataclass
# noinspection PyUnresolvedReferences
from .monkey_patching_typing import ZenericFix as Generic
from .constants import *
from .debug_print_ import *
from .subcheck import *
from .annotations_tricks import *
from .annotations_tricks import *
from .aliases import *
from .get_patches_ import *
from .zeneric2 import *
from .structural_equalities import *
from .literal import *
from .dataclass_info import *
from .my_intersection import *
from .recursive_tricks import *
from .monkey_patching_typing import *
from .assorted_recursive_type_subst import *
from .type_algebra import *
from .uninhabited import *
from .complete import *
from .common import *
resolve_types(CanBeUsed)
|
zuper-typing-z6
|
/zuper-typing-z6-6.2.3.tar.gz/zuper-typing-z6-6.2.3/src/zuper_typing/__init__.py
|
__init__.py
|
__all__ = ["make_Uninhabited", "is_Uninhabited"]
class Unh:
def __init__(self):
raise Exception() # pragma: no cover
def make_Uninhabited():
return Unh
def is_Uninhabited(x):
return x is Unh
|
zuper-typing-z6
|
/zuper-typing-z6-6.2.3.tar.gz/zuper-typing-z6-6.2.3/src/zuper_typing/uninhabited.py
|
uninhabited.py
|
from dataclasses import dataclass, field, is_dataclass
from datetime import datetime
from decimal import Decimal
from functools import reduce
from typing import cast, Dict, List, Optional, Set, Tuple, Type
from zuper_commons.types import ZValueError
from .aliases import TypeLike
from .annotations_tricks import (
get_DictLike_args,
get_FixedTupleLike_args,
get_ListLike_arg,
get_Optional_arg,
get_SetLike_arg,
get_TypeVar_name,
get_Union_args,
get_VarTuple_arg,
is_DictLike,
is_FixedTupleLike,
is_ListLike,
is_Optional,
is_SetLike,
is_TypeVar,
is_Union,
is_VarTuple,
make_dict,
make_list,
make_set,
make_Tuple,
make_Union,
make_VarTuple,
unroll_union,
)
from .common import DictStrType
from .my_intersection import get_Intersection_args, is_Intersection, make_Intersection
from .uninhabited import is_Uninhabited, make_Uninhabited
__all__ = ["type_inf", "type_sup", "Matches"]
def unroll_intersection(a: TypeLike) -> Tuple[TypeLike, ...]:
if is_Intersection(a):
return get_Intersection_args(a)
else:
return (a,)
def type_sup(a: TypeLike, b: TypeLike) -> TypeLike:
assert a is not None
assert b is not None
assert not isinstance(a, tuple), a
assert not isinstance(b, tuple), b
if a is b or (a == b):
return a
if a is object or b is object:
return object
if is_Uninhabited(a):
return b
if is_Uninhabited(b):
return a
if a is type(None):
if is_Optional(b):
return b
else:
return Optional[b]
if b is type(None):
if is_Optional(a):
return a
else:
return Optional[a]
if is_Optional(a):
return type_sup(type(None), type_sup(get_Optional_arg(a), b))
if is_Optional(b):
return type_sup(type(None), type_sup(get_Optional_arg(b), a))
# if is_Union(a) and is_Union(b): # XXX
# r = []
# r.extend(unroll_union(a))
# r.extend(unroll_union(b))
# return reduce(type_sup, r)
#
if is_Union(a) and is_Union(b):
ta = unroll_union(a)
tb = unroll_union(b)
tva, oa = get_typevars(ta)
tvb, ob = get_typevars(tb)
tv = tuple(set(tva + tvb))
oab = oa + ob
if not oab:
return make_Union(*tv)
else:
other = reduce(type_sup, oa + ob)
os = unroll_union(other)
return make_Union(*(tv + os))
if (a, b) in [(bool, int), (int, bool)]:
return int
if is_ListLike(a) and is_ListLike(b):
a = cast(Type[List], a)
b = cast(Type[List], b)
A = get_ListLike_arg(a)
B = get_ListLike_arg(b)
u = type_sup(A, B)
return make_list(u)
if is_SetLike(a) and is_SetLike(b):
a = cast(Type[Set], a)
b = cast(Type[Set], b)
A = get_SetLike_arg(a)
B = get_SetLike_arg(b)
u = type_sup(A, B)
return make_set(u)
if is_DictLike(a) and is_DictLike(b):
a = cast(Type[Dict], a)
b = cast(Type[Dict], b)
KA, VA = get_DictLike_args(a)
KB, VB = get_DictLike_args(b)
K = type_sup(KA, KB)
V = type_sup(VA, VB)
return make_dict(K, V)
if is_VarTuple(a) and is_VarTuple(b):
a = cast(Type[Tuple], a)
b = cast(Type[Tuple], b)
VA = get_VarTuple_arg(a)
VB = get_VarTuple_arg(b)
V = type_sup(VA, VB)
return make_VarTuple(V)
if is_FixedTupleLike(a) and is_FixedTupleLike(b):
a = cast(Type[Tuple], a)
b = cast(Type[Tuple], b)
tas = get_FixedTupleLike_args(a)
tbs = get_FixedTupleLike_args(b)
ts = tuple(type_sup(ta, tb) for ta, tb in zip(tas, tbs))
return make_Tuple(*ts)
if is_dataclass(a) and is_dataclass(b):
return type_sup_dataclass(a, b)
if is_TypeVar(a) and is_TypeVar(b):
if get_TypeVar_name(a) == get_TypeVar_name(b):
return a
return make_Union(a, b)
# raise NotImplementedError(a, b)
def type_inf_dataclass(a: Type[dataclass], b: Type[dataclass]) -> Type[dataclass]:
from .monkey_patching_typing import my_dataclass
ann_a = a.__annotations__
ann_b = b.__annotations__
all_keys = set(ann_a) | set(ann_b)
res = {}
for k in all_keys:
if k in ann_a and k not in ann_b:
R = ann_a[k]
elif k not in ann_a and k in ann_b:
R = ann_b[k]
else:
VA = ann_a[k]
VB = ann_b[k]
R = type_inf(VA, VB)
if is_Uninhabited(R):
return R
res[k] = R
name = f"Int_{a.__name__}_{b.__name__}"
T2 = my_dataclass(type(name, (), {"__annotations__": res, "__module__": "zuper_typing"}))
return T2
def type_sup_dataclass(a: Type[dataclass], b: Type[dataclass]) -> Type[dataclass]:
from .monkey_patching_typing import my_dataclass
ann_a = a.__annotations__
ann_b = b.__annotations__
common_keys = set(ann_a) & set(ann_b)
res = {}
for k in common_keys:
if k in ann_a and k not in ann_b:
R = ann_a[k]
elif k not in ann_a and k in ann_b:
R = ann_b[k]
else:
VA = ann_a[k]
VB = ann_b[k]
R = type_sup(VA, VB)
res[k] = R
name = f"Join_{a.__name__}_{b.__name__}"
T2 = my_dataclass(type(name, (), {"__annotations__": res, "__module__": "zuper_typing"}))
return T2
def type_inf(a: TypeLike, b: TypeLike) -> TypeLike:
try:
res = type_inf0(a, b)
except ZValueError as e: # pragma: no cover
raise
# raise ZValueError("problem", a=a, b=b) from e
# if isinstance(res, tuple):
# raise ZValueError(a=a, b=b, res=res)
return res
def get_typevars(a: Tuple[TypeLike, ...]) -> Tuple[Tuple, Tuple]:
tv = []
ts = []
for _ in a:
if is_TypeVar(_):
tv.append(_)
else:
ts.append(_)
return tuple(tv), tuple(ts)
def type_inf0(a: TypeLike, b: TypeLike) -> TypeLike:
assert a is not None
assert b is not None
if isinstance(a, tuple): # pragma: no cover
raise ZValueError(a=a, b=b)
if isinstance(b, tuple): # pragma: no cover
raise ZValueError(a=a, b=b)
if a is b or (a == b):
return a
if a is object:
return b
if b is object:
return a
if is_Uninhabited(a):
return a
if is_Uninhabited(b):
return b
if is_Optional(a):
if b is type(None):
return b
if is_Optional(b):
if a is type(None):
return a
if is_Optional(a) and is_Optional(b):
x = type_inf(get_Optional_arg(a), get_Optional_arg(b))
if is_Uninhabited(x):
return type(None)
return Optional[x]
# if not is_Intersection(a) and is_Intersection(b):
# r = (a,) + unroll_intersection(b)
# return reduce(type_inf, r)
if is_Intersection(a) or is_Intersection(b):
ta = unroll_intersection(a)
tb = unroll_intersection(b)
tva, oa = get_typevars(ta)
tvb, ob = get_typevars(tb)
tv = tuple(set(tva + tvb))
oab = oa + ob
if not oab:
return make_Intersection(tv)
else:
other = reduce(type_inf, oa + ob)
os = unroll_intersection(other)
return make_Intersection(tv + os)
if is_Union(b):
# A ^ (C u D)
# = A^C u A^D
r = []
for t in get_Union_args(b):
r.append(type_inf(a, t))
return reduce(type_sup, r)
# if is_Intersection(a) and not is_Intersection(b):
# res = []
# for aa in get_Intersection_args(a):
# r = type_inf(aa)
# r.extend(unroll_intersection(a))
# r.extend(unroll_intersection(b)) # put first!
# return reduce(type_inf, r)
if (a, b) in [(bool, int), (int, bool)]:
return bool
if is_TypeVar(a) and is_TypeVar(b):
if get_TypeVar_name(a) == get_TypeVar_name(b):
return a
if is_TypeVar(a) or is_TypeVar(b):
return make_Intersection((a, b))
primitive = (bool, int, str, Decimal, datetime, float, bytes, type(None))
if a in primitive or b in primitive:
return make_Uninhabited()
if is_ListLike(a) ^ is_ListLike(b):
return make_Uninhabited()
if is_ListLike(a) & is_ListLike(b):
a = cast(Type[List], a)
b = cast(Type[List], b)
A = get_ListLike_arg(a)
B = get_ListLike_arg(b)
u = type_inf(A, B)
return make_list(u)
if is_SetLike(a) ^ is_SetLike(b):
return make_Uninhabited()
if is_SetLike(a) and is_SetLike(b):
a = cast(Type[Set], a)
b = cast(Type[Set], b)
A = get_SetLike_arg(a)
B = get_SetLike_arg(b)
u = type_inf(A, B)
return make_set(u)
if is_DictLike(a) ^ is_DictLike(b):
return make_Uninhabited()
if is_DictLike(a) and is_DictLike(b):
a = cast(Type[Dict], a)
b = cast(Type[Dict], b)
KA, VA = get_DictLike_args(a)
KB, VB = get_DictLike_args(b)
K = type_inf(KA, KB)
V = type_inf(VA, VB)
return make_dict(K, V)
if is_dataclass(a) ^ is_dataclass(b):
return make_Uninhabited()
if is_dataclass(a) and is_dataclass(b):
return type_inf_dataclass(a, b)
if is_VarTuple(a) and is_VarTuple(b):
a = cast(Type[Tuple], a)
b = cast(Type[Tuple], b)
VA = get_VarTuple_arg(a)
VB = get_VarTuple_arg(b)
V = type_inf(VA, VB)
return make_VarTuple(V)
if is_FixedTupleLike(a) and is_FixedTupleLike(b):
a = cast(Type[Tuple], a)
b = cast(Type[Tuple], b)
tas = get_FixedTupleLike_args(a)
tbs = get_FixedTupleLike_args(b)
ts = tuple(type_inf(ta, tb) for ta, tb in zip(tas, tbs))
return make_Tuple(*ts)
if is_TypeVar(a) and is_TypeVar(b):
if get_TypeVar_name(a) == get_TypeVar_name(b):
return a
return make_Intersection((a, b))
@dataclass
class MatchConstraint:
ub: Optional[type] = None
lb: Optional[type] = None
def impose_subtype(self, ub) -> "MatchConstraint":
ub = type_sup(self.ub, ub) if self.ub is not None else ub
return MatchConstraint(ub=ub, lb=self.lb)
def impose_supertype(self, lb) -> "MatchConstraint":
lb = type_inf(self.lb, lb) if self.lb is not None else lb
return MatchConstraint(lb=lb, ub=self.ub)
DMC = make_dict(str, MatchConstraint)
@dataclass
class Matches:
m: Dict[str, MatchConstraint] = field(default_factory=DMC)
def __post_init__(self):
self.m = DMC(self.m)
def get_matches(self) -> Dict[str, type]:
res = DictStrType()
for k, v in self.m.items():
if v.ub is not None:
res[k] = v.ub
return res
def get_ub(self, k: str):
if k not in self.m:
return None
return self.m[k].ub
def get_lb(self, k: str):
if k not in self.m:
return None
return self.m[k].lb
def must_be_subtype_of(self, k: str, ub) -> "Matches":
m2 = dict(self.m)
if k not in m2:
m2[k] = MatchConstraint()
m2[k] = m2[k].impose_subtype(ub=ub)
return Matches(m2)
def must_be_supertype_of(self, k: str, lb) -> "Matches":
m2 = dict(self.m)
if k not in m2:
m2[k] = MatchConstraint()
m2[k] = m2[k].impose_supertype(lb=lb)
return Matches(m2)
|
zuper-typing-z6
|
/zuper-typing-z6-6.2.3.tar.gz/zuper-typing-z6-6.2.3/src/zuper_typing/type_algebra.py
|
type_algebra.py
|
from typing import Tuple
from zuper_commons.types import ZTypeError
from zuper_typing import (
asdict_not_recursive,
is_CustomDict,
is_CustomList,
is_CustomSet,
is_CustomTuple,
is_dataclass_instance,
)
__all__ = ["check_complete_types", "NotCompleteType"]
class NotCompleteType(ZTypeError):
pass
def check_complete_types(x: object, prefix: Tuple[str, ...] = (), orig=None):
if orig is None:
orig = x
if isinstance(x, dict):
T = type(x)
if not is_CustomDict(type(x)):
raise NotCompleteType("Found", w=prefix, T=T, x=x, orig=orig)
for k, v in x.items():
check_complete_types(v, prefix=prefix + (k,), orig=orig)
if isinstance(x, list):
T = type(x)
if not is_CustomList(type(x)):
raise NotCompleteType("Found", w=prefix, T=T, x=x, orig=orig)
for i, v in enumerate(x):
check_complete_types(v, prefix=prefix + (str(i),), orig=orig)
if isinstance(x, tuple):
T = type(x)
if not is_CustomTuple(type(x)):
raise NotCompleteType("Found", w=prefix, T=T, x=x, orig=orig)
for i, v in enumerate(x):
check_complete_types(v, prefix=prefix + (str(i),), orig=orig)
if isinstance(x, set):
T = type(x)
if not is_CustomSet(type(x)):
raise NotCompleteType("Found", w=prefix, T=T, x=x, orig=orig)
for i, v in enumerate(x):
check_complete_types(v, prefix=prefix + (str(i),), orig=orig)
elif is_dataclass_instance(x):
for k, v in asdict_not_recursive(x).items():
check_complete_types(v, prefix=prefix + (k,), orig=orig)
|
zuper-typing-z6
|
/zuper-typing-z6-6.2.3.tar.gz/zuper-typing-z6-6.2.3/src/zuper_typing/complete.py
|
complete.py
|
import sys
import typing
from abc import ABCMeta, abstractmethod
from dataclasses import _PARAMS, dataclass, fields, is_dataclass
from datetime import datetime
from decimal import Decimal
from typing import Any, cast, ClassVar, Dict, Optional, Tuple, Type, TypeVar
from zuper_commons.types import ZTypeError, ZValueError
from . import logger
from .aliases import TypeLike
from .annotations_tricks import (
get_ClassVar_arg,
get_Type_arg,
get_TypeVar_bound,
is_ClassVar,
is_NewType,
is_Type,
is_TypeLike,
make_dict,
name_for_type_like,
)
from .constants import ANNOTATIONS_ATT, DEPENDS_ATT, MakeTypeCache, PYTHON_36, ZuperTypingGlobals
from .dataclass_info import DataclassInfo, get_dataclass_info, set_dataclass_info
from .recursive_tricks import (
get_name_without_brackets,
NoConstructorImplemented,
replace_typevars,
)
from .subcheck import can_be_used_as2, value_liskov
__all__ = ["resolve_types", "MyABC", "StructuralTyping", "make_type"]
X = TypeVar("X")
def as_tuple(x) -> Tuple:
return x if isinstance(x, tuple) else (x,)
if PYTHON_36: # pragma: no cover
from typing import GenericMeta
# noinspection PyUnresolvedReferences
old_one = GenericMeta.__getitem__
else:
old_one = None
if PYTHON_36: # pragma: no cover
# logger.info('In Python 3.6')
class ZMeta(type):
def __getitem__(self, *params):
return ZenericFix.__class_getitem__(*params)
else:
ZMeta = type
class ZenericFix(metaclass=ZMeta):
if PYTHON_36: # pragma: no cover
def __getitem__(self, *params):
# logger.info(f'P36 {params} {self}')
if self is typing.Generic:
return ZenericFix.__class_getitem__(*params)
if self is Dict:
K, V = params
if K is not str:
return make_dict(K, V)
# noinspection PyArgumentList
return old_one(self, params)
# noinspection PyMethodParameters
@classmethod
def __class_getitem__(cls0, params):
# logger.info(f"ZenericFix.__class_getitem__ params = {params}")
types0 = as_tuple(params)
assert isinstance(types0, tuple)
for t in types0:
assert is_TypeLike(t), (t, types0)
bound_att = types0
name = "Generic[%s]" % ",".join(name_for_type_like(_) for _ in bound_att)
di = DataclassInfo(name=name, orig=bound_att)
gp = type(name, (GenericProxy,), {"di": di})
set_dataclass_info(gp, di)
return gp
#
# def autoInitDecorator(toDecoreFun):
# def wrapper(*args, **kwargs):
# print("Hello from autoinit decorator")
# toDecoreFun(*args, **kwargs)
#
# return wrapper
class StructuralTyping(type):
cache = {}
def __subclasscheck__(self, subclass: type) -> bool:
key = (self, subclass)
if key in StructuralTyping.cache:
return StructuralTyping.cache[key]
# logger.info(f"{subclass.__name__} <= {self.__name__}")
try:
can = can_be_used_as2(subclass, self)
except:
if PYTHON_36:
return False
else:
raise
res = can.result
StructuralTyping.cache[key] = res
return res
def __instancecheck__(self, instance) -> bool:
T = type(instance)
if T is self:
return True
if not is_dataclass(T):
return False
i = super().__instancecheck__(instance)
if i:
return True
# logger.info(f"in {T.__name__} <= {self.__name__}")
# res = can_be_used_as2(T, self)
return self.__subclasscheck__(T)
# return res.result
class MyABC(StructuralTyping, ABCMeta):
def __new__(mcs, name_orig: str, bases, namespace, **kwargs):
# logger.info(name_orig=name_orig, bases=bases, namespace=namespace, kwargs=kwargs)
clsi = None
if "di" in namespace:
clsi = cast(DataclassInfo, namespace["di"])
# logger.info('got clsi from namespace', clsi)
else:
if bases:
# this is when we subclass
base_info = get_dataclass_info(bases[-1])
clsi = DataclassInfo(name="", orig=base_info.get_open())
# logger.info('got clsi from subclass', base_info=base_info, clsi=clsi)
if clsi is None:
default = DataclassInfo(name=name_orig, orig=())
clsi = default
name = clsi.name = get_name_for(name_orig, clsi)
# noinspection PyArgumentList
cls = super().__new__(mcs, name, bases, namespace, **kwargs)
qn = cls.__qualname__.replace("." + name_orig, "." + name)
setattr(cls, "__qualname__", qn)
# logger.info(f" old module {cls.__module__} new {mcs.__module__}")
# setattr(cls, "__module__", mcs.__module__)
set_dataclass_info(cls, clsi)
return cls
def get_name_for(name_orig: str, clsi: DataclassInfo) -> str:
name0 = get_name_without_brackets(name_orig)
params = []
for x in clsi.orig:
# params.append(replace_typevars(x, bindings=clsi.bindings, symbols={}))
params.append(x)
if not params:
return name0
else:
name = f"{name0}[%s]" % (",".join(name_for_type_like(_) for _ in params))
return name
if PYTHON_36: # pragma: no cover
class FakeGenericMeta(MyABC):
def __getitem__(self, params2):
clsi = get_dataclass_info(self)
types_open = clsi.get_open()
types2 = as_tuple(params2)
assert isinstance(types2, tuple), types2
for t in types2:
assert is_TypeLike(t), (t, types2)
if types_open == types2:
return self
bindings: Dict[TypeVar, TypeVar] = {}
for T, U in zip(types_open, types2):
bindings[T] = U
bound = get_TypeVar_bound(T)
if bound is not None:
try:
can = can_be_used_as2(U, bound)
except (TypeError, ValueError) as e:
if PYTHON_36:
continue
else:
raise
# raise ZTypeError(U=U, bound=bound) from e
else:
if not can:
msg = (
f'For type parameter "{name_for_type_like(T)}", expected a '
f'subclass of "{name_for_type_like(bound)}", found {U}.'
)
raise ZTypeError(msg, can=can, T=T, bound=bound, U=U)
return make_type(self, bindings)
else:
FakeGenericMeta = MyABC
class GenericProxy(metaclass=FakeGenericMeta):
@abstractmethod
def need(self) -> None:
""""""
@classmethod
def __class_getitem__(cls, params2) -> type:
clsi = get_dataclass_info(cls)
types_open = clsi.get_open()
types2 = as_tuple(params2)
if len(types_open) != len(types2):
msg = "Cannot match type length"
raise ZValueError(
msg, cls=cls.__name__, clsi=get_dataclass_info(cls), types=types_open, types2=types2
)
bindings = {}
for T, U in zip(types_open, types2):
bindings[T] = U
bound = get_TypeVar_bound(T)
if (bound is not None) and (bound is not object) and isinstance(bound, type):
# logger.info(f"{U} should be usable as {T.__bound__}")
# logger.info(
# f" issubclass({U}, {T.__bound__}) ="
# f" {issubclass(U, T.__bound__)}"
# )
try:
# issub = issubclass(U, bound)
issub = can_be_used_as2(U, bound)
except TypeError as e:
msg = ""
raise ZTypeError(msg, T=T, T_bound=bound, U=U) from e
if not issub:
msg = (
f'For type parameter "{T.__name__}", expected a'
f'subclass of "{bound.__name__}", found @U.'
)
raise ZTypeError(msg, T=T, T_bound=bound, U=U)
res = make_type(cls, bindings)
# from .monkey_patching_typing import remember_created_class
#
# remember_created_class(res, "__class_getitem__")
return res
class Fake:
symbols: dict
myt: type
def __init__(self, myt, symbols: dict):
self.myt = myt
n = name_for_type_like(myt)
self.name_without = get_name_without_brackets(n)
self.symbols = symbols
def __getitem__(self, item: type) -> type:
n = name_for_type_like(item)
complete = f"{self.name_without}[{n}]"
if complete in self.symbols:
res = self.symbols[complete]
else:
# noinspection PyUnresolvedReferences
res = self.myt[item]
# myt = self.myt, symbols = self.symbols,
# d = debug_print(dict(item=item, res=res))
# logger.error(f"Fake:getitem", myt=self.myt, item=item, res=res)
return res
def resolve_types(T, locals_=None, refs: Tuple = (), nrefs: Optional[Dict[str, Any]] = None):
if nrefs is None:
nrefs = {}
assert is_dataclass(T)
clsi = get_dataclass_info(T)
# rl = RecLogger()
if locals_ is None:
locals_ = {}
symbols = dict(locals_)
for k, v in nrefs.items():
symbols[k] = v
others = getattr(T, DEPENDS_ATT, ())
for t in (T,) + refs + others:
n = name_for_type_like(t)
symbols[n] = t
# logger.info(f't = {t} n {n}')
name_without = get_name_without_brackets(n)
# if name_without in ['Union', 'Dict', ]:
# # FIXME please add more here
# continue
if name_without not in symbols:
symbols[name_without] = Fake(t, symbols)
# else:
# pass
for x in clsi.get_open(): # (T, GENERIC_ATT2, ()):
if hasattr(x, "__name__"):
symbols[x.__name__] = x
# logger.debug(f'symbols: {symbols}')
annotations: Dict[str, TypeLike] = getattr(T, ANNOTATIONS_ATT, {})
# add in case it was not there
setattr(T, ANNOTATIONS_ATT, annotations)
for k, v in annotations.items():
if not isinstance(v, str) and is_ClassVar(v):
continue # XXX
v = cast(TypeLike, v)
try:
r = replace_typevars(v, bindings={}, symbols=symbols)
# rl.p(f'{k!r} -> {v!r} -> {r!r}')
annotations[k] = r
except NameError:
# msg = (
# f"resolve_type({T.__name__}):"
# f' Cannot resolve names for attribute "{k}" = {v!r}.'
# )
# msg += f'\n symbols: {symbols}'
# msg += '\n\n' + indent(traceback.format_exc(), '', '> ')
# raise NameError(msg) from e
# logger.warning(msg)
continue
except TypeError as e: # pragma: no cover
msg = f'Cannot resolve type for attribute "{k}".'
raise ZTypeError(msg) from e
for f in fields(T):
assert f.name in annotations
# msg = f'Cannot get annotation for field {f.name!r}'
# logger.warning(msg)
# continue
# logger.info(K=T.__name__, name=f.name, before=f.type, after=annotations[f.name],
# a=annotations[f.name].__dict__)
f.type = annotations[f.name]
# logger.info(K=T.__name__, anns=getattr(T, '__annotations__',"?"), annotations=annotations)
def type_check(type_self: type, k: str, T_expected: type, value_found: object):
try:
enable_difficult = ZuperTypingGlobals.enable_type_checking_difficult
T_found = type(value_found)
simple = T_found in [int, float, bool, str, bytes, Decimal, datetime]
definitely_exclude = T_found in [dict, list, tuple]
do_it = (not definitely_exclude) and (enable_difficult or simple)
if do_it:
ok = value_liskov(value_found, T_expected)
if not ok: # pragma: no cover
type_self_name = name_for_type_like(type_self)
# T_expected_name = name_for_type_like(T_expected)
# T_found_name = name_for_type_like(T_found)
msg = f"Error for field {k!r} of class {type_self_name}"
# warnings.warn(msg, stacklevel=3)
raise ZValueError(
msg,
field=k,
why=ok,
expected_type=T_expected,
found_value=value_found,
found_type=type(value_found),
)
except TypeError as e: # pragma: no cover
msg = f"Cannot judge annotation of {k} (supposedly {value_found!r})."
if sys.version_info[:2] == (3, 6):
# FIXME: warn
return
logger.error(msg)
raise TypeError(msg) from e
def make_type(
cls: Type[dataclass],
bindings: Dict[type, type], # TypeVars
symbols: Optional[Dict[str, type]] = None,
) -> type:
if symbols is None:
symbols = {}
clsi = get_dataclass_info(cls)
key = tuple(bindings.items()) + tuple(symbols.items())
if key in clsi.specializations:
return clsi.specializations[key]
try:
res = make_type_(cls, bindings, symbols)
except ZValueError as e: # pragma: no cover
msg = "Cannot run make_type"
raise ZValueError(msg, cls=cls, bindings=bindings, symbols=symbols) from e
clsi.specializations[key] = res
return res
def make_type_(
cls: Type[dataclass],
bindings0: Dict[type, type], # TypeVars
symbols: Optional[Dict[str, type]] = None,
) -> type:
clsi = get_dataclass_info(cls)
# We only allow the binding of the open ones
bindings: Dict[type, type] = {}
open = clsi.get_open()
for k, v in bindings0.items():
if k in open:
bindings[k] = v
else:
pass
if not bindings:
return cls
if symbols is None:
symbols = {}
symbols = dict(symbols)
refs = getattr(cls, DEPENDS_ATT, ())
for r in refs:
# n = name_for_type_like(r)
n = r.__name__
symbols[get_name_without_brackets(n)] = r
assert not is_NewType(cls), cls
cache_key = (str(cls), str(bindings), str(clsi.orig))
if ZuperTypingGlobals.cache_enabled:
if cache_key in MakeTypeCache.cache:
# logger.info(f"using cached value for {cache_key}")
return MakeTypeCache.cache[cache_key]
recur = lambda _: replace_typevars(_, bindings=bindings, symbols=symbols)
new_bindings = bindings
# its_globals = dict(sys.modules[cls.__module__].__dict__)
# # its_globals[get_name_without_brackets(cls.__name__)] = cls
# try:
# annotations = typing.get_type_hints(cls, its_globals)
# except:
# logger.info(f'globals for {cls.__name__}', cls.__module__, list(its_globals))
# raise
annotations = getattr(cls, ANNOTATIONS_ATT, {})
name_without = get_name_without_brackets(cls.__name__)
def param_name(x: type) -> str:
x = replace_typevars(x, bindings=new_bindings, symbols=symbols)
return name_for_type_like(x)
if clsi.orig:
pnames = tuple(param_name(_) for _ in clsi.orig)
name2 = "%s[%s]" % (name_without, ",".join(pnames))
else:
name2 = name_without
try:
# def __new__(_cls, *args, **kwargs):
# print('creating object')
# x = super().__new__(_cls, *args, **kwargs)
# return x
cls2 = type(name2, (cls,), {"need": lambda: None})
# cls2.__new__ = __new__
except TypeError as e: # pragma: no cover
msg = f'Cannot create derived class "{name2}" from the class.'
raise ZTypeError(msg, cls=cls) from e
symbols[name2] = cls2
symbols[cls.__name__] = cls2 # also MyClass[X] should resolve to the same
MakeTypeCache.cache[cache_key] = cls2
class Fake2:
def __getitem__(self, item):
n = name_for_type_like(item)
complete = f"{name_without}[{n}]"
if complete in symbols:
return symbols[complete]
logger.info(f"Fake2:getitem", name_for_type_like(cls), complete=complete)
# noinspection PyUnresolvedReferences
return cls[item]
if name_without not in symbols:
symbols[name_without] = Fake2()
for T, U in bindings.items():
symbols[T.__name__] = U
if hasattr(U, "__name__"):
# dict does not have name
symbols[U.__name__] = U
new_annotations = {}
for k, v0 in annotations.items():
v = recur(v0)
# print(f'{v0!r} -> {v!r}')
if is_ClassVar(v):
s = get_ClassVar_arg(v)
if is_Type(s):
st = get_Type_arg(s)
concrete = recur(st)
new_annotations[k] = ClassVar[type]
setattr(cls2, k, concrete)
else:
s2 = recur(s)
new_annotations[k] = ClassVar[s2]
else:
new_annotations[k] = v
original__post_init__ = getattr(cls, "__post_init__", None)
if ZuperTypingGlobals.enable_type_checking:
def __post_init__(self):
# do it first (because they might change things around)
if original__post_init__ is not None:
original__post_init__(self)
for k, T_expected in new_annotations.items():
if is_ClassVar(T_expected):
continue
if isinstance(T_expected, type):
val = getattr(self, k)
type_check(type(self), k=k, value_found=val, T_expected=T_expected)
# important: do it before dataclass
setattr(cls2, "__post_init__", __post_init__)
cls2.__annotations__ = new_annotations
# logger.info('new annotations: %s' % new_annotations)
if is_dataclass(cls):
frozen = is_frozen(cls)
cls2 = dataclass(cls2, unsafe_hash=True, frozen=frozen)
else:
# noinspection PyUnusedLocal
def init_placeholder(self, *args, **kwargs):
if args or kwargs:
msg = (
f"Default constructor of {cls2.__name__} does not know "
f"what to do with arguments."
)
msg += f"\nargs: {args!r}\nkwargs: {kwargs!r}"
msg += f"\nself: {self}"
msg += f"\nself: {dir(type(self))}"
msg += f"\nself: {type(self)}"
raise NoConstructorImplemented(msg)
if cls.__init__ == object.__init__:
setattr(cls2, "__init__", init_placeholder)
cls2.__module__ = cls.__module__
setattr(cls2, "__name__", name2)
setattr(cls2, "__doc__", getattr(cls, "__doc__"))
qn = cls.__qualname__
qn0, sep, _ = qn.rpartition(".")
if not sep:
sep = ""
setattr(cls2, "__qualname__", qn0 + sep + name2)
orig2 = tuple(replace_typevars(x, bindings=new_bindings, symbols=symbols) for x in clsi.orig)
clsi2 = DataclassInfo(name=name2, orig=orig2)
set_dataclass_info(cls2, clsi2)
MakeTypeCache.cache[cache_key] = cls2
return cls2
def is_frozen(t):
return getattr(t, _PARAMS).frozen
|
zuper-typing-z6
|
/zuper-typing-z6-6.2.3.tar.gz/zuper-typing-z6-6.2.3/src/zuper_typing/zeneric2.py
|
zeneric2.py
|
import typing
from dataclasses import _FIELDS, dataclass as dataclass_orig, Field, is_dataclass
from typing import (
Any,
Callable,
cast,
ClassVar,
Dict,
Iterable,
Iterator,
List,
Optional,
Sequence,
Set,
Tuple,
Type,
TYPE_CHECKING,
TypeVar,
Union,
)
from zuper_commons.types import ZAssertionError, ZTypeError, ZValueError
# from .my_intersection import Intersection
from .aliases import TypeLike
from .constants import (
ATT_TUPLE_TYPES,
NAME_ARG,
PYTHON_36,
ZuperTypingGlobals,
ATT_LIST_TYPE,
TUPLE_EMPTY_ATTR,
)
from .literal import get_Literal_args, is_Literal
from .uninhabited import is_Uninhabited
__all__ = [
"is_Set",
"is_List",
"is_Dict",
"is_Optional",
"is_Union",
"is_Tuple",
"is_Literal",
"is_dataclass",
"is_TypeLike",
"is_TypeVar",
"is_Awaitable",
"is_Any",
"is_Callable",
"is_ClassVar",
"is_FixedTuple",
"is_FixedTupleLike_canonical",
"is_ForwardRef",
"is_Iterable",
"is_Iterator",
"is_List_canonical",
"is_MyNamedArg",
"is_NewType",
"is_Sequence",
"is_SpecialForm",
"is_Type",
"is_VarTuple",
"is_VarTuple_canonical",
"get_Set_name_V",
"get_Set_arg",
"get_List_arg",
"get_Dict_name_K_V",
"get_Dict_args",
"get_Literal_args",
"get_TypeVar_bound",
"get_TypeVar_name",
"get_Optional_arg",
"get_Union_args",
"get_Awaitable_arg",
"get_Callable_info",
"get_ClassVar_arg",
"get_ClassVar_name",
"get_Dict_name",
"get_fields_including_static",
"get_FixedTuple_args",
"get_ForwardRef_arg",
"get_Iterable_arg",
"get_Iterable_name",
"get_Iterator_arg",
"get_Iterator_name",
"get_List_name",
"get_MyNamedArg_name",
"get_NewType_arg",
"get_NewType_name",
"get_NewType_repr",
"get_Optional_name",
"get_Sequence_arg",
"get_Sequence_name",
"get_Set_name",
"get_Tuple_name",
"get_tuple_types",
"get_Type_arg",
"get_Type_name",
"get_Union_name",
"get_VarTuple_arg",
"name_for_type_like",
"make_ForwardRef",
"make_Tuple",
"make_TypeVar",
"make_Union",
"make_VarTuple",
"key_for_sorting_types",
"MyBytes",
"MyStr",
"get_ListLike_arg",
"get_FixedTupleLike_args",
"get_CustomTuple_args",
"get_CustomDict_args",
"get_CustomList_arg",
"get_CustomSet_arg",
"get_Dict_args",
"get_DictLike_args",
"get_Dict_name_K_V",
"get_List_arg",
"get_DictLike_name",
"get_ListLike_name",
"get_Set_arg",
"get_Set_name_V",
"get_SetLike_arg",
"get_SetLike_name",
"is_ListLike",
"is_CustomDict",
"is_CustomList",
"is_CustomSet",
"is_CustomTuple",
"is_Dict",
"is_DictLike",
"is_DictLike_canonical",
"is_FixedTupleLike",
"is_List",
"is_ListLike_canonical",
"is_Set",
"is_SetLike",
"is_SetLike_canonical",
"make_list",
"make_CustomTuple",
"make_dict",
"make_set",
"CustomTuple",
"CustomDict",
"CustomList",
"CustomSet",
"lift_to_customtuple",
"lift_to_customtuple_type",
"is_TupleLike",
"get_FixedTupleLike_args",
"get_FixedTupleLike_name",
]
def is_TypeLike(x: object) -> bool:
if isinstance(x, type):
return True
else:
# noinspection PyTypeChecker
return (
is_SpecialForm(x)
or is_ClassVar(x)
or is_MyNamedArg(x)
or is_Type(x)
or is_TypeVar(x)
or is_NewType(x)
)
def is_SpecialForm(x: TypeLike) -> bool:
""" Does not include: ClassVar, NamedArg, Type, TypeVar
Does include: ForwardRef, NewType, Literal
"""
if (
is_Any(x)
or is_Callable(x)
or is_Dict(x)
or is_Tuple(x)
or is_ForwardRef(x)
or is_Iterable(x)
or is_Iterator(x)
or is_List(x)
or is_NewType(x)
or is_Optional(x)
or is_Sequence(x)
or is_Set(x)
or is_Tuple(x)
or is_Union(x)
or is_Awaitable(x)
or is_Literal(x)
):
return True
return False
# noinspection PyProtectedMember
def is_Optional(x: TypeLike) -> bool:
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
return isinstance(x, typing._Union) and len(x.__args__) >= 2 and x.__args__[-1] is type(None)
else:
# noinspection PyUnresolvedReferences
return (
isinstance(x, typing._GenericAlias)
and (getattr(x, "__origin__") is Union)
and len(x.__args__) >= 2
and x.__args__[-1] is type(None)
)
X = TypeVar("X")
def get_Optional_arg(x: Type[Optional[X]]) -> Type[X]:
assert is_Optional(x)
args = x.__args__
if len(args) == 2:
return args[0]
else:
return make_Union(*args[:-1])
# return x.__args__[0]
def is_Union(x: TypeLike) -> bool:
""" Union[X, None] is not considered a Union"""
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
return not is_Optional(x) and isinstance(x, typing._Union)
else:
# noinspection PyUnresolvedReferences
return not is_Optional(x) and isinstance(x, typing._GenericAlias) and (x.__origin__ is Union)
def get_Union_args(x: TypeLike) -> Tuple[TypeLike, ...]:
assert is_Union(x), x
# noinspection PyUnresolvedReferences
return tuple(x.__args__)
def key_for_sorting_types(y: TypeLike) -> tuple:
if is_TypeVar(y):
return (1, get_TypeVar_name(y))
elif is_dataclass(y):
return (2, name_for_type_like(y))
# This never happens because NoneType is removed
# (we do Optional)
# elif y is type(None):
# return (3, "")
else:
return (0, name_for_type_like(y))
def remove_duplicates(a: Tuple[TypeLike]) -> Tuple[TypeLike]:
done = []
for _ in a:
assert is_TypeLike(_), a
if _ not in done:
done.append(_)
return tuple(done)
def unroll_union(a: TypeLike) -> Tuple[TypeLike, ...]:
if is_Union(a):
return get_Union_args(a)
elif is_Optional(a):
return get_Optional_arg(a), type(None)
else:
return (a,)
def make_Union(*a: TypeLike) -> TypeLike:
r = ()
for _ in a:
if not is_TypeLike(_):
raise ZValueError(not_typelike=_, inside=a)
r = r + unroll_union(_)
a = r
if len(a) == 0:
raise ValueError("empty")
a = remove_duplicates(a)
if len(a) == 1:
return a[0]
# print(list(map(key_for_sorting_types, a)))
if type(None) in a:
others = tuple(_ for _ in a if _ is not type(None))
return Optional[make_Union(*others)]
a = tuple(sorted(a, key=key_for_sorting_types))
if len(a) == 2:
x = Union[a[0], a[1]]
elif len(a) == 3:
x = Union[a[0], a[1], a[2]]
elif len(a) == 4:
x = Union[a[0], a[1], a[2], a[3]]
elif len(a) == 5:
x = Union[a[0], a[1], a[2], a[3], a[4]]
else:
x = Union.__getitem__(tuple(a))
return x
class MakeTupleCaches:
tuple_caches = {}
def make_VarTuple(a: Type[X]) -> Type[Tuple[X, ...]]:
args = (a, ...)
res = make_Tuple(*args)
return res
class DummyForEmpty:
pass
def make_Tuple(*a: TypeLike) -> Type[Tuple]:
for _ in a:
if isinstance(_, tuple):
raise ValueError(a)
if a in MakeTupleCaches.tuple_caches:
return MakeTupleCaches.tuple_caches[a]
if len(a) == 0:
x = Tuple[DummyForEmpty]
setattr(x, TUPLE_EMPTY_ATTR, True)
elif len(a) == 1:
x = Tuple[a[0]]
elif len(a) == 2:
x = Tuple[a[0], a[1]]
elif len(a) == 3:
x = Tuple[a[0], a[1], a[2]]
elif len(a) == 4:
x = Tuple[a[0], a[1], a[2], a[3]]
elif len(a) == 5:
x = Tuple[a[0], a[1], a[2], a[3], a[4]]
else:
if PYTHON_36: # pragma: no cover
x = Tuple[a]
else:
# NOTE: actually correct
# noinspection PyArgumentList
x = Tuple.__getitem__(tuple(a))
MakeTupleCaches.tuple_caches[a] = x
return x
def _check_valid_arg(x: Any) -> None:
if not ZuperTypingGlobals.paranoid:
return
if isinstance(x, str): # pragma: no cover
msg = f"The annotations must be resolved: {x!r}"
raise ValueError(msg)
def is_ForwardRef(x: TypeLike) -> bool:
_check_valid_arg(x)
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
return isinstance(x, typing._ForwardRef)
else:
# noinspection PyUnresolvedReferences
return isinstance(x, typing.ForwardRef)
class CacheFor:
cache = {}
def make_ForwardRef(n: str) -> TypeLike:
if n in CacheFor.cache:
return CacheFor.cache[n]
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
res = typing._ForwardRef(n)
else:
# noinspection PyUnresolvedReferences
res = typing.ForwardRef(n)
CacheFor.cache[n] = res
return res
def get_ForwardRef_arg(x: TypeLike) -> str:
assert is_ForwardRef(x)
# noinspection PyUnresolvedReferences
return x.__forward_arg__
def is_Any(x: TypeLike) -> bool:
_check_valid_arg(x)
if PYTHON_36: # pragma: no cover
return x is Any
else:
# noinspection PyUnresolvedReferences
return isinstance(x, typing._SpecialForm) and x._name == "Any"
class CacheTypeVar:
cache = {}
if TYPE_CHECKING:
make_TypeVar = TypeVar
else:
def make_TypeVar(
name: str, *, bound: Optional[type] = None, contravariant: bool = False, covariant: bool = False,
) -> TypeVar:
key = (name, bound, contravariant, covariant)
if key in CacheTypeVar.cache:
return CacheTypeVar.cache[key]
# noinspection PyTypeHints
res = TypeVar(name, bound=bound, contravariant=contravariant, covariant=covariant)
CacheTypeVar.cache[key] = res
return res
def is_TypeVar(x: TypeLike) -> bool:
return isinstance(x, typing.TypeVar)
def get_TypeVar_bound(x: TypeVar) -> TypeLike:
assert is_TypeVar(x), x
bound = x.__bound__
if bound is None:
return object
else:
return bound
def get_TypeVar_name(x: TypeVar) -> str:
assert is_TypeVar(x), x
return x.__name__
def is_ClassVar(x: TypeLike) -> bool:
_check_valid_arg(x)
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
return isinstance(x, typing._ClassVar)
else:
# noinspection PyUnresolvedReferences
return isinstance(x, typing._GenericAlias) and (x.__origin__ is typing.ClassVar)
def get_ClassVar_arg(x: TypeLike) -> TypeLike: # cannot put ClassVar
assert is_ClassVar(x), x
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
return x.__type__
else:
# noinspection PyUnresolvedReferences
return x.__args__[0]
def get_ClassVar_name(x: TypeLike) -> str: # cannot put ClassVar
assert is_ClassVar(x), x
s = name_for_type_like(get_ClassVar_arg(x))
return f"ClassVar[{s}]"
def is_Type(x: TypeLike) -> bool:
_check_valid_arg(x)
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
return (x is typing.Type) or (isinstance(x, typing.GenericMeta) and (x.__origin__ is typing.Type))
else:
# noinspection PyUnresolvedReferences
return (x is typing.Type) or (isinstance(x, typing._GenericAlias) and (x.__origin__ is type))
def is_NewType(x: TypeLike) -> bool:
_check_valid_arg(x)
# if PYTHON_36: # pragma: no cover
# # noinspection PyUnresolvedReferences
# return (x is typing.Type) or (isinstance(x, typing.GenericMeta) and (x.__origin__
# is typing.Type))
# else:
# return (x is typing.Type) or (isinstance(x, typing._GenericAlias) and (x.__origin__ is
# type))
res = hasattr(x, "__supertype__") and type(x).__name__ == "function"
return res
def get_NewType_arg(x: TypeLike) -> TypeLike:
assert is_NewType(x), x
# noinspection PyUnresolvedReferences
return x.__supertype__
def get_NewType_name(x: TypeLike) -> str:
return x.__name__
def get_NewType_repr(x: TypeLike) -> str:
n = get_NewType_name(x)
p = get_NewType_arg(x)
if is_Any(p) or p is object:
return f"NewType({n!r})"
else:
sp = name_for_type_like(p)
return f"NewType({n!r}, {sp})"
def is_Tuple(x: TypeLike) -> bool:
_check_valid_arg(x)
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
return isinstance(x, typing.TupleMeta)
else:
# noinspection PyUnresolvedReferences
return isinstance(x, typing._GenericAlias) and (x._name == "Tuple")
def is_FixedTuple(x: TypeLike) -> bool:
if not is_Tuple(x):
return False
x = cast(Type[Tuple], x)
ts = get_tuple_types(x)
# if len(ts) == 0:
# return False
if len(ts) == 2 and ts[-1] is ...:
return False
else:
return True
def is_VarTuple(x: TypeLike) -> bool:
if x is tuple:
return True
if not is_Tuple(x):
return False
x = cast(Type[Tuple], x)
ts = get_tuple_types(x)
if len(ts) == 2 and ts[-1] is ...:
return True
else:
return False
def get_FixedTuple_args(x: Type[Tuple]) -> Tuple[TypeLike, ...]:
assert is_FixedTuple(x), x
return get_tuple_types(x)
def is_VarTuple_canonical(x: Type[Tuple]) -> bool:
return (x is not tuple) and (x is not Tuple)
#
# def is_FixedTuple_canonical(x: Type[Tuple]) -> bool:
# return (x is not tuple) and (x is not Tuple)
def is_FixedTupleLike_canonical(x: Type[Tuple]) -> bool:
return (x is not tuple) and (x is not Tuple)
def get_VarTuple_arg(x: Type[Tuple[X, ...]]) -> Type[X]:
if x is tuple:
return Any
assert is_VarTuple(x), x
ts = get_tuple_types(x)
# if len(ts) == 0: # pragma: no cover
# return Any
return ts[0]
def is_generic_alias(x: TypeLike, name: str) -> bool:
# noinspection PyUnresolvedReferences
return isinstance(x, typing._GenericAlias) and (x._name == name)
def is_List(x: TypeLike) -> bool:
_check_valid_arg(x)
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
return x is typing.List or isinstance(x, typing.GenericMeta) and x.__origin__ is typing.List
else:
return is_generic_alias(x, "List")
def is_Iterator(x: TypeLike) -> bool:
_check_valid_arg(x)
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
return x is typing.Iterator or isinstance(x, typing.GenericMeta) and x.__origin__ is typing.Iterator
else:
return is_generic_alias(x, "Iterator")
def is_Iterable(x: TypeLike) -> bool:
_check_valid_arg(x)
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
return x is typing.Iterable or isinstance(x, typing.GenericMeta) and x.__origin__ is typing.Iterable
else:
return is_generic_alias(x, "Iterable")
def is_Sequence(x: TypeLike) -> bool:
_check_valid_arg(x)
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
return x is typing.Sequence or isinstance(x, typing.GenericMeta) and x.__origin__ is typing.Sequence
else:
return is_generic_alias(x, "Sequence")
def is_placeholder_typevar(x: TypeLike) -> bool:
return is_TypeVar(x) and get_TypeVar_name(x) in ["T", "T_co"]
def get_Set_arg(x: Type[Set]) -> TypeLike:
assert is_Set(x)
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
if x is typing.Set:
return Any
# noinspection PyUnresolvedReferences
t = x.__args__[0]
if is_placeholder_typevar(t):
return Any
return t
def get_List_arg(x: Type[List[X]]) -> Type[X]:
assert is_List(x), x
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
if x.__args__ is None:
return Any
# noinspection PyUnresolvedReferences
t = x.__args__[0]
if is_placeholder_typevar(t):
return Any
return t
def is_List_canonical(x: Type[List]) -> bool:
assert is_List(x), x
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
if x.__args__ is None:
return False
# noinspection PyUnresolvedReferences
t = x.__args__[0]
if is_placeholder_typevar(t):
return False
return True
_K = TypeVar("_K")
_V = TypeVar("_V")
def get_Dict_args(T: Type[Dict[_K, _V]]) -> Tuple[Type[_K], Type[_V]]:
assert is_Dict(T), T
if T is Dict:
return Any, Any
# noinspection PyUnresolvedReferences
K, V = T.__args__
if PYTHON_36: # pragma: no cover
if is_placeholder_typevar(K):
K = Any
if is_placeholder_typevar(V):
V = Any
return K, V
_X = TypeVar("_X")
def get_Iterator_arg(x: TypeLike) -> TypeLike: # PyCharm has problems
assert is_Iterator(x), x
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
if x.__args__ is None:
return Any
# noinspection PyUnresolvedReferences
t = x.__args__[0]
if is_placeholder_typevar(t):
return Any
return t
def get_Iterable_arg(x: TypeLike) -> TypeLike: # PyCharm has problems
assert is_Iterable(x), x
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
if x.__args__ is None:
return Any
# noinspection PyUnresolvedReferences
t = x.__args__[0]
if is_placeholder_typevar(t):
return Any
return t
def get_Sequence_arg(x: Type[Sequence[_X]]) -> Type[_X]:
assert is_Sequence(x), x
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
if x.__args__ is None:
return Any
# noinspection PyUnresolvedReferences
t = x.__args__[0]
if is_placeholder_typevar(t):
return Any
return t
def get_Type_arg(x: TypeLike) -> TypeLike:
assert is_Type(x)
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
if x.__args__ is None:
return type
# noinspection PyUnresolvedReferences
return x.__args__[0]
def is_Callable(x: TypeLike) -> bool:
_check_valid_arg(x)
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
return isinstance(x, typing.CallableMeta)
else:
return getattr(x, "_name", None) == "Callable"
# return hasattr(x, '__origin__') and x.__origin__ is typing.Callable
# return isinstance(x, typing._GenericAlias) and x.__origin__.__name__ == "Callable"
def is_Awaitable(x: TypeLike) -> bool:
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
return getattr(x, "_gorg", None) is typing.Awaitable
else:
return getattr(x, "_name", None) == "Awaitable"
def get_Awaitable_arg(x: TypeLike) -> TypeLike:
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
if x.__args__ is None:
return Any
# noinspection PyUnresolvedReferences
t = x.__args__[0]
if is_placeholder_typevar(t):
return Any
return t
def is_MyNamedArg(x: object) -> bool:
return hasattr(x, NAME_ARG)
def get_MyNamedArg_name(x: TypeLike) -> str:
assert is_MyNamedArg(x), x
return getattr(x, NAME_ARG)
def is_Dict(x: TypeLike) -> bool:
_check_valid_arg(x)
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
return x is Dict or isinstance(x, typing.GenericMeta) and x.__origin__ is typing.Dict
else:
return is_generic_alias(x, "Dict")
def is_Set(x: TypeLike) -> bool:
_check_valid_arg(x)
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
if x is typing.Set:
return True
# noinspection PyUnresolvedReferences
return isinstance(x, typing.GenericMeta) and x.__origin__ is typing.Set
else:
return is_generic_alias(x, "Set") or is_generic_alias(x, "FrozenSet") # XXX: hack
def get_Dict_name(T: Type[Dict]) -> str:
assert is_Dict(T), T
K, V = get_Dict_args(T)
return get_Dict_name_K_V(K, V)
def get_Dict_name_K_V(K: TypeLike, V: TypeLike) -> str:
return "Dict[%s,%s]" % (name_for_type_like(K), name_for_type_like(V))
def get_Set_name_V(V: TypeLike) -> str:
return "Set[%s]" % (name_for_type_like(V))
def get_Union_name(V: TypeLike) -> str:
return "Union[%s]" % ",".join(name_for_type_like(_) for _ in get_Union_args(V))
def get_List_name(V: Type[List]) -> str:
v = get_List_arg(V)
return "List[%s]" % name_for_type_like(v)
def get_Type_name(V: TypeLike) -> str:
v = get_Type_arg(V)
return "Type[%s]" % name_for_type_like(v)
def get_Iterator_name(V: Type[Iterator]) -> str:
# noinspection PyTypeChecker
v = get_Iterator_arg(V)
return "Iterator[%s]" % name_for_type_like(v)
def get_Iterable_name(V: Type[Iterable[X]]) -> str:
# noinspection PyTypeChecker
v = get_Iterable_arg(V)
return "Iterable[%s]" % name_for_type_like(v)
def get_Sequence_name(V: Type[Sequence]) -> str:
v = get_Sequence_arg(V)
return "Sequence[%s]" % name_for_type_like(v)
def get_Optional_name(V: TypeLike) -> str: # cannot use Optional as type arg
v = get_Optional_arg(V)
return "Optional[%s]" % name_for_type_like(v)
def get_Set_name(V: Type[Set]) -> str:
v = get_Set_arg(V)
return "Set[%s]" % name_for_type_like(v)
def get_Tuple_name(V: Type[Tuple]) -> str:
return "Tuple[%s]" % ",".join(name_for_type_like(_) for _ in get_tuple_types(V))
def get_tuple_types(V: Type[Tuple]) -> Tuple[TypeLike, ...]:
# from .annotations_tricks import is_CustomTuple, get_CustomTuple_args, CustomTuple
if is_CustomTuple(V):
V = cast(Type[CustomTuple], V)
args = get_CustomTuple_args(V)
return args
if V is tuple:
return Any, ...
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
if V.__args__ is None:
return Any, ...
# noinspection PyUnresolvedReferences
args = V.__args__ # XXX
if args == (DummyForEmpty,):
return ()
if args == ():
if hasattr(V, TUPLE_EMPTY_ATTR):
return ()
else:
return Any, ...
else:
return args
def name_for_type_like(x: TypeLike) -> str:
if is_Any(x):
return "Any"
elif isinstance(x, typing.TypeVar):
return x.__name__
elif x is type(None):
return "NoneType"
elif x is MyStr:
return "str"
elif x is MyBytes:
return "bytes"
elif is_Union(x):
return get_Union_name(x)
elif is_List(x):
x = cast(Type[List], x)
return get_List_name(x)
elif is_Iterator(x):
x = cast(Type[Iterator], x)
# noinspection PyTypeChecker
return get_Iterator_name(x)
elif is_Iterable(x):
x = cast(Type[Iterable], x)
# noinspection PyTypeChecker
return get_Iterable_name(x)
elif is_Tuple(x):
x = cast(Type[Tuple], x)
return get_Tuple_name(x)
elif is_Set(x):
x = cast(Type[Set], x)
return get_Set_name(x)
elif is_SetLike(x):
x = cast(Type[Set], x)
return get_SetLike_name(x)
elif is_Dict(x):
x = cast(Type[Dict], x)
return get_Dict_name(x)
elif is_DictLike(x):
x = cast(Type[Dict], x)
return get_DictLike_name(x)
elif is_Type(x):
return get_Type_name(x)
elif is_ClassVar(x):
return get_ClassVar_name(x)
elif is_Sequence(x):
x = cast(Type[Sequence], x)
return get_Sequence_name(x)
elif is_Optional(x):
return get_Optional_name(x)
elif is_NewType(x):
return get_NewType_name(x)
# return get_NewType_repr(x)
elif is_Literal(x):
s = ",".join(repr(_) for _ in get_Literal_args(x))
return f"Literal[{s}]"
elif is_ForwardRef(x):
a = get_ForwardRef_arg(x)
return f"ForwardRef({a!r})"
elif is_Uninhabited(x):
return "!"
elif is_Callable(x):
info = get_Callable_info(x)
# params = ','.join(name_for_type_like(p) for p in info.parameters_by_position)
def ps(k, v):
if k.startswith("__"):
return name_for_type_like(v)
else:
return f"NamedArg({name_for_type_like(v)},{k!r})"
params = ",".join(ps(k, v) for k, v in info.parameters_by_name.items())
ret = name_for_type_like(info.returns)
return f"Callable[[{params}],{ret}]"
elif x is typing.IO:
return str(x) # TODO: should get the attribute
elif hasattr(x, "__name__"):
# logger.info(f'not matching __name__ {type(x)} {x!r}')
return x.__name__
else:
# logger.info(f'not matching {type(x)} {x!r}')
return str(x)
# do not make a dataclass
class CallableInfo:
parameters_by_name: Dict[str, TypeLike]
parameters_by_position: Tuple[TypeLike, ...]
ordering: Tuple[str, ...]
returns: TypeLike
def __init__(self, parameters_by_name, parameters_by_position, ordering, returns):
for k, v in parameters_by_name.items():
assert not is_MyNamedArg(v), v
for v in parameters_by_position:
assert not is_MyNamedArg(v), v
self.parameters_by_name = parameters_by_name
self.parameters_by_position = parameters_by_position
self.ordering = ordering
self.returns = returns
def __repr__(self) -> str:
return (
f"CallableInfo({self.parameters_by_name!r}, {self.parameters_by_position!r}, "
f"{self.ordering}, {self.returns})"
)
def replace(self, f: typing.Callable[[Any], Any]) -> "CallableInfo":
parameters_by_name = {k: f(v) for k, v in self.parameters_by_name.items()}
parameters_by_position = tuple(f(v) for v in self.parameters_by_position)
ordering = self.ordering
returns = f(self.returns)
return CallableInfo(parameters_by_name, parameters_by_position, ordering, returns)
def as_callable(self) -> typing.Callable:
args = []
for k, v in self.parameters_by_name.items():
# if is_MyNamedArg(v):
# # try:
# v = v.original
# TODO: add MyNamedArg
args.append(v)
# noinspection PyTypeHints
return Callable[args, self.returns]
def get_Callable_info(x: Type[typing.Callable]) -> CallableInfo:
assert is_Callable(x), x
parameters_by_name = {}
parameters_by_position = []
ordering = []
args = x.__args__
if args:
returns = args[-1]
rest = args[:-1]
else:
returns = Any
rest = ()
for i, a in enumerate(rest):
if is_MyNamedArg(a):
name = get_MyNamedArg_name(a)
t = a.original
# t = a
else:
name = f"{i}"
t = a
parameters_by_name[name] = t
ordering.append(name)
parameters_by_position.append(t)
return CallableInfo(
parameters_by_name=parameters_by_name,
parameters_by_position=tuple(parameters_by_position),
ordering=tuple(ordering),
returns=returns,
)
def get_fields_including_static(x: Type[dataclass_orig]) -> Dict[str, Field]:
""" returns the fields including classvars """
fields = getattr(x, _FIELDS)
return fields
# _V = TypeVar("_V")
# _K = TypeVar("_K")
#
# _X = TypeVar("_X")
_Y = TypeVar("_Y")
_Z = TypeVar("_Z")
class MyBytes(bytes):
pass
class MyStr(str):
pass
class CustomSet(set):
__set_type__: ClassVar[type]
def __hash__(self) -> Any:
try:
return self._cached_hash
except AttributeError:
try:
h = self._cached_hash = hash(tuple(sorted(self)))
except TypeError: # pragma: no cover
h = self._cached_hash = hash(tuple(self))
return h
class CustomList(list):
__list_type__: ClassVar[type]
def __hash__(self) -> Any: # pragma: no cover
try:
return self._cached_hash
except AttributeError: # pragma: no cover
h = self._cached_hash = hash(tuple(self))
return h
def __getitem__(self, i):
T = type(self)
if isinstance(i, slice):
# noinspection PyArgumentList
return T(list.__getitem__(self, i))
return list.__getitem__(self, i)
def __add__(self, other):
r = super().__add__(other)
T = type(self)
# noinspection PyArgumentList
return T(r)
class CustomTuple(tuple):
__tuple_types__: ClassVar[Tuple[type, ...]]
def __new__(cls, *all_args):
if not all_args:
args = ()
else:
(args,) = all_args
if ZuperTypingGlobals.check_tuple_values:
from .subcheck import value_liskov
for i, (a, T) in enumerate(zip(args, cls.__tuple_types__)):
can = value_liskov(a, T)
if not can:
msg = f"Entry #{i} does not pass the liskov test."
raise ZValueError(
msg, args=args, __tuple_types__=cls.__tuple_types__, i=i, a=a, T=T, can=can
)
# logger.info('hello', __tuple_types__=cls.__tuple_types__, args=args)
# noinspection PyTypeChecker
return tuple.__new__(cls, args)
def __hash__(self) -> Any: # pragma: no cover
try:
return self._cached_hash
except AttributeError: # pragma: no cover
h = self._cached_hash = hash(tuple(self))
return h
def __getitem__(self, i):
if isinstance(i, slice):
vals = super().__getitem__(i)
types = self.__tuple_types__[i]
T2 = make_CustomTuple(types)
# noinspection PyArgumentList
return T2(vals)
else:
return super().__getitem__(i)
def __add__(self, other):
vals = super().__add__(other)
if isinstance(other, CustomTuple):
types2 = type(other).__tuple_types__
else:
types2 = (Any,) * len(other)
T2 = make_CustomTuple(self.__tuple_types__ + types2)
# noinspection PyArgumentList
return T2(vals)
class CustomDict(dict):
__dict_type__: ClassVar[Tuple[type, type]]
def __hash__(self) -> Any:
try:
return self._cached_hash
except AttributeError:
pass
try:
it = tuple(sorted(self.items()))
except TypeError:
it = tuple(self.items())
try:
h = self._cached_hash = hash(tuple(it))
except TypeError as e:
msg = "Cannot compute hash"
raise ZTypeError(msg, it=it) from e
return h
def copy(self: _X) -> _X:
return type(self)(self)
def get_CustomSet_arg(x: Type[CustomSet]) -> TypeLike:
assert is_CustomSet(x), x
return x.__set_type__
def get_CustomList_arg(x: Type[CustomList]) -> TypeLike:
assert is_CustomList(x), x
if not hasattr(x, ATT_LIST_TYPE):
msg = "CustomList without __list_type__?"
raise ZValueError(msg, x=type(x), x2=str(x), d=x.__dict__)
return getattr(x, ATT_LIST_TYPE)
def get_CustomDict_args(x: Type[CustomDict]) -> Tuple[TypeLike, TypeLike]:
assert is_CustomDict(x), x
return x.__dict_type__
def get_CustomTuple_args(x: Type[CustomTuple]) -> Tuple[TypeLike, ...]:
assert is_CustomTuple(x), x
return x.__tuple_types__
def is_CustomSet(x: TypeLike) -> bool:
return isinstance(x, type) and (x is not CustomSet) and issubclass(x, CustomSet)
def is_CustomList(x: TypeLike) -> bool:
return isinstance(x, type) and (x is not CustomList) and issubclass(x, CustomList)
def is_CustomDict(x: TypeLike) -> bool:
return isinstance(x, type) and (x is not CustomDict) and issubclass(x, CustomDict)
def is_CustomTuple(x: TypeLike) -> bool:
return isinstance(x, type) and (x is not CustomTuple) and issubclass(x, CustomTuple)
def is_SetLike(x: TypeLike) -> bool:
return (x is set) or is_Set(x) or is_CustomSet(x)
def is_ListLike(x: TypeLike) -> bool:
return (x is list) or is_List(x) or is_CustomList(x)
def is_DictLike(x: TypeLike) -> bool:
return (x is dict) or is_Dict(x) or is_CustomDict(x)
def is_ListLike_canonical(x: Type[List]) -> bool:
return is_CustomList(x)
def is_DictLike_canonical(x: Type[Dict]) -> bool:
return is_CustomDict(x)
def is_SetLike_canonical(x: Type[Set]) -> bool:
return is_CustomSet(x)
def get_SetLike_arg(x: Type[Set[_V]]) -> Type[_V]:
if x is set:
return Any
if is_Set(x):
return get_Set_arg(x)
if is_CustomSet(x):
x = cast(Type[CustomSet], x)
return get_CustomSet_arg(x)
assert False, x
def get_ListLike_arg(x: Type[List[_V]]) -> Type[_V]:
if x is list:
return Any
if is_List(x):
return get_List_arg(x)
if is_CustomList(x):
# noinspection PyTypeChecker
return get_CustomList_arg(x)
assert False, x
def get_DictLike_args(x: Type[Dict[_K, _V]]) -> Tuple[Type[_K], Type[_V]]:
assert is_DictLike(x), x
if is_Dict(x):
return get_Dict_args(x)
elif is_CustomDict(x):
x = cast(Type[CustomDict], x)
return get_CustomDict_args(x)
elif x is dict:
return Any, Any
else:
assert False, x
def get_DictLike_name(T: Type[Dict]) -> str:
assert is_DictLike(T)
K, V = get_DictLike_args(T)
return get_Dict_name_K_V(K, V)
def get_ListLike_name(x: Type[List]) -> str:
V = get_ListLike_arg(x)
return "List[%s]" % name_for_type_like(V)
def get_SetLike_name(x: Type[Set]) -> str:
v = get_SetLike_arg(x)
return "Set[%s]" % name_for_type_like(v)
Q_ = TypeVar("Q_")
K__ = TypeVar("K__")
V__ = TypeVar("V__")
class Caches:
use_cache = True
make_set_cache: Dict[Type[Q_], Type[CustomSet]] = {}
make_list_cache: Dict[Type[Q_], Type[CustomList]] = {}
make_dict_cache: Dict[Tuple[Type[K__], Type[V__]], Type[CustomDict]] = {}
make_tuple_cache: Dict[Tuple[TypeLike, ...], Type[CustomTuple]] = {}
def assert_good_typelike(x: TypeLike) -> None:
if isinstance(x, type):
return
# if is_dataclass(type(x)):
# n = type(x).__name__
# if n in ["Constant"]:
# raise AssertionError(x)
# def make_list(V0: Type[_X]) -> Intersection[Type[List[Type[_X]]], Callable[[List[_X]], List[_X]]]:
def make_list(V0: Type[_X]) -> Type[List[Type[_X]]]:
if Caches.use_cache:
if V0 in Caches.make_list_cache:
return Caches.make_list_cache[V0]
assert_good_typelike(V0)
class MyType(type):
def __eq__(self, other) -> bool:
V2 = getattr(self, "__list_type__")
if is_List(other):
return V2 == get_List_arg(other)
res2 = isinstance(other, type) and issubclass(other, CustomList) and other.__list_type__ == V2
return res2
def __hash__(cls) -> Any: # pragma: no cover
return 1 # XXX
# logger.debug(f'here ___eq__ {self} {other} {issubclass(other, CustomList)} = {res}')
def copy(self: _X) -> _X:
return type(self)(self)
attrs = {"__list_type__": V0, "copy": copy}
# name = get_List_name(V)
name = "List[%s]" % name_for_type_like(V0)
res = MyType(name, (CustomList,), attrs)
setattr(res, "EMPTY", res([]))
Caches.make_list_cache[V0] = res
add_class_to_module(res)
# noinspection PyTypeChecker
return res
def add_class_to_module(C: type) -> None:
""" Adds the class to the module's dictionary, so that Pickle can save it. """
name = C.__name__
g = globals()
# from . import logger
# logger.info(f'added class {name}')
g[name] = C
def lift_to_customtuple(vs: tuple):
ts = tuple(type(_) for _ in vs)
T = make_CustomTuple(ts)
# noinspection PyArgumentList
return T(vs)
def lift_to_customtuple_type(vs: tuple, T: type):
if T is Any:
ts = tuple(type(_) for _ in vs)
# raise ZAssertionError(vs=vs, T=T)
else:
ts = tuple(T for _ in vs)
T = make_CustomTuple(ts)
# noinspection PyArgumentList
return T(vs)
def make_CustomTuple(Vs: Tuple[TypeLike, ...]) -> Type[Tuple]:
if Vs == (Any, Any):
raise ZAssertionError(Vs=Vs)
# if len(Vs) == 2:
# from zuper_lang.lang import Constant, EXP, EV
# if Vs[0] is Constant and Vs[1] is EV:
# raise ZValueError(Vs=Vs)
if Caches.use_cache:
if Vs in Caches.make_tuple_cache:
return Caches.make_tuple_cache[Vs]
for _ in Vs:
assert_good_typelike(_)
class MyTupleType(type):
def __eq__(self, other) -> bool:
V2 = getattr(self, ATT_TUPLE_TYPES)
if is_FixedTupleLike(other):
return V2 == get_FixedTupleLike_args(other)
res2 = (
isinstance(other, type)
and issubclass(other, CustomTuple)
and getattr(other, ATT_TUPLE_TYPES) == V2
)
return res2
def __hash__(cls) -> Any: # pragma: no cover
return 1 # XXX
# logger.debug(f'here ___eq__ {self} {other} {issubclass(other, CustomList)} = {res}')
def copy(self: _X) -> _X:
return type(self)(self)
attrs = {ATT_TUPLE_TYPES: Vs, "copy": copy}
# name = get_List_name(V)
s = ",".join(name_for_type_like(_) for _ in Vs)
name = "Tuple[%s]" % s
res = MyTupleType(name, (CustomTuple,), attrs)
# setattr(res, "EMPTY", res())
Caches.make_tuple_cache[Vs] = res
add_class_to_module(res)
# noinspection PyTypeChecker
return res
def make_set(V: TypeLike) -> Type[CustomSet]:
if Caches.use_cache:
if V in Caches.make_set_cache:
return Caches.make_set_cache[V]
assert_good_typelike(V)
class MyType(type):
def __eq__(self, other) -> bool:
V2 = getattr(self, "__set_type__")
if is_Set(other):
return V2 == get_Set_arg(other)
res2 = isinstance(other, type) and issubclass(other, CustomSet) and other.__set_type__ == V2
return res2
def __hash__(cls) -> Any: # pragma: no cover
return 1 # XXX
def copy(self: _X) -> _X:
return type(self)(self)
attrs = {"__set_type__": V, "copy": copy}
name = get_Set_name_V(V)
res = MyType(name, (CustomSet,), attrs)
setattr(res, "EMPTY", res([]))
Caches.make_set_cache[V] = res
add_class_to_module(res)
# noinspection PyTypeChecker
return res
# from . import logger
# def make_dict(K: Type[X], V: Type[Y]) -> Type[Dict[Type[X], Type[Y]]]:
def make_dict(K: TypeLike, V: TypeLike) -> type: # Type[CustomDict]:
key = (K, V)
if Caches.use_cache:
if key in Caches.make_dict_cache:
return Caches.make_dict_cache[key]
assert_good_typelike(K)
assert_good_typelike(V)
class MyType(type):
def __eq__(self, other) -> bool:
K2, V2 = getattr(self, "__dict_type__")
if is_Dict(other):
K1, V1 = get_Dict_args(other)
return K2 == K1 and V2 == V1
res2 = (
isinstance(other, type) and issubclass(other, CustomDict) and other.__dict_type__ == (K2, V2)
)
return res2
def __hash__(cls) -> Any: # pragma: no cover
return 1 # XXX
if isinstance(V, str): # pragma: no cover
msg = f"Trying to make dict with K = {K!r} and V = {V!r}; I need types, not strings."
raise ValueError(msg)
# warnings.warn('Creating dict', stacklevel=2)
attrs = {"__dict_type__": (K, V)}
name = get_Dict_name_K_V(K, V)
res = MyType(name, (CustomDict,), attrs)
setattr(res, "EMPTY", res({}))
Caches.make_dict_cache[key] = res
# noinspection PyUnresolvedReferences
# import zuper_typing.my_dict
#
# zuper_typing.my_dict.__dict__[res.__name__] = res
# noinspection PyTypeChecker
add_class_to_module(res)
return res
def is_TupleLike(x: TypeLike) -> bool:
return is_Tuple(x) or x is tuple or is_CustomTuple(x)
def is_FixedTupleLike(x: TypeLike) -> bool:
return is_FixedTuple(x) or is_CustomTuple(x)
def get_FixedTupleLike_args(x: Type[Tuple]) -> Tuple[TypeLike, ...]:
assert is_FixedTupleLike(x), x
if is_FixedTuple(x):
x = cast(Type[Tuple], x)
return get_tuple_types(x)
if is_CustomTuple(x):
x = cast(Type[CustomTuple], x)
return get_CustomTuple_args(x)
assert False, x
def get_FixedTupleLike_name(V: Type[Tuple]) -> str:
return "Tuple[%s]" % ",".join(name_for_type_like(_) for _ in get_FixedTupleLike_args(V))
|
zuper-typing-z6
|
/zuper-typing-z6-6.2.3.tar.gz/zuper-typing-z6-6.2.3/src/zuper_typing/annotations_tricks.py
|
annotations_tricks.py
|
from typing import Tuple
from .aliases import TypeLike
from .annotations_tricks import is_TypeLike, key_for_sorting_types, name_for_type_like
from .constants import INTERSECTION_ATT, PYTHON_36
__all__ = [
"get_Intersection_args",
"make_Intersection",
"is_Intersection",
"Intersection",
]
if PYTHON_36: # pragma: no cover
class IntersectionMeta(type):
def __getitem__(self, params):
return make_Intersection(params)
class Intersection(metaclass=IntersectionMeta):
pass
else:
class Intersection:
@classmethod
def __class_getitem__(cls, params):
# return Intersection_item(cls, params)
return make_Intersection(params)
class IntersectionCache:
use_cache = True
make_intersection_cache = {}
def make_Intersection(ts: Tuple[TypeLike, ...]) -> TypeLike:
if len(ts) == 0:
return object
done = []
for t in ts:
assert is_TypeLike(t), ts
if t not in done:
done.append(t)
done = sorted(done, key=key_for_sorting_types)
ts = tuple(done)
if len(ts) == 1:
return ts[0]
if IntersectionCache.use_cache:
if ts in IntersectionCache.make_intersection_cache:
return IntersectionCache.make_intersection_cache[ts]
class IntersectionBase(type):
def __eq__(self, other):
if is_Intersection(other):
t1 = get_Intersection_args(self)
t2 = get_Intersection_args(other)
return set(t1) == set(t2)
return False
def __hash__(cls): # pragma: no cover
return 1 # XXX
# logger.debug(f'here ___eq__ {self} {other} {issubclass(other, CustomList)} = {res}')
attrs = {INTERSECTION_ATT: ts}
name = "Intersection[%s]" % ",".join(name_for_type_like(_) for _ in ts)
res = IntersectionBase(name, (), attrs)
IntersectionCache.make_intersection_cache[ts] = res
return res
def is_Intersection(T: TypeLike) -> bool:
return hasattr(T, INTERSECTION_ATT) and type(T).__name__.startswith("Intersection")
def get_Intersection_args(T: TypeLike) -> Tuple[TypeLike, ...]:
assert is_Intersection(T)
return getattr(T, INTERSECTION_ATT)
|
zuper-typing-z6
|
/zuper-typing-z6-6.2.3.tar.gz/zuper-typing-z6-6.2.3/src/zuper_typing/my_intersection.py
|
my_intersection.py
|
from datetime import datetime
from decimal import Decimal
from numbers import Number
from typing import (
Any,
Awaitable,
cast,
ClassVar,
Dict,
Generic,
Iterable,
Iterator,
List,
Optional,
Sequence,
Set,
Tuple,
Type,
TypeVar,
Union,
)
from zuper_commons.types import ZNotImplementedError, ZTypeError, ZValueError
from . import logger
from .aliases import TypeLike
from .annotations_tricks import (
get_Awaitable_arg,
get_Callable_info,
get_ClassVar_arg,
get_ForwardRef_arg,
get_Iterable_arg,
get_Iterator_arg,
get_List_arg,
get_NewType_arg,
get_Optional_arg,
get_Sequence_arg,
get_Type_arg,
get_TypeVar_name,
get_Union_args,
get_VarTuple_arg,
is_Any,
is_Awaitable,
is_Callable,
is_ClassVar,
is_FixedTupleLike_canonical,
is_ForwardRef,
is_Iterable,
is_Iterator,
is_List,
is_List_canonical,
is_NewType,
is_Optional,
is_Sequence,
is_Type,
is_TypeVar,
is_Union,
is_VarTuple,
is_VarTuple_canonical,
make_Tuple,
make_Union,
make_VarTuple,
)
from .get_patches_ import is_placeholder
from .literal import is_Literal
from .annotations_tricks import (
CustomList,
get_CustomList_arg,
get_DictLike_args,
get_FixedTupleLike_args,
get_ListLike_arg,
get_SetLike_arg,
is_CustomList,
is_DictLike,
is_DictLike_canonical,
is_FixedTupleLike,
is_ListLike,
is_ListLike_canonical,
is_SetLike,
is_SetLike_canonical,
make_dict,
make_list,
make_set,
)
from .my_intersection import get_Intersection_args, is_Intersection, make_Intersection
from .uninhabited import is_Uninhabited
__all__ = [
"get_name_without_brackets",
"replace_typevars",
"canonical",
"NoConstructorImplemented",
"find_typevars_inside",
]
def get_name_without_brackets(name: str) -> str:
if "[" in name:
return name[: name.index("[")]
else:
return name
class NoConstructorImplemented(TypeError):
pass
X = TypeVar("X")
def get_default_attrs():
from .zeneric2 import dataclass
@dataclass
class MyQueue(Generic[X]):
pass
return dict(
Any=Any,
Optional=Optional,
Union=Union,
Tuple=Tuple,
List=List,
Set=Set,
Dict=Dict,
Queue=MyQueue,
)
def canonical(typelike: TypeLike) -> TypeLike:
return replace_typevars(typelike, bindings={}, symbols={}, make_canonical=True)
def replace_typevars(
cls: TypeLike,
*,
bindings: Dict[Any, type],
symbols: Dict[str, type],
make_canonical: bool = False,
) -> TypeLike:
if is_placeholder(cls):
msg = "Cannot run replace_typevars() on placeholder"
raise ZValueError(msg, cls=cls)
r = lambda _: replace_typevars(_, bindings=bindings, symbols=symbols)
if cls is type:
return type
if hasattr(cls, "__name__") and cls.__name__ in symbols:
return symbols[cls.__name__]
elif (isinstance(cls, str) or is_TypeVar(cls)) and cls in bindings:
return bindings[cls]
elif hasattr(cls, "__name__") and cls.__name__.startswith("Placeholder"):
return cls
elif is_TypeVar(cls):
name = get_TypeVar_name(cls)
for k, v in bindings.items():
if is_TypeVar(k) and get_TypeVar_name(k) == name:
return v
return cls
# return bindings[cls]
elif isinstance(cls, str):
if cls in symbols:
return symbols[cls]
g = dict(get_default_attrs())
g.update(symbols)
g0 = dict(g)
try:
return eval(cls, g)
except TypeError as e:
raise ZTypeError(cls=cls, g=g) from e
except NameError as e:
msg = f"Cannot resolve {cls!r}\ng: {list(g0)}"
# msg += 'symbols: {list(g0)'
raise NameError(msg) from e
elif is_NewType(cls):
# XXX: maybe we should propagate?
return cls
elif is_Type(cls):
x = get_Type_arg(cls)
r = r(x)
if x == r:
return cls
return Type[r]
# return type
elif is_DictLike(cls):
cls = cast(Type[Dict], cls)
is_canonical = is_DictLike_canonical(cls)
K0, V0 = get_DictLike_args(cls)
K = r(K0)
V = r(V0)
# logger.debug(f'{K0} -> {K}; {V0} -> {V}')
if (K0, V0) == (K, V) and (is_canonical or not make_canonical):
return cls
res = make_dict(K, V)
return res
elif is_SetLike(cls):
cls = cast(Type[Set], cls)
is_canonical = is_SetLike_canonical(cls)
V0 = get_SetLike_arg(cls)
V = r(V0)
if V0 == V and (is_canonical or not make_canonical):
return cls
return make_set(V)
elif is_CustomList(cls):
cls = cast(Type[CustomList], cls)
V0 = get_CustomList_arg(cls)
V = r(V0)
if V0 == V:
return cls
return make_list(V)
elif is_List(cls):
cls = cast(Type[List], cls)
arg = get_List_arg(cls)
is_canonical = is_List_canonical(cls)
arg2 = r(arg)
if arg == arg2 and (is_canonical or not make_canonical):
return cls
return List[arg2]
elif is_ListLike(cls):
cls = cast(Type[List], cls)
arg = get_ListLike_arg(cls)
is_canonical = is_ListLike_canonical(cls)
arg2 = r(arg)
if arg == arg2 and (is_canonical or not make_canonical):
return cls
return make_list(arg2)
# XXX NOTE: must go after CustomDict
elif hasattr(cls, "__annotations__"):
from .zeneric2 import make_type
cls2 = make_type(cls, bindings=bindings, symbols=symbols)
# from zuper_typing.logging_util import ztinfo
# ztinfo("replace_typevars", bindings=bindings, cls=cls, cls2=cls2)
# logger.info(f'old cls: {cls.__annotations__}')
# logger.info(f'new cls2: {cls2.__annotations__}')
return cls2
elif is_ClassVar(cls):
is_canonical = True # XXXis_ClassVar_canonical(cls)
x = get_ClassVar_arg(cls)
r = r(x)
if x == r and (is_canonical or not make_canonical):
return cls
return ClassVar[r]
elif is_Iterator(cls):
is_canonical = True # is_Iterator_canonical(cls)
# noinspection PyTypeChecker
x = get_Iterator_arg(cls)
r = r(x)
if x == r and (is_canonical or not make_canonical):
return cls
return Iterator[r]
elif is_Sequence(cls):
is_canonical = True # is_Sequence_canonical(cls)
cls = cast(Type[Sequence], cls)
x = get_Sequence_arg(cls)
r = r(x)
if x == r and (is_canonical or not make_canonical):
return cls
return Sequence[r]
elif is_Optional(cls):
is_canonical = True # is_Optional_canonical(cls)
x = get_Optional_arg(cls)
x2 = r(x)
if x == x2 and (is_canonical or not make_canonical):
return cls
return Optional[x2]
elif is_Union(cls):
# cls = cast(Type[Union], cls) cannot cast
xs = get_Union_args(cls)
is_canonical = True # is_Union_canonical(cls)
ys = tuple(r(_) for _ in xs)
if ys == xs and (is_canonical or not make_canonical):
return cls
return make_Union(*ys)
elif is_Intersection(cls):
xs = get_Intersection_args(cls)
ys = tuple(r(_) for _ in xs)
if ys == xs:
return cls
return make_Intersection(ys)
elif is_VarTuple(cls):
cls = cast(Type[Tuple], cls)
is_canonical = is_VarTuple_canonical(cls)
X = get_VarTuple_arg(cls)
Y = r(X)
if X == Y and (is_canonical or not make_canonical):
return cls
return make_VarTuple(Y)
elif is_FixedTupleLike(cls):
cls = cast(Type[Tuple], cls)
is_canonical = is_FixedTupleLike_canonical(cls)
xs = get_FixedTupleLike_args(cls)
ys = tuple(r(_) for _ in xs)
if ys == xs and (is_canonical or not make_canonical):
return cls
return make_Tuple(*ys)
elif is_Callable(cls):
cinfo = get_Callable_info(cls)
cinfo2 = cinfo.replace(r)
return cinfo2.as_callable()
elif is_Awaitable(cls):
x = get_Awaitable_arg(cls)
y = r(x)
if x == y:
return cls
return Awaitable[cls]
elif is_ForwardRef(cls):
T = get_ForwardRef_arg(cls)
if T in symbols:
return r(symbols[T])
else:
logger.warning(f"could not resolve {cls}")
return cls
elif cls in (int, bool, float, Decimal, datetime, str, bytes, Number, type(None), object):
return cls
elif is_Any(cls):
return cls
elif is_Uninhabited(cls):
return cls
elif is_Literal(cls):
return cls
elif isinstance(cls, type):
# logger.warning(f"extraneous class {cls}")
return cls
# elif is_Literal(cls):
# return cls
else:
# raise ZNotImplementedError(cls=cls)
# logger.debug(f"Nothing to do with {cls!r} {cls}")
return cls
B = Dict[Any, Any] # bug in Python 3.6
def find_typevars_inside(cls: TypeLike, already: Set[int] = None) -> Tuple[TypeLike, ...]:
if already is None:
already = set()
if id(cls) in already:
return ()
already.add(id(cls))
r = lambda _: find_typevars_inside(_, already)
def rs(ts):
res = ()
for x in ts:
res = res + r(x)
return res
if cls is type:
return ()
#
# if cls is function:
# raise ZException(cls=cls, t=type(cls))
if is_TypeVar(cls):
return (cls,)
elif isinstance(cls, str):
return () # XXX
raise ZNotImplementedError(cls=cls)
# if cls in symbols:
# return symbols[cls]
# g = dict(get_default_attrs())
# g.update(symbols)
# g0 = dict(g)
# try:
# return eval(cls, g)
# except NameError as e:
# msg = f"Cannot resolve {cls!r}\ng: {list(g0)}"
# # msg += 'symbols: {list(g0)'
# raise NameError(msg) from e
elif is_NewType(cls):
return r(get_NewType_arg(cls))
elif is_Type(cls):
return r(get_Type_arg(cls))
elif is_DictLike(cls):
cls = cast(Type[Dict], cls)
K0, V0 = get_DictLike_args(cls)
return r(K0) + r(V0)
elif is_SetLike(cls):
cls = cast(Type[Set], cls)
V0 = get_SetLike_arg(cls)
return r(V0)
elif is_ListLike(cls):
cls = cast(Type[List], cls)
V0 = get_ListLike_arg(cls)
return r(V0)
# XXX NOTE: must go after CustomDict
elif hasattr(cls, "__annotations__"):
# from .logging import logger
#
# logger.info(cls)
# logger.info(is_NewType(cls))
# logger.info(cls.__annotations__)
d = dict(cls.__annotations__)
return rs(d.values())
elif is_ClassVar(cls):
x = get_ClassVar_arg(cls)
return r(x)
elif is_Iterator(cls):
V0 = get_Iterator_arg(cls)
return r(V0)
elif is_Sequence(cls):
cls = cast(Type[Sequence], cls)
V0 = get_Sequence_arg(cls)
return r(V0)
elif is_Iterable(cls):
cls = cast(Type[Iterable], cls)
V0 = get_Iterable_arg(cls)
return r(V0)
elif is_Optional(cls):
x = get_Optional_arg(cls)
return r(x)
elif is_Union(cls):
xs = get_Union_args(cls)
return rs(xs)
elif is_Intersection(cls):
xs = get_Intersection_args(cls)
return rs(xs)
elif is_VarTuple(cls):
cls = cast(Type[Tuple], cls)
x = get_VarTuple_arg(cls)
return r(x)
elif is_FixedTupleLike(cls):
cls = cast(Type[Tuple], cls)
xs = get_FixedTupleLike_args(cls)
return rs(xs)
elif is_Callable(cls):
cinfo = get_Callable_info(cls)
return rs(cinfo.parameters_by_position) + r(cinfo.returns)
elif is_ForwardRef(cls):
return () # XXX
elif cls in (int, bool, float, Decimal, datetime, str, bytes, Number, type(None), object):
return ()
elif is_Any(cls):
return ()
elif is_Uninhabited(cls):
return ()
elif is_Literal(cls):
return ()
elif isinstance(cls, type):
return ()
else:
raise ZNotImplementedError(cls=cls)
# logger.debug(f'Nothing to do with {cls!r} {cls}')
# return cls
|
zuper-typing-z6
|
/zuper-typing-z6-6.2.3.tar.gz/zuper-typing-z6-6.2.3/src/zuper_typing/recursive_tricks.py
|
recursive_tricks.py
|
from setuptools import setup, find_packages
def get_version(filename):
import ast
version = None
with open(filename) as f:
for line in f:
if line.startswith('__version__'):
version = ast.parse(line).body[0].value.s
break
else:
raise ValueError('No version found in %r.' % filename)
if version is None:
raise ValueError(filename)
return version
shell_version = get_version(filename='src/zuper_json/__init__.py')
setup(name='zuper-utils',
package_dir={'': 'src'},
packages=find_packages('src'),
version=shell_version,
zip_safe=False,
entry_points={
'console_scripts': [
# 'zj = zuper_ipce.zj:zj_main',
'json2cbor = zuper_json:json2cbor_main',
'cbor2json = zuper_json:cbor2json_main',
'cbor2yaml = zuper_json:cbor2yaml_main',
]
},
install_requires=[
'pybase64',
'PyYAML',
'validate_email',
'mypy_extensions',
'nose',
'coverage>=1.4.33',
'dataclasses',
'jsonschema',
'cbor2',
'numpy',
'base58',
'zuper-commons>=3,<4',
'frozendict',
'pytz',
'termcolor',
'numpy',
],
)
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/setup.py
|
setup.py
|
from dataclasses import dataclass
from typing import Tuple, List
from .test_utils import assert_object_roundtrip, assert_type_roundtrip
symbols = {}
def test_tuples1():
@dataclass
class M:
a: Tuple[int, str]
a = M((1, '32'))
assert_object_roundtrip(a, {})
assert_type_roundtrip(M, {})
def test_tuples3():
T = Tuple[str, int]
assert_type_roundtrip(T, symbols)
def test_tuples2():
T = Tuple[str, ...]
assert_type_roundtrip(T, symbols)
def test_list1():
T = List[str]
assert_type_roundtrip(T, symbols)
def test_list2():
@dataclass
class M:
a: List[str]
a = M(['a', 'b'])
assert_object_roundtrip(a, symbols)
#
# def test_tuples1():
#
# @dataclass
# class M:
# a: Tuple[int, str]
#
# a = M((1,'32'))
#
# assert_object_roundtrip(a, {})
# assert_type_roundtrip(M, {})
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/test_tuples.py
|
test_tuples.py
|
import io
import json
import select
import time
import traceback
from io import BufferedReader
from json import JSONDecodeError
from typing import Iterator
import cbor2
import yaml
from . import logger
from .json_utils import decode_bytes_before_json_deserialization, encode_bytes_before_json_serialization
__all__ = [
'read_cbor_or_json_objects',
'json2cbor_main',
'cbor2json_main',
'cbor2yaml_main',
'read_next_cbor',
'read_next_either_json_or_cbor',
]
def json2cbor_main():
fo = open('/dev/stdout', 'wb', buffering=0)
fi = open('/dev/stdin', 'rb', buffering=0)
# noinspection PyTypeChecker
fi = BufferedReader(fi, buffer_size=1)
for j in read_cbor_or_json_objects(fi):
c = cbor2.dumps(j)
fo.write(c)
fo.flush()
def cbor2json_main():
fo = open('/dev/stdout', 'wb', buffering=0)
fi = open('/dev/stdin', 'rb', buffering=0)
for j in read_cbor_objects(fi):
j = encode_bytes_before_json_serialization(j)
ob = json.dumps(j)
ob = ob.encode('utf-8')
fo.write(ob)
fo.write(b'\n')
fo.flush()
def cbor2yaml_main():
fo = open('/dev/stdout', 'wb')
fi = open('/dev/stdin', 'rb')
for j in read_cbor_objects(fi):
ob = yaml.dump(j)
ob = ob.encode('utf-8')
fo.write(ob)
fo.write(b'\n')
fo.flush()
def read_cbor_or_json_objects(f, timeout=None) -> Iterator:
""" Reads cbor or line-separated json objects from the binary file f."""
while True:
try:
ob = read_next_either_json_or_cbor(f, timeout=timeout)
yield ob
except StopIteration:
break
except TimeoutError:
raise
def read_cbor_objects(f, timeout=None) -> Iterator:
""" Reads cbor or line-separated json objects from the binary file f."""
while True:
try:
ob = read_next_cbor(f, timeout=timeout)
yield ob
except StopIteration:
break
except TimeoutError:
raise
def read_next_either_json_or_cbor(f, timeout=None, waiting_for: str = None) -> dict:
""" Raises StopIteration if it is EOF.
Raises TimeoutError if over timeout"""
fs = [f]
t0 = time.time()
intermediate_timeout = 3.0
while True:
try:
readyr, readyw, readyx = select.select(fs, [], fs, intermediate_timeout)
except io.UnsupportedOperation:
break
if readyr:
break
elif readyx:
logger.warning('Exceptional condition on input channel %s' % readyx)
else:
delta = time.time() - t0
if (timeout is not None) and (delta > timeout):
msg = 'Timeout after %.1f s.' % delta
logger.error(msg)
raise TimeoutError(msg)
else:
msg = 'I have been waiting %.1f s.' % delta
if timeout is None:
msg += ' I will wait indefinitely.'
else:
msg += ' Timeout will occurr at %.1f s.' % timeout
if waiting_for:
msg += ' ' + waiting_for
logger.warning(msg)
first = f.peek(1)[:1]
if len(first) == 0:
msg = 'Detected EOF on %s.' % f
if waiting_for:
msg += ' ' + waiting_for
raise StopIteration(msg)
# logger.debug(f'first char is {first}')
if first in [b' ', b'\n', b'{']:
line = f.readline()
line = line.strip()
if not line:
msg = 'Read empty line. Re-trying.'
logger.warning(msg)
return read_next_either_json_or_cbor(f)
# logger.debug(f'line is {line!r}')
try:
j = json.loads(line)
except JSONDecodeError:
msg = f'Could not decode line {line!r}: {traceback.format_exc()}'
logger.error(msg)
return read_next_either_json_or_cbor(f)
j = decode_bytes_before_json_deserialization(j)
return j
else:
j = cbor2.load(f)
return j
def wait_for_data(f, timeout=None, waiting_for: str = None):
""" Raises StopIteration if it is EOF.
Raises TimeoutError if over timeout"""
# XXX: StopIteration not implemented
fs = [f]
t0 = time.time()
intermediate_timeout = 3.0
while True:
try:
readyr, readyw, readyx = select.select(fs, [], fs, intermediate_timeout)
except io.UnsupportedOperation:
break
if readyr:
break
elif readyx:
logger.warning('Exceptional condition on input channel %s' % readyx)
else:
delta = time.time() - t0
if (timeout is not None) and (delta > timeout):
msg = 'Timeout after %.1f s.' % delta
logger.error(msg)
raise TimeoutError(msg)
else:
msg = 'I have been waiting %.1f s.' % delta
if timeout is None:
msg += ' I will wait indefinitely.'
else:
msg += ' Timeout will occurr at %.1f s.' % timeout
if waiting_for:
msg += ' ' + waiting_for
logger.warning(msg)
def read_next_cbor(f, timeout=None, waiting_for: str = None) -> dict:
""" Raises StopIteration if it is EOF.
Raises TimeoutError if over timeout"""
wait_for_data(f, timeout, waiting_for)
try:
j = cbor2.load(f)
return j
except OSError as e:
if e.errno == 29:
raise StopIteration from None
raise
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/json2cbor.py
|
json2cbor.py
|
from collections import UserString
from typing import NewType, Dict, Callable
def valid_email(s):
import validate_email
is_valid = validate_email.validate_email(s)
if not is_valid:
msg = f'Invalid email address {s!r}'
raise ValueError(msg)
json_formats: Dict[str, Callable[[str], None]] = {
"date-time": None,
"email": valid_email,
"ipv4": None,
"ipv6": None,
"uri": None,
"uri-reference": None,
"json-pointer": None,
"uri-template": None,
# others:
"domain": None,
"multihash": None,
}
def make_special(name, sformat):
validator = json_formats[sformat]
class Special(UserString):
data: str
def __init__(self, seq):
UserString.__init__(self, seq)
if validator is not None:
validator(self.data)
return type(name, (Special,), {})
__all__ = [
'URL',
'DateTimeString',
'Email',
'IP4',
'IP6',
'URI',
'URIReference',
'JSONPointer',
'URITemplate',
'Domain',
'Multihash',
# 'IPDELink',
]
URL = make_special('URL', 'uri')
DateTimeString = make_special('DateTimeString', 'date-time')
Email = make_special('Email', 'email')
IP4 = make_special('IP4', 'ipv4')
IP6 = make_special('IP6', 'ipv6')
URI = make_special('URI', 'uri')
URIReference = make_special('URIReference', 'uri')
JSONPointer = make_special('JSONPointer', 'json-pointer')
URITemplate = make_special('URITemplate', 'uri-template')
Domain = make_special('Domain', 'domain')
Multihash = make_special('Multihash', 'multihash')
IPDELink = NewType('IPDELink', str)
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/special_strings.py
|
special_strings.py
|
import logging
logging.basicConfig()
logger = logging.getLogger('zj')
logger.setLevel(logging.DEBUG)
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/logging.py
|
logging.py
|
from datetime import datetime
from typing import Dict, List, Union, NewType
# JSONObject = Dict[str, Dict]
# JSONList = List['MemoryJSON']
# MemoryJSON = Union[int, str, float, JSONList, JSONObject, type(None)]
# JSONObject = Dict[str, Dict]
# JSONList = List['MemoryJSON']
IPCE = Union[int, str, float, bytes, datetime, List['IPCE'], Dict[str, 'IPCE'], type(None)]
IPCL = Union[int, str, float, bytes, datetime, List['IPCL'], Dict[str, 'IPCL'], type(None)]
CID = NewType('CID', str)
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/types.py
|
types.py
|
import numpy as np
from zuper_commons.types import check_isinstance
def dict_from_numpy(x: np.ndarray) -> dict:
res = {}
res['shape'] = list(x.shape)
res['dtype'] = x.dtype.name
res['data'] = x.tobytes()
return res
def numpy_from_dict(d: dict) -> np.ndarray:
shape = tuple(d['shape'])
dtype = d['dtype']
data: bytes = d['data']
check_isinstance(data, bytes)
a = np.frombuffer(data, dtype=dtype)
res = a.reshape(shape)
return res
#
#
# def bytes_from_numpy(a: np.ndarray) -> bytes:
# import h5py
# io = BytesIO()
# with h5py.File(io) as f:
# # f.setdefault("compression", "lzo")
# f['value'] = a
# uncompressed = io.getvalue()
#
# compressed_data = zlib.compress(uncompressed)
# return compressed_data
#
#
# def numpy_from_bytes(b: bytes) -> np.ndarray:
# b = zlib.decompress(b)
# import h5py
# io = BytesIO(b)
# with h5py.File(io) as f:
# # f.setdefault("compression", "lzw")
# a = f['value']
# res = np.array(a)
# return res
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/numpy_encoding.py
|
numpy_encoding.py
|
import json
from dataclasses import dataclass
from typing import Optional
from jsonschema import validate
from .ipce import object_to_ipce
@dataclass
class AName:
""" Describes a Name with optional middle name"""
first: str
last: str
middle: Optional[str] = None
symbols = {'AName': AName}
def test_schema1():
n1 = AName('one', 'two')
y1 = object_to_ipce(n1, symbols)
print(json.dumps(y1, indent=2))
validate(y1, y1['$schema'])
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/test_json_schema.py
|
test_json_schema.py
|
import dataclasses
import typing
from dataclasses import fields
from numbers import Number
from typing import Generic
import yaml
from nose.tools import raises, assert_equal
from . import logger
from .annotations_tricks import is_ClassVar, get_ClassVar_arg, is_Type, get_Type_arg, is_forward_ref
from .constants import enable_type_checking
from .ipce import type_to_schema, schema_to_type
from .monkey_patching_typing import my_dataclass
from .pretty import pprint
from .test_utils import assert_object_roundtrip, assert_type_roundtrip, known_failure
from .zeneric2 import resolve_types, dataclass
def test_basic():
U = TypeVar('U')
T = Generic[U]
print(T.mro())
assert_equal(T.__name__, 'Generic[U]')
print('inheriting C(T)')
@dataclass
class C(T):
...
print(C.mro())
assert_equal(C.__name__, 'C[U]')
print('subscribing C[int]')
D = C[int]
assert_equal(D.__name__, 'C[int]')
@raises(TypeError)
def test_dataclass_can_preserve_init():
X = TypeVar('X')
@dataclass
class M(Generic[X]):
x: int
M(x=2)
def test_serialize_generic_typevar():
X = typing.TypeVar('X', bound=Number)
@dataclass
class M1(Generic[X]):
""" A generic class """
x: X
M2 = assert_type_roundtrip(M1, {})
f1 = fields(M1)
assert f1[0].type == X
# there was a bug with modifying this
_ = M1[int]
f1b = fields(M1)
assert f1b[0].type == X
assert f1 == f1b
# M2 = assert_type_roundtrip(M1, {})
def test_serialize_generic():
X = typing.TypeVar('X', bound=Number)
@dataclass
class M1(Generic[X]):
""" A generic class """
x: X
M1int = M1[int]
assert_type_roundtrip(M1, {})
assert_type_roundtrip(M1int, {})
m1a = M1int(x=2)
m1b = M1int(x=3)
s = type_to_schema(M1, {})
# print(json.dumps(s, indent=3))
M2 = schema_to_type(s, {}, {})
# noinspection PyUnresolvedReferences
M2int = M2[int]
assert_equal(M1.__module__, M2.__module__)
m2a = M2int(x=2)
m2b = M2int(x=3)
# print(m1a)
# print(m2a)
# print(type(m1a))
# print(type(m2a))
# print(type(m1a).__module__)
# print(type(m2a).__module__)
assert m1a == m2a
assert m2a == m1a
assert m2b == m1b
assert m1b == m2b
assert m1b != m1a
assert m2b != m2a
# assert_object_roundtrip(M, {'M': M})
def test_serialize_generic_optional():
# @dataclass
# class Animal:
# pass
X = typing.TypeVar('X', bound=Number)
@dataclass
class M1(Generic[X]):
""" A generic class """
x: X
xo: typing.Optional[X] = None
M1int = M1[int]
assert_type_roundtrip(M1, {})
assert_type_roundtrip(M1int, {})
m1a = M1int(x=2)
m1b = M1int(x=3)
s = type_to_schema(M1, {})
# print(json.dumps(s, indent=3))
M2 = schema_to_type(s, {}, {})
# noinspection PyUnresolvedReferences
M2int = M2[int]
assert_equal(M1.__module__, M2.__module__)
m2a = M2int(x=2)
m2b = M2int(x=3)
# print(m1a)
# print(m2a)
# print(type(m1a))
# print(type(m2a))
# print(type(m1a).__module__)
# print(type(m2a).__module__)
assert m1a == m2a
assert m2a == m1a
assert m2b == m1b
assert m1b == m2b
assert m1b != m1a
assert m2b != m2a
from typing import Optional, TypeVar
def test_more():
X = TypeVar('X')
@dataclass
class Entity0(Generic[X]):
data0: X
parent: "Optional[Entity0[X]]" = None
resolve_types(Entity0)
print(Entity0.__annotations__['parent'].__repr__())
assert not isinstance(Entity0.__annotations__['parent'], str)
# raise Exception()
schema = type_to_schema(Entity0, {}, {})
print(yaml.dump(schema))
T = schema_to_type(schema, {}, {})
print(T.__annotations__)
assert_type_roundtrip(Entity0, {})
EI = Entity0[int]
assert_equal(EI.__annotations__['parent'].__args__[0].__name__, 'Entity0[int]')
assert_type_roundtrip(EI, {})
x = EI(data0=3, parent=EI(data0=4))
assert_object_roundtrip(x, {}) # {'Entity': Entity, 'X': X})
def test_more_direct():
# language=yaml
schema = yaml.load("""
$id: http://invalid.json-schema.org/Entity0[X]#
$schema: http://json-schema.org/draft-07/schema#
__module__: zuper_json.zeneric2
__qualname__: test_more.<locals>.Entity0
definitions:
X: {$id: 'http://invalid.json-schema.org/Entity0[X]/X#', $schema: 'http://json-schema.org/draft-07/schema#'}
description: 'Entity0[X](data0: ~X, parent: ''Optional[Entity0[X]]'' = None)'
properties:
data0: {$ref: 'http://invalid.json-schema.org/Entity0[X]/X#'}
parent: {$ref: 'http://invalid.json-schema.org/Entity0[X]#'}
required: [data0]
title: Entity0[X]
type: object
""", Loader=yaml.SafeLoader)
T = schema_to_type(schema, {}, {})
print(T.__annotations__)
@known_failure
def test_more2():
X = TypeVar('X')
Y = TypeVar('Y')
@dataclass
class Entity11(Generic[X]):
data0: X
parent: "Optional[Entity11[X]]" = None
type_to_schema(Entity11, {})
EI = Entity11[int]
assert_type_roundtrip(Entity11, {})
assert_type_roundtrip(EI, {})
@dataclass
class Entity2(Generic[Y]):
parent: Optional[Entity11[Y]] = None
type_to_schema(Entity2, {})
assert_type_roundtrip(Entity2, {})
#
E2I = Entity2[int]
assert_type_roundtrip(E2I, {})
x = E2I(parent=EI(data0=4))
# print(json.dumps(type_to_schema(type(x), {}), indent=2))
assert_object_roundtrip(x, {'Entity11': Entity11, 'Entity2': Entity2},
works_without_schema=False)
def test_more2b():
X = TypeVar('X')
Y = TypeVar('Y')
@dataclass
class Entity12(Generic[X]):
data0: X
parent: "Optional[Entity12[X]]" = None
@dataclass
class Entity13(Generic[Y]):
parent: Optional[Entity12[Y]] = None
EI = Entity12[int]
# print(EI.__annotations__['parent'])
E2I = Entity13[int]
parent2 = E2I.__annotations__['parent']
print(parent2)
x = E2I(parent=EI(data0=4))
# print(json.dumps(type_to_schema(type(x), {}), indent=2))
# print(type(x).__name__)
assert_object_roundtrip(x, {'Entity12': Entity12, 'Entity13': Entity13}, works_without_schema=False)
from typing import ClassVar, Type
def test_isClassVar():
X = TypeVar('X')
A = ClassVar[Type[X]]
assert is_ClassVar(A)
assert get_ClassVar_arg(A) == Type[X]
def test_isType():
X = TypeVar('X')
A = Type[X]
# print(type(A))
# print(A.__dict__)
assert is_Type(A)
assert get_Type_arg(A) == X
def test_more3_simpler():
X = TypeVar('X')
@dataclass
class MyClass(Generic[X]):
XT: ClassVar[Type[X]]
assert_type_roundtrip(MyClass, {})
#
# # type_to_schema(MyClass, {})
C = MyClass[int, str]
assert_type_roundtrip(C, {})
def test_more3():
# class Base:
# pass
X = TypeVar('X')
Y = TypeVar('Y')
@dataclass
class MyClass(Generic[X, Y]):
a: X
XT: ClassVar[Type[X]]
YT: ClassVar[Type[Y]]
def method(self, x: X) -> Y:
return type(self).YT(x)
assert_type_roundtrip(MyClass, {})
# type_to_schema(MyClass, {})
C = MyClass[int, str]
assert_type_roundtrip(C, {})
# print(f'Annotations for C: {C.__annotations__}')
assert_equal(C.__annotations__['XT'], ClassVar[Type[int]])
assert_equal(C.XT, int)
assert_equal(C.__annotations__['YT'], ClassVar[Type[str]])
assert_equal(C.YT, str)
schema = type_to_schema(C, {})
# print(json.dumps(schema, indent=2))
schema_to_type(schema, {}, {})
# print(f'Annotations for C2: {C2.__annotations__}')
e = C(2)
r = e.method(1)
assert r == "1"
assert_object_roundtrip(e, {})
def test_entity():
X = TypeVar('X')
@my_dataclass
class SecurityModel2:
# guid: Any
owner: str
arbiter: str
@my_dataclass
class Entity2(Generic[X]):
data0: X
guid: str
security_model: SecurityModel2
parent: "Optional[Entity2[X]]" = None
forked: "Optional[Entity2[X]]" = None
T = type_to_schema(Entity2, {}, {})
C = schema_to_type(T, {}, {})
print(yaml.dump(T))
print(C.__annotations__)
# resolve_types(Entity2, locals())
# assert_type_roundtrip(Entity2, locals())
assert_type_roundtrip(Entity2, {})
Entity2_int = Entity2[int]
assert_type_roundtrip(Entity2_int, {})
# assert_object_roundtrip(x, {})
def test_entity0():
# language=yaml
schema = yaml.load("""
$id: http://invalid.json-schema.org/Entity2[X]#
$schema: http://json-schema.org/draft-07/schema#
definitions:
X: {$id: 'http://invalid.json-schema.org/Entity2[X]/X#', $schema: 'http://json-schema.org/draft-07/schema#'}
description:
properties:
parent: {$ref: 'http://invalid.json-schema.org/Entity2[X]#'}
required: [data0, guid, security_model]
title: Entity2[X]
type: object
""", Loader=yaml.SafeLoader)
C = schema_to_type(schema, {}, {})
print(C.__annotations__)
assert not is_forward_ref(C.__annotations__['parent'].__args__[0])
def test_classvar1():
@dataclass
class C:
v: ClassVar[int] = 1
assert_type_roundtrip(C, {})
# schema = type_to_schema(C, {})
# C2: C = schema_to_type(schema, {}, {})
#
# assert_equal(C.v, C2.v)
def test_classvar2():
X = TypeVar('X', bound=int)
@dataclass
class CG(Generic[X]):
v: ClassVar[X] = 1
C = CG[int]
schema = type_to_schema(C, {})
C2: C = schema_to_type(schema, {}, {})
assert_type_roundtrip(C, {})
assert_type_roundtrip(CG, {})
assert_equal(C.v, C2.v)
@raises(TypeError)
def test_check_bound():
@dataclass
class Animal:
pass
X = TypeVar('X', bound=Animal)
@dataclass
class CG(Generic[X]):
a: X
CG[int](a=2)
# assert_type_roundtrip(CG, {})
# assert_type_roundtrip(CG[int], {})
#
if enable_type_checking:
@raises(ValueError, TypeError) # typerror in 3.6
def test_check_value():
@dataclass
class CG(Generic[()]):
a: int
CG[int](a="a")
def test_signing():
X = TypeVar('X')
@dataclass
class PublicKey1:
key: bytes
@dataclass
class Signed1(Generic[X]):
key: PublicKey1
signature_data: bytes
data: X
s = Signed1[str](key=PublicKey1(key=b''), signature_data=b'xxx', data="message")
assert_type_roundtrip(Signed1[str], {})
assert_object_roundtrip(s, {})
def test_derived1():
X = TypeVar('X')
@dataclass
class Signed3(Generic[X]):
data: X
S = Signed3[int]
logger.info(dataclasses.fields(S))
class Y(S):
"""hello"""
pass
assert S.__doc__ in ['Signed3[int](data:int)', 'Signed3[int](data: int)']
assert_equal(Y.__doc__, """hello""")
assert_type_roundtrip(Y, {})
def test_derived2_no_doc():
X = TypeVar('X')
@dataclass
class Signed3(Generic[X]):
data: X
S = Signed3[int]
class Z(S):
pass
assert_type_roundtrip(Z, {})
def test_derived2_subst():
X = TypeVar('X')
# print(dir(Generic))
# print(dir(typing.GenericMeta))
# print(Generic.__getitem__)
@dataclass
class Signed3(Generic[X]):
data: X
parent: Optional['Signed3[X]'] = None
_ = Signed3[int]
# resolve_types(Signed3, locals())
S = Signed3[int]
pprint(**S.__annotations__)
assert 'X' not in str(S.__annotations__), S.__annotations__
# assert_type_roundtrip(S, {})
@dataclass
class Y(S):
pass
pprint(**Y.__annotations__)
schema = type_to_schema(Y, {}, {})
print(yaml.dump(schema))
TY = schema_to_type(schema, {}, {})
pprint('annotations', **TY.__annotations__)
P = TY.__annotations__['parent']
assert not is_forward_ref(P)
# raise Exception()
# raise Exception()
assert_type_roundtrip(Y, {})
def test_derived3_subst():
X = TypeVar('X')
@dataclass
class Signed3(Generic[X]):
data: Optional[X]
S = Signed3[int]
x = S(data=2)
assert_object_roundtrip(x, {})
if __name__ == '__main__':
test_entity()
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/test_zeneric.py
|
test_zeneric.py
|
from typing import NewType, Dict, Any, cast
JSONSchema = NewType('JSONSchema', dict)
GlobalsDict = Dict[str, Any]
ProcessingDict = Dict[str, Any]
EncounteredDict = Dict[str, str]
_SpecialForm = Any
SCHEMA_ID = "http://json-schema.org/draft-07/schema#"
SCHEMA_ATT = '$schema'
ID_ATT = '$id'
REF_ATT = '$ref'
X_CLASSVARS = 'classvars'
X_CLASSATTS = 'clasatts'
JSC_FORMAT = 'format'
JSC_REQUIRED = 'required'
JSC_TYPE = 'type'
JSC_ITEMS = 'items'
JSC_DEFAULT = 'default'
JSC_TITLE = 'title'
JSC_NUMBER = 'number'
JSC_INTEGER = 'integer'
JSC_ARRAY = "array"
JSC_OBJECT = 'object'
JSC_ADDITIONAL_PROPERTIES = 'additionalProperties'
JSC_PROPERTY_NAMES = 'propertyNames'
JSC_DESCRIPTION = 'description'
JSC_STRING = 'string'
JSC_NULL = 'null'
JSC_BOOL = 'boolean'
JSC_PROPERTIES = 'properties'
JSC_DEFINITIONS = 'definitions'
Z_ATT_LSIZE = 'lsize'
Z_ATT_TSIZE = 'tsize'
# GENERIC_ATT = '__generic__'
GENERIC_ATT2 = '__generic2__'
BINDINGS_ATT = '__binding__'
INTERSECTION_ATT = '__intersection__'
X_PYTHON_MODULE_ATT = '__module__'
ATT_PYTHON_NAME = '__qualname__'
NAME_ARG = '__name_arg__' # XXX: repeated
import sys
PYTHON_36 = sys.version_info[1] == 6
PYTHON_37 = sys.version_info[1] == 7
JSC_TITLE_NUMPY = 'numpy'
JSC_TITLE_BYTES = 'bytes'
JSC_TITLE_DECIMAL = 'decimal'
JSC_TITLE_FLOAT = 'float'
JSC_TITLE_DATETIME = 'datetime'
JSC_TITLE_CALLABLE = 'Callable'
JSC_TITLE_TYPE = 'type'
JSC_TITLE_CID = 'cid'
JSC_FORMAT_CID = 'cid'
SCHEMA_BYTES = cast(JSONSchema, {JSC_TYPE: JSC_STRING,
JSC_TITLE: JSC_TITLE_BYTES,
SCHEMA_ATT: SCHEMA_ID})
SCHEMA_CID = cast(JSONSchema, {JSC_TYPE: JSC_STRING,
JSC_TITLE: JSC_TITLE_CID,
JSC_FORMAT: JSC_FORMAT_CID,
SCHEMA_ATT: SCHEMA_ID})
enable_type_checking = False
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/constants.py
|
constants.py
|
from dataclasses import dataclass
from typing import *
from nose.tools import raises
from .ipce import ipce_to_object
from .my_intersection import Intersection
from .test_utils import assert_object_roundtrip, assert_type_roundtrip
# noinspection PyUnresolvedReferences
def test_union_1():
@dataclass
class MyClass:
f: Union[int, str]
e = MyClass(1)
assert_object_roundtrip(e, {}) # raise here
e = MyClass('a') # pragma: no cover
assert_object_roundtrip(e, {}) # pragma: no cover
def test_union_2():
T = Union[int, str]
assert_type_roundtrip(T, {})
def test_union_3():
@dataclass
class A:
a: int
@dataclass
class B:
b: int
@dataclass
class C:
c: Union[A, B]
ec1 = C(A(1))
ec2 = C(B(1))
assert_type_roundtrip(C, {})
assert_object_roundtrip(ec1, {})
assert_object_roundtrip(ec2, {})
def test_intersection1():
@dataclass
class A:
a: int
@dataclass
class B:
b: str
AB = Intersection[A, B]
assert_type_roundtrip(AB, {}, expect_type_equal=False)
def test_intersection2():
@dataclass
class A:
a: int
@dataclass
class B:
b: str
AB = Intersection[A, B]
e = AB(a=1, b='2')
assert_object_roundtrip(e, {}) # raise here
@raises(TypeError)
def test_none1():
@dataclass
class A:
b: int
ob = ipce_to_object(None, {}, {}, expect_type=A)
assert ob is not None
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/test_union_inter.py
|
test_union_inter.py
|
# @relies_on_missing_features
from dataclasses import dataclass
from typing import Dict
from .ipce import object_to_ipce
from .pretty import pprint
from .test_utils import assert_type_roundtrip, assert_object_roundtrip
def test_serialize_klasses0():
assert_type_roundtrip(type, {})
@dataclass
class A:
a: int
Aj = object_to_ipce(A, {})
pprint(Aj=Aj)
assert_object_roundtrip(A, {}, expect_equality=False) # because of classes
def test_serialize_klasses1():
@dataclass
class MyLanguage:
my_types: Dict[str, type]
@dataclass
class A:
a: int
pass
a = MyLanguage({'a': A})
assert_type_roundtrip(MyLanguage, {})
assert_object_roundtrip(a, {}, expect_equality=False) # because of classes
def test_serialize_klasses2():
@dataclass
class MyLanguage:
my_type: type
@dataclass
class A:
a: int
a = MyLanguage(A)
assert_type_roundtrip(MyLanguage, {})
assert_object_roundtrip(a, {}, expect_equality=False) # because of classes
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/test_serialize_klasses.py
|
test_serialize_klasses.py
|
import json
from datetime import datetime
from .base64_utils import encode_bytes_base64, is_encoded_bytes_base64, decode_bytes_base64
def json_dump(x) -> str:
x = recursive_sort(x)
if False:
s = json.dumps(x, ensure_ascii=False, allow_nan=False, check_circular=False,
indent=2)
else:
s = json.dumps(x, ensure_ascii=False, allow_nan=False, check_circular=False,
separators=(',', ':'))
# (optional): put the links on the same line instead of indenting
# "$schema": {"/": "sha6:92c65f"},
# s = re.sub(r'\n\s+\"/\"(.*)\s*\n\s*', r'"/"\1', s)
return s
def recursive_sort(x):
if isinstance(x, dict):
s = sorted(x)
return {k: recursive_sort(x[k]) for k in s}
else:
return x
def transform_leaf(x, transform):
if isinstance(x, dict):
return {k: transform_leaf(v, transform) for k, v in x.items()}
if isinstance(x, list):
return [transform_leaf(_, transform) for _ in x]
# if isinstance(x, (str, bool, float, int, type(None))):
return transform(x)
from decimal import Decimal
DECIMAL_PREFIX = 'decimal:'
def encode_bytes_before_json_serialization(x0):
def f(x):
if isinstance(x, bytes):
return encode_bytes_base64(x)
elif isinstance(x, datetime):
return x.isoformat()
elif isinstance(x, Decimal):
return DECIMAL_PREFIX + str(x)
else:
return x
return transform_leaf(x0, f)
def decode_bytes_before_json_deserialization(x0):
def f(x):
if isinstance(x, str) and is_encoded_bytes_base64(x):
return decode_bytes_base64(x)
elif isinstance(x, str) and x.startswith(DECIMAL_PREFIX):
x = x.replace(DECIMAL_PREFIX, '')
return Decimal(x)
else:
return x
return transform_leaf(x0, f)
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/json_utils.py
|
json_utils.py
|
from abc import ABCMeta, abstractmethod
from zuper_json.zeneric2 import dataclass
from dataclasses import is_dataclass
from typing import *
try:
from typing import ForwardRef
except ImportError: # pragma: no cover
from typing import _ForwardRef as ForwardRef
from nose.tools import raises, assert_equal
from .constants import BINDINGS_ATT
from .test_utils import assert_object_roundtrip
from .zeneric2 import NoConstructorImplemented
X = TypeVar('X')
@raises(TypeError)
def test_boxed1():
@dataclass
class Boxed(Generic[X]):
inside: X
# cannot instance yet
Boxed(inside=13)
# assert_object_roundtrip(n1, {'Boxed': Boxed})
def test_boxed2():
@dataclass
class BoxedZ(Generic[X]):
inside: X
# print(BoxedZ.__eq__)
C = BoxedZ[int]
# print(pretty_dict('BoxedZ[int]', C.__dict__))
assert_equal(C.__annotations__, {'inside': int})
n1 = C(inside=13)
assert_object_roundtrip(n1, {'BoxedZ': BoxedZ})
@raises(TypeError)
def test_boxed_cannot():
# without @dataclass
class CannotInstantiateYet(Generic[X]):
inside: X
# print(CannotInstantiateYet.__init__)
# noinspection PyArgumentList
CannotInstantiateYet(inside=13)
@raises(TypeError)
def test_boxed_cannot2():
class CannotInstantiateYet(Generic[X]):
inside: X
# print(CannotInstantiateYet.__init__)
# assert_equal(CannotInstantiateYet.__init__.__name__, 'cannot_instantiate')
CI = dataclass(CannotInstantiateYet)
# print(CannotInstantiateYet.__init__)
# assert_equal(CannotInstantiateYet.__init__.__name__, 'new_init')
# print(CI.__init__)
CI(inside=13)
def test_boxed_can_dataclass():
@dataclass
class CannotInstantiateYet(Generic[X]):
inside: X
print('name: %s %s' % (CannotInstantiateYet.__name__, CannotInstantiateYet))
assert 'CannotInstantiateYet' in CannotInstantiateYet.__name__, CannotInstantiateYet.__name__
assert is_dataclass(CannotInstantiateYet)
print('calling')
CanBeInstantiated = CannotInstantiateYet[str]
assert 'CannotInstantiateYet[str]' in CanBeInstantiated.__name__, CanBeInstantiated.__name__
print('CanBeInstantiated: %s %s' % (CanBeInstantiated.__name__, CanBeInstantiated))
print(CanBeInstantiated.__init__)
CanBeInstantiated(inside="13")
def test_boxed_can_with_dataclass():
@dataclass
class CannotInstantiateYet(Generic[X]):
inside: X
CanBeInstantiated = CannotInstantiateYet[str]
CanBeInstantiated(inside="12")
class Animal(metaclass=ABCMeta):
@abstractmethod
def verse(self):
"""verse"""
class Dog(Animal):
def verse(self):
return 'wof'
@raises(NoConstructorImplemented)
def test_parametric_zeneric():
A = TypeVar('A', bound=Animal)
class Parametric(Generic[A]):
inside: A
AT: ClassVar[Type[A]]
def check_knows_type(self, Specific):
T = type(self)
a: A = type(self).AT()
a.verse()
assert (self.AT is getattr(T, BINDINGS_ATT)[A])
assert (self.AT is Specific), (self.AT, id(self.AT), Specific, id(Specific))
fido = Dog()
PDog = Parametric[Dog]
assert 'inside' not in PDog.__dict__, PDog.__dict__
assert 'AT' in PDog.__dict__, PDog.__dict__
p = PDog(inside=fido)
p.check_knows_type(Dog)
def test_parametric_zeneric_dataclass():
A = TypeVar('A', bound=Animal)
@dataclass
class Parametric(Generic[A]):
inside: A
AT: ClassVar[Type[A]]
def check_knows_type(self, Specific):
T = type(self)
a: A = type(self).AT()
a.verse()
assert (self.AT is getattr(T, BINDINGS_ATT)[A])
assert (self.AT is Specific), (self.AT, id(self.AT), Specific, id(Specific))
fido = Dog()
PDog = Parametric[Dog]
assert 'inside' not in PDog.__dict__, PDog.__dict__
assert 'AT' in PDog.__dict__, PDog.__dict__
p = PDog(inside=fido)
p.check_knows_type(Dog)
#
# # @raises(NoConstructorImplemented)
# def test_parametric_zeneric():
# try:
# _do_parametric(lambda _: _)
# except NoConstructorImplemented:
# print('ok test_parametric_zeneric')
# else:
# pass
# # raise AssertionError
#
#
# def test_parametric_zeneric_dataclass():
# _do_parametric(dataclass)
# print('ok test_parametric_zeneric_dataclass')
if __name__ == '__main__':
test_parametric_zeneric_dataclass()
test_parametric_zeneric()
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/test_boxed.py
|
test_boxed.py
|
import json
from dataclasses import dataclass, field
import numpy as np
from numpy.testing import assert_allclose
from .json_utils import encode_bytes_before_json_serialization
from .numpy_encoding import dict_from_numpy, numpy_from_dict
from .test_utils import assert_type_roundtrip, assert_object_roundtrip
def array_eq(arr1, arr2):
return (isinstance(arr1, np.ndarray) and
isinstance(arr2, np.ndarray) and
arr1.shape == arr2.shape and
(arr1 == arr2).all())
def test_numpy_01():
@dataclass
class C:
data: np.ndarray = field(metadata=dict(contract='array[HxWx3](uint8)'))
assert_type_roundtrip(C, {})
def test_numpy_02():
@dataclass
class C:
data: np.ndarray = field(metadata=dict(contract='array[HxWx3](uint8)'))
#
# def __eq__(self, other):
# if not isinstance(other, C):
# return NotImplemented
# return array_eq(self.data, other.data)
x = np.random.rand(1)
c = C(x)
assert_object_roundtrip(c, {})
#
# def test_numpy_03():
# x = np.random.rand(2, 3)
# b = bytes_from_numpy(x)
# y = numpy_from_bytes(b)
# assert_allclose(x, y)
def test_numpy_04():
x = np.random.rand(2, 3)
d = dict_from_numpy(x)
d1 = encode_bytes_before_json_serialization(d)
print(json.dumps(d1, indent=3))
y = numpy_from_dict(d)
assert_allclose(x, y)
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/test_numpy.py
|
test_numpy.py
|
from typing import ClassVar, Tuple, Any
from .annotations_tricks import is_Dict, get_Set_name_V
class CustomDict(dict):
__dict_type__: ClassVar[Tuple[type, type]]
def __setitem__(self, key, val):
K, V = self.__dict_type__
if not isinstance(key, K):
msg = f'Invalid key; expected {K}, got {type(key)}'
raise ValueError(msg)
# XXX: this should be for many more cases
if isinstance(V, type) and not isinstance(val, V):
msg = f'Invalid value; expected {V}, got {type(val)}'
raise ValueError(msg)
dict.__setitem__(self, key, val)
def __hash__(self):
try:
return self._cached_hash
except AttributeError:
h = self._cached_hash = hash(tuple(sorted(self.items())))
return h
def make_dict(K, V) -> type:
attrs = {'__dict_type__': (K, V)}
from .annotations_tricks import get_Dict_name_K_V
name = get_Dict_name_K_V(K, V)
res = type(name, (CustomDict,), attrs)
return res
def is_Dict_or_CustomDict(x):
from .annotations_tricks import is_Dict
return is_Dict(x) or (isinstance(x, type) and issubclass(x, CustomDict))
def get_Dict_or_CustomDict_Key_Value(x):
assert is_Dict_or_CustomDict(x)
if is_Dict(x):
return x.__args__
else:
return x.__dict_type__
class CustomSet(set):
__set_type__: ClassVar[type]
def __hash__(self):
try:
return self._cached_hash
except AttributeError:
h = self._cached_hash = hash(tuple(sorted(self)))
return h
def make_set(V) -> type:
attrs = {'__set_type__': V}
name = get_Set_name_V(V)
res = type(name, (CustomSet,), attrs)
return res
def is_set_or_CustomSet(x):
from .annotations_tricks import is_Set
return is_Set(x) or (isinstance(x, type) and issubclass(x, CustomSet))
def get_set_Set_or_CustomSet_Value(x):
from .annotations_tricks import is_Set, get_Set_arg
if x is set:
return Any
if is_Set(x):
return get_Set_arg(x)
if isinstance(x, type) and issubclass(x, CustomSet):
return x.__set_type__
assert False, x
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/my_dict.py
|
my_dict.py
|
from dataclasses import is_dataclass
from typing import *
from zuper_commons.text import indent
from .annotations_tricks import is_Any, is_union, get_union_types, is_optional, get_optional_type
from .my_dict import is_Dict_or_CustomDict, get_Dict_or_CustomDict_Key_Value
def can_be_used_as(T1, T2) -> Tuple[bool, str]:
# cop out for the easy cases
if T1 == T2:
return True, ''
if is_Any(T2):
return True, ''
if is_Dict_or_CustomDict(T2):
K2, V2 = get_Dict_or_CustomDict_Key_Value(T2)
if not is_Dict_or_CustomDict(T1):
msg = f'Expecting a dictionary, got {T1}'
return False, msg
else:
K1, V1 = get_Dict_or_CustomDict_Key_Value(T1)
# TODO: to finish
return True, ''
if is_dataclass(T2):
if not is_dataclass(T1):
msg = f'Expecting dataclass to match to {T2}, got {T1}'
return False, msg
h1 = get_type_hints(T1)
h2 = get_type_hints(T2)
for k, v2 in h2.items():
if not k in h1: # and not optional...
msg = f'Type {T2}\n requires field "{k}" \n of type {v2} \n but {T1} does not have it. '
return False, msg
v1 = h1[k]
ok, why = can_be_used_as(v1, v2)
if not ok:
msg = f'Type {T2}\n requires field "{k}"\n of type {v2} \n but {T1}\n has annotated it as {v1}\n which cannot be used. '
msg += '\n\n' + indent(why, '> ')
return False, msg
return True, ''
if is_union(T2):
for t in get_union_types(T2):
ok, why = can_be_used_as(T1, t)
if ok:
return True, ''
msg = f'Cannot use {T1} as any of {T2}'
return False, msg
if is_optional(T2):
t = get_optional_type(T2)
return can_be_used_as(T1, t)
# if isinstance(T2, type):
# if issubclass(T1, T2):
# return True, ''
#
# msg = f'Type {T1}\n is not a subclass of {T2}'
# return False, msg
# return True, ''
if isinstance(T1, type) and isinstance(T2, type):
if issubclass(T1, T2):
print('yes')
return True, ''
else:
msg = f'Type {T1}\n is not a subclass of {T2}'
return False, msg
msg = f'{T1} ? {T2}'
raise NotImplementedError(msg)
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/subcheck.py
|
subcheck.py
|
import base64
def encode_bytes_base64(data: bytes, mime=None) -> str:
encoded = base64.b64encode(data).decode('ascii')
if mime is None:
mime = 'binary/octet-stream'
res = 'data:%s;base64,%s' % (mime, encoded)
return res
def is_encoded_bytes_base64(s: str):
return s.startswith('data:') and 'base64,' in s
def decode_bytes_base64(s: str) -> bytes:
assert is_encoded_bytes_base64(s)
i = s.index('base64,')
j = i + len('base64,')
s2 = s[j:]
res = base64.b64decode(s2)
return res
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/base64_utils.py
|
base64_utils.py
|
from dataclasses import dataclass
from typing import List
from .test_utils import assert_object_roundtrip
def test_list_1():
@dataclass
class MyClass:
f: List[int]
e = MyClass([1, 2, 3])
assert_object_roundtrip(e, {})
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/test_list.py
|
test_list.py
|
from typing import *
import yaml
from nose.tools import assert_equal
from zuper_json.ipce import type_to_schema
from zuper_json.pretty import pprint
from zuper_json.zeneric2 import dataclass
def test_type():
X = TypeVar('X')
Y = TypeVar('Y')
class A:
pass
@dataclass
class Another(Generic[Y]):
data0: Y
assert_equal('Another[Y]', Another.__name__)
@dataclass
class MyClass(Generic[X]):
another: Another[X]
# print(MyClass.__annotations__['another'].__annotations__['data0'])
assert_equal(MyClass.__annotations__['another'].__annotations__['data0'], X)
C = MyClass[A]
print(C.__annotations__['another'])
print(C.__annotations__['another'].__annotations__['data0'])
assert_equal(C.__annotations__['another'].__annotations__['data0'], A)
print(C.__annotations__['another'])
assert_equal(C.__annotations__['another'].__name__, 'Another[A]')
def test_type02():
X = TypeVar('X')
V = TypeVar('V')
class MyClass(Generic[X]):
data0: X
C0 = MyClass
C1 = MyClass[V]
print(C0.__annotations__)
print(C1.__annotations__)
assert C0.__annotations__['data0'] == X
assert C1.__annotations__['data0'] == V
def test_type05():
class A:
pass
X = TypeVar('X')
@dataclass
class MyEntity(Generic[X]):
guid: str
forked1: 'MyEntity[X]'
forked2: Optional['MyEntity[X]']
forked3: 'Optional[MyEntity[X]]'
print('%s' % MyEntity)
print('name: %s' % MyEntity.__name__)
# resolve_types(MyEntity, locals())
forked1_X = MyEntity.__annotations__['forked1']
print(f'forked1_X: {forked1_X!r}')
forked2_X = MyEntity.__annotations__['forked2']
print(f'forked2_X: {forked2_X!r}')
forked3_X = MyEntity.__annotations__['forked3']
print(f'forked3_X: {forked3_X!r}')
E = MyEntity[A]
forked1_A = E.__annotations__['forked1']
print(f'forked1_A: {forked1_A!r}')
forked2_A = E.__annotations__['forked2']
print(f'forked2_A: {forked2_A!r}')
forked3_A = E.__annotations__['forked3']
print(f'forked3_A: {forked3_A!r}')
assert_equal(E.__name__, 'MyEntity[A]')
# assert_equal(E.__annotations__['parent'].__args__[0].__name__, Entity[Any].__name__)
print(E.__annotations__['forked1'])
assert_equal(E.__annotations__['forked1'].__name__, MyEntity[A].__name__)
print(E.__annotations__['forked2'])
assert_equal(E.__annotations__['forked2'].__args__[0].__name__, MyEntity[A].__name__)
def test_type06():
@dataclass
class Values:
a: int
Z = TypeVar('Z')
U = TypeVar('U')
M = TypeVar('M')
@dataclass
class EntityUpdateProposal(Generic[M]):
proposal: M
A = EntityUpdateProposal[Z]
assert_equal(A.__name__ , 'EntityUpdateProposal[Z]')
assert_equal(A.__annotations__['proposal'], Z)
@dataclass
class Signed(Generic[U]):
value: U
B = Signed[EntityUpdateProposal[Z]]
assert_equal(B.__name__ , 'Signed[EntityUpdateProposal[Z]]')
assert_equal(B.__annotations__['value'].__name__, 'EntityUpdateProposal[Z]')
@dataclass
class VersionChainWithAuthors(Generic[Z]):
# signed_proposals: List[Signed[EntityUpdateProposal[Z]]]
signed_proposal: Signed[EntityUpdateProposal[Z]]
# previous: 'Optional[VersionChainWithAuthors[Z]]' = None
print('**********\n\n\n')
C = VersionChainWithAuthors[Values]
pprint('C annotations', C=C, **C.__annotations__)
assert_equal(C.__name__, 'VersionChainWithAuthors[Values]')
assert_equal(C.__annotations__['signed_proposal'].__name__, 'Signed[EntityUpdateProposal[Values]]')
print(yaml.dump(type_to_schema(C, {}, {})))
#
# assert_equal(E.__name__, 'Entity[A]')
# assert_equal(E.__annotations__['parent'].__args__[0].__name__, Entity[Any].__name__)
# pprint('Annotations of E', **E.__annotations__)
# assert_equal(E.__annotations__['forked'].__args__[0].__name__, Entity[A].__name__)
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/test_extra.py
|
test_extra.py
|
from typing import Optional, Dict, Any, TypeVar
from zuper_commons.text import indent
def pprint(msg=None, **kwargs):
print(pretty_dict(msg, kwargs))
def pretty_dict(head: Optional[str], d: Dict[str, Any], omit_falsy=False, sort_keys=False):
if not d:
return head + ': (empty dict)' if head else '(empty dict)'
s = []
n = max(len(str(_)) for _ in d)
ordered = sorted(d) if sort_keys else list(d)
# ks = sorted(d)
for k in ordered:
v = d[k]
if k == '__builtins__':
v = '(hiding __builtins__)'
if not hasattr(v, 'conclusive') and (not isinstance(v, int)) and (not v) and omit_falsy:
continue
prefix = (str(k) + ':').rjust(n + 1) + ' '
if isinstance(v, TypeVar):
# noinspection PyUnresolvedReferences
v = f'TypeVar({v.__name__}, bound={v.__bound__})'
if isinstance(v, dict):
v = pretty_dict('', v)
s.append(indent(v, '', prefix))
return (head + ':\n' if head else '') + indent("\n".join(s), "│ ")
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/pretty.py
|
pretty.py
|
import dataclasses
import typing
from datetime import datetime
from typing import TypeVar, Generic, Dict
import termcolor
from .constants import PYTHON_36
from .my_dict import make_dict
from .zeneric2 import ZenericFix, resolve_types
if PYTHON_36: # pragma: no cover
from typing import GenericMeta
previous_getitem = GenericMeta.__getitem__
else:
from typing import _GenericAlias
previous_getitem = _GenericAlias.__getitem__
class Alias1:
def __getitem__(self, params):
if self is typing.Dict:
K, V = params
if K is not str:
return make_dict(K, V)
# noinspection PyArgumentList
return previous_getitem(self, params)
if PYTHON_36: # pragma: no cover
from typing import GenericMeta
old_one = GenericMeta.__getitem__
class P36Generic:
def __getitem__(self, params):
# pprint('P36', params=params, self=self)
if self is typing.Generic:
return ZenericFix.__class_getitem__(params)
if self is typing.Dict:
K, V = params
if K is not str:
return make_dict(K, V)
# noinspection PyArgumentList
return old_one(self, params)
GenericMeta.__getitem__ = P36Generic.__getitem__
else:
Generic.__class_getitem__ = ZenericFix.__class_getitem__
_GenericAlias.__getitem__ = Alias1.__getitem__
Dict.__getitem__ = Alias1.__getitem__
def _cmp_fn_loose(name, op, self_tuple, other_tuple):
body = ['if other.__class__.__name__ == self.__class__.__name__:',
f' return {self_tuple}{op}{other_tuple}',
'return NotImplemented']
fn = dataclasses._create_fn(name, ('self', 'other'), body)
fn.__doc__ = """
This is a loose comparison function.
Instead of comparing:
self.__class__ is other.__class__
we compare:
self.__class__.__name__ == other.__class__.__name__
"""
return fn
dataclasses._cmp_fn = _cmp_fn_loose
def typevar__repr__(self):
if self.__covariant__:
prefix = '+'
elif self.__contravariant__:
prefix = '-'
else:
prefix = '~'
s = prefix + self.__name__
if self.__bound__:
if isinstance(self.__bound__, type):
b = self.__bound__.__name__
else:
b = str(self.__bound__)
s += f'<{b}'
return s
setattr(TypeVar, '__repr__', typevar__repr__)
NAME_ARG = '__name_arg__'
# need to have this otherwise it's not possible to say that two types are the same
class Reg:
already = {}
def MyNamedArg(x: type, name):
key = f'{x} {name}'
if key in Reg.already:
return Reg.already[key]
meta = getattr(x, '__metaclass_', type)
d = {NAME_ARG: name, 'original': x}
cname = x.__name__
res = meta(cname, (x,), d)
res.__module__ = 'typing'
Reg.already[key] = res
return res
import mypy_extensions
setattr(mypy_extensions, 'NamedArg', MyNamedArg)
from dataclasses import dataclass as original_dataclass
class RegisteredClasses:
# klasses: Dict[str, type] = {}
klasses = {}
def remember_created_class(res):
# print(f'Registered class "{res.__name__}"')
k = (res.__module__, res.__name__)
RegisteredClasses.klasses[k] = res
def my_dataclass(_cls=None, *, init=True, repr=True, eq=True, order=False,
unsafe_hash=False, frozen=False):
def wrap(cls):
return my_dataclass_(cls, init=init, repr=repr, eq=eq, order=order, unsafe_hash=unsafe_hash, frozen=frozen)
# See if we're being called as @dataclass or @dataclass().
if _cls is None:
# We're called with parens.
return wrap
# We're called as @dataclass without parens.
return wrap(_cls)
def my_dataclass_(_cls, *, init=True, repr=True, eq=True, order=False,
unsafe_hash=False, frozen=False):
unsafe_hash = True
# pprint('my_dataclass', _cls=_cls)
res = original_dataclass(_cls, init=init, repr=repr, eq=eq, order=order,
unsafe_hash=unsafe_hash, frozen=frozen)
remember_created_class(res)
assert dataclasses.is_dataclass(res)
refs = getattr(_cls, '__depends__', ())
resolve_types(res, refs=refs)
def __repr__(self):
return DataclassHooks.dc_repr(self)
def __str__(self):
return DataclassHooks.dc_str(self)
setattr(res, '__repr__', __repr__)
setattr(res, '__str__', __str__)
# res.__doc__ = res.__doc__.replace(' ', '')
return res
def nice_str(self):
return DataclassHooks.dc_repr(self)
def blue(x):
return termcolor.colored(x, 'blue')
def nice_repr(self):
s = termcolor.colored(type(self).__name__, 'red')
s += blue('(')
ss = []
annotations = getattr(type(self), '__annotations__', {})
for k in annotations:
a = getattr(self, k)
a_s = debug_print_compact(a)
eq = blue('=')
k = termcolor.colored(k, attrs=['dark'])
ss.append(f'{k}{eq}{a_s}')
s += blue(', ').join(ss)
s += blue(')')
return s
def debug_print_compact(x):
if isinstance(x, str):
return debug_print_str(x, '')
if isinstance(x, bytes):
return debug_print_bytes(x)
if isinstance(x, datetime):
return debug_print_date(x, '')
return f'{x!r}'
def debug_print_str(x: str, prefix: str):
if x.startswith('Qm'):
x2 = 'Qm...' + x[-4:] + ' ' + prefix
return termcolor.colored(x2, 'magenta')
if x.startswith('zd'):
x2 = 'zd...' + x[-4:] + ' ' + prefix
return termcolor.colored(x2, 'magenta')
if x.startswith('-----BEGIN'):
s = 'PEM key' + ' ' + prefix
return termcolor.colored(s, 'yellow')
if x.startswith('Traceback'):
lines = x.split('\n')
colored = [termcolor.colored(_, 'red') for _ in lines]
if colored:
colored[0] += ' ' + prefix
s = "\n".join(colored)
return s
return x.__repr__() + ' ' + prefix
def debug_print_date(x: datetime, prefix=None):
s = x.isoformat()[:19]
s = s.replace('T', ' ')
return termcolor.colored(s, 'yellow') + (' ' + prefix if prefix else '')
def debug_print_bytes(x: bytes):
s = f'{len(x)} bytes ' + x[:10].__repr__()
return termcolor.colored(s, 'yellow')
class DataclassHooks:
dc_repr = nice_repr
dc_str = nice_str
setattr(dataclasses, 'dataclass', my_dataclass)
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/monkey_patching_typing.py
|
monkey_patching_typing.py
|
from nose.tools import raises
from .special_strings import Email
@raises(ValueError)
def test_email():
Email('aaa')
def test_email_ok():
Email('[email protected]')
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/test_special_string.py
|
test_special_string.py
|
from dataclasses import dataclass
from decimal import Decimal
from .test_utils import assert_object_roundtrip
def test_decimal1():
@dataclass
class MyClass:
f: Decimal
e = MyClass(Decimal(1.0))
assert_object_roundtrip(e, {})
e = MyClass(Decimal('0.3'))
assert_object_roundtrip(e, {})
def test_decimal2():
f = Decimal('3.14')
assert_object_roundtrip(f, {})
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/test_decimal.py
|
test_decimal.py
|
import datetime
import hashlib
import inspect
import traceback
import typing
from dataclasses import make_dataclass, _FIELDS, field, Field, dataclass, is_dataclass
from decimal import Decimal
from numbers import Number
from typing import Type, Dict, Any, TypeVar, Optional, ClassVar, cast, Union, \
Generic, List, Tuple, Callable
import base58
import cbor2
import numpy as np
import yaml
from frozendict import frozendict
from jsonschema.validators import validator_for, validate
from mypy_extensions import NamedArg
from nose.tools import assert_in
from zuper_commons.text import indent
from zuper_commons.types import check_isinstance
from .annotations_tricks import is_optional, get_optional_type, is_forward_ref, get_forward_ref_arg, is_Any, \
is_ClassVar, get_ClassVar_arg, is_Type, is_Callable, get_Callable_info, get_union_types, is_union, is_Dict, \
get_Dict_name_K_V, is_Tuple, get_List_arg, is_List, is_Set, get_Set_arg, get_Set_name_V
from .constants import X_PYTHON_MODULE_ATT, ATT_PYTHON_NAME, SCHEMA_BYTES, GlobalsDict, JSONSchema, _SpecialForm, \
ProcessingDict, EncounteredDict, SCHEMA_ATT, SCHEMA_ID, JSC_TYPE, JSC_STRING, JSC_NUMBER, JSC_OBJECT, JSC_TITLE, \
JSC_ADDITIONAL_PROPERTIES, JSC_DESCRIPTION, JSC_PROPERTIES, BINDINGS_ATT, JSC_INTEGER, ID_ATT, \
JSC_DEFINITIONS, REF_ATT, JSC_REQUIRED, X_CLASSVARS, X_CLASSATTS, JSC_BOOL, PYTHON_36, JSC_TITLE_NUMPY, JSC_NULL, \
JSC_TITLE_BYTES, JSC_ARRAY, JSC_ITEMS, JSC_DEFAULT, GENERIC_ATT2, JSC_TITLE_DECIMAL, JSC_TITLE_DATETIME, \
JSC_TITLE_FLOAT, JSC_TITLE_CALLABLE, JSC_TITLE_TYPE, SCHEMA_CID, JSC_PROPERTY_NAMES
from .my_dict import make_dict, CustomDict, make_set, CustomSet, get_set_Set_or_CustomSet_Value
from .my_intersection import is_Intersection, get_Intersection_args, Intersection
from .numpy_encoding import numpy_from_dict, dict_from_numpy
from .pretty import pretty_dict
from .subcheck import can_be_used_as
from .types import IPCE
from .zeneric2 import get_name_without_brackets, replace_typevars, loglevel, RecLogger
# new interface
def ipce_from_object(ob, globals_: GlobalsDict = None, suggest_type=None, with_schema=True) -> IPCE:
globals_ = globals_ or {}
return object_to_ipce(ob, globals_, suggest_type=suggest_type, with_schema=with_schema)
def object_to_ipce(ob, globals_: GlobalsDict, suggest_type=None, with_schema=True) -> IPCE:
# logger.debug(f'object_to_ipce({ob})')
res = object_to_ipce_(ob, globals_, suggest_type=suggest_type, with_schema=with_schema)
# print(indent(json.dumps(res, indent=3), '|', ' res: -'))
if isinstance(res, dict) and SCHEMA_ATT in res:
schema = res[SCHEMA_ATT]
# print(json.dumps(schema, indent=2))
# print(json.dumps(res, indent=2))
# currently disabled becasue JSONSchema insists on resolving all the URIs
if False:
validate(res, schema)
#
# try:
#
# except: # pragma: no cover
# # cannot generate this if there are no bugs
# fn = 'error.json'
# with open(fn, 'w') as f:
# f.write(json.dumps(res, indent=2))
# raise
return res
def object_to_ipce_(ob,
globals_: GlobalsDict,
with_schema: bool,
suggest_type: Type = None,
) -> IPCE:
trivial = (bool, int, str, float, type(None), bytes, Decimal, datetime.datetime)
if isinstance(ob, datetime.datetime):
if not ob.tzinfo:
msg = 'Cannot serialize dates without a timezone.'
raise ValueError(msg)
if isinstance(ob, trivial):
for T in trivial:
if isinstance(ob, T):
if (suggest_type is not None) and (suggest_type is not T) and (not is_Any(suggest_type)) and \
(not can_be_used_as(T, suggest_type)[0]):
msg = f'Found object of type {type(ob)!r} when expected a {suggest_type!r}'
raise ValueError(msg)
return ob
if isinstance(ob, list):
if is_List(suggest_type):
suggest_type_l = get_List_arg(suggest_type)
else:
# XXX should we warn?
suggest_type_l = None # XXX
return [object_to_ipce(_, globals_, suggest_type=suggest_type_l,
with_schema=with_schema) for _ in ob]
if isinstance(ob, tuple):
suggest_type_l = None # XXX
return [object_to_ipce(_, globals_, suggest_type=suggest_type_l,
with_schema=with_schema) for _ in ob]
if isinstance(ob, set):
# if is_Set(suggest_type):
# suggest_type_l = get_Set_arg(suggest_type)
# else:
# suggest_type_l = None
#
# return [object_to_ipce(_, globals_, suggest_type=suggest_type_l,
# with_schema=with_schema) for _ in ob]
return set_to_ipce(ob, globals_, suggest_type=suggest_type, with_schema=with_schema)
if isinstance(ob, (dict, frozendict)):
return dict_to_ipce(ob, globals_, suggest_type=suggest_type, with_schema=with_schema)
if isinstance(ob, type):
return type_to_schema(ob, globals_, processing={})
if is_Any(ob) or is_List(ob) or is_Dict(ob):
# TODO: put more here
return type_to_schema(ob, globals_, processing={})
if isinstance(ob, np.ndarray):
res = dict_from_numpy(ob)
if with_schema:
res[SCHEMA_ATT] = type_numpy_to_schema(type(ob), globals_, {})
return res
if is_dataclass(ob):
return serialize_dataclass(ob, globals_, with_schema=with_schema)
msg = f'I do not know a way to convert object of type {type(ob)}.'
raise NotImplementedError(msg)
import dataclasses
def serialize_dataclass(ob, globals_, with_schema: bool):
globals_ = dict(globals_)
res = {}
T = type(ob)
if with_schema:
res[SCHEMA_ATT] = type_to_schema(T, globals_)
globals_[T.__name__] = T
for f in dataclasses.fields(ob):
k = f.name
suggest_type = f.type
if not hasattr(ob, k): # pragma: no cover
assert False, (ob, k)
v = getattr(ob, k)
try:
suggest_type = resolve_all(suggest_type, globals_)
if is_ClassVar(suggest_type):
continue
if v is None:
if is_optional(suggest_type):
continue
if is_optional(suggest_type):
suggest_type = get_optional_type(suggest_type)
res[k] = object_to_ipce(v, globals_,
suggest_type=suggest_type, with_schema=with_schema)
except (KeyboardInterrupt, RecursionError):
raise
except BaseException as e:
msg = f'Cannot serialize attribute {k} of type {type(v)}.'
msg += f'\nThe schema for {type(ob)} says that it should be of type {f.type}.'
raise ValueError(msg) from e
return res
def resolve_all(T, globals_):
"""
Returns either a type or a generic alias
:return:
"""
if isinstance(T, type):
return T
if isinstance(T, str):
T = eval_just_string(T, globals_)
return T
if is_forward_ref(T):
tn = get_forward_ref_arg(T)
return resolve_all(tn, globals_)
if is_optional(T):
t = get_optional_type(T)
t = resolve_all(t, globals_)
return Optional[t]
# logger.debug(f'no thing to do for {T}')
return T
def dict_to_ipce(ob: dict, globals_: GlobalsDict, suggest_type: Optional[type], with_schema: bool):
# assert suggest_type is not None
res = {}
# pprint('suggest_type ', suggest_type=suggest_type)
if is_Dict(suggest_type):
# noinspection PyUnresolvedReferences
K, V = suggest_type.__args__
elif isinstance(suggest_type, type) and issubclass(suggest_type, CustomDict):
K, V = suggest_type.__dict_type__
elif (suggest_type is None) or is_Any(suggest_type):
all_str = all(type(_) is str for _ in ob)
if all_str:
K = str
else:
K = Any
V = Any
suggest_type = Dict[K, V]
else: # pragma: no cover
assert False, suggest_type
if with_schema:
res[SCHEMA_ATT] = type_to_schema(suggest_type, globals_)
if isinstance(K, type) and issubclass(K, str):
for k, v in ob.items():
res[k] = object_to_ipce(v, globals_, suggest_type=V, with_schema=with_schema)
else:
FV = FakeValues[K, V]
for k, v in ob.items():
kj = object_to_ipce(k, globals_)
if isinstance(k, int):
h = str(k)
else:
h = get_sha256_base58(cbor2.dumps(kj)).decode('ascii')
fv = FV(k, v)
res[h] = object_to_ipce(fv, globals_, with_schema=with_schema)
return res
def set_to_ipce(ob: set, globals_: GlobalsDict, suggest_type: Optional[type], with_schema: bool):
if is_Set(suggest_type):
V = get_Set_arg(suggest_type)
else:
V = None
res = {}
if with_schema:
res[SCHEMA_ATT] = type_to_schema(suggest_type, globals_)
for v in ob:
vj = object_to_ipce(v, globals_, with_schema=with_schema,
suggest_type=V)
h = get_sha256_base58(cbor2.dumps(vj)).decode('ascii')
res[h] = vj
return res
def get_sha256_base58(contents):
import hashlib
m = hashlib.sha256()
m.update(contents)
s = m.digest()
return base58.b58encode(s)
ids2cid = {}
@loglevel
def ipce_to_object(mj: IPCE,
global_symbols,
encountered: Optional[dict] = None,
expect_type: Optional[type] = None) -> object:
res = ipce_to_object_(mj, global_symbols, encountered, expect_type)
if id(mj) in ids2cid:
pass
# ids2cid[id(res)] = ids2cid[id(mj)]
# setattr(res, '__ipde_cid_attr__', ids2cid[id(mj)])
return res
def ipce_to_object_(mj: IPCE,
global_symbols,
encountered: Optional[dict] = None,
expect_type: Optional[type] = None) -> object:
encountered = encountered or {}
# logger.debug(f'ipce_to_object expect {expect_type} mj {mj}')
trivial = (int, float, bool, datetime.datetime, Decimal, bytes, str)
if isinstance(mj, trivial):
T = type(mj)
if expect_type is not None:
ok, why = can_be_used_as(T, expect_type)
if not ok:
msg = 'Found a {T}, wanted {expect_type}'
raise ValueError(msg)
return mj
if isinstance(mj, list):
if expect_type and is_Tuple(expect_type):
# noinspection PyTypeChecker
return deserialize_tuple(expect_type, mj, global_symbols, encountered)
elif expect_type and is_List(expect_type):
suggest = get_List_arg(expect_type)
seq = [ipce_to_object(_, global_symbols, encountered, expect_type=suggest) for _ in mj]
return seq
else:
suggest = None
seq = [ipce_to_object(_, global_symbols, encountered, expect_type=suggest) for _ in mj]
return seq
if mj is None:
if expect_type is None:
return None
elif expect_type is type(None):
return None
elif is_optional(expect_type):
return None
else:
msg = f'The value is None but the expected type is {expect_type}.'
raise TypeError(msg) # XXX
if expect_type is np.ndarray:
return numpy_from_dict(mj)
assert isinstance(mj, dict), type(mj)
if mj.get(SCHEMA_ATT, '') == SCHEMA_ID:
schema = cast(JSONSchema, mj)
return schema_to_type(schema, global_symbols, encountered)
if SCHEMA_ATT in mj:
sa = mj[SCHEMA_ATT]
K = schema_to_type(sa, global_symbols, encountered)
# logger.debug(f' loaded K = {K} from {mj}')
else:
if expect_type is not None:
# logger.debug('expect_type = %s' % expect_type)
# check_isinstance(expect_type, type)
K = expect_type
else:
msg = f'Cannot find a schema and expect_type=None.\n{mj}'
raise ValueError(msg)
# assert isinstance(K, type), K
if is_optional(K):
assert mj is not None # excluded before
K = get_optional_type(K)
return ipce_to_object(mj,
global_symbols,
encountered,
expect_type=K)
if (isinstance(K, type) and issubclass(K, dict)) or is_Dict(K) or \
(isinstance(K, type) and issubclass(K, CustomDict)):
return deserialize_Dict(K, mj, global_symbols, encountered)
if (isinstance(K, type) and issubclass(K, set)) or is_Set(K) or \
(isinstance(K, type) and issubclass(K, CustomSet)):
return deserialize_Set(K, mj, global_symbols, encountered)
if is_dataclass(K):
return deserialize_dataclass(K, mj, global_symbols, encountered)
if is_union(K):
errors = []
for T in get_union_types(K):
try:
return ipce_to_object(mj,
global_symbols,
encountered,
expect_type=T)
except KeyboardInterrupt:
raise
except BaseException as e:
errors.append(e)
msg = f'Cannot deserialize with any of {get_union_types(K)}'
msg += '\n'.join(str(e) for e in errors)
raise Exception(msg)
if is_Any(K):
msg = f'Not implemented\n{mj}'
raise NotImplementedError(msg)
assert False, (type(K), K, mj, expect_type) # pragma: no cover
def deserialize_tuple(expect_type, mj, global_symbols, encountered):
seq = []
for i, ob in enumerate(mj):
expect_type_i = expect_type.__args__[i]
seq.append(ipce_to_object(ob, global_symbols, encountered, expect_type=expect_type_i))
return tuple(seq)
def deserialize_dataclass(K, mj, global_symbols, encountered):
global_symbols = dict(global_symbols)
global_symbols[K.__name__] = K
# logger.debug(global_symbols)
# logger.debug(f'Deserializing object of type {K}')
# logger.debug(f'mj: \n' + json.dumps(mj, indent=2))
# some data classes might have no annotations ("Empty")
anns = getattr(K, '__annotations__', {})
if not anns:
pass
# logger.warning(f'No annotations for class {K}')
# pprint(f'annotations: {anns}')
attrs = {}
for k, v in mj.items():
if k in anns:
expect_type = resolve_all(anns[k], global_symbols)
if is_optional(expect_type):
expect_type = get_optional_type(expect_type)
if inspect.isabstract(expect_type):
msg = f'Trying to instantiate abstract class for field "{k}" of class {K}'
msg += f'\n annotation = {anns[k]}'
msg += f'\n expect_type = {expect_type}'
msg += f'\n\n%s' % indent(yaml.dump(mj), ' > ')
raise TypeError(msg)
try:
attrs[k] = ipce_to_object(v, global_symbols, encountered, expect_type=expect_type)
except KeyboardInterrupt:
raise
except BaseException as e:
msg = f'Cannot deserialize attribute {k} (expect: {expect_type})'
msg += f'\nvalue: {v!r}'
msg += '\n\n' + indent(traceback.format_exc(), '| ')
raise TypeError(msg) from e
for k, T in anns.items():
T = resolve_all(T, global_symbols)
if is_ClassVar(T):
continue
if not k in mj:
msg = f'Cannot find field {k!r} in data. Know {sorted(mj)}'
if is_optional(T):
attrs[k] = None
pass
else:
raise ValueError(msg)
try:
return K(**attrs)
except TypeError as e: # pragma: no cover
msg = f'Cannot instantiate type with attrs {attrs}:\n{K}'
msg += f'\n\n Bases: {K.__bases__}'
anns = getattr(K, '__annotations__', 'none')
msg += f"\n{anns}"
df = getattr(K, '__dataclass_fields__', 'none')
# noinspection PyUnresolvedReferences
msg += f'\n{df}'
msg += f'because:\n{e}' # XXX
raise TypeError(msg) from e
def deserialize_Dict(D, mj, global_symbols, encountered):
if isinstance(D, type) and issubclass(D, CustomDict):
K, V = D.__dict_type__
ob = D()
elif is_Dict(D):
K, V = D.__args__
D2 = make_dict(K, V)
ob = D2()
elif isinstance(D, type) and issubclass(D, dict):
K, V = Any, Any
ob = D()
else: # pragma: no cover
msg = pretty_dict("not sure", dict(D=D))
raise NotImplementedError(msg)
attrs = {}
FV = FakeValues[K, V]
for k, v in mj.items():
if k == SCHEMA_ATT:
continue
if issubclass(K, str):
attrs[k] = ipce_to_object(v, global_symbols, encountered, expect_type=V)
else:
attrs[k] = ipce_to_object(v, global_symbols, encountered, expect_type=FV)
if isinstance(K, type) and issubclass(K, str):
ob.update(attrs)
return ob
else:
for k, v in attrs.items():
# noinspection PyUnresolvedReferences
ob[v.real_key] = v.value
return ob
def deserialize_Set(D, mj, global_symbols, encountered):
V = get_set_Set_or_CustomSet_Value(D)
res = set()
for k, v in mj.items():
if k == SCHEMA_ATT:
continue
vob = ipce_to_object(v, global_symbols, encountered, expect_type=V)
res.add(vob)
T = make_set(V)
return T(res)
class CannotFindSchemaReference(ValueError):
pass
class CannotResolveTypeVar(ValueError):
pass
schema_cache: Dict[Any, Union[type, _SpecialForm]] = {}
def schema_hash(k):
ob_cbor = cbor2.dumps(k)
ob_cbor_hash = hashlib.sha256(ob_cbor).digest()
return ob_cbor_hash
def schema_to_type(schema0: JSONSchema,
global_symbols: Dict,
encountered: Dict) -> Union[type, _SpecialForm]:
h = schema_hash([schema0, list(global_symbols), list(encountered)])
if h in schema_cache:
# logger.info(f'cache hit for {schema0}')
return schema_cache[h]
res = schema_to_type_(schema0, global_symbols, encountered)
if ID_ATT in schema0:
schema_id = schema0[ID_ATT]
encountered[schema_id] = res
# print(f'Found {schema_id} -> {res}')
schema_cache[h] = res
return res
def schema_to_type_(schema0: JSONSchema, global_symbols: Dict, encountered: Dict) -> Union[type, _SpecialForm]:
# pprint('schema_to_type_', schema0=schema0)
encountered = encountered or {}
info = dict(global_symbols=global_symbols, encountered=encountered)
check_isinstance(schema0, dict)
schema = cast(JSONSchema, dict(schema0))
# noinspection PyUnusedLocal
metaschema = schema.pop(SCHEMA_ATT, None)
schema_id = schema.pop(ID_ATT, None)
if schema_id:
if not JSC_TITLE in schema:
pass
else:
cls_name = schema[JSC_TITLE]
encountered[schema_id] = cls_name
if schema == {}:
return Any
if REF_ATT in schema:
r = schema[REF_ATT]
if r == SCHEMA_ID:
if schema.get(JSC_TITLE, '') == 'type':
return type
else:
return Type
if r in encountered:
return encountered[r]
else:
m = f'Cannot evaluate reference {r!r}'
msg = pretty_dict(m, info)
raise CannotFindSchemaReference(msg)
if "anyOf" in schema:
options = schema["anyOf"]
args = [schema_to_type(_, global_symbols, encountered) for _ in options]
return Union[tuple(args)]
if "allOf" in schema:
options = schema["allOf"]
args = [schema_to_type(_, global_symbols, encountered) for _ in options]
res = Intersection[tuple(args)]
return res
jsc_type = schema.get(JSC_TYPE, None)
jsc_title = schema.get(JSC_TITLE, '-not-provided-')
if jsc_title == JSC_TITLE_NUMPY:
return np.ndarray
if jsc_type == JSC_STRING:
if jsc_title == JSC_TITLE_BYTES:
return bytes
elif jsc_title == JSC_TITLE_DATETIME:
return datetime.datetime
elif jsc_title == JSC_TITLE_DECIMAL:
return Decimal
else:
return str
elif jsc_type == JSC_NULL:
return type(None)
elif jsc_type == JSC_BOOL:
return bool
elif jsc_type == JSC_NUMBER:
if jsc_title == JSC_TITLE_FLOAT:
return float
else:
return Number
elif jsc_type == JSC_INTEGER:
return int
elif jsc_type == JSC_OBJECT:
if jsc_title == JSC_TITLE_CALLABLE:
return schema_to_type_callable(schema, global_symbols, encountered)
elif jsc_title.startswith('Dict'):
return schema_dict_to_DictType(schema, global_symbols, encountered)
elif jsc_title.startswith('Set'):
return schema_dict_to_SetType(schema, global_symbols, encountered)
elif JSC_DEFINITIONS in schema:
return schema_to_type_generic(schema, global_symbols, encountered)
elif ATT_PYTHON_NAME in schema:
tn = schema[ATT_PYTHON_NAME]
if tn in global_symbols:
return global_symbols[tn]
else:
# logger.debug(f'did not find {tn} in {global_symbols}')
return schema_to_type_dataclass(schema, global_symbols, encountered, schema_id=schema_id)
assert False, schema # pragma: no cover
elif jsc_type == JSC_ARRAY:
return schema_array_to_type(schema, global_symbols, encountered)
assert False, schema # pragma: no cover
def schema_array_to_type(schema, global_symbols, encountered):
items = schema['items']
if isinstance(items, list):
assert len(items) > 0
args = tuple([schema_to_type(_, global_symbols, encountered) for _ in items])
if PYTHON_36: # pragma: no cover
return typing.Tuple[args]
else:
# noinspection PyArgumentList
return Tuple.__getitem__(args)
else:
if 'Tuple' in schema[JSC_TITLE]:
args = schema_to_type(items, global_symbols, encountered)
if PYTHON_36: # pragma: no cover
return typing.Tuple[args, ...]
else:
# noinspection PyArgumentList
return Tuple.__getitem__((args, Ellipsis))
else:
args = schema_to_type(items, global_symbols, encountered)
if PYTHON_36: # pragma: no cover
return List[args]
else:
# noinspection PyArgumentList
return List[args]
def schema_dict_to_DictType(schema, global_symbols, encountered):
K = str
V = schema_to_type(schema[JSC_ADDITIONAL_PROPERTIES], global_symbols, encountered)
# pprint(f'here:', d=dict(V.__dict__))
# if issubclass(V, FakeValues):
if isinstance(V, type) and V.__name__.startswith('FakeValues'):
K = V.__annotations__['real_key']
V = V.__annotations__['value']
D = make_dict(K, V)
# we never put it anyway
# if JSC_DESCRIPTION in schema:
# setattr(D, '__doc__', schema[JSC_DESCRIPTION])
return D
def schema_dict_to_SetType(schema, global_symbols, encountered):
V = schema_to_type(schema[JSC_ADDITIONAL_PROPERTIES], global_symbols, encountered)
return make_set(V)
def type_to_schema(T: Any, globals0: dict, processing: ProcessingDict = None) -> JSONSchema:
# pprint('type_to_schema', T=T)
globals_ = dict(globals0)
processing = processing or {}
try:
if hasattr(T, '__name__') and T.__name__ in processing:
return processing[T.__name__]
# res = cast(JSONSchema, {REF_ATT: refname})
# return res
if T is type:
res = cast(JSONSchema, {REF_ATT: SCHEMA_ID,
JSC_TITLE: JSC_TITLE_TYPE
# JSC_DESCRIPTION: T.__doc__
})
return res
if T is type(None):
res = cast(JSONSchema, {SCHEMA_ATT: SCHEMA_ID,
JSC_TYPE: JSC_NULL})
return res
if isinstance(T, type):
for klass in T.mro():
if klass.__name__.startswith('Generic'):
continue
if klass is object:
continue
# globals_[klass.__name__] = klass
globals_[get_name_without_brackets(klass.__name__)] = klass
bindings = getattr(klass, BINDINGS_ATT, {})
for k, v in bindings.items():
if hasattr(v, '__name__') and v.__name__ not in globals_:
globals_[v.__name__] = v
globals_[k.__name__] = v
schema = type_to_schema_(T, globals_, processing)
check_isinstance(schema, dict)
except NotImplementedError: # pragma: no cover
raise
except (ValueError, AssertionError) as e:
m = f'Cannot get schema for {T}'
if hasattr(T, '__name__'):
m += f' (name = {T.__name__!r})'
msg = pretty_dict(m, dict( # globals0=globals0,
# globals=globals_,
processing=processing))
# msg += '\n' + traceback.format_exc()
raise type(e)(msg) from e
except KeyboardInterrupt:
raise
except BaseException as e:
m = f'Cannot get schema for {T}'
if hasattr(T, '__name__'):
m += f' (name = {T.__name__!r})'
m += f' {T.__name__ in processing}'
msg = pretty_dict(m, dict( # globals0=globals0,
# globals=globals_,
processing=processing))
raise TypeError(msg) from e
assert_in(SCHEMA_ATT, schema)
assert schema[SCHEMA_ATT] in [SCHEMA_ID]
# assert_equal(schema[SCHEMA_ATT], SCHEMA_ID)
if schema[SCHEMA_ATT] == SCHEMA_ID:
# print(yaml.dump(schema))
if False:
cls = validator_for(schema)
cls.check_schema(schema)
return schema
K = TypeVar('K')
V = TypeVar('V')
@dataclass
class FakeValues(Generic[K, V]):
real_key: K
value: V
def dict_to_schema(T, globals_, processing) -> JSONSchema:
assert is_Dict(T) or (isinstance(T, type) and issubclass(T, CustomDict))
if is_Dict(T):
K, V = T.__args__
elif issubclass(T, CustomDict):
K, V = T.__dict_type__
else: # pragma: no cover
assert False
res = cast(JSONSchema, {JSC_TYPE: JSC_OBJECT})
res[JSC_TITLE] = get_Dict_name_K_V(K, V)
if isinstance(K, type) and issubclass(K, str):
res[JSC_PROPERTIES] = {"$schema": {}} # XXX
res[JSC_ADDITIONAL_PROPERTIES] = type_to_schema(V, globals_, processing)
res[SCHEMA_ATT] = SCHEMA_ID
return res
else:
res[JSC_PROPERTIES] = {"$schema": {}} # XXX
props = FakeValues[K, V]
res[JSC_ADDITIONAL_PROPERTIES] = type_to_schema(props, globals_, processing)
res[SCHEMA_ATT] = SCHEMA_ID
return res
def set_to_schema(T, globals_, processing) -> JSONSchema:
assert is_Set(T) or (isinstance(T, type) and issubclass(T, set))
V = get_set_Set_or_CustomSet_Value(T)
res = cast(JSONSchema, {JSC_TYPE: JSC_OBJECT})
res[JSC_TITLE] = get_Set_name_V(V)
res[JSC_PROPERTY_NAMES] = SCHEMA_CID
res[JSC_ADDITIONAL_PROPERTIES] = type_to_schema(V, globals_, processing)
res[SCHEMA_ATT] = SCHEMA_ID
return res
def Tuple_to_schema(T, globals_: GlobalsDict, processing: ProcessingDict) -> JSONSchema:
assert is_Tuple(T)
args = T.__args__
if args[-1] == Ellipsis:
items = args[0]
res = cast(JSONSchema, {})
res[SCHEMA_ATT] = SCHEMA_ID
res[JSC_TYPE] = JSC_ARRAY
res[JSC_ITEMS] = type_to_schema(items, globals_, processing)
res[JSC_TITLE] = 'Tuple'
return res
else:
res = cast(JSONSchema, {})
res[SCHEMA_ATT] = SCHEMA_ID
res[JSC_TYPE] = JSC_ARRAY
res[JSC_ITEMS] = []
res[JSC_TITLE] = 'Tuple'
for a in args:
res[JSC_ITEMS].append(type_to_schema(a, globals_, processing))
return res
def List_to_schema(T, globals_: GlobalsDict, processing: ProcessingDict) -> JSONSchema:
assert is_List(T)
items = get_List_arg(T)
res = cast(JSONSchema, {})
res[SCHEMA_ATT] = SCHEMA_ID
res[JSC_TYPE] = JSC_ARRAY
res[JSC_ITEMS] = type_to_schema(items, globals_, processing)
res[JSC_TITLE] = 'List'
return res
def type_callable_to_schema(T: Type, globals_: GlobalsDict, processing: ProcessingDict) -> JSONSchema:
assert is_Callable(T)
cinfo = get_Callable_info(T)
# res: JSONSchema = {JSC_TYPE: X_TYPE_FUNCTION, SCHEMA_ATT: X_SCHEMA_ID}
res = cast(JSONSchema, {JSC_TYPE: JSC_OBJECT, SCHEMA_ATT: SCHEMA_ID,
JSC_TITLE: JSC_TITLE_CALLABLE,
'special': 'callable'})
p = res[JSC_DEFINITIONS] = {}
for k, v in cinfo.parameters_by_name.items():
p[k] = type_to_schema(v, globals_, processing)
p['return'] = type_to_schema(cinfo.returns, globals_, processing)
res['ordering'] = cinfo.ordering
# print(res)
return res
def schema_to_type_callable(schema: JSONSchema, global_symbols: GlobalsDict, encountered: ProcessingDict):
schema = dict(schema)
definitions = dict(schema[JSC_DEFINITIONS])
ret = schema_to_type(definitions.pop('return'), global_symbols, encountered)
others = []
for k in schema['ordering']:
d = schema_to_type(definitions[k], global_symbols, encountered)
if not k.startswith('#'):
d = NamedArg(d, k)
others.append(d)
# noinspection PyTypeHints
return Callable[others, ret]
def type_to_schema_(T: Type, globals_: GlobalsDict, processing: ProcessingDict) -> JSONSchema:
if T is None:
raise ValueError()
if is_optional(T): # pragma: no cover
msg = f'Should not be needed to have an Optional here yet: {T}'
raise AssertionError(msg)
if is_forward_ref(T): # pragma: no cover
arg = get_forward_ref_arg(T)
# if arg == MemoryJSON.__name__:
# return type_to_schema_(MemoryJSON, globals_, processing)
msg = f'It is not supported to have an ForwardRef here yet: {T}'
raise ValueError(msg)
if isinstance(T, str): # pragma: no cover
msg = f'It is not supported to have a string here: {T!r}'
raise ValueError(msg)
# pprint('type_to_schema_', T=T)
if T is str:
res = cast(JSONSchema, {JSC_TYPE: JSC_STRING, SCHEMA_ATT: SCHEMA_ID})
return res
if T is bool:
res = cast(JSONSchema, {JSC_TYPE: JSC_BOOL, SCHEMA_ATT: SCHEMA_ID})
return res
if T is Number:
res = cast(JSONSchema, {JSC_TYPE: JSC_NUMBER, SCHEMA_ATT: SCHEMA_ID})
return res
if T is float:
res = cast(JSONSchema, {JSC_TYPE: JSC_NUMBER, SCHEMA_ATT: SCHEMA_ID, JSC_TITLE: JSC_TITLE_FLOAT})
return res
if T is int:
res = cast(JSONSchema, {JSC_TYPE: JSC_INTEGER, SCHEMA_ATT: SCHEMA_ID})
return res
if T is Decimal:
res = cast(JSONSchema, {JSC_TYPE: JSC_STRING, JSC_TITLE: JSC_TITLE_DECIMAL, SCHEMA_ATT: SCHEMA_ID})
return res
if T is datetime.datetime:
res = cast(JSONSchema, {JSC_TYPE: JSC_STRING, JSC_TITLE: JSC_TITLE_DATETIME, SCHEMA_ATT: SCHEMA_ID})
return res
if T is bytes:
return SCHEMA_BYTES
# we cannot use isinstance on typing.Any
if is_Any(T): # XXX not possible...
res = cast(JSONSchema, {SCHEMA_ATT: SCHEMA_ID})
return res
if is_union(T):
return schema_Union(T, globals_, processing)
if is_Dict(T) or (isinstance(T, type) and issubclass(T, CustomDict)):
return dict_to_schema(T, globals_, processing)
if is_Set(T) or (isinstance(T, type) and issubclass(T, set)):
return set_to_schema(T, globals_, processing)
if is_Intersection(T):
return schema_Intersection(T, globals_, processing)
if is_Callable(T):
return type_callable_to_schema(T, globals_, processing)
if is_List(T):
return List_to_schema(T, globals_, processing)
if is_Tuple(T):
# noinspection PyTypeChecker
return Tuple_to_schema(T, globals_, processing)
assert isinstance(T, type), T
if issubclass(T, dict): # pragma: no cover
msg = f'A regular "dict" slipped through.\n{T}'
raise TypeError(msg)
if hasattr(T, GENERIC_ATT2) and is_generic(T):
return type_generic_to_schema(T, globals_, processing)
if is_dataclass(T):
return type_dataclass_to_schema(T, globals_, processing)
if T is np.ndarray:
return type_numpy_to_schema(T, globals_, processing)
msg = f'Cannot interpret this type: {T!r}'
msg += f'\n globals_: {globals_}'
msg += f'\n globals_: {processing}'
raise ValueError(msg)
def is_generic(T):
a = getattr(T, GENERIC_ATT2)
return any(isinstance(_, TypeVar) for _ in a)
def type_numpy_to_schema(T, globals_, processing) -> JSONSchema:
res = cast(JSONSchema, {SCHEMA_ATT: SCHEMA_ID})
res[JSC_TYPE] = JSC_OBJECT
res[JSC_TITLE] = JSC_TITLE_NUMPY
res[JSC_PROPERTIES] = {
'shape': {}, # TODO
'dtype': {}, # TODO
'data': SCHEMA_BYTES
}
return res
def schema_Intersection(T, globals_, processing):
args = get_Intersection_args(T)
options = [type_to_schema(t, globals_, processing) for t in args]
res = cast(JSONSchema, {SCHEMA_ATT: SCHEMA_ID, "allOf": options})
return res
@loglevel
def schema_to_type_generic(res: JSONSchema, global_symbols: dict, encountered: dict, rl: RecLogger = None) -> Type:
rl = rl or RecLogger()
# rl.pp('schema_to_type_generic', schema=res, global_symbols=global_symbols, encountered=encountered)
assert res[JSC_TYPE] == JSC_OBJECT
assert JSC_DEFINITIONS in res
cls_name = res[JSC_TITLE]
encountered = dict(encountered)
required = res.get(JSC_REQUIRED, [])
typevars: List[TypeVar] = []
for tname, t in res[JSC_DEFINITIONS].items():
bound = schema_to_type(t, global_symbols, encountered)
# noinspection PyTypeHints
if is_Any(bound):
bound = None
# noinspection PyTypeHints
tv = TypeVar(tname, bound=bound)
typevars.append(tv)
if ID_ATT in t:
encountered[t[ID_ATT]] = tv
typevars: Tuple[TypeVar, ...] = tuple(typevars)
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
base = Generic.__getitem__(typevars)
else:
# noinspection PyUnresolvedReferences
base = Generic.__class_getitem__(typevars)
fields_required = [] # (name, type, Field)
fields_not_required = []
for pname, v in res.get(JSC_PROPERTIES, {}).items():
ptype = schema_to_type(v, global_symbols, encountered)
if pname in required:
_Field = field()
fields_required.append((pname, ptype, _Field))
else:
_Field = field(default=None)
ptype = Optional[ptype]
fields_not_required.append((pname, ptype, _Field))
fields = fields_required + fields_not_required
T = make_dataclass(cls_name, fields, bases=(base,), namespace=None, init=True,
repr=True, eq=True, order=False,
unsafe_hash=False, frozen=False)
fix_annotations_with_self_reference(T, cls_name)
if JSC_DESCRIPTION in res:
setattr(T, '__doc__', res[JSC_DESCRIPTION])
if ATT_PYTHON_NAME in res:
setattr(T, '__qualname__', res[ATT_PYTHON_NAME])
if X_PYTHON_MODULE_ATT in res:
setattr(T, '__module__', res[X_PYTHON_MODULE_ATT])
return T
def type_generic_to_schema(T: Type, globals_: GlobalsDict, processing_: ProcessingDict) -> JSONSchema:
assert hasattr(T, GENERIC_ATT2)
types2 = getattr(T, GENERIC_ATT2)
processing2 = dict(processing_)
globals2 = dict(globals_)
res = cast(JSONSchema, {})
res[SCHEMA_ATT] = SCHEMA_ID
res[JSC_TITLE] = T.__name__
res[ATT_PYTHON_NAME] = T.__qualname__
res[X_PYTHON_MODULE_ATT] = T.__module__
res[ID_ATT] = make_url(T.__name__)
res[JSC_TYPE] = JSC_OBJECT
processing2[f'{T.__name__}'] = make_ref(res[ID_ATT])
# print(f'T: {T.__name__} ')
definitions = {}
if hasattr(T, '__doc__') and T.__doc__:
res[JSC_DESCRIPTION] = T.__doc__
globals_ = dict(globals_)
for t2 in types2:
if not isinstance(t2, TypeVar):
continue
url = make_url(f'{T.__name__}/{t2.__name__}')
# processing2[f'~{name}'] = {'$ref': url}
processing2[f'{t2.__name__}'] = make_ref(url)
# noinspection PyTypeHints
globals2[t2.__name__] = t2
bound = t2.__bound__ or Any
schema = type_to_schema(bound, globals2, processing2)
schema[ID_ATT] = url
definitions[t2.__name__] = schema
globals_[t2.__name__] = t2
if definitions:
res[JSC_DEFINITIONS] = definitions
res[JSC_PROPERTIES] = properties = {}
required = []
for name, t in T.__annotations__.items():
t = replace_typevars(t, bindings={}, symbols=globals_, rl=None)
if is_ClassVar(t):
continue
try:
result = eval_field(t, globals2, processing2)
except KeyboardInterrupt:
raise
except BaseException as e:
msg = f'Cannot evaluate field "{name}" of class {T} annotated as {t}'
raise Exception(msg) from e
assert isinstance(result, Result), result
properties[name] = result.schema
if not result.optional:
required.append(name)
if required:
res[JSC_REQUIRED] = required
return res
def type_dataclass_to_schema(T: Type, globals_: GlobalsDict, processing: ProcessingDict) -> JSONSchema:
assert is_dataclass(T), T
p2 = dict(processing)
res = cast(JSONSchema, {})
res[ID_ATT] = make_url(T.__name__)
if hasattr(T, '__name__') and T.__name__:
res[JSC_TITLE] = T.__name__
p2[T.__name__] = make_ref(res[ID_ATT])
res[ATT_PYTHON_NAME] = T.__qualname__
res[X_PYTHON_MODULE_ATT] = T.__module__
res[SCHEMA_ATT] = SCHEMA_ID
res[JSC_TYPE] = JSC_OBJECT
if hasattr(T, '__doc__') and T.__doc__:
res[JSC_DESCRIPTION] = T.__doc__
res[JSC_PROPERTIES] = properties = {}
classvars = {}
classatts = {}
required = []
fields_ = getattr(T, _FIELDS)
# noinspection PyUnusedLocal
afield: Field
for name, afield in fields_.items():
t = afield.type
try:
if isinstance(t, str):
t = eval_just_string(t, globals_)
if is_ClassVar(t):
tt = get_ClassVar_arg(t)
result = eval_field(tt, globals_, p2)
classvars[name] = result.schema
the_att = getattr(T, name)
if isinstance(the_att, type):
classatts[name] = type_to_schema(the_att, globals_, processing)
else:
classatts[name] = object_to_ipce(the_att, globals_)
else:
result = eval_field(t, globals_, p2)
if not result.optional:
required.append(name)
properties[name] = result.schema
if not result.optional:
if not isinstance(afield.default, dataclasses._MISSING_TYPE):
# logger.info(f'default for {name} is {afield.default}')
properties[name]['default'] = object_to_ipce(afield.default, globals_)
except KeyboardInterrupt:
raise
except BaseException as e:
msg = f'Cannot write schema for attribute {name} -> {t}'
raise TypeError(msg) from e
if required: # empty is error
res[JSC_REQUIRED] = required
if classvars:
res[X_CLASSVARS] = classvars
if classatts:
res[X_CLASSATTS] = classatts
return res
if typing.TYPE_CHECKING: # pragma: no cover
from .monkey_patching_typing import original_dataclass
else:
from dataclasses import dataclass as original_dataclass
@original_dataclass
class Result:
schema: JSONSchema
optional: Optional[bool] = False
# TODO: make url generic
def make_url(x: str):
assert isinstance(x, str), x
return f'http://invalid.json-schema.org/{x}#'
def make_ref(x: str) -> JSONSchema:
assert len(x) > 1, x
assert isinstance(x, str), x
return cast(JSONSchema, {REF_ATT: x})
def eval_field(t, globals_: GlobalsDict, processing: ProcessingDict) -> Result:
debug_info2 = lambda: dict(globals_=globals_, processing=processing)
if isinstance(t, str):
te = eval_type_string(t, globals_, processing)
return te
if is_Type(t):
res = cast(JSONSchema, make_ref(SCHEMA_ID))
return Result(res)
if is_Tuple(t):
res = Tuple_to_schema(t, globals_, processing)
return Result(res)
if is_List(t):
res = List_to_schema(t, globals_, processing)
return Result(res)
if is_forward_ref(t):
tn = get_forward_ref_arg(t)
# tt = t._eval_type(globals_, processing)
# print(f'tn: {tn!r} tt: {tt!r}')
return eval_type_string(tn, globals_, processing)
if is_optional(t):
tt = get_optional_type(t)
result = eval_field(tt, globals_, processing)
return Result(result.schema, optional=True)
if is_union(t):
return Result(schema_Union(t, globals_, processing))
if is_Any(t):
res = cast(JSONSchema, {})
return Result(res)
if is_Dict(t):
schema = dict_to_schema(t, globals_, processing)
return Result(schema)
if is_Set(t):
schema = set_to_schema(t, globals_, processing)
return Result(schema)
if isinstance(t, TypeVar):
l = t.__name__
if l in processing:
return Result(processing[l])
# I am not sure why this is different in Python 3.6
if PYTHON_36 and (l in globals_): # pragma: no cover
T = globals_[l]
return Result(type_to_schema(T, globals_, processing))
m = f'Could not resolve the TypeVar {t}'
msg = pretty_dict(m, debug_info2())
raise CannotResolveTypeVar(msg)
if isinstance(t, type):
# catch recursion here
if t.__name__ in processing:
return eval_field(t.__name__, globals_, processing)
else:
schema = type_to_schema(t, globals_, processing)
return Result(schema)
msg = f'Could not deal with {t}'
msg += f'\nglobals: {globals_}'
msg += f'\nprocessing: {processing}'
raise NotImplementedError(msg)
def schema_Union(t, globals_, processing):
types = get_union_types(t)
options = [type_to_schema(t, globals_, processing) for t in types]
res = cast(JSONSchema, {SCHEMA_ATT: SCHEMA_ID, "anyOf": options})
return res
def eval_type_string(t: str, globals_: GlobalsDict, processing: ProcessingDict) -> Result:
check_isinstance(t, str)
globals2 = dict(globals_)
debug_info = lambda: dict(t=t, globals2=pretty_dict("", globals2), processing=pretty_dict("", processing))
if t in processing:
schema: JSONSchema = make_ref(make_url(t))
return Result(schema)
elif t in globals2:
return eval_field(globals2[t], globals2, processing)
else:
try:
res = eval_just_string(t, globals2)
return eval_field(res, globals2, processing)
except NotImplementedError as e: # pragma: no cover
m = 'While evaluating string'
msg = pretty_dict(m, debug_info())
raise NotImplementedError(msg) from e
except KeyboardInterrupt:
raise
except BaseException as e: # pragma: no cover
m = 'Could not evaluate type string'
msg = pretty_dict(m, debug_info())
raise ValueError(msg) from e
def eval_just_string(t: str, globals_):
from typing import Optional
eval_locals = {'Optional': Optional, 'List': List}
# TODO: put more above?
# do not pollute environment
if t in globals_:
return globals_[t]
eval_globals = dict(globals_)
try:
res = eval(t, eval_globals, eval_locals)
return res
except (KeyboardInterrupt, RecursionError):
raise
except BaseException as e:
m = f'Error while evaluating the string {t!r} using eval().'
msg = pretty_dict(m, dict(eval_locals=eval_locals, eval_globals=eval_globals))
raise type(e)(msg) from e
@loglevel
def schema_to_type_dataclass(res: JSONSchema, global_symbols: dict, encountered: EncounteredDict,
schema_id=None, rl: RecLogger = None) -> Type:
rl = rl or RecLogger()
# rl.pp('schema_to_type_dataclass', res=res, global_symbols=global_symbols, encountered=encountered)
assert res[JSC_TYPE] == JSC_OBJECT
cls_name = res[JSC_TITLE]
# It's already done by the calling function
# if ID_ATT in res:
# # encountered[res[ID_ATT]] = ForwardRef(cls_name)
# encountered[res[ID_ATT]] = cls_name
required = res.get(JSC_REQUIRED, [])
fields = [] # (name, type, Field)
for pname, v in res.get(JSC_PROPERTIES, {}).items():
ptype = schema_to_type(v, global_symbols, encountered)
# assert isinstance(ptype)
if pname in required:
_Field = field()
else:
_Field = field(default=None)
ptype = Optional[ptype]
if JSC_DEFAULT in v:
default_value = ipce_to_object(v[JSC_DEFAULT], global_symbols, expect_type=ptype)
_Field.default = default_value
fields.append((pname, ptype, _Field))
# pprint('making dataclass with fields', fields=fields, res=res)
for pname, v in res.get(X_CLASSVARS, {}).items():
ptype = schema_to_type(v, global_symbols, encountered)
fields.append((pname, ClassVar[ptype], field()))
unsafe_hash = True
try:
T = make_dataclass(cls_name, fields, bases=(), namespace=None, init=True, repr=True, eq=True, order=False,
unsafe_hash=unsafe_hash, frozen=False)
except TypeError: # pragma: no cover
from . import logger
msg = 'Cannot make dataclass with fields:'
for f in fields:
msg += f'\n {f}'
logger.error(msg)
raise
fix_annotations_with_self_reference(T, cls_name)
for pname, v in res.get(X_CLASSATTS, {}).items():
if isinstance(v, dict) and SCHEMA_ATT in v and v[SCHEMA_ATT] == SCHEMA_ID:
interpreted = schema_to_type(cast(JSONSchema, v), global_symbols, encountered)
else:
interpreted = ipce_to_object(v, global_symbols)
setattr(T, pname, interpreted)
if JSC_DESCRIPTION in res:
setattr(T, '__doc__', res[JSC_DESCRIPTION])
else:
# the original one did not have it
setattr(T, '__doc__', None)
if ATT_PYTHON_NAME in res:
setattr(T, '__qualname__', res[ATT_PYTHON_NAME])
if X_PYTHON_MODULE_ATT in res:
setattr(T, '__module__', res[X_PYTHON_MODULE_ATT])
return T
from . import logger
def fix_annotations_with_self_reference(T, cls_name):
for k, v in T.__annotations__.items():
if is_optional(v):
a = get_optional_type(v)
if is_forward_ref(a):
arg = get_forward_ref_arg(a)
if arg == cls_name:
T.__annotations__[k] = Optional[T]
else:
logger.warning(f'Cannot fix annotation {a}')
continue
# raise Exception(a)
for f in dataclasses.fields(T):
f.type = T.__annotations__[f.name]
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/ipce.py
|
ipce.py
|
from dataclasses import dataclass
from .test_utils import assert_object_roundtrip, assert_type_roundtrip
def test_bool1():
@dataclass
class M:
a: bool
a = M(True)
assert_object_roundtrip(a, {})
assert_type_roundtrip(M, {})
def test_none1():
assert_type_roundtrip(type(None), {})
@dataclass
class M:
a: type(None)
a = M(None)
assert_object_roundtrip(a, {})
assert_type_roundtrip(M, {})
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/test_bool.py
|
test_bool.py
|
from dataclasses import dataclass
from .test_utils import assert_object_roundtrip, assert_type_roundtrip
def test_float_1():
@dataclass
class MyClass:
f: float
e = MyClass(1.0)
assert_object_roundtrip(e, {})
def test_float_2():
@dataclass
class MyClass:
f: float
T2 = assert_type_roundtrip(MyClass, {})
print(T2)
assert T2.__annotations__['f'] is float
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/test_float.py
|
test_float.py
|
from dataclasses import dataclass
from typing import *
from nose.tools import raises
from .ipce import make_dict, type_to_schema
from .pretty import pprint
from .test_utils import assert_object_roundtrip, assert_type_roundtrip
@raises(ValueError)
def test_dict_check_key():
D = Dict[int, int]
d = D()
d['a'] = 2
@raises(ValueError)
def test_dict_check_value():
D = Dict[int, int]
d = D()
d[2] = 'a'
def test_dict_int_int0():
D = make_dict(int, int)
assert_type_roundtrip(D, {})
def test_dict_int_int1():
D = Dict[int, int]
pprint(schema=type_to_schema(D, {}))
assert_type_roundtrip(D, {})
# @dataclass
# class MyClass:
# f: Dict[int, int]
#
# e = MyClass({1: 2})
# assert_object_roundtrip(e, {})
def test_dict_int_int():
@dataclass
class MyClass:
f: Dict[int, int]
e = MyClass({1: 2})
assert_object_roundtrip(e, {})
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/test_dict.py
|
test_dict.py
|
from dataclasses import dataclass
from typing import *
import yaml
from nose.tools import raises
from zuper_json.ipce import ipce_to_object, object_to_ipce, type_to_schema
from zuper_json.subcheck import can_be_used_as
def test_corner_cases01():
assert None is ipce_to_object(None, {}, {}, expect_type=Optional[int])
def test_corner_cases02():
assert 2 == ipce_to_object(2, {}, {}, expect_type=Optional[int])
def test_corner_cases03():
assert None is ipce_to_object(None, {}, {}, expect_type=None)
def test_corner_cases04():
object_to_ipce({1: 2}, {}, suggest_type=None)
def test_corner_cases05():
object_to_ipce(12, {}, suggest_type=Optional[int])
def test_corner_cases06():
assert can_be_used_as(int, Optional[int])[0]
@raises(ValueError)
def test_corner_cases07():
ipce_to_object(12, {}, expect_type=Union[bool, str])
@raises(ValueError)
def test_corner_cases08():
ipce_to_object(12, {}, expect_type=Optional[bool])
@raises(ValueError)
def test_corner_cases09():
type_to_schema(None, {})
@raises(ValueError)
def test_property_error():
@dataclass
class MyClass32:
a: int
ok, _ = can_be_used_as(str, int)
assert not ok
# noinspection PyTypeChecker
ob = MyClass32('not an int')
# ipce_to_object(ob, {}, {}, expect_type=MyClass32)
res = object_to_ipce(ob, {}, {})
print(yaml.dump(res))
@raises(NotImplementedError)
def test_not_know():
class C:
pass
object_to_ipce(C(), {}, {})
if __name__ == '__main__':
test_property_error()
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/test_corner_cases.py
|
test_corner_cases.py
|
from dataclasses import dataclass, field
from typing import *
from zuper_json.monkey_patching_typing import my_dataclass
try:
from typing import ForwardRef
except ImportError: # pragma: no cover
from typing import _ForwardRef as ForwardRef
from .annotations_tricks import is_Any
from .constants import SCHEMA_ATT, SCHEMA_ID
from .ipce import make_dict, ipce_to_object, object_to_ipce, type_to_schema, schema_to_type, \
CannotFindSchemaReference, JSONSchema, CannotResolveTypeVar, eval_field
from .test_utils import assert_object_roundtrip
@dataclass
class Empty:
...
@dataclass
class Contents:
data: bytes
@dataclass
class Address:
""" An address with street and number """
street: str
number: int
@dataclass
class Person:
""" Describes a Person """
first: str
last: str
address: Address
@dataclass
class Office:
""" An Office contains people. """
people: Dict[str, Person] = field(default_factory=make_dict(str, Person))
def test_ser1():
x1 = Office()
x1.people['andrea'] = Person('Andrea', 'Censi', Address('Sonnegstrasse', 3))
assert_object_roundtrip(x1, get_symbols())
def test_ser2():
x1 = Office()
x1.people['andrea'] = Person('Andrea', 'Censi', Address('Sonnegstrasse', 3))
assert_object_roundtrip(x1, {}, expect_equality=True)
@dataclass
class Name:
""" Describes a Name with optional middle name"""
first: str
last: str
middle: Optional[str] = None
@dataclass
class Chain:
""" Describes a Name with optional middle name"""
value: str
down: Optional['Chain'] = None
def get_symbols():
@dataclass
class FA:
""" Describes a Name with optional middle name"""
value: str
down: 'FB'
@dataclass
class FB:
mine: int
symbols = {'Office': Office,
'Person': Person,
'Address': Address,
'Name': Name,
'Contents': Contents,
'Empty': Empty,
'FA': FA,
'FB': FB,
'Chain': Chain}
return symbols
def test_optional_1():
n1 = Name(first='H', middle='J', last='Wells')
assert_object_roundtrip(n1, get_symbols())
def test_optional_2():
n1 = Name(first='H', last='Wells')
assert_object_roundtrip(n1, get_symbols())
def test_optional_3():
n1 = Name(first='H', last='Wells')
assert_object_roundtrip(n1, {}, expect_equality=True)
def test_recursive():
n1 = Chain(value='12')
assert_object_roundtrip(n1, {'Chain': Chain})
def test_ser_forward1():
symbols = get_symbols()
FA = symbols['FA']
FB = symbols['FB']
n1 = FA(value='a', down=FB(12))
# with private_register('test_forward'):
assert_object_roundtrip(n1, get_symbols())
def test_ser_forward2():
n1 = Empty()
assert_object_roundtrip(n1, get_symbols())
def test_ser_dict_object():
@dataclass
class M:
x: int
y: int
@dataclass()
class P:
x: int
y: int
@dataclass(unsafe_hash=True)
class N:
x: int
y: int
@dataclass(frozen=True)
class O:
x: int
y: int
@dataclass(frozen=True, unsafe_hash=True)
class L:
x: int
y: int
@dataclass
class M:
a: Dict[L, str]
d = {L(0, 0): 'one',
L(1, 1): 'two'}
m = M(d)
symbols2 = {L.__qualname__: L}
assert_object_roundtrip(m, symbols2)
from nose.tools import raises, assert_equal
def test_bytes1():
n1 = Contents(b'1234')
assert_object_roundtrip(n1, get_symbols())
@raises(ValueError)
def test_abnormal_no_schema():
ipce_to_object({}, {})
def test_lists():
ipce_to_object([], {})
def test_nulls():
object_to_ipce(None, {})
def test_lists_2():
object_to_ipce([1], {})
# @raises(ValueError)
# def test_the_tester_no_links2_in_snd_not():
# h = 'myhash'
# x = {LINKS: {h: {}}, "a": {"one": {"/": h}}}
# assert_good_canonical(x)
@raises(ValueError)
def test_the_tester_no_links2_in_snd_not2():
class NotDataClass:
...
T = NotDataClass
type_to_schema(T, get_symbols())
@raises(AssertionError)
def test_not_optional():
T = Optional[int]
type_to_schema(T, get_symbols())
def test_not_union0():
T = Union[int, str]
type_to_schema(T, {})
@raises(ValueError)
def test_not_str1():
# noinspection PyTypeChecker
type_to_schema('T', {})
@raises(ValueError)
def test_not_fref2():
# noinspection PyTypeChecker
type_to_schema(ForwardRef('one'), {})
def test_any():
# noinspection PyTypeChecker
s = type_to_schema(Any, {})
assert_equal(s, {SCHEMA_ATT: SCHEMA_ID})
# @raises(NotImplementedError)
def test_any_instantiate():
# noinspection PyTypeChecker
schema = type_to_schema(Name, {})
ipce_to_object(schema, {})
@raises(TypeError)
def test_not_dict_naked():
class A(dict):
...
type_to_schema(A, {})
def test_any1b():
res = cast(JSONSchema, {})
t = schema_to_type(res, {}, encountered={})
assert is_Any(t), t
def test_any2():
@dataclass
class C:
a: Any
e = C(12)
assert_object_roundtrip(e, {})
@raises(CannotFindSchemaReference)
def test_invalid_schema():
schema = cast(JSONSchema, {"$ref": "not-existing"})
schema_to_type(schema, {}, {})
# @raises(CannotFindSchemaReference)
def test_dict_only():
T = Dict[str, str]
_ = type_to_schema(T, {})
@raises(ValueError)
def test_str1():
type_to_schema('string-arg', {})
@raises(ValueError)
def test_forward_ref1():
type_to_schema(ForwardRef('AA'), {})
@raises(TypeError)
def test_forward_ref2():
@dataclass
class MyClass:
# noinspection PyUnresolvedReferences
f: ForwardRef('unknown')
type_to_schema(MyClass, {})
@raises(TypeError)
def test_forward_ref3():
@dataclass
class MyClass:
# noinspection PyUnresolvedReferences
f: Optional['unknown']
# do not put MyClass
type_to_schema(MyClass, {})
@raises(TypeError)
def test_forward_ref4():
class Other:
pass
@dataclass
class MyClass:
f: Optional['Other']
# do not put MyClass
type_to_schema(MyClass, {'Other': Other})
# @raises(NotImplementedError)
def test_error1():
try:
def f():
raise NotImplementedError()
@dataclass
class MyClass:
f: Optional['f()']
# do not put MyClass
type_to_schema(MyClass, {'f': f})
except (TypeError, NotImplementedError, NameError):
pass
else:
raise AssertionError()
def test_2_ok():
X = TypeVar('X')
@my_dataclass
class M(Generic[X]):
x: X
@my_dataclass
class MyClass:
f: "Optional[M[int]]"
# do not put M
type_to_schema(MyClass, {'M': M}) # <---- note
@raises(TypeError)
def test_2_error():
X = TypeVar('X')
@my_dataclass
class M(Generic[X]):
x: X
@my_dataclass
class MyClass:
f: "Optional[M[int]]"
# do not put M
type_to_schema(MyClass, {}) # <---- note
# for completeness
@raises(CannotResolveTypeVar)
def test_cannot_resolve():
X = TypeVar('X')
eval_field(X, {}, {})
@raises(AssertionError)
def test_random_json():
""" Invalid because of $schema """
data = {"$schema": {"title": "LogEntry"}, "topic": "next_episode", "data": None}
ipce_to_object(data, {})
if __name__ == '__main__':
test_error2()
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/test_serialization1.py
|
test_serialization1.py
|
import typing
from typing import *
from .annotations_tricks import is_optional, get_optional_type, is_forward_ref, get_forward_ref_arg, is_Any, is_Tuple, \
is_ClassVar, get_ClassVar_arg, is_Type, get_Type_arg
from .constants import PYTHON_36, PYTHON_37
def test_union():
a = Union[int, str]
# print(a)
# print(type(a))
if PYTHON_37:
assert isinstance(a, typing._GenericAlias)
# print(a.__dict__)
assert a.__origin__ == Union
def test_optional():
a = Optional[int]
assert is_optional(a)
assert get_optional_type(a) is int
class Tree:
n: Optional['Tree']
symbols = {'Tree': Tree}
def test_forward():
x = Tree.__annotations__['n']
assert is_optional(x)
t = get_optional_type(x)
# print(t)
# print(type(t))
# print(t.__dict__)
assert is_forward_ref(t)
# print(f'__forward_arg__: {t.__forward_arg__!r}')
# print(f'__forward_code__: {t.__forward_code__!r}')
# print(f'__forward_evaluated__: {t.__forward_evaluated__!r}')
# print(f'__forward_value__: {t.__forward_value__!r}')
# print(f'__forward_is_argument__: {t.__forward_is_argument__!r}')
assert get_forward_ref_arg(t) == 'Tree'
if PYTHON_36: # pragma: no cover
t._eval_type(localns=locals(), globalns=globals())
else:
t._evaluate(localns=locals(), globalns=globals())
# print(f'__forward_arg__: {t.__forward_arg__!r}')
# print(f'__forward_code__: {t.__forward_code__!r}')
# print(f'__forward_evaluated__: {t.__forward_evaluated__!r}')
# print(f'__forward_value__: {t.__forward_value__!r}')
# print(f'__forward_is_argument__: {t.__forward_is_argument__!r}')
def test_any():
a = Any
assert is_Any(a)
def test_Tuple1():
a = Tuple[int, str]
assert is_Tuple(a)
def test_Tuple2():
a = Tuple[int, ...]
assert is_Tuple(a)
def test_Typevar():
a = TypeVar('X')
assert isinstance(a, TypeVar)
def test_ClassVar():
a = ClassVar[int]
assert is_ClassVar(a)
assert get_ClassVar_arg(a) is int
def test_Type():
X = TypeVar('X')
a = Type[X]
assert is_Type(a)
assert get_Type_arg(a) == X
# assert get_ClassVar_arg(a) is int
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/test_annotations_tricks.py
|
test_annotations_tricks.py
|
from dataclasses import dataclass
from typing import *
from zuper_json.annotations_tricks import is_Set
from zuper_json.my_dict import make_set
from .test_utils import assert_object_roundtrip, assert_type_roundtrip
def test_not_implemented_set():
@dataclass
class MyClass:
f: Set[int]
e = MyClass({1, 2, 3})
assert_object_roundtrip(e, {}) # pragma: no cover
def test_is_set01():
assert not is_Set(set)
def test_is_set02():
T = Set
print(f'the set is {T}')
assert is_Set(T)
def test_is_set03():
assert is_Set(Set[int])
def test_rt():
T = Set[int]
assert_type_roundtrip(T, {}, expect_type_equal=False)
def test_rt_yes():
T = make_set(int)
assert_type_roundtrip(T, {}, expect_type_equal=True)
def test_rt2():
T = make_set(int)
assert_type_roundtrip(T, {})
def test_not_implemented_set_2():
@dataclass
class A:
a: int
@dataclass
class MyClass:
f: Set[A]
e = MyClass({A(1), A(2)})
assert_object_roundtrip(e, {}) # pragma: no cover
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/test_sets.py
|
test_sets.py
|
from dataclasses import dataclass
from typing import Callable
from mypy_extensions import NamedArg
from nose.tools import assert_equal
from .annotations_tricks import is_Callable, get_Callable_info
from .test_utils import assert_type_roundtrip
def test_detection_1():
T = Callable[[], int]
print(T.__dict__)
assert is_Callable(T)
res = get_Callable_info(T)
assert_equal(res.parameters_by_name, {})
assert_equal(res.parameters_by_position, ())
assert_equal(res.returns, int)
def test_detection_2():
T = Callable[[NamedArg(str, "A")], int]
assert is_Callable(T)
res = get_Callable_info(T)
assert_equal(res.returns, int)
assert_equal(res.parameters_by_position, (str,))
assert_equal(res.parameters_by_name, {"A": str})
def test_detection_3():
T = Callable[[NamedArg(str, "A")], int]
assert is_Callable(T)
res = get_Callable_info(T)
assert_equal(res.returns, int)
assert_equal(res.parameters_by_position, (str,))
assert_equal(res.parameters_by_name, {"A": str})
def test_detection_4():
@dataclass
class MyClass:
pass
T = Callable[[NamedArg(MyClass, "A")], int]
assert is_Callable(T)
res = get_Callable_info(T)
assert_equal(res.returns, int)
assert_equal(res.parameters_by_position, (MyClass,))
assert_equal(res.parameters_by_name, {"A": MyClass})
def test_NamedArg_eq():
a = NamedArg(int, 'A')
b = NamedArg(int, 'A')
assert_equal(a, b)
A = Callable[[NamedArg(int, 'A')], int]
B = Callable[[NamedArg(int, 'A')], int]
assert_equal(A, B)
# @raises(TypeError)
def test_callable_1():
T = Callable[[], int]
assert_type_roundtrip(T, {})
def test_callable_2():
T = Callable[[NamedArg(int, "A")], int]
assert_type_roundtrip(T, {})
def test_callable_3():
T = Callable[[int], int]
assert_type_roundtrip(T, {})
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/test_callable.py
|
test_callable.py
|
import json
import typing
# noinspection PyUnresolvedReferences
from contextlib import contextmanager
from dataclasses import is_dataclass, fields
try:
# noinspection PyUnresolvedReferences
from typing import ForwardRef
except ImportError: # pragma: no cover
# noinspection PyUnresolvedReferences
from typing import _ForwardRef as ForwardRef
from unittest import SkipTest
import cbor2 as cbor
import yaml
from nose.tools import assert_equal
from zuper_json.zeneric2 import loglevel, RecLogger
from . import logger
from .annotations_tricks import is_Dict
from .constants import PYTHON_36
from .ipce import object_to_ipce, ipce_to_object, type_to_schema, schema_to_type
from .json_utils import encode_bytes_before_json_serialization, decode_bytes_before_json_deserialization
from .pretty import pretty_dict
def assert_type_roundtrip(T, use_globals: dict, expect_type_equal: bool = True):
assert T is not None
rl = RecLogger()
# resolve_types(T)
schema0 = type_to_schema(T, use_globals)
schema = type_to_schema(T, use_globals)
rl.pp('\n\nschema', schema=json.dumps(schema, indent=2))
T2 = schema_to_type(schema, {}, {})
rl.pp(f"\n\nT ({T}) the original one", **getattr(T, '__dict__', {}))
print()
rl.pp(f"\n\nT2 ({T2}) - reconstructed from schema ", **getattr(T2, '__dict__', {}))
print()
# pprint("schema", schema=json.dumps(schema, indent=2))
assert_equal(schema, schema0)
if expect_type_equal:
# assert_same_types(T, T)
# assert_same_types(T2, T)
assert_equivalent_types(T, T2, assume_yes=set())
schema2 = type_to_schema(T2, use_globals)
if schema != schema2:
msg = 'Different schemas'
msg = pretty_dict(msg, dict(T=T, schema=schema0, T2=T2, schema2=schema2))
# print(msg)
with open('tmp1.json', 'w') as f:
f.write(json.dumps(schema, indent=2))
with open('tmp2.json', 'w') as f:
f.write(json.dumps(schema2, indent=2))
assert_equal(schema, schema2)
raise AssertionError(msg)
return T2
@loglevel
def assert_equivalent_types(T1: type, T2: type, assume_yes: set, rl=None):
key = (id(T1), id(T2))
if key in assume_yes:
return
assume_yes = set(assume_yes)
assume_yes.add(key)
rl = rl or RecLogger()
try:
# print(f'assert_equivalent_types({T1},{T2})')
if T1 is T2:
rl.p('same by equality')
return
if hasattr(T1, '__dict__'):
rl.pp('comparing',
T1=f'{T1!r}',
T2=f'{T2!r}',
T1_dict=T1.__dict__, T2_dict=T2.__dict__)
# for these builtin we cannot set/get the attrs
if not isinstance(T1, typing.TypeVar) and (not isinstance(T1, ForwardRef)) and not is_Dict(T1):
for k in ['__name__', '__module__', '__doc__']:
msg = f'Difference for {k} of {T1} ({type(T1)} and {T2} ({type(T2)}'
assert_equal(getattr(T1, k, ()), getattr(T2, k, ()), msg=msg)
if is_dataclass(T1):
assert is_dataclass(T2)
fields1 = fields(T1)
fields2 = fields(T2)
fields1 = {_.name: _ for _ in fields1}
fields2 = {_.name: _ for _ in fields2}
if sorted(fields1) != sorted(fields2):
msg = f'Different fields: {sorted(fields1)} != {sorted(fields2)}'
raise Exception(msg)
for k in fields1:
t1 = fields1[k].type
t2 = fields2[k].type
rl.pp(f'checking the fields {k}',
t1=f'{t1!r}',
t2=f'{t2!r}',
t1_ann=f'{T1.__annotations__[k]!r}',
t2_ann=f'{T2.__annotations__[k]!r}')
try:
assert_equivalent_types(t1, t2, assume_yes=assume_yes)
except BaseException as e:
msg = f'Could not establish the field {k!r} to be equivalent'
msg += f'\n t1 = {t1!r}'
msg += f'\n t2 = {t2!r}'
msg += f'\n t1_ann = {T1.__annotations__[k]!r}'
msg += f'\n t2_ann = {T2.__annotations__[k]!r}'
raise Exception(msg) from e
# for k in ['__annotations__']:
# assert_equivalent_types(getattr(T1, k, None), getattr(T2, k, None))
if False:
if hasattr(T1, 'mro'):
if len(T1.mro()) != len(T2.mro()):
msg = pretty_dict('Different mros', dict(T1=T1.mro(), T2=T2.mro()))
raise AssertionError(msg)
for m1, m2 in zip(T1.mro(), T2.mro()):
if m1 is T1 or m2 is T2: continue
assert_equivalent_types(m1, m2)
if PYTHON_36: # pragma: no cover
pass # XX
else:
if isinstance(T1, typing._GenericAlias):
# noinspection PyUnresolvedReferences
if not is_Dict(T1):
# noinspection PyUnresolvedReferences
for z1, z2 in zip(T1.__args__, T2.__args__):
assert_equivalent_types(z1, z2, assume_yes=assume_yes)
except BaseException as e:
msg = f'Could not establish the two types to be equivalent.'
msg += f'\n T1 = {id(T1)} {T1!r}'
msg += f'\n T2 = {id(T2)} {T2!r}'
raise Exception(msg) from e
# assert T1 == T2
# assert_equal(T1.mro(), T2.mro())
def assert_object_roundtrip(x1, use_globals, expect_equality=True, works_without_schema=True):
"""
expect_equality: if __eq__ is preserved
Will not be preserved if use_globals = {}
because a new Dataclass will be created
and different Dataclasses with the same fields do not compare equal.
"""
y1 = object_to_ipce(x1, use_globals)
y1_cbor = cbor.dumps(y1)
y1 = cbor.loads(y1_cbor)
y1e = encode_bytes_before_json_serialization(y1)
y1es = json.dumps(y1e, indent=2)
logger.info(f'y1es: {y1es}')
y1esl = decode_bytes_before_json_deserialization(json.loads(y1es))
y1eslo = ipce_to_object(y1esl, use_globals)
x1b = ipce_to_object(y1, use_globals)
x1bj = object_to_ipce(x1b, use_globals)
# if False:
# from zuper_ipce import store_json, recall_json
# h1 = store_json(y1)
# y1b = recall_json(h1)
# assert y1b == y1
# h2 = store_json(x1bj)
# assert h1 == h2
check_equality(x1, x1b, expect_equality)
if y1 != x1bj: # pragma: no cover
msg = pretty_dict('Round trip not obtained', dict(x1bj=yaml.dump(x1bj),
y1=yaml.dump(y1)))
# assert_equal(y1, x1bj, msg=msg)
if 'propertyNames' in y1['$schema']:
assert_equal(y1['$schema']['propertyNames'], x1bj['$schema']['propertyNames'], msg=msg)
raise AssertionError(msg)
# once again, without schema
if works_without_schema:
z1 = object_to_ipce(x1, use_globals, with_schema=False)
z2 = cbor.loads(cbor.dumps(z1))
u1 = ipce_to_object(z2, use_globals, expect_type=type(x1))
check_equality(x1, u1, expect_equality)
return locals()
def check_equality(x1, x1b, expect_equality):
if isinstance(x1b, type) and isinstance(x1, type):
logger.warning('Skipping type equality check for %s and %s' % (x1b, x1))
else:
#
# if isinstance(x1, np.ndarray):
# assert allclose(x1b, x1)
# else:
# print('x1: %s' % x1)
eq1 = (x1b == x1)
eq2 = (x1 == x1b)
# test object equality
if expect_equality: # pragma: no cover
if not eq1:
m = 'Object equality (next == orig) not preserved'
msg = pretty_dict(m,
dict(x1b=x1b,
x1b_=type(x1b),
x1=x1,
x1_=type(x1), x1b_eq=x1b.__eq__))
raise AssertionError(msg)
if not eq2:
m = 'Object equality (orig == next) not preserved'
msg = pretty_dict(m,
dict(x1b=x1b,
x1b_=type(x1b),
x1=x1,
x1_=type(x1),
x1_eq=x1.__eq__))
raise AssertionError(msg)
else:
if eq1 and eq2: # pragma: no cover
msg = 'You did not expect equality but they actually are'
raise Exception(msg)
from functools import wraps
from nose.plugins.attrib import attr
from nose.plugins.skip import SkipTest
def fail(message): # pragma: no cover
raise AssertionError(message)
def known_failure(f): # pragma: no cover
@wraps(f)
def run_test(*args, **kwargs):
try:
f(*args, **kwargs)
except BaseException as e:
raise SkipTest("Known failure test failed: " + str(e))
fail("test passed but marked as work in progress")
return attr('known_failure')(run_test)
def relies_on_missing_features(f):
msg = "Test relying on not implemented feature."
@wraps(f)
def run_test(*args, **kwargs): # pragma: no cover
try:
f(*args, **kwargs)
except BaseException as e:
raise SkipTest(msg) from e
fail("test passed but marked as work in progress")
return attr('relies_on_missing_features')(run_test)
#
# def with_private_register(f):
# return f
# from zuper_ipce.test_utils import with_private_register as other
# return other(f)
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/test_utils.py
|
test_utils.py
|
__version__ = '3.0.3'
from .logging import logger
logger.info(f'zj {__version__}')
from . import monkey_patching_typing
from .json2cbor import *
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/__init__.py
|
__init__.py
|
import json
from dataclasses import dataclass
from typing import *
from .ipce import ipce_to_object, type_to_schema, schema_to_type
from .test_utils import relies_on_missing_features, assert_type_roundtrip, assert_object_roundtrip, known_failure
symbols = {}
@relies_on_missing_features
def test_type1():
T = Type
assert_type_roundtrip(T, symbols)
def test_type2():
T = type
assert_type_roundtrip(T, symbols)
@relies_on_missing_features
def test_newtype():
T = NewType('T', str)
assert_type_roundtrip(T, symbols)
def test_dict1():
c = {}
assert_object_roundtrip(c, symbols)
def test_dict2():
T = Dict[str, Any]
# <class 'zuper_json.my_dict.Dict[str,Any]'>
assert_type_roundtrip(T, symbols, expect_type_equal=False)
@known_failure
def test_dict4():
# T = Dict[str, Any]
# <class 'zuper_json.my_dict.Dict[str,Any]'>
ob = {}
ipce_to_object(ob, {}, expect_type=Any)
def test_type__any():
T = Any
assert_type_roundtrip(T, symbols)
@known_failure
def test_type_any2():
@dataclass
class C:
a: Any
c = C(a={})
assert_object_roundtrip(c, symbols)
def test_type__any3():
@dataclass
class C:
a: Any
c = C(a=1)
assert_object_roundtrip(c, symbols)
def test_type__any4():
assert_object_roundtrip(Any, symbols)
def test_defaults1():
@dataclass
class DummyImageSourceConfig:
shape: Tuple[int, int] = (480, 640)
images_per_episode: int = 120
num_episodes: int = 10
mj = type_to_schema(DummyImageSourceConfig, {})
print(json.dumps(mj, indent=2))
T2 = schema_to_type(mj, {}, {})
print(dataclasses.fields(T2))
import dataclasses
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/test_type.py
|
test_type.py
|
from dataclasses import dataclass
from typing import Optional
from zuper_json.test_utils import assert_type_roundtrip
def test_recursive01():
@dataclass
class Rec1:
a: int
parent: 'Rec1'
assert_type_roundtrip(Rec1, {})
def test_recursive02():
@dataclass
class Rec2:
a: int
parent: 'Optional[Rec2]'
assert_type_roundtrip(Rec2, {})
def test_recursive03():
@dataclass
class Rec3:
a: int
parent: Optional['Rec3']
assert_type_roundtrip(Rec3, {})
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/test_recurisve.py
|
test_recurisve.py
|
from datetime import datetime
from nose.tools import raises
from zuper_json.test_utils import assert_type_roundtrip, assert_object_roundtrip
def test_datetime01():
assert_type_roundtrip(datetime, {})
@raises(ValueError)
def test_datetime02():
d = datetime.now()
assert_object_roundtrip(d, {})
import pytz
def test_datetime03():
d = datetime.now()
timezone = pytz.timezone("America/Los_Angeles")
d_aware = timezone.localize(d)
assert_object_roundtrip(d_aware, {})
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/test_datetime.py
|
test_datetime.py
|
import sys
import traceback
import typing
import warnings
from abc import ABCMeta, abstractmethod
from dataclasses import dataclass, fields
# noinspection PyUnresolvedReferences
from typing import Dict, Type, TypeVar, Any, ClassVar, Sequence, _eval_type, Tuple
from zuper_commons.text import indent, pretty_dict
from .constants import PYTHON_36, GENERIC_ATT2, BINDINGS_ATT
from .logging import logger
try:
from typing import ForwardRef
except ImportError: # pragma: no cover
from typing import _ForwardRef as ForwardRef
from .annotations_tricks import is_ClassVar, get_ClassVar_arg, is_Type, get_Type_arg, name_for_type_like, \
is_forward_ref, get_forward_ref_arg, is_optional, get_optional_type, is_List, get_List_arg, is_union, \
get_union_types
def loglevel(f):
def f2(*args, **kwargs):
RecLogger.levels += 1
# if RecLogger.levels >= 10:
# raise AssertionError()
try:
return f(*args, **kwargs)
finally:
RecLogger.levels -= 1
return f2
class RecLogger:
levels = 0
prefix: Tuple[str, ...]
count = 0
def __init__(self, prefix=None):
if prefix is None:
prefix = (str(RecLogger.count),)
RecLogger.count += 1
self.prefix = prefix
def p(self, s):
p = ' ' * RecLogger.levels + ':'
# p = '/'.join(('root',) + self.prefix) + ':'
print(indent(s, p))
def pp(self, msg, **kwargs):
self.p(pretty_dict(msg, kwargs))
def child(self, name=None):
name = name or '-'
prefix = self.prefix + (name,)
return RecLogger(prefix)
def get_name_without_brackets(name: str) -> str:
if '[' in name:
return name[:name.index('[')]
else:
return name
def as_tuple(x) -> Tuple:
return x if isinstance(x, tuple) else (x,)
def get_type_spec(types) -> Dict[str, Type]:
res = {}
for x in types:
if not isinstance(x, TypeVar): # pragma: no cover
msg = f'Not sure what happened - but did you import zuper_json? {(x, types)}'
raise ValueError(msg)
res[x.__name__] = x.__bound__ or Any
return res
class ZenericFix:
class CannotInstantiate(TypeError):
...
@classmethod
def __class_getitem__(cls, params):
# pprint('ZenericFix.__class_getitem__', cls=cls, params=params)
types = as_tuple(params)
if PYTHON_36: # pragma: no cover
class FakeGenericMeta(MyABC):
def __getitem__(self, params2):
# pprint('FakeGenericMeta.__getitem__', cls=cls, self=self, params2=params2)
types2 = as_tuple(params2)
if types == types2:
return self
bindings = {}
for T, U in zip(types, types2):
bindings[T] = U
if T.__bound__ is not None and isinstance(T.__bound__, type):
if not issubclass(U, T.__bound__):
msg = (f'For type parameter "{T.__name__}", expected a'
f'subclass of "{T.__bound__.__name__}", found {U}.')
raise TypeError(msg)
return make_type(self, bindings)
else:
FakeGenericMeta = MyABC
class GenericProxy(metaclass=FakeGenericMeta):
@abstractmethod
def need(self):
""""""
@classmethod
def __class_getitem__(cls, params2):
types2 = as_tuple(params2)
bindings = {}
if types == types2:
return cls
for T, U in zip(types, types2):
bindings[T] = U
if T.__bound__ is not None and isinstance(T.__bound__, type):
if not issubclass(U, T.__bound__):
msg = (f'For type parameter "{T.__name__}", expected a'
f'subclass of "{T.__bound__.__name__}", found {U}.')
raise TypeError(msg)
return make_type(cls, bindings)
name = 'Generic[%s]' % ",".join(_.__name__ for _ in types)
gp = type(name, (GenericProxy,), {GENERIC_ATT2: types})
setattr(gp, GENERIC_ATT2, types)
return gp
class MyABC(ABCMeta):
def __new__(mcls, name, bases, namespace, **kwargs):
# logger.info('name: %s' % name)
# logger.info('namespace: %s' % namespace)
# logger.info('bases: %s' % str(bases))
# if bases:
# logger.info('bases[0]: %s' % str(bases[0].__dict__))
cls = super().__new__(mcls, name, bases, namespace, **kwargs)
# logger.info(name)
# logger.info(bases)
# logger.info(kwargs)
# logger.info(mcls.__dict__)
if GENERIC_ATT2 in namespace:
spec = namespace[GENERIC_ATT2]
# elif 'types_' in namespace:
# spec = namespace['types_']
elif bases and GENERIC_ATT2 in bases[0].__dict__:
spec = bases[0].__dict__[GENERIC_ATT2]
else:
spec = {}
if spec:
name0 = get_name_without_brackets(name)
name = f'{name0}[%s]' % (",".join(name_for_type_like(_) for _ in spec))
setattr(cls, '__name__', name)
else:
pass
setattr(cls, '__module__', mcls.__module__)
# logger.info('spec: %s' % spec)
return cls
class NoConstructorImplemented(TypeError):
pass
from typing import Optional, Union, List, Set
def get_default_attrs():
return dict(Any=Any, Optional=Optional, Union=Union, Tuple=Tuple,
List=List, Set=Set,
Dict=Dict)
class Fake:
def __init__(self, myt, symbols):
self.myt = myt
self.name_without = get_name_without_brackets(myt.__name__)
self.symbols = symbols
def __getitem__(self, item):
n = name_for_type_like(item)
complete = f'{self.name_without}[{n}]'
if complete in self.symbols:
return self.symbols[complete]
# noinspection PyUnresolvedReferences
return self.myt[item]
@loglevel
def resolve_types(T, locals_=None, refs=()):
assert is_dataclass(T)
rl = RecLogger()
# rl.p(f'resolving types for {T!r}')
# g = dict(globals())
# g = {}
# locals_ = {}
#
# if hasattr(T, GENERIC_ATT):
# for k, v in getattr(T, GENERIC_ATT).items():
# g[k] = TypeVar(k)
# if hasattr(T, '__name__'):
# g[get_name_without_brackets(T.__name__)] = T
#
# g['Optional'] = typing.Optional
# g['Any'] = Any
# g['Union'] = typing.Union
# # print('globals: %s' % g)
symbols = dict(locals_ or {})
for t in (T,) + refs:
symbols[t.__name__] = t
name_without = get_name_without_brackets(t.__name__)
if name_without not in symbols:
symbols[name_without] = Fake(t, symbols)
else:
pass
for x in getattr(T, GENERIC_ATT2, ()):
if hasattr(x, '__name__'):
symbols[x.__name__] = x
annotations = getattr(T, '__annotations__', {})
for k, v in annotations.items():
if not isinstance(v, str) and is_ClassVar(v):
continue # XXX
try:
r = replace_typevars(v, bindings={}, symbols=symbols, rl=None)
# rl.p(f'{k!r} -> {v!r} -> {r!r}')
annotations[k] = r
except NameError as e:
msg = f'resolve_type({T.__name__}): Cannot resolve names for attribute "{k}".'
msg += f'\n symbols: {symbols}'
msg += '\n\n' + indent(traceback.format_exc(), '', '> ')
logger.warning(msg)
continue
except TypeError as e:
msg = f'Cannot resolve type for attribute "{k}".'
raise TypeError(msg) from e
for f in fields(T):
if not f.name in annotations:
# msg = f'Cannot get annotation for field {f.name!r}'
# logger.warning(msg)
continue
f.type = annotations[f.name]
from dataclasses import is_dataclass
@loglevel
def replace_typevars(cls, *, bindings, symbols, rl: Optional[RecLogger], already=None):
rl = rl or RecLogger()
# rl.p(f'Replacing typevars {cls}')
# rl.p(f' bindings {bindings}')
# rl.p(f' symbols {symbols}')
already = already or {}
if id(cls) in already:
return already[id(cls)]
elif cls in bindings:
return bindings[cls]
elif isinstance(cls, str):
if cls in symbols:
return symbols[cls]
g = dict(get_default_attrs())
g.update(symbols)
# for t, u in zip(types, types2):
# g[t.__name__] = u
# g[u.__name__] = u
g0 = dict(g)
try:
return eval(cls, g)
except NameError as e:
msg = f'Cannot resolve {cls!r}\ng: {list(g0)}'
# msg += 'symbols: {list(g0)'
raise NameError(msg) from e
elif hasattr(cls, '__annotations__'):
return make_type(cls, bindings)
elif is_Type(cls):
x = get_Type_arg(cls)
r = replace_typevars(x, bindings=bindings, already=already, symbols=symbols, rl=rl.child('classvar arg'))
return Type[r]
elif is_ClassVar(cls):
x = get_ClassVar_arg(cls)
r = replace_typevars(x, bindings=bindings, already=already, symbols=symbols, rl=rl.child('classvar arg'))
return typing.ClassVar[r]
elif is_List(cls):
arg = get_List_arg(cls)
return typing.List[
replace_typevars(arg, bindings=bindings, already=already, symbols=symbols, rl=rl.child('list arg'))]
elif is_optional(cls):
x = get_optional_type(cls)
return typing.Optional[
replace_typevars(x, bindings=bindings, already=already, symbols=symbols, rl=rl.child('optional arg'))]
elif is_union(cls):
xs = get_union_types(cls)
ys = tuple(replace_typevars(_, bindings=bindings, already=already, symbols=symbols, rl=rl.child())
for _ in xs)
return typing.Union[ys]
elif is_forward_ref(cls):
T = get_forward_ref_arg(cls)
return replace_typevars(T, bindings=bindings, already=already, symbols=symbols, rl=rl.child('forward '))
else:
return cls
cache_enabled = True
cache = {}
if PYTHON_36:
B = Dict[Any, Any] # bug in Python 3.6
else:
B = Dict[TypeVar, Any]
@loglevel
def make_type(cls: type, bindings: B, rl: RecLogger = None) -> type:
if not bindings:
return cls
cache_key = (str(cls), str(bindings))
if cache_enabled:
if cache_key in cache:
return cache[cache_key]
rl = rl or RecLogger()
generic_att2 = getattr(cls, GENERIC_ATT2, ())
assert isinstance(generic_att2, tuple)
# rl.p(f'make_type for {cls.__name__}')
# rl.p(f' dataclass {is_dataclass(cls)}')
# rl.p(f' bindings: {bindings}')
# rl.p(f' generic_att: {generic_att2}')
symbols = {}
annotations = getattr(cls, '__annotations__', {})
name_without = get_name_without_brackets(cls.__name__)
def param_name(x):
x2 = replace_typevars(x, bindings=bindings, symbols=symbols, rl=rl.child('param_name'))
return name_for_type_like(x2)
if generic_att2:
name2 = '%s[%s]' % (name_without, ",".join(param_name(_) for _ in generic_att2))
else:
name2 = name_without
# rl.p(' name2: %s' % name2)
try:
cls2 = type(name2, (cls,), {'need': lambda: None})
except TypeError as e:
msg = f'Cannot instantiate from {cls!r}'
raise TypeError(msg) from e
symbols[name2] = cls2
symbols[cls.__name__] = cls2 # also MyClass[X] should resolve to the same
cache[cache_key] = cls2
#
class Fake:
def __getitem__(self, item):
n = name_for_type_like(item)
complete = f'{name_without}[{n}]'
if complete in symbols:
return symbols[complete]
# noinspection PyUnresolvedReferences
return cls[item]
if name_without not in symbols:
symbols[name_without] = Fake()
else:
pass
for T, U in bindings.items():
symbols[T.__name__] = U
if hasattr(U, '__name__'):
# dict does not have name
symbols[U.__name__] = U
# first of all, replace the bindings in the generic_att
generic_att2_new = tuple(
replace_typevars(_, bindings=bindings, symbols=symbols, rl=rl.child('attribute')) for _ in generic_att2)
# rl.p(f' generic_att2_new: {generic_att2_new}')
# pprint(f'\n\n{cls.__name__}')
# pprint(f'binding', bindings=str(bindings))
# pprint(f'symbols', **symbols)
new_annotations = {}
for k, v0 in annotations.items():
# v = eval_type(v0, bindings, symbols)
# if hasattr(v, GENERIC_ATT):
v = replace_typevars(v0, bindings=bindings, symbols=symbols, rl=rl.child(f'ann {k}'))
# print(f'{v0!r} -> {v!r}')
if is_ClassVar(v):
s = get_ClassVar_arg(v)
# s = eval_type(s, bindings, symbols)
if is_Type(s):
st = get_Type_arg(s)
# concrete = eval_type(st, bindings, symbols)
concrete = st
new_annotations[k] = ClassVar[Type[st]]
setattr(cls2, k, concrete)
else:
new_annotations[k] = ClassVar[s]
else:
new_annotations[k] = v
# pprint(' new annotations', **new_annotations)
original__post_init__ = getattr(cls, '__post_init__', None)
def __post_init__(self):
for k, v in new_annotations.items():
if is_ClassVar(v): continue
if isinstance(v, type):
val = getattr(self, k)
try:
if type(val).__name__ != v.__name__ and not isinstance(val, v):
msg = f'Expected field "{k}" to be a "{v.__name__}" but found {type(val).__name__}'
warnings.warn(msg, stacklevel=3)
# raise ValueError(msg)
except TypeError as e:
msg = f'Cannot judge annotation of {k} (supposedly {v}.'
if sys.version_info[:2] == (3, 6):
# FIXME: warn
continue
logger.error(msg)
raise TypeError(msg) from e
if original__post_init__ is not None:
original__post_init__(self)
setattr(cls2, '__post_init__', __post_init__)
# important: do it before dataclass
cls2.__annotations__ = new_annotations
# logger.info('new annotations: %s' % new_annotations)
if is_dataclass(cls):
# note: need to have set new annotations
# pprint('creating dataclass from %s' % cls2)
cls2 = dataclass(cls2)
# setattr(cls2, _FIELDS, fields2)
else:
# print('Detected that cls = %s not a dataclass' % cls)
# noinspection PyUnusedLocal
def init_placeholder(self, *args, **kwargs):
if args or kwargs:
msg = f'Default constructor of {cls2.__name__} does not know what to do with arguments.'
msg += f'\nargs: {args!r}\nkwargs: {kwargs!r}'
msg += f'\nself: {self}'
msg += f'\nself: {dir(type(self))}'
msg += f'\nself: {type(self)}'
raise NoConstructorImplemented(msg)
setattr(cls2, '__init__', init_placeholder)
cls2.__module__ = cls.__module__
setattr(cls2, '__name__', name2)
setattr(cls2, BINDINGS_ATT, bindings)
setattr(cls2, GENERIC_ATT2, generic_att2_new)
setattr(cls2, '__post_init__', __post_init__)
# rl.p(f' final {cls2.__name__} {cls2.__annotations__}')
# rl.p(f' dataclass {is_dataclass(cls2)}')
#
return cls2
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/zeneric2.py
|
zeneric2.py
|
import typing
from typing import Union, Any, Dict
from .constants import NAME_ARG
from .constants import PYTHON_36
# noinspection PyProtectedMember
def is_optional(x):
if PYTHON_36: # pragma: no cover
return isinstance(x, typing._Union) and len(x.__args__) == 2 and x.__args__[-1] is type(None)
else:
return isinstance(x, typing._GenericAlias) and (x.__origin__ is Union) and len(x.__args__) == 2 and x.__args__[
-1] is type(None)
def get_optional_type(x):
assert is_optional(x)
return x.__args__[0]
def is_union(x):
""" Union[X, None] is not considered a Union"""
if PYTHON_36: # pragma: no cover
return not is_optional(x) and isinstance(x, typing._Union)
else:
return not is_optional(x) and isinstance(x, typing._GenericAlias) and (x.__origin__ is Union)
def get_union_types(x):
assert is_union(x)
return tuple(x.__args__)
def _check_valid_arg(x):
if isinstance(x, str): # pragma: no cover
msg = f'The annotations must be resolved: {x!r}'
raise ValueError(msg)
def is_forward_ref(x):
_check_valid_arg(x)
if PYTHON_36: # pragma: no cover
return isinstance(x, typing._ForwardRef)
else:
return isinstance(x, typing.ForwardRef)
def get_forward_ref_arg(x) -> str:
assert is_forward_ref(x)
return x.__forward_arg__
def is_Any(x):
_check_valid_arg(x)
if PYTHON_36: # pragma: no cover
return str(x) == 'typing.Any'
else:
# noinspection PyUnresolvedReferences
return isinstance(x, typing._SpecialForm) and x._name == 'Any'
def is_ClassVar(x):
_check_valid_arg(x)
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
return isinstance(x, typing._ClassVar)
else:
return isinstance(x, typing._GenericAlias) and (x.__origin__ is typing.ClassVar)
def get_ClassVar_arg(x):
assert is_ClassVar(x)
if PYTHON_36: # pragma: no cover
return x.__type__
else:
return x.__args__[0]
def is_Type(x):
_check_valid_arg(x)
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
return (x is typing.Type) or (isinstance(x, typing.GenericMeta) and (x.__origin__ is typing.Type))
else:
return (x is typing.Type) or (isinstance(x, typing._GenericAlias) and (x.__origin__ is type))
def is_Tuple(x):
_check_valid_arg(x)
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
return isinstance(x, typing.TupleMeta)
else:
return isinstance(x, typing._GenericAlias) and (x._name == 'Tuple')
def is_List(x):
_check_valid_arg(x)
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
return isinstance(x, typing.GenericMeta) and x.__origin__ is typing.List
else:
return isinstance(x, typing._GenericAlias) and (x._name == 'List')
def get_List_arg(x):
assert is_List(x)
return x.__args__[0]
def get_Type_arg(x):
assert is_Type(x)
return x.__args__[0]
def is_Callable(x):
_check_valid_arg(x)
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
return isinstance(x, typing.CallableMeta)
else:
return getattr(x, '_name', None) == 'Callable'
# return hasattr(x, '__origin__') and x.__origin__ is typing.Callable
# return isinstance(x, typing._GenericAlias) and x.__origin__.__name__ == "Callable"
def is_MyNamedArg(x):
return hasattr(x, NAME_ARG)
def get_MyNamedArg_name(x):
assert is_MyNamedArg(x)
return getattr(x, NAME_ARG)
def is_Dict(x: Any):
_check_valid_arg(x)
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
return isinstance(x, typing.GenericMeta) and x.__origin__ is typing.Dict
else:
return isinstance(x, typing._GenericAlias) and x._name == 'Dict'
def is_Set(x: Any):
_check_valid_arg(x)
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
if x is typing.Set:
return True
return isinstance(x, typing.GenericMeta) and x.__origin__ is typing.Set
else:
return isinstance(x, typing._GenericAlias) and x._name == 'Set'
def get_Set_arg(x):
assert is_Set(x)
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
if x is typing.Set:
return Any
return x.__args__[0]
def get_Dict_name(T):
assert is_Dict(T)
K, V = T.__args__
return get_Dict_name_K_V(K, V)
def get_Dict_name_K_V(K, V):
return 'Dict[%s,%s]' % (name_for_type_like(K), name_for_type_like(V))
def get_Set_name_V(V):
return 'Set[%s]' % (name_for_type_like(V))
def name_for_type_like(x):
if is_Any(x):
return 'Any'
elif isinstance(x, type):
return x.__name__
elif isinstance(x, typing.TypeVar):
return x.__name__
elif is_Dict(x):
return get_Dict_name(x)
elif is_Callable(x):
info = get_Callable_info(x)
params = ','.join(name_for_type_like(p) for p in info.parameters_by_position)
ret = name_for_type_like(info.returns)
return f'Callable[[{params}],{ret}'
elif hasattr(x, '__name__'):
return x.__name__
else:
return str(x)
from typing import Tuple
# do not make a dataclass
class CallableInfo:
parameters_by_name: Dict[str, Any]
parameters_by_position: Tuple
ordering: Tuple[str, ...]
returns: Any
def __init__(self, parameters_by_name, parameters_by_position, ordering, returns):
self.parameters_by_name = parameters_by_name
self.parameters_by_position = parameters_by_position
self.ordering = ordering
self.returns = returns
def get_Callable_info(x) -> CallableInfo:
assert is_Callable(x)
parameters_by_name = {}
parameters_by_position = []
ordering = []
args = x.__args__
if args:
returns = args[-1]
rest = args[:-1]
else:
returns = Any
rest = ()
for i, a in enumerate(rest):
if is_MyNamedArg(a):
name = get_MyNamedArg_name(a)
t = a.original
else:
name = f'#{i}'
t = a
parameters_by_name[name] = t
ordering.append(name)
parameters_by_position.append(t)
return CallableInfo(parameters_by_name=parameters_by_name,
parameters_by_position=tuple(parameters_by_position),
ordering=tuple(ordering),
returns=returns)
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/annotations_tricks.py
|
annotations_tricks.py
|
from .constants import INTERSECTION_ATT
from .constants import PYTHON_36
from dataclasses import dataclass, is_dataclass
def Intersection_item(cls, params):
from .zeneric2 import as_tuple
types = as_tuple(params)
name = f'Intersection[{",".join(_.__name__ for _ in types)}]'
annotations = {}
any_dataclass = any(is_dataclass(_) for _ in types)
for t in types:
a = getattr(t, '__annotations__', {})
annotations.update(a)
res = {
'__annotations__': annotations,
INTERSECTION_ATT: types
}
C = type(name, params, res)
if any_dataclass:
C = dataclass(C)
return C
if PYTHON_36: # pragma: no cover
class IntersectionMeta(type):
def __getitem__(self, params):
return Intersection_item(self, params)
class Intersection(metaclass=IntersectionMeta):
pass
else:
class Intersection:
@classmethod
def __class_getitem__(cls, params):
return Intersection_item(cls, params)
def is_Intersection(T):
return hasattr(T, INTERSECTION_ATT)
def get_Intersection_args(T):
assert is_Intersection(T)
return getattr(T, INTERSECTION_ATT)
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/my_intersection.py
|
my_intersection.py
|
from dataclasses import dataclass
from typing import *
try:
# noinspection PyUnresolvedReferences
from typing import ForwardRef
except ImportError: # pragma: no cover
# noinspection PyUnresolvedReferences
from typing import _ForwardRef as ForwardRef
from .test_utils import assert_object_roundtrip
def test_forward1_ok_no_locals_if_using_name():
# """
# *USED TO* Fail because there is no "C" in the context
# if we don't evaluate locals().
# l
# """
@dataclass
class C:
a: int
b: Optional['C'] = None
e = C(12, C(1))
assert_object_roundtrip(e, {})
def test_forward1():
@dataclass
class C:
a: int
b: Optional['C'] = None
e = C(12, C(1))
assert_object_roundtrip(e, {"C": C})
def test_forward2():
@dataclass
class C:
a: int
b: 'Optional[C]' = None
# noinspection PyTypeChecker
e = C(12, C(1))
assert_object_roundtrip(e, {"C": C})
def test_forward3():
@dataclass
class C:
a: int
b: 'Optional[C]'
e = C(12, C(1, None))
assert_object_roundtrip(e, {"C": C})
|
zuper-utils
|
/zuper-utils-3.0.3.tar.gz/zuper-utils-3.0.3/src/zuper_json/test_forward.py
|
test_forward.py
|
elastic-apm -- ZUQA agent for Python
===========================================
.. image:: https://apm-ci.elastic.co/buildStatus/icon?job=apm-agent-python%2Fapm-agent-python-mbp%2Fmaster
:target: https://apm-ci.elastic.co/job/apm-agent-python/job/apm-agent-python-mbp/
:alt: Build Status
.. image:: https://img.shields.io/pypi/v/elastic-apm.svg?style=flat
:target: https://pypi.python.org/pypi/zuqa/
:alt: Latest Version
.. image:: https://img.shields.io/pypi/pyversions/elastic-apm.svg?style=flat
:target: https://pypi.python.org/pypi/elastic-apm/
:alt: Supported Python versions
This is the official Python module for ZUQA.
It provides full out-of-the-box support for many of the popular frameworks,
including Django, and Flask. ZUQA is also easy to adapt for most
WSGI-compatible web applications via `custom integrations`_.
Your application doesn't live on the web? No problem! ZUQA is easy to use in
any Python application.
Read the documentation_.
.. _documentation: https://www.elastic.co/guide/en/apm/agent/python/current/index.html
.. _`custom integrations`: https://www.elastic.co/blog/creating-custom-framework-integrations-with-the-elastic-apm-python-agent
License
-------
BSD-3-Clause
Made with ♥️ and ☕️ by Elastic and our community.
|
zuqa-agent-python
|
/zuqa-agent-python-1.0.0a1.tar.gz/zuqa-agent-python-1.0.0a1/README.rst
|
README.rst
|
#!/usr/bin/env python
"""
zuqa
======
zuqa is a Python client for `zuQA Kit <https://zuqa.io>`_. It provides
full out-of-the-box support for many of the popular frameworks, including
`Django <djangoproject.com>`_, `Flask <http://flask.pocoo.org/>`_, and `Pylons
<http://www.pylonsproject.org/>`_. zuqa also includes drop-in support for any
`WSGI <http://wsgi.readthedocs.org/>`_-compatible web application.
"""
# BSD 3-Clause License
#
# Copyright (c) 2019, Elasticsearch BV
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Hack to prevent stupid "TypeError: 'NoneType' object is not callable" error
# in multiprocessing/util.py _exit_function when running `python
# setup.py test` (see
# http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
for m in ("multiprocessing", "billiard"):
try:
__import__(m)
except ImportError:
pass
import ast
import os
import sys
from codecs import open
from distutils.command.build_ext import build_ext
from distutils.errors import CCompilerError, DistutilsExecError, DistutilsPlatformError
from setuptools import Extension, find_packages, setup
from setuptools.command.test import test as TestCommand
if sys.platform == "win32":
build_ext_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError, IOError)
else:
build_ext_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError)
class BuildExtFailed(Exception):
pass
class optional_build_ext(build_ext):
def run(self):
try:
build_ext.run(self)
except DistutilsPlatformError:
raise BuildExtFailed()
def build_extension(self, ext):
try:
build_ext.build_extension(self, ext)
except build_ext_errors:
raise BuildExtFailed()
def get_version():
"""
Get version without importing from zuqa. This avoids any side effects
from importing while installing and/or building the module
:return: a string, indicating the version
"""
version_file = open(os.path.join("zuqa", "version.py"), encoding="utf-8")
for line in version_file:
if line.startswith("VERSION"):
version_string = ast.literal_eval(line.split(" = ")[1])
return version_string
return "unknown"
tests_require = [
"py>=1.4.26",
"pytest>=2.6.4",
"pytest-django==2.8.0",
"pytest-capturelog>=0.7",
"blinker>=1.1",
"celery",
"django-celery",
"Flask>=0.8",
"starlette",
"logbook",
"mock",
"pep8",
"webob",
"pytz",
"redis",
"requests",
"jinja2",
"pytest-benchmark",
"urllib3-mock",
"Twisted",
# isort
"apipkg",
"execnet",
"isort",
"pytest-cache",
"pytest-isort",
]
if sys.version_info[0] == 2:
tests_require += ["unittest2", "python-memcached"]
else:
tests_require += ["python3-memcached"]
try:
import __pypy__
except ImportError:
tests_require += ["psycopg2"]
if sys.version_info >= (3, 5):
tests_require += ["aiohttp", "tornado", "starlette", "pytest-asyncio", "pytest-mock"]
install_requires = ["urllib3", "certifi", "cachetools;python_version=='2.7'"]
class PyTest(TestCommand):
user_options = [("pytest-args=", "a", "Arguments to pass to py.test")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = []
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.pytest_args)
sys.exit(errno)
setup_kwargs = dict(
name="zuqa-agent-python",
version=get_version(),
author="Zuka Technologies, Inc",
license="BSD",
url="https://github.com/zukatechnologies/zuqa-agent-python",
description="The official Python module for zuQA",
long_description=open(os.path.join(os.path.dirname(__file__), "README.rst"), encoding="utf-8").read(),
packages=find_packages(exclude=("tests",)),
zip_safe=False,
install_requires=install_requires,
tests_require=tests_require,
extras_require={
"tests": tests_require,
"flask": ["blinker"],
"aiohttp": ["aiohttp"],
"tornado": ["tornado"],
"starlette": ["starlette", "flask", "requests"],
"opentracing": ["opentracing>=2.0.0"],
},
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4",
cmdclass={"test": PyTest},
test_suite="tests",
include_package_data=True,
entry_points={"paste.filter_app_factory": ["zuqa = zuqa.contrib.paste:filter_factory"]},
classifiers=[
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Topic :: Software Development",
"Development Status :: 3 - Alpha",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"License :: OSI Approved :: BSD License",
],
)
def run_setup(with_extensions):
setup_kwargs_tmp = dict(setup_kwargs)
if with_extensions:
setup_kwargs_tmp["ext_modules"] = [
Extension("zuqa.utils.wrapt._wrappers", ["zuqa/utils/wrapt/_wrappers.c"])
]
setup_kwargs_tmp["cmdclass"]["build_ext"] = optional_build_ext
setup(**setup_kwargs_tmp)
# Figure out if we should build the wrapt C extensions
with_extensions = os.environ.get("ZUQA_AGENT_WRAPT_EXTENSIONS", None)
if with_extensions:
if with_extensions.lower() == "true":
with_extensions = True
elif with_extensions.lower() == "false":
with_extensions = False
else:
with_extensions = None
if hasattr(sys, "pypy_version_info"):
with_extensions = False
if with_extensions is None:
with_extensions = True
try:
run_setup(with_extensions=with_extensions)
except BuildExtFailed:
run_setup(with_extensions=False)
|
zuqa-agent-python
|
/zuqa-agent-python-1.0.0a1.tar.gz/zuqa-agent-python-1.0.0a1/setup.py
|
setup.py
|
# BSD 3-Clause License
#
# Copyright (c) 2012, the Sentry Team, see AUTHORS for more details
# Copyright (c) 2019, Elasticsearch BV
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
import sys
from zuqa.utils import get_url_dict
from zuqa.utils.wsgi import get_current_url, get_environ, get_headers
class ZUQA(object):
"""
A WSGI middleware which will attempt to capture any
uncaught exceptions and send them to ZUQA.
>>> from zuqa.base import Client
>>> application = ZUQA(application, Client())
"""
def __init__(self, application, client):
self.application = application
self.client = client
def __call__(self, environ, start_response):
try:
for event in self.application(environ, start_response):
yield event
except Exception:
exc_info = sys.exc_info()
self.handle_exception(exc_info, environ)
exc_info = None
raise
def handle_exception(self, exc_info, environ):
event_id = self.client.capture(
"Exception",
exc_info=exc_info,
context={
"request": {
"method": environ.get("REQUEST_METHOD"),
"url": get_url_dict(get_current_url(environ)),
"headers": dict(get_headers(environ)),
"env": dict(get_environ(environ)),
}
},
handled=False,
)
return event_id
|
zuqa-agent-python
|
/zuqa-agent-python-1.0.0a1.tar.gz/zuqa-agent-python-1.0.0a1/zuqa/middleware.py
|
middleware.py
|
# BSD 3-Clause License
#
# Copyright (c) 2012, the Sentry Team, see AUTHORS for more details
# Copyright (c) 2019, Elasticsearch BV
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
import re
import warnings
from collections import defaultdict
from zuqa.conf.constants import ERROR, MASK, SPAN, TRANSACTION
from zuqa.utils import compat, varmap
from zuqa.utils.encoding import force_text
from zuqa.utils.stacks import get_lines_from_file
SANITIZE_FIELD_NAMES = frozenset(
["authorization", "password", "secret", "passwd", "token", "api_key", "access_token", "sessionid"]
)
SANITIZE_VALUE_PATTERNS = [re.compile(r"^[- \d]{16,19}$")] # credit card numbers, with or without spacers
def for_events(*events):
"""
:param events: list of event types
Only calls wrapped function if given event_type is in list of events
"""
events = set(events)
def wrap(func):
func.event_types = events
return func
return wrap
@for_events(ERROR, TRANSACTION)
def remove_http_request_body(client, event):
"""
Removes request.body from context
:param client: an ZUQA client
:param event: a transaction or error event
:return: The modified event
"""
if "context" in event and "request" in event["context"]:
event["context"]["request"].pop("body", None)
return event
@for_events(ERROR, SPAN)
def remove_stacktrace_locals(client, event):
"""
Removes local variables from any frames.
:param client: an ZUQA client
:param event: a transaction or error event
:return: The modified event
"""
func = lambda frame: frame.pop("vars", None)
return _process_stack_frames(event, func)
@for_events(ERROR, SPAN)
def sanitize_stacktrace_locals(client, event):
"""
Sanitizes local variables in all frames
:param client: an ZUQA client
:param event: a transaction or error event
:return: The modified event
"""
def func(frame):
if "vars" in frame:
frame["vars"] = varmap(_sanitize, frame["vars"])
return _process_stack_frames(event, func)
@for_events(ERROR, TRANSACTION)
def sanitize_http_request_cookies(client, event):
"""
Sanitizes http request cookies
:param client: an ZUQA client
:param event: a transaction or error event
:return: The modified event
"""
# sanitize request.cookies dict
try:
cookies = event["context"]["request"]["cookies"]
event["context"]["request"]["cookies"] = varmap(_sanitize, cookies)
except (KeyError, TypeError):
pass
# sanitize request.header.cookie string
try:
cookie_string = event["context"]["request"]["headers"]["cookie"]
event["context"]["request"]["headers"]["cookie"] = _sanitize_string(cookie_string, "; ", "=")
except (KeyError, TypeError):
pass
return event
@for_events(ERROR, TRANSACTION)
def sanitize_http_response_cookies(client, event):
"""
Sanitizes the set-cookie header of the response
:param client: an ZUQA client
:param event: a transaction or error event
:return: The modified event
"""
try:
cookie_string = event["context"]["response"]["headers"]["set-cookie"]
event["context"]["response"]["headers"]["set-cookie"] = _sanitize_string(cookie_string, ";", "=")
except (KeyError, TypeError):
pass
return event
@for_events(ERROR, TRANSACTION)
def sanitize_http_headers(client, event):
"""
Sanitizes http request/response headers
:param client: an ZUQA client
:param event: a transaction or error event
:return: The modified event
"""
# request headers
try:
headers = event["context"]["request"]["headers"]
event["context"]["request"]["headers"] = varmap(_sanitize, headers)
except (KeyError, TypeError):
pass
# response headers
try:
headers = event["context"]["response"]["headers"]
event["context"]["response"]["headers"] = varmap(_sanitize, headers)
except (KeyError, TypeError):
pass
return event
@for_events(ERROR, TRANSACTION)
def sanitize_http_wsgi_env(client, event):
"""
Sanitizes WSGI environment variables
:param client: an ZUQA client
:param event: a transaction or error event
:return: The modified event
"""
try:
env = event["context"]["request"]["env"]
event["context"]["request"]["env"] = varmap(_sanitize, env)
except (KeyError, TypeError):
pass
return event
@for_events(ERROR, TRANSACTION)
def sanitize_http_request_querystring(client, event):
"""
Sanitizes http request query string
:param client: an ZUQA client
:param event: a transaction or error event
:return: The modified event
"""
try:
query_string = force_text(event["context"]["request"]["url"]["search"], errors="replace")
except (KeyError, TypeError):
return event
if "=" in query_string:
sanitized_query_string = _sanitize_string(query_string, "&", "=")
full_url = event["context"]["request"]["url"]["full"]
event["context"]["request"]["url"]["search"] = sanitized_query_string
event["context"]["request"]["url"]["full"] = full_url.replace(query_string, sanitized_query_string)
return event
@for_events(ERROR, TRANSACTION)
def sanitize_http_request_body(client, event):
"""
Sanitizes http request body. This only works if the request body
is a query-encoded string. Other types (e.g. JSON) are not handled by
this sanitizer.
:param client: an ZUQA client
:param event: a transaction or error event
:return: The modified event
"""
try:
body = force_text(event["context"]["request"]["body"], errors="replace")
except (KeyError, TypeError):
return event
if "=" in body:
sanitized_query_string = _sanitize_string(body, "&", "=")
event["context"]["request"]["body"] = sanitized_query_string
return event
@for_events(ERROR, SPAN)
def add_context_lines_to_frames(client, event):
# divide frames up into source files before reading from disk. This should help
# with utilizing the disk cache better
#
# TODO: further optimize by only opening each file once and reading all needed source
# TODO: blocks at once.
per_file = defaultdict(list)
_process_stack_frames(
event,
lambda frame: per_file[frame["context_metadata"][0]].append(frame) if "context_metadata" in frame else None,
)
for filename, frames in compat.iteritems(per_file):
for frame in frames:
# context_metadata key has been set in zuqa.utils.stacks.get_frame_info for
# all frames for which we should gather source code context lines
fname, lineno, context_lines, loader, module_name = frame.pop("context_metadata")
pre_context, context_line, post_context = get_lines_from_file(
fname, lineno, context_lines, loader, module_name
)
if context_line:
frame["pre_context"] = pre_context
frame["context_line"] = context_line
frame["post_context"] = post_context
return event
@for_events(ERROR, SPAN)
def mark_in_app_frames(client, event):
warnings.warn(
"The mark_in_app_frames processor is deprecated and can be removed from your PROCESSORS setting",
DeprecationWarning,
)
return event
def _sanitize(key, value):
if value is None:
return
if isinstance(value, compat.string_types) and any(pattern.match(value) for pattern in SANITIZE_VALUE_PATTERNS):
return MASK
if isinstance(value, dict):
# varmap will call _sanitize on each k:v pair of the dict, so we don't
# have to do anything with dicts here
return value
if not key: # key can be a NoneType
return value
key = key.lower()
for field in SANITIZE_FIELD_NAMES:
if field in key:
# store mask as a fixed length for security
return MASK
return value
def _sanitize_string(unsanitized, itemsep, kvsep):
"""
sanitizes a string that contains multiple key/value items
:param unsanitized: the unsanitized string
:param itemsep: string that separates items
:param kvsep: string that separates key from value
:return: a sanitized string
"""
sanitized = []
kvs = unsanitized.split(itemsep)
for kv in kvs:
kv = kv.split(kvsep)
if len(kv) == 2:
sanitized.append((kv[0], _sanitize(kv[0], kv[1])))
else:
sanitized.append(kv)
return itemsep.join(kvsep.join(kv) for kv in sanitized)
def _process_stack_frames(event, func):
if "stacktrace" in event:
for frame in event["stacktrace"]:
func(frame)
# an error can have two stacktraces, one in "exception", one in "log"
if "exception" in event and "stacktrace" in event["exception"]:
for frame in event["exception"]["stacktrace"]:
func(frame)
# check for chained exceptions
cause = event["exception"].get("cause", None)
while cause:
if "stacktrace" in cause[0]:
for frame in cause[0]["stacktrace"]:
func(frame)
cause = cause[0].get("cause", None)
if "log" in event and "stacktrace" in event["log"]:
for frame in event["log"]["stacktrace"]:
func(frame)
return event
|
zuqa-agent-python
|
/zuqa-agent-python-1.0.0a1.tar.gz/zuqa-agent-python-1.0.0a1/zuqa/processors.py
|
processors.py
|
# BSD 3-Clause License
#
# Copyright (c) 2019, Elasticsearch BV
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import functools
import random
import re
import threading
import time
import timeit
import warnings
from collections import defaultdict
from zuqa.conf import constants
from zuqa.conf.constants import LABEL_RE, SPAN, TRANSACTION
from zuqa.context import init_execution_context
from zuqa.metrics.base_metrics import Timer
from zuqa.utils import compat, encoding, get_name_from_func
from zuqa.utils.deprecation import deprecated
from zuqa.utils.disttracing import TraceParent, TracingOptions
from zuqa.utils.logging import get_logger
__all__ = ("capture_span", "tag", "label", "set_transaction_name", "set_custom_context", "set_user_context")
error_logger = get_logger("zuqa.errors")
logger = get_logger("zuqa.traces")
_time_func = timeit.default_timer
execution_context = init_execution_context()
class ChildDuration(object):
__slots__ = ("obj", "_nesting_level", "_start", "_duration", "_lock")
def __init__(self, obj):
self.obj = obj
self._nesting_level = 0
self._start = None
self._duration = 0
self._lock = threading.Lock()
def start(self, timestamp):
with self._lock:
self._nesting_level += 1
if self._nesting_level == 1:
self._start = timestamp
def stop(self, timestamp):
with self._lock:
self._nesting_level -= 1
if self._nesting_level == 0:
self._duration += timestamp - self._start
@property
def duration(self):
return self._duration
class BaseSpan(object):
def __init__(self, labels=None):
self._child_durations = ChildDuration(self)
self.labels = {}
if labels:
self.label(**labels)
def child_started(self, timestamp):
self._child_durations.start(timestamp)
def child_ended(self, timestamp):
self._child_durations.stop(timestamp)
def end(self, skip_frames=0, duration=None):
raise NotImplementedError()
def label(self, **labels):
"""
Label this span with one or multiple key/value labels. Keys should be strings, values can be strings, booleans,
or numerical values (int, float, Decimal)
span_obj.label(key1="value1", key2=True, key3=42)
Note that keys will be dedotted, replacing dot (.), star (*) and double quote (") with an underscore (_)
:param labels: key/value pairs of labels
:return: None
"""
labels = encoding.enforce_label_format(labels)
self.labels.update(labels)
@deprecated("transaction/span.label()")
def tag(self, **tags):
"""
This method is deprecated, please use "label()" instead.
Tag this span with one or multiple key/value tags. Both the values should be strings
span_obj.tag(key1="value1", key2="value2")
Note that keys will be dedotted, replacing dot (.), star (*) and double quote (") with an underscore (_)
:param tags: key/value pairs of tags
:return: None
"""
for key in tags.keys():
self.labels[LABEL_RE.sub("_", compat.text_type(key))] = encoding.keyword_field(compat.text_type(tags[key]))
class Transaction(BaseSpan):
def __init__(self, tracer, transaction_type="custom", trace_parent=None, is_sampled=True, start=None):
self.id = "%016x" % random.getrandbits(64)
self.trace_parent = trace_parent
if start:
self.timestamp = self.start_time = start
else:
self.timestamp, self.start_time = time.time(), _time_func()
self.name = None
self.duration = None
self.result = None
self.transaction_type = transaction_type
self.tracer = tracer
self.dropped_spans = 0
self.context = {}
self.is_sampled = is_sampled
self._span_counter = 0
self._span_timers = defaultdict(Timer)
self._span_timers_lock = threading.Lock()
try:
self._breakdown = self.tracer._agent._metrics.get_metricset(
"zuqa.metrics.sets.breakdown.BreakdownMetricSet"
)
except (LookupError, AttributeError):
self._breakdown = None
try:
self._transaction_metrics = self.tracer._agent._metrics.get_metricset(
"zuqa.metrics.sets.transactions.TransactionsMetricSet"
)
except (LookupError, AttributeError):
self._transaction_metrics = None
super(Transaction, self).__init__()
def end(self, skip_frames=0, duration=None):
self.duration = duration if duration is not None else (_time_func() - self.start_time)
if self._transaction_metrics:
self._transaction_metrics.timer(
"transaction.duration",
reset_on_collect=True,
**{"transaction.name": self.name, "transaction.type": self.transaction_type}
).update(self.duration)
if self._breakdown:
for (span_type, span_subtype), timer in compat.iteritems(self._span_timers):
labels = {
"span.type": span_type,
"transaction.name": self.name,
"transaction.type": self.transaction_type,
}
if span_subtype:
labels["span.subtype"] = span_subtype
self._breakdown.timer("span.self_time", reset_on_collect=True, **labels).update(*timer.val)
labels = {"transaction.name": self.name, "transaction.type": self.transaction_type}
if self.is_sampled:
self._breakdown.counter("transaction.breakdown.count", reset_on_collect=True, **labels).inc()
self._breakdown.timer(
"span.self_time",
reset_on_collect=True,
**{"span.type": "app", "transaction.name": self.name, "transaction.type": self.transaction_type}
).update(self.duration - self._child_durations.duration)
def _begin_span(
self,
name,
span_type,
context=None,
leaf=False,
labels=None,
parent_span_id=None,
span_subtype=None,
span_action=None,
sync=True,
start=None,
):
parent_span = execution_context.get_span()
tracer = self.tracer
if parent_span and parent_span.leaf:
span = DroppedSpan(parent_span, leaf=True)
elif tracer.config.transaction_max_spans and self._span_counter > tracer.config.transaction_max_spans - 1:
self.dropped_spans += 1
span = DroppedSpan(parent_span)
self._span_counter += 1
else:
span = Span(
transaction=self,
name=name,
span_type=span_type or "code.custom",
context=context,
leaf=leaf,
labels=labels,
parent=parent_span,
parent_span_id=parent_span_id,
span_subtype=span_subtype,
span_action=span_action,
sync=sync,
start=start,
)
span.frames = tracer.frames_collector_func()
self._span_counter += 1
execution_context.set_span(span)
return span
def begin_span(
self,
name,
span_type,
context=None,
leaf=False,
labels=None,
span_subtype=None,
span_action=None,
sync=True,
start=None,
):
"""
Begin a new span
:param name: name of the span
:param span_type: type of the span
:param context: a context dict
:param leaf: True if this is a leaf span
:param labels: a flat string/string dict of labels
:param span_subtype: sub type of the span, e.g. "postgresql"
:param span_action: action of the span , e.g. "query"
:param start: timestamp, mostly useful for testing
:return: the Span object
"""
return self._begin_span(
name,
span_type,
context=context,
leaf=leaf,
labels=labels,
parent_span_id=None,
span_subtype=span_subtype,
span_action=span_action,
sync=sync,
start=start,
)
def end_span(self, skip_frames=0, duration=None):
"""
End the currently active span
:param skip_frames: numbers of frames to skip in the stack trace
:param duration: override duration, mostly useful for testing
:return: the ended span
"""
span = execution_context.get_span()
if span is None:
raise LookupError()
span.end(skip_frames=skip_frames, duration=duration)
return span
def ensure_parent_id(self):
"""If current trace_parent has no span_id, generate one, then return it
This is used to generate a span ID which the RUM agent will use to correlate
the RUM transaction with the backend transaction.
"""
if self.trace_parent.span_id == self.id:
self.trace_parent.span_id = "%016x" % random.getrandbits(64)
logger.debug("Set parent id to generated %s", self.trace_parent.span_id)
return self.trace_parent.span_id
def to_dict(self):
self.context["tags"] = self.labels
result = {
"id": self.id,
"trace_id": self.trace_parent.trace_id,
"name": encoding.keyword_field(self.name or ""),
"type": encoding.keyword_field(self.transaction_type),
"duration": self.duration * 1000, # milliseconds
"result": encoding.keyword_field(str(self.result)),
"timestamp": int(self.timestamp * 1000000), # microseconds
"sampled": self.is_sampled,
"span_count": {"started": self._span_counter - self.dropped_spans, "dropped": self.dropped_spans},
}
if self.trace_parent:
result["trace_id"] = self.trace_parent.trace_id
# only set parent_id if this transaction isn't the root
if self.trace_parent.span_id and self.trace_parent.span_id != self.id:
result["parent_id"] = self.trace_parent.span_id
if self.is_sampled:
result["context"] = self.context
return result
def track_span_duration(self, span_type, span_subtype, self_duration):
# TODO: once asynchronous spans are supported, we should check if the transaction is already finished
# TODO: and, if it has, exit without tracking.
with self._span_timers_lock:
self._span_timers[(span_type, span_subtype)].update(self_duration)
class Span(BaseSpan):
__slots__ = (
"id",
"transaction",
"name",
"type",
"subtype",
"action",
"context",
"leaf",
"timestamp",
"start_time",
"duration",
"parent",
"parent_span_id",
"frames",
"labels",
"sync",
"_child_durations",
)
def __init__(
self,
transaction,
name,
span_type,
context=None,
leaf=False,
labels=None,
parent=None,
parent_span_id=None,
span_subtype=None,
span_action=None,
sync=True,
start=None,
):
"""
Create a new Span
:param transaction: transaction object that this span relates to
:param name: Generic name of the span
:param span_type: type of the span, e.g. db
:param context: context dictionary
:param leaf: is this span a leaf span?
:param labels: a dict of labels
:param parent_span_id: override of the span ID
:param span_subtype: sub type of the span, e.g. mysql
:param span_action: sub type of the span, e.g. query
:param sync: indicate if the span was executed synchronously or asynchronously
:param start: timestamp, mostly useful for testing
"""
self.start_time = start or _time_func()
self.id = "%016x" % random.getrandbits(64)
self.transaction = transaction
self.name = name
self.context = context if context is not None else {}
self.leaf = leaf
# timestamp is bit of a mix of monotonic and non-monotonic time sources.
# we take the (non-monotonic) transaction timestamp, and add the (monotonic) difference of span
# start time and transaction start time. In this respect, the span timestamp is guaranteed to grow
# monotonically with respect to the transaction timestamp
self.timestamp = transaction.timestamp + (self.start_time - transaction.start_time)
self.duration = None
self.parent = parent
self.parent_span_id = parent_span_id
self.frames = None
self.sync = sync
if span_subtype is None and "." in span_type:
# old style dottet type, let's split it up
type_bits = span_type.split(".")
if len(type_bits) == 2:
span_type, span_subtype = type_bits[:2]
else:
span_type, span_subtype, span_action = type_bits[:3]
self.type = span_type
self.subtype = span_subtype
self.action = span_action
if self.transaction._breakdown:
p = self.parent if self.parent else self.transaction
p.child_started(self.start_time)
super(Span, self).__init__(labels=labels)
def to_dict(self):
result = {
"id": self.id,
"transaction_id": self.transaction.id,
"trace_id": self.transaction.trace_parent.trace_id,
# use either the explicitly set parent_span_id, or the id of the parent, or finally the transaction id
"parent_id": self.parent_span_id or (self.parent.id if self.parent else self.transaction.id),
"name": encoding.keyword_field(self.name),
"type": encoding.keyword_field(self.type),
"subtype": encoding.keyword_field(self.subtype),
"action": encoding.keyword_field(self.action),
"sync": self.sync,
"timestamp": int(self.timestamp * 1000000), # microseconds
"duration": self.duration * 1000, # milliseconds
}
if self.labels:
if self.context is None:
self.context = {}
self.context["tags"] = self.labels
if self.context:
result["context"] = self.context
if self.frames:
result["stacktrace"] = self.frames
return result
def end(self, skip_frames=0, duration=None):
"""
End this span and queue it for sending.
:param skip_frames: amount of frames to skip from the beginning of the stack trace
:param duration: override duration, mostly useful for testing
:return: None
"""
tracer = self.transaction.tracer
timestamp = _time_func()
self.duration = duration if duration is not None else (timestamp - self.start_time)
if not tracer.span_frames_min_duration or self.duration >= tracer.span_frames_min_duration:
self.frames = tracer.frames_processing_func(self.frames)[skip_frames:]
else:
self.frames = None
execution_context.set_span(self.parent)
tracer.queue_func(SPAN, self.to_dict())
if self.transaction._breakdown:
p = self.parent if self.parent else self.transaction
p.child_ended(self.start_time + self.duration)
self.transaction.track_span_duration(
self.type, self.subtype, self.duration - self._child_durations.duration
)
def update_context(self, key, data):
"""
Update the context data for given key
:param key: the key, e.g. "db"
:param data: a dictionary
:return: None
"""
current = self.context.get(key, {})
current.update(data)
self.context[key] = current
def __str__(self):
return u"{}/{}/{}".format(self.name, self.type, self.subtype)
class DroppedSpan(BaseSpan):
__slots__ = ("leaf", "parent", "id")
def __init__(self, parent, leaf=False):
self.parent = parent
self.leaf = leaf
self.id = None
super(DroppedSpan, self).__init__()
def end(self, skip_frames=0, duration=None):
execution_context.set_span(self.parent)
def child_started(self, timestamp):
pass
def child_ended(self, timestamp):
pass
def update_context(self, key, data):
pass
@property
def type(self):
return None
@property
def subtype(self):
return None
@property
def action(self):
return None
@property
def context(self):
return None
class Tracer(object):
def __init__(self, frames_collector_func, frames_processing_func, queue_func, config, agent):
self.config = config
self.queue_func = queue_func
self.frames_processing_func = frames_processing_func
self.frames_collector_func = frames_collector_func
self._agent = agent
self._ignore_patterns = [re.compile(p) for p in config.transactions_ignore_patterns or []]
@property
def span_frames_min_duration(self):
if self.config.span_frames_min_duration in (-1, None):
return None
else:
return self.config.span_frames_min_duration / 1000.0
def begin_transaction(self, transaction_type, trace_parent=None, start=None):
"""
Start a new transactions and bind it in a thread-local variable
:param transaction_type: type of the transaction, e.g. "request"
:param trace_parent: an optional TraceParent object
:param start: override the start timestamp, mostly useful for testing
:returns the Transaction object
"""
if trace_parent:
is_sampled = bool(trace_parent.trace_options.recorded)
else:
is_sampled = (
self.config.transaction_sample_rate == 1.0 or self.config.transaction_sample_rate > random.random()
)
transaction = Transaction(self, transaction_type, trace_parent=trace_parent, is_sampled=is_sampled, start=start)
if trace_parent is None:
transaction.trace_parent = TraceParent(
constants.TRACE_CONTEXT_VERSION,
"%032x" % random.getrandbits(128),
transaction.id,
TracingOptions(recorded=is_sampled),
)
execution_context.set_transaction(transaction)
return transaction
def _should_ignore(self, transaction_name):
for pattern in self._ignore_patterns:
if pattern.search(transaction_name):
return True
return False
def end_transaction(self, result=None, transaction_name=None, duration=None):
"""
End the current transaction and queue it for sending
:param result: result of the transaction, e.g. "OK" or 200
:param transaction_name: name of the transaction
:param duration: override duration, mostly useful for testing
:return:
"""
transaction = execution_context.get_transaction(clear=True)
if transaction:
if transaction.name is None:
transaction.name = transaction_name if transaction_name is not None else ""
transaction.end(duration=duration)
if self._should_ignore(transaction.name):
return
if transaction.result is None:
transaction.result = result
self.queue_func(TRANSACTION, transaction.to_dict())
return transaction
class capture_span(object):
__slots__ = ("name", "type", "subtype", "action", "extra", "skip_frames", "leaf", "labels", "duration", "start")
def __init__(
self,
name=None,
span_type="code.custom",
extra=None,
skip_frames=0,
leaf=False,
tags=None,
labels=None,
span_subtype=None,
span_action=None,
start=None,
duration=None,
):
self.name = name
self.type = span_type
self.subtype = span_subtype
self.action = span_action
self.extra = extra
self.skip_frames = skip_frames
self.leaf = leaf
if tags and not labels:
warnings.warn(
'The tags argument to capture_span is deprecated, use "labels" instead',
category=DeprecationWarning,
stacklevel=2,
)
labels = tags
self.labels = labels
self.start = start
self.duration = duration
def __call__(self, func):
self.name = self.name or get_name_from_func(func)
@functools.wraps(func)
def decorated(*args, **kwds):
with self:
return func(*args, **kwds)
return decorated
def __enter__(self):
transaction = execution_context.get_transaction()
if transaction and transaction.is_sampled:
return transaction.begin_span(
self.name,
self.type,
context=self.extra,
leaf=self.leaf,
labels=self.labels,
span_subtype=self.subtype,
span_action=self.action,
start=self.start,
)
def __exit__(self, exc_type, exc_val, exc_tb):
transaction = execution_context.get_transaction()
if transaction and transaction.is_sampled:
try:
span = transaction.end_span(self.skip_frames, duration=self.duration)
if exc_val and not isinstance(span, DroppedSpan):
try:
exc_val._zuqa_span_id = span.id
except AttributeError:
# could happen if the exception has __slots__
pass
except LookupError:
logger.info("ended non-existing span %s of type %s", self.name, self.type)
def label(**labels):
"""
Labels current transaction. Keys should be strings, values can be strings, booleans,
or numerical values (int, float, Decimal)
:param labels: key/value map of labels
"""
transaction = execution_context.get_transaction()
if not transaction:
error_logger.warning("Ignored labels %s. No transaction currently active.", ", ".join(labels.keys()))
else:
transaction.label(**labels)
@deprecated("zuqa.label")
def tag(**tags):
"""
Tags current transaction. Both key and value of the label should be strings.
"""
transaction = execution_context.get_transaction()
if not transaction:
error_logger.warning("Ignored tags %s. No transaction currently active.", ", ".join(tags.keys()))
else:
transaction.tag(**tags)
def set_transaction_name(name, override=True):
transaction = execution_context.get_transaction()
if not transaction:
return
if transaction.name is None or override:
transaction.name = name
def set_transaction_result(result, override=True):
transaction = execution_context.get_transaction()
if not transaction:
return
if transaction.result is None or override:
transaction.result = result
def get_transaction_id():
"""
Returns the current transaction ID
"""
transaction = execution_context.get_transaction()
if not transaction:
return
return transaction.id
def get_trace_id():
"""
Returns the current trace ID
"""
transaction = execution_context.get_transaction()
if not transaction:
return
return transaction.trace_parent.trace_id if transaction.trace_parent else None
def get_span_id():
"""
Returns the current span ID
"""
span = execution_context.get_span()
if not span:
return
return span.id
def set_context(data, key="custom"):
"""
Attach contextual data to the current transaction and errors that happen during the current transaction.
If the transaction is not sampled, this function becomes a no-op.
:param data: a dictionary, or a callable that returns a dictionary
:param key: the namespace for this data
"""
transaction = execution_context.get_transaction()
if not (transaction and transaction.is_sampled):
return
if callable(data):
data = data()
# remove invalid characters from key names
for k in list(data.keys()):
if LABEL_RE.search(k):
data[LABEL_RE.sub("_", k)] = data.pop(k)
if key in transaction.context:
transaction.context[key].update(data)
else:
transaction.context[key] = data
set_custom_context = functools.partial(set_context, key="custom")
def set_user_context(username=None, email=None, user_id=None):
data = {}
if username is not None:
data["username"] = encoding.keyword_field(username)
if email is not None:
data["email"] = encoding.keyword_field(email)
if user_id is not None:
data["id"] = encoding.keyword_field(user_id)
set_context(data, "user")
|
zuqa-agent-python
|
/zuqa-agent-python-1.0.0a1.tar.gz/zuqa-agent-python-1.0.0a1/zuqa/traces.py
|
traces.py
|
# BSD 3-Clause License
#
# Copyright (c) 2012, the Sentry Team, see AUTHORS for more details
# Copyright (c) 2019, Elasticsearch BV
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
from __future__ import absolute_import
import inspect
import itertools
import logging
import os
import platform
import sys
import threading
import time
import warnings
from copy import deepcopy
import zuqa
from zuqa.conf import Config, VersionedConfig, constants
from zuqa.conf.constants import ERROR
from zuqa.metrics.base_metrics import MetricsRegistry
from zuqa.traces import Tracer, execution_context
from zuqa.utils import cgroup, compat, is_master_process, stacks, varmap
from zuqa.utils.encoding import enforce_label_format, keyword_field, shorten, transform
from zuqa.utils.logging import get_logger
from zuqa.utils.module_import import import_string
__all__ = ("Client",)
class Client(object):
"""
The base ZUQA client, which handles communication over the
HTTP API to the APM Server.
Will read default configuration from the environment variable
``ZUQA_APP_NAME`` and ``ZUQA_SECRET_TOKEN``
if available. ::
>>> from zuqa import Client
>>> # Read configuration from environment
>>> client = Client()
>>> # Configure the client manually
>>> client = Client(
>>> include_paths=['my.package'],
>>> service_name='myapp',
>>> secret_token='secret_token',
>>> )
>>> # Record an exception
>>> try:
>>> 1/0
>>> except ZeroDivisionError:
>>> ident = client.capture_exception()
>>> print ("Exception caught; reference is %%s" %% ident)
"""
logger = get_logger("zuqa")
def __init__(self, config=None, **inline):
# configure loggers first
cls = self.__class__
self.logger = get_logger("%s.%s" % (cls.__module__, cls.__name__))
self.error_logger = get_logger("zuqa.errors")
self._pid = None
self._thread_starter_lock = threading.Lock()
self._thread_managers = {}
self.tracer = None
self.processors = []
self.filter_exception_types_dict = {}
self._service_info = None
self.check_python_version()
config = Config(config, inline_dict=inline)
if config.errors:
for msg in config.errors.values():
self.error_logger.error(msg)
config.disable_send = True
self.config = VersionedConfig(config, version=None)
# Insert the log_record_factory into the logging library
# The LogRecordFactory functionality is only available on python 3.2+
if compat.PY3 and not self.config.disable_log_record_factory:
record_factory = logging.getLogRecordFactory()
# Only way to know if it's wrapped is to create a log record
throwaway_record = record_factory(__name__, logging.DEBUG, __file__, 252, "dummy_msg", [], None)
if not hasattr(throwaway_record, "zuqa_labels"):
self.logger.debug("Inserting zuqa log_record_factory into logging")
# Late import due to circular imports
import zuqa.handlers.logging as elastic_logging
new_factory = elastic_logging.log_record_factory(record_factory)
logging.setLogRecordFactory(new_factory)
headers = {
"Content-Type": "application/x-ndjson",
"Content-Encoding": "gzip",
"User-Agent": "zuqa-python/%s" % zuqa.VERSION,
}
transport_kwargs = {
"metadata": self._build_metadata(),
"headers": headers,
"verify_server_cert": self.config.verify_server_cert,
"server_cert": self.config.server_cert,
"timeout": self.config.server_timeout,
"processors": self.load_processors(),
}
self._api_endpoint_url = compat.urlparse.urljoin(
self.config.server_url if self.config.server_url.endswith("/") else self.config.server_url + "/",
constants.EVENTS_API_PATH,
)
transport_class = import_string(self.config.transport_class)
self._transport = transport_class(self._api_endpoint_url, self, **transport_kwargs)
self.config.transport = self._transport
self._thread_managers["transport"] = self._transport
for exc_to_filter in self.config.filter_exception_types or []:
exc_to_filter_type = exc_to_filter.split(".")[-1]
exc_to_filter_module = ".".join(exc_to_filter.split(".")[:-1])
self.filter_exception_types_dict[exc_to_filter_type] = exc_to_filter_module
if platform.python_implementation() == "PyPy":
# PyPy introduces a `_functools.partial.__call__` frame due to our use
# of `partial` in AbstractInstrumentedModule
skip_modules = ("zuqa.", "_functools")
else:
skip_modules = ("zuqa.",)
self.tracer = Tracer(
frames_collector_func=lambda: list(
stacks.iter_stack_frames(
start_frame=inspect.currentframe(), skip_top_modules=skip_modules, config=self.config
)
),
frames_processing_func=lambda frames: self._get_stack_info_for_trace(
frames,
library_frame_context_lines=self.config.source_lines_span_library_frames,
in_app_frame_context_lines=self.config.source_lines_span_app_frames,
with_locals=self.config.collect_local_variables in ("all", "transactions"),
locals_processor_func=lambda local_var: varmap(
lambda k, v: shorten(
v,
list_length=self.config.local_var_list_max_length,
string_length=self.config.local_var_max_length,
dict_length=self.config.local_var_dict_max_length,
),
local_var,
),
),
queue_func=self.queue,
config=self.config,
agent=self,
)
self.include_paths_re = stacks.get_path_regex(self.config.include_paths) if self.config.include_paths else None
self.exclude_paths_re = stacks.get_path_regex(self.config.exclude_paths) if self.config.exclude_paths else None
self._metrics = MetricsRegistry(self)
for path in self.config.metrics_sets:
self._metrics.register(path)
if self.config.breakdown_metrics:
self._metrics.register("zuqa.metrics.sets.breakdown.BreakdownMetricSet")
self._thread_managers["metrics"] = self._metrics
compat.atexit_register(self.close)
if self.config.central_config:
self._thread_managers["config"] = self.config
else:
self._config_updater = None
if config.enabled:
self.start_threads()
def start_threads(self):
with self._thread_starter_lock:
current_pid = os.getpid()
if self._pid != current_pid:
self.logger.debug("Detected PID change from %r to %r, starting threads", self._pid, current_pid)
for manager_type, manager in self._thread_managers.items():
self.logger.debug("Starting %s thread", manager_type)
manager.start_thread(pid=current_pid)
self._pid = current_pid
def get_handler(self, name):
return import_string(name)
def capture(self, event_type, date=None, context=None, custom=None, stack=None, handled=True, **kwargs):
"""
Captures and processes an event and pipes it off to Client.send.
"""
if not self.config.is_recording:
return
if event_type == "Exception":
# never gather log stack for exceptions
stack = False
data = self._build_msg_for_logging(
event_type, date=date, context=context, custom=custom, stack=stack, handled=handled, **kwargs
)
if data:
# queue data, and flush the queue if this is an unhandled exception
self.queue(ERROR, data, flush=not handled)
return data["id"]
def capture_message(self, message=None, param_message=None, **kwargs):
"""
Creates an event from ``message``.
>>> client.capture_message('My event just happened!')
"""
return self.capture("Message", message=message, param_message=param_message, **kwargs)
def capture_exception(self, exc_info=None, handled=True, **kwargs):
"""
Creates an event from an exception.
>>> try:
>>> exc_info = sys.exc_info()
>>> client.capture_exception(exc_info)
>>> finally:
>>> del exc_info
If exc_info is not provided, or is set to True, then this method will
perform the ``exc_info = sys.exc_info()`` and the requisite clean-up
for you.
"""
return self.capture("Exception", exc_info=exc_info, handled=handled, **kwargs)
def queue(self, event_type, data, flush=False):
if self.config.disable_send:
return
self.start_threads()
if flush and is_master_process():
# don't flush in uWSGI master process to avoid ending up in an unpredictable threading state
flush = False
self._transport.queue(event_type, data, flush)
def begin_transaction(self, transaction_type, trace_parent=None, start=None):
"""
Register the start of a transaction on the client
:param transaction_type: type of the transaction, e.g. "request"
:param trace_parent: an optional TraceParent object for distributed tracing
:param start: override the start timestamp, mostly useful for testing
:return: the started transaction object
"""
self._metrics.collect_actively = True
if self.config.is_recording:
return self.tracer.begin_transaction(transaction_type, trace_parent=trace_parent, start=start)
def end_transaction(self, name=None, result="", duration=None):
"""
End the current transaction.
:param name: optional name of the transaction
:param result: result of the transaction, e.g. "OK" or "HTTP 2xx"
:param duration: override duration, mostly useful for testing
:return: the ended transaction object
"""
transaction = self.tracer.end_transaction(result, name, duration=duration)
self._metrics.last_transaction_name = transaction.name
self._metrics.collect_actively = False
return transaction
def close(self):
if self.config.enabled:
with self._thread_starter_lock:
for _, manager in self._thread_managers.items():
manager.stop_thread()
def get_service_info(self):
if self._service_info:
return self._service_info
language_version = platform.python_version()
if hasattr(sys, "pypy_version_info"):
runtime_version = ".".join(map(str, sys.pypy_version_info[:3]))
else:
runtime_version = language_version
result = {
"name": keyword_field(self.config.service_name),
"environment": keyword_field(self.config.environment),
"version": keyword_field(self.config.service_version),
"agent": {"name": "python", "version": zuqa.VERSION},
"language": {"name": "python", "version": keyword_field(platform.python_version())},
"runtime": {
"name": keyword_field(platform.python_implementation()),
"version": keyword_field(runtime_version),
},
}
if self.config.framework_name:
result["framework"] = {
"name": keyword_field(self.config.framework_name),
"version": keyword_field(self.config.framework_version),
}
if self.config.service_node_name:
result["node"] = {"configured_name": keyword_field(self.config.service_node_name)}
self._service_info = result
return result
def get_process_info(self):
return {
"pid": os.getpid(),
"ppid": os.getppid() if hasattr(os, "getppid") else None,
"argv": sys.argv,
"title": None, # Note: if we implement this, the value needs to be wrapped with keyword_field
}
def get_system_info(self):
system_data = {
"hostname": keyword_field(self.config.hostname),
"architecture": platform.machine(),
"platform": platform.system().lower(),
}
system_data.update(cgroup.get_cgroup_container_metadata())
pod_name = os.environ.get("KUBERNETES_POD_NAME") or system_data["hostname"]
changed = False
if "kubernetes" in system_data:
k8s = system_data["kubernetes"]
k8s["pod"]["name"] = pod_name
else:
k8s = {"pod": {"name": pod_name}}
# get kubernetes metadata from environment
if "KUBERNETES_NODE_NAME" in os.environ:
k8s["node"] = {"name": os.environ["KUBERNETES_NODE_NAME"]}
changed = True
if "KUBERNETES_NAMESPACE" in os.environ:
k8s["namespace"] = os.environ["KUBERNETES_NAMESPACE"]
changed = True
if "KUBERNETES_POD_UID" in os.environ:
# this takes precedence over any value from /proc/self/cgroup
k8s["pod"]["uid"] = os.environ["KUBERNETES_POD_UID"]
changed = True
if changed:
system_data["kubernetes"] = k8s
return system_data
def _build_metadata(self):
data = {
"service": self.get_service_info(),
"process": self.get_process_info(),
"system": self.get_system_info(),
}
if self.config.global_labels:
data["labels"] = enforce_label_format(self.config.global_labels)
return data
def _build_msg_for_logging(
self, event_type, date=None, context=None, custom=None, stack=None, handled=True, **kwargs
):
"""
Captures, processes and serializes an event into a dict object
"""
transaction = execution_context.get_transaction()
span = execution_context.get_span()
if transaction:
transaction_context = deepcopy(transaction.context)
else:
transaction_context = {}
event_data = {}
if custom is None:
custom = {}
if date is not None:
warnings.warn(
"The date argument is no longer evaluated and will be removed in a future release", DeprecationWarning
)
date = time.time()
if stack is None:
stack = self.config.auto_log_stacks
if context:
transaction_context.update(context)
context = transaction_context
else:
context = transaction_context
event_data["context"] = context
if transaction and transaction.labels:
context["tags"] = deepcopy(transaction.labels)
# if '.' not in event_type:
# Assume it's a builtin
event_type = "zuqa.events.%s" % event_type
handler = self.get_handler(event_type)
result = handler.capture(self, **kwargs)
if self._filter_exception_type(result):
return
# data (explicit) culprit takes over auto event detection
culprit = result.pop("culprit", None)
if custom.get("culprit"):
culprit = custom.pop("culprit")
for k, v in compat.iteritems(result):
if k not in event_data:
event_data[k] = v
log = event_data.get("log", {})
if stack and "stacktrace" not in log:
if stack is True:
frames = stacks.iter_stack_frames(skip=3, config=self.config)
else:
frames = stack
frames = stacks.get_stack_info(
frames,
with_locals=self.config.collect_local_variables in ("errors", "all"),
library_frame_context_lines=self.config.source_lines_error_library_frames,
in_app_frame_context_lines=self.config.source_lines_error_app_frames,
include_paths_re=self.include_paths_re,
exclude_paths_re=self.exclude_paths_re,
locals_processor_func=lambda local_var: varmap(
lambda k, v: shorten(
v,
list_length=self.config.local_var_list_max_length,
string_length=self.config.local_var_max_length,
dict_length=self.config.local_var_dict_max_length,
),
local_var,
),
)
log["stacktrace"] = frames
if "stacktrace" in log and not culprit:
culprit = stacks.get_culprit(log["stacktrace"], self.config.include_paths, self.config.exclude_paths)
if "level" in log and isinstance(log["level"], compat.integer_types):
log["level"] = logging.getLevelName(log["level"]).lower()
if log:
event_data["log"] = log
if culprit:
event_data["culprit"] = culprit
if "custom" in context:
context["custom"].update(custom)
else:
context["custom"] = custom
# Make sure all data is coerced
event_data = transform(event_data)
if "exception" in event_data:
event_data["exception"]["handled"] = bool(handled)
event_data["timestamp"] = int(date * 1000000)
if transaction:
if transaction.trace_parent:
event_data["trace_id"] = transaction.trace_parent.trace_id
# parent id might already be set in the handler
event_data.setdefault("parent_id", span.id if span else transaction.id)
event_data["transaction_id"] = transaction.id
event_data["transaction"] = {"sampled": transaction.is_sampled, "type": transaction.transaction_type}
return event_data
def _filter_exception_type(self, data):
exception = data.get("exception")
if not exception:
return False
exc_type = exception.get("type")
exc_module = exception.get("module")
if exc_module == "None":
exc_module = None
if exc_type in self.filter_exception_types_dict:
exc_to_filter_module = self.filter_exception_types_dict[exc_type]
if not exc_to_filter_module or exc_to_filter_module == exc_module:
if exc_module:
exc_name = "%s.%s" % (exc_module, exc_type)
else:
exc_name = exc_type
self.logger.info("Ignored %s exception due to exception type filter", exc_name)
return True
return False
def _get_stack_info_for_trace(
self,
frames,
library_frame_context_lines=None,
in_app_frame_context_lines=None,
with_locals=True,
locals_processor_func=None,
):
"""Overrideable in derived clients to add frames/info, e.g. templates"""
return stacks.get_stack_info(
frames,
library_frame_context_lines=library_frame_context_lines,
in_app_frame_context_lines=in_app_frame_context_lines,
with_locals=with_locals,
include_paths_re=self.include_paths_re,
exclude_paths_re=self.exclude_paths_re,
locals_processor_func=locals_processor_func,
)
def load_processors(self):
"""
Loads processors from self.config.processors, as well as constants.HARDCODED_PROCESSORS.
Duplicate processors (based on the path) will be discarded.
:return: a list of callables
"""
processors = itertools.chain(self.config.processors, constants.HARDCODED_PROCESSORS)
seen = {}
# setdefault has the nice property that it returns the value that it just set on the dict
return [seen.setdefault(path, import_string(path)) for path in processors if path not in seen]
def check_python_version(self):
v = tuple(map(int, platform.python_version_tuple()[:2]))
if v == (2, 7):
warnings.warn(
(
"The ZUQA agent will stop supporting Python 2.7 starting in 6.0.0 -- "
"Please upgrade to Python 3.5+ to continue to use the latest features."
),
PendingDeprecationWarning,
)
elif v < (3, 5):
warnings.warn("The ZUQA agent only supports Python 3.5+", DeprecationWarning)
class DummyClient(Client):
"""Sends messages into an empty void"""
def send(self, url, **kwargs):
return None
|
zuqa-agent-python
|
/zuqa-agent-python-1.0.0a1.tar.gz/zuqa-agent-python-1.0.0a1/zuqa/base.py
|
base.py
|
# BSD 3-Clause License
#
# Copyright (c) 2012, the Sentry Team, see AUTHORS for more details
# Copyright (c) 2019, Elasticsearch BV
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
import random
import sys
from zuqa.conf.constants import EXCEPTION_CHAIN_MAX_DEPTH
from zuqa.utils import compat, varmap
from zuqa.utils.encoding import keyword_field, shorten, to_unicode
from zuqa.utils.logging import get_logger
from zuqa.utils.stacks import get_culprit, get_stack_info, iter_traceback_frames
__all__ = ("BaseEvent", "Exception", "Message")
logger = get_logger("zuqa.events")
class BaseEvent(object):
@staticmethod
def to_string(client, data):
raise NotImplementedError
@staticmethod
def capture(client, **kwargs):
return {}
class Exception(BaseEvent):
"""
Exceptions store the following metadata:
- value: 'My exception value'
- type: 'ClassName'
- module '__builtin__' (i.e. __builtin__.TypeError)
- frames: a list of serialized frames (see _get_traceback_frames)
"""
@staticmethod
def to_string(client, data):
exc = data["exception"]
if exc["value"]:
return "%s: %s" % (exc["type"], exc["value"])
return exc["type"]
@staticmethod
def get_hash(data):
exc = data["exception"]
output = [exc["type"]]
for frame in data["stacktrace"]["frames"]:
output.append(frame["module"])
output.append(frame["function"])
return output
@staticmethod
def capture(client, exc_info=None, **kwargs):
culprit = exc_value = exc_type = exc_module = frames = exc_traceback = None
new_exc_info = False
if not exc_info or exc_info is True:
new_exc_info = True
exc_info = sys.exc_info()
if exc_info == (None, None, None):
raise ValueError("No exception found: capture_exception requires an active exception.")
try:
exc_type, exc_value, exc_traceback = exc_info
frames = get_stack_info(
iter_traceback_frames(exc_traceback, config=client.config),
with_locals=client.config.collect_local_variables in ("errors", "all"),
library_frame_context_lines=client.config.source_lines_error_library_frames,
in_app_frame_context_lines=client.config.source_lines_error_app_frames,
include_paths_re=client.include_paths_re,
exclude_paths_re=client.exclude_paths_re,
locals_processor_func=lambda local_var: varmap(
lambda k, val: shorten(
val,
list_length=client.config.local_var_list_max_length,
string_length=client.config.local_var_max_length,
dict_length=client.config.local_var_dict_max_length,
),
local_var,
),
)
culprit = kwargs.get("culprit", None) or get_culprit(
frames, client.config.include_paths, client.config.exclude_paths
)
if hasattr(exc_type, "__module__"):
exc_module = exc_type.__module__
exc_type = exc_type.__name__
else:
exc_module = None
exc_type = exc_type.__name__
finally:
if new_exc_info:
try:
del exc_info
del exc_traceback
except Exception as e:
logger.exception(e)
if "message" in kwargs:
message = kwargs["message"]
else:
message = "%s: %s" % (exc_type, to_unicode(exc_value)) if exc_value else str(exc_type)
data = {
"id": "%032x" % random.getrandbits(128),
"culprit": keyword_field(culprit),
"exception": {
"message": message,
"type": keyword_field(str(exc_type)),
"module": keyword_field(str(exc_module)),
"stacktrace": frames,
},
}
if hasattr(exc_value, "_zuqa_span_id"):
data["parent_id"] = exc_value._zuqa_span_id
del exc_value._zuqa_span_id
if compat.PY3:
depth = kwargs.get("_exc_chain_depth", 0)
if depth > EXCEPTION_CHAIN_MAX_DEPTH:
return
cause = exc_value.__cause__
chained_context = exc_value.__context__
# we follow the pattern of Python itself here and only capture the chained exception
# if cause is not None and __suppress_context__ is False
if chained_context and not (exc_value.__suppress_context__ and cause is None):
if cause:
chained_exc_type = type(cause)
chained_exc_value = cause
else:
chained_exc_type = type(chained_context)
chained_exc_value = chained_context
chained_exc_info = chained_exc_type, chained_exc_value, chained_context.__traceback__
chained_cause = Exception.capture(
client, exc_info=chained_exc_info, culprit="None", _exc_chain_depth=depth + 1
)
if chained_cause:
data["exception"]["cause"] = [chained_cause["exception"]]
return data
class Message(BaseEvent):
"""
Messages store the following metadata:
- message: 'My message from %s about %s'
- params: ('foo', 'bar')
"""
@staticmethod
def to_string(client, data):
return data["log"]["message"]
@staticmethod
def get_hash(data):
msg = data["param_message"]
return [msg["message"]]
@staticmethod
def capture(client, param_message=None, message=None, level=None, logger_name=None, **kwargs):
if message:
param_message = {"message": message}
params = param_message.get("params")
message = param_message["message"] % params if params else param_message["message"]
data = kwargs.get("data", {})
message_data = {
"id": "%032x" % random.getrandbits(128),
"log": {
"level": keyword_field(level or "error"),
"logger_name": keyword_field(logger_name or "__root__"),
"message": message,
"param_message": keyword_field(param_message["message"]),
},
}
if isinstance(data.get("stacktrace"), dict):
message_data["log"]["stacktrace"] = data["stacktrace"]["frames"]
if kwargs.get("exception"):
message_data["culprit"] = kwargs["exception"]["culprit"]
message_data["exception"] = kwargs["exception"]["exception"]
return message_data
|
zuqa-agent-python
|
/zuqa-agent-python-1.0.0a1.tar.gz/zuqa-agent-python-1.0.0a1/zuqa/events.py
|
events.py
|
# BSD 3-Clause License
#
# Copyright (c) 2019, Elasticsearch BV
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
VERSION = "1.0.0-alpha.1"
|
zuqa-agent-python
|
/zuqa-agent-python-1.0.0a1.tar.gz/zuqa-agent-python-1.0.0a1/zuqa/version.py
|
version.py
|
# BSD 3-Clause License
#
# Copyright (c) 2012, the Sentry Team, see AUTHORS for more details
# Copyright (c) 2019, Elasticsearch BV
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
import sys
from zuqa.base import Client
from zuqa.conf import setup_logging # noqa: F401
from zuqa.instrumentation.control import instrument, uninstrument # noqa: F401
from zuqa.traces import ( # noqa: F401
capture_span,
get_span_id,
get_trace_id,
get_transaction_id,
label,
set_context,
set_custom_context,
set_transaction_name,
set_transaction_result,
set_user_context,
tag,
)
from zuqa.utils.disttracing import trace_parent_from_headers, trace_parent_from_string # noqa: F401
__all__ = ("VERSION", "Client")
try:
VERSION = __import__("pkg_resources").get_distribution("zuqa").version
except Exception:
VERSION = "unknown"
if sys.version_info >= (3, 5):
from zuqa.contrib.asyncio.traces import async_capture_span # noqa: F401
|
zuqa-agent-python
|
/zuqa-agent-python-1.0.0a1.tar.gz/zuqa-agent-python-1.0.0a1/zuqa/__init__.py
|
__init__.py
|
# BSD 3-Clause License
#
# Copyright (c) 2019, Elasticsearch BV
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from zuqa.base import Client
from zuqa.middleware import ZUQA
def filter_factory(app, global_conf, **kwargs):
client = Client(**kwargs)
return ZUQA(app, client)
|
zuqa-agent-python
|
/zuqa-agent-python-1.0.0a1.tar.gz/zuqa-agent-python-1.0.0a1/zuqa/contrib/paste.py
|
paste.py
|
# BSD 3-Clause License
#
# Copyright (c) 2019, Elasticsearch BV
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
zuqa-agent-python
|
/zuqa-agent-python-1.0.0a1.tar.gz/zuqa-agent-python-1.0.0a1/zuqa/contrib/__init__.py
|
__init__.py
|
# BSD 3-Clause License
#
# Copyright (c) 2012, the Sentry Team, see AUTHORS for more details
# Copyright (c) 2019, Elasticsearch BV
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
from zuqa.base import Client
from zuqa.middleware import ZUQA as Middleware
from zuqa.utils import compat
def list_from_setting(config, setting):
value = config.get(setting)
if not value:
return None
return value.split()
class ZUQA(Middleware):
def __init__(self, app, config, client_cls=Client):
client_config = {key[11:]: val for key, val in compat.iteritems(config) if key.startswith("zuqa.")}
client = client_cls(**client_config)
super(ZUQA, self).__init__(app, client)
|
zuqa-agent-python
|
/zuqa-agent-python-1.0.0a1.tar.gz/zuqa-agent-python-1.0.0a1/zuqa/contrib/pylons/__init__.py
|
__init__.py
|
# BSD 3-Clause License
#
# Copyright (c) 2019, Elasticsearch BV
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import aiohttp
from aiohttp.web import HTTPException, Response, middleware
import zuqa
from zuqa.conf import constants
from zuqa.contrib.aiohttp.utils import get_data_from_request, get_data_from_response
from zuqa.utils.disttracing import TraceParent
class AioHttpTraceParent(TraceParent):
@classmethod
def merge_duplicate_headers(cls, headers, key):
return ",".join(headers.getall(key, [])) or None
def tracing_middleware(app):
from zuqa.contrib.aiohttp import CLIENT_KEY # noqa
async def handle_request(request, handler):
zuqa_client = app.get(CLIENT_KEY)
if zuqa_client:
request[CLIENT_KEY] = zuqa_client
trace_parent = AioHttpTraceParent.from_headers(request.headers)
zuqa_client.begin_transaction("request", trace_parent=trace_parent)
resource = request.match_info.route.resource
name = request.method
if resource:
# canonical has been added in 3.3, and returns one of path, formatter, prefix
for attr in ("canonical", "_path", "_formatter", "_prefix"):
if hasattr(resource, attr):
name += " " + getattr(resource, attr)
break
else:
name += " unknown route"
else:
name += " unknown route"
zuqa.set_transaction_name(name, override=False)
zuqa.set_context(
lambda: get_data_from_request(request, zuqa_client.config, constants.TRANSACTION), "request"
)
try:
response = await handler(request)
zuqa.set_transaction_result("HTTP {}xx".format(response.status // 100), override=False)
zuqa.set_context(
lambda: get_data_from_response(response, zuqa_client.config, constants.TRANSACTION), "response"
)
return response
except Exception as exc:
if zuqa_client:
zuqa_client.capture_exception(
context={"request": get_data_from_request(request, zuqa_client.config, constants.ERROR)}
)
zuqa.set_transaction_result("HTTP 5xx", override=False)
zuqa.set_context({"status_code": 500}, "response")
# some exceptions are response-like, e.g. have headers and status code. Let's try and capture them
if isinstance(exc, (Response, HTTPException)):
zuqa.set_context(
lambda: get_data_from_response(exc, zuqa_client.config, constants.ERROR), # noqa: F821
"response",
)
raise
finally:
zuqa_client.end_transaction()
# decorating with @middleware is only required in aiohttp < 4.0, and we only support 3+
if aiohttp.__version__.startswith("3"):
return middleware(handle_request)
return handle_request
|
zuqa-agent-python
|
/zuqa-agent-python-1.0.0a1.tar.gz/zuqa-agent-python-1.0.0a1/zuqa/contrib/aiohttp/middleware.py
|
middleware.py
|
# BSD 3-Clause License
#
# Copyright (c) 2019, Elasticsearch BV
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from typing import Union
from aiohttp.web import HTTPException, Request, Response
from zuqa.conf import Config
from zuqa.utils import compat, get_url_dict
def get_data_from_request(request: Request, config: Config, event_type: str):
result = {
"method": request.method,
"socket": {"remote_address": request.remote, "encrypted": request.secure},
"cookies": dict(request.cookies),
}
if config.capture_headers:
result["headers"] = dict(request.headers)
# TODO: capture body
result["url"] = get_url_dict(str(request.url))
return result
def get_data_from_response(response: Union[HTTPException, Response], config: Config, event_type: str):
result = {}
status = getattr(response, "status", getattr(response, "status_code", None))
if isinstance(status, compat.integer_types):
result["status_code"] = status
if config.capture_headers and getattr(response, "headers", None):
headers = response.headers
result["headers"] = {key: ";".join(headers.getall(key)) for key in compat.iterkeys(headers)}
return result
|
zuqa-agent-python
|
/zuqa-agent-python-1.0.0a1.tar.gz/zuqa-agent-python-1.0.0a1/zuqa/contrib/aiohttp/utils.py
|
utils.py
|
# BSD 3-Clause License
#
# Copyright (c) 2019, Elasticsearch BV
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import aiohttp
import zuqa
from zuqa import Client
CLIENT_KEY = "_zuqa_client_instance"
class ZUQA:
def __init__(self, app, client=None):
if not client:
config = app.get("ZUQA", {})
config.setdefault("framework_name", "aiohttp")
config.setdefault("framework_version", aiohttp.__version__)
client = Client(config=config)
app[CLIENT_KEY] = client
self.app = app
self.client = client
self.install_tracing(app, client)
def install_tracing(self, app, client):
from zuqa.contrib.aiohttp.middleware import tracing_middleware
app.middlewares.insert(0, tracing_middleware(app))
if client.config.instrument and client.config.enabled:
zuqa.instrument()
|
zuqa-agent-python
|
/zuqa-agent-python-1.0.0a1.tar.gz/zuqa-agent-python-1.0.0a1/zuqa/contrib/aiohttp/__init__.py
|
__init__.py
|
# BSD 3-Clause License
#
# Copyright (c) 2012, the Sentry Team, see AUTHORS for more details
# Copyright (c) 2019, Elasticsearch BV
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
from celery import signals
from zuqa.utils import get_name_from_func
class CeleryFilter(object):
def filter(self, record):
if record.funcName in ("_log_error",):
return 0
else:
return 1
def register_exception_tracking(client):
dispatch_uid = "zuqa-exc-tracking"
def process_failure_signal(sender, task_id, exception, args, kwargs, traceback, einfo, **kw):
client.capture_exception(
extra={"task_id": task_id, "task": sender, "args": args, "kwargs": kwargs}, handled=False
)
signals.task_failure.disconnect(process_failure_signal, dispatch_uid=dispatch_uid)
signals.task_failure.connect(process_failure_signal, weak=False, dispatch_uid=dispatch_uid)
_register_worker_signals(client)
def register_instrumentation(client):
def begin_transaction(*args, **kwargs):
client.begin_transaction("celery")
def end_transaction(task_id, task, *args, **kwargs):
name = get_name_from_func(task)
client.end_transaction(name, kwargs.get("state", "None"))
dispatch_uid = "zuqa-tracing-%s"
# unregister any existing clients
signals.task_prerun.disconnect(begin_transaction, dispatch_uid=dispatch_uid % "prerun")
signals.task_postrun.disconnect(end_transaction, dispatch_uid=dispatch_uid % "postrun")
# register for this client
signals.task_prerun.connect(begin_transaction, dispatch_uid=dispatch_uid % "prerun", weak=False)
signals.task_postrun.connect(end_transaction, weak=False, dispatch_uid=dispatch_uid % "postrun")
_register_worker_signals(client)
def _register_worker_signals(client):
def worker_shutdown(*args, **kwargs):
client.close()
def connect_worker_process_init(*args, **kwargs):
signals.worker_process_shutdown.connect(worker_shutdown, dispatch_uid="zuqa-shutdown-worker", weak=False)
signals.worker_init.connect(
connect_worker_process_init, dispatch_uid="zuqa-connect-start-threads", weak=False
)
|
zuqa-agent-python
|
/zuqa-agent-python-1.0.0a1.tar.gz/zuqa-agent-python-1.0.0a1/zuqa/contrib/celery/__init__.py
|
__init__.py
|
# BSD 3-Clause License
#
# Copyright (c) 2019, Elasticsearch BV
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from werkzeug.exceptions import ClientDisconnected
from zuqa.conf import constants
from zuqa.utils import compat, get_url_dict
from zuqa.utils.wsgi import get_environ, get_headers
def get_data_from_request(request, config, event_type):
result = {
"env": dict(get_environ(request.environ)),
"method": request.method,
"socket": {"remote_address": request.environ.get("REMOTE_ADDR"), "encrypted": request.is_secure},
"cookies": request.cookies,
}
if config.capture_headers:
result["headers"] = dict(get_headers(request.environ))
if request.method in constants.HTTP_WITH_BODY:
if config.capture_body not in ("all", event_type):
result["body"] = "[REDACTED]"
else:
body = None
if request.content_type == "application/x-www-form-urlencoded":
body = compat.multidict_to_dict(request.form)
elif request.content_type and request.content_type.startswith("multipart/form-data"):
body = compat.multidict_to_dict(request.form)
if request.files:
body["_files"] = {
field: val[0].filename if len(val) == 1 else [f.filename for f in val]
for field, val in compat.iterlists(request.files)
}
else:
try:
body = request.get_data(as_text=True)
except ClientDisconnected:
pass
if body is not None:
result["body"] = body
result["url"] = get_url_dict(request.url)
return result
def get_data_from_response(response, config, event_type):
result = {}
if isinstance(getattr(response, "status_code", None), compat.integer_types):
result["status_code"] = response.status_code
if config.capture_headers and getattr(response, "headers", None):
headers = response.headers
result["headers"] = {key: ";".join(headers.getlist(key)) for key in compat.iterkeys(headers)}
return result
|
zuqa-agent-python
|
/zuqa-agent-python-1.0.0a1.tar.gz/zuqa-agent-python-1.0.0a1/zuqa/contrib/flask/utils.py
|
utils.py
|
# BSD 3-Clause License
#
# Copyright (c) 2012, the Sentry Team, see AUTHORS for more details
# Copyright (c) 2019, Elasticsearch BV
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
from __future__ import absolute_import
import logging
import flask
from flask import request, signals
import zuqa
import zuqa.instrumentation.control
from zuqa.base import Client
from zuqa.conf import constants, setup_logging
from zuqa.contrib.flask.utils import get_data_from_request, get_data_from_response
from zuqa.handlers.logging import LoggingHandler
from zuqa.traces import execution_context
from zuqa.utils import build_name_with_http_method_prefix
from zuqa.utils.disttracing import TraceParent
from zuqa.utils.logging import get_logger
logger = get_logger("zuqa.errors.client")
def make_client(client_cls, app, **defaults):
config = app.config.get("ZUQA", {})
if "framework_name" not in defaults:
defaults["framework_name"] = "flask"
defaults["framework_version"] = getattr(flask, "__version__", "<0.7")
client = client_cls(config, **defaults)
return client
class ZUQA(object):
"""
Flask application for ZUQA.
Look up configuration from ``os.environ.get('ZUQA_APP_NAME')`` and
``os.environ.get('ZUQA_SECRET_TOKEN')``::
>>> zuqa = ZUQA(app)
Pass an arbitrary APP_NAME and SECRET_TOKEN::
>>> zuqa = ZUQA(app, service_name='myapp', secret_token='asdasdasd')
Pass an explicit client::
>>> zuqa = ZUQA(app, client=client)
Automatically configure logging::
>>> zuqa = ZUQA(app, logging=True)
Capture an exception::
>>> try:
>>> 1 / 0
>>> except ZeroDivisionError:
>>> zuqa.capture_exception()
Capture a message::
>>> zuqa.capture_message('hello, world!')
"""
def __init__(self, app=None, client=None, client_cls=Client, logging=False, **defaults):
self.app = app
self.logging = logging
self.client_cls = client_cls
self.client = client
if app:
self.init_app(app, **defaults)
def handle_exception(self, *args, **kwargs):
if not self.client:
return
if self.app.debug and not self.client.config.debug:
return
self.client.capture_exception(
exc_info=kwargs.get("exc_info"),
context={"request": get_data_from_request(request, self.client.config, constants.ERROR)},
custom={"app": self.app},
handled=False,
)
# End the transaction here, as `request_finished` won't be called when an
# unhandled exception occurs.
#
# Unfortunately, that also means that we can't capture any response data,
# as the response isn't ready at this point in time.
self.client.end_transaction(result="HTTP 5xx")
def init_app(self, app, **defaults):
self.app = app
if not self.client:
self.client = make_client(self.client_cls, app, **defaults)
# 0 is a valid log level (NOTSET), so we need to check explicitly for it
if self.logging or self.logging is logging.NOTSET:
if self.logging is not True:
kwargs = {"level": self.logging}
else:
kwargs = {}
setup_logging(LoggingHandler(self.client, **kwargs))
signals.got_request_exception.connect(self.handle_exception, sender=app, weak=False)
try:
from zuqa.contrib.celery import register_exception_tracking
register_exception_tracking(self.client)
except ImportError:
pass
# Instrument to get spans
if self.client.config.instrument and self.client.config.enabled:
zuqa.instrumentation.control.instrument()
signals.request_started.connect(self.request_started, sender=app)
signals.request_finished.connect(self.request_finished, sender=app)
try:
from zuqa.contrib.celery import register_instrumentation
register_instrumentation(self.client)
except ImportError:
pass
else:
logger.debug("Skipping instrumentation. INSTRUMENT is set to False.")
@app.context_processor
def rum_tracing():
"""
Adds APM related IDs to the context used for correlating the backend transaction with the RUM transaction
"""
transaction = execution_context.get_transaction()
if transaction and transaction.trace_parent:
return {
"apm": {
"trace_id": transaction.trace_parent.trace_id,
"span_id": lambda: transaction.ensure_parent_id(),
"is_sampled": transaction.is_sampled,
"is_sampled_js": "true" if transaction.is_sampled else "false",
}
}
return {}
def request_started(self, app):
if not self.app.debug or self.client.config.debug:
trace_parent = TraceParent.from_headers(request.headers)
self.client.begin_transaction("request", trace_parent=trace_parent)
zuqa.set_context(
lambda: get_data_from_request(request, self.client.config, constants.TRANSACTION), "request"
)
rule = request.url_rule.rule if request.url_rule is not None else ""
rule = build_name_with_http_method_prefix(rule, request)
zuqa.set_transaction_name(rule, override=False)
def request_finished(self, app, response):
if not self.app.debug or self.client.config.debug:
zuqa.set_context(
lambda: get_data_from_response(response, self.client.config, constants.TRANSACTION), "response"
)
if response.status_code:
result = "HTTP {}xx".format(response.status_code // 100)
else:
result = response.status
zuqa.set_transaction_result(result, override=False)
# Instead of calling end_transaction here, we defer the call until the response is closed.
# This ensures that we capture things that happen until the WSGI server closes the response.
response.call_on_close(self.client.end_transaction)
def capture_exception(self, *args, **kwargs):
assert self.client, "capture_exception called before application configured"
return self.client.capture_exception(*args, **kwargs)
def capture_message(self, *args, **kwargs):
assert self.client, "capture_message called before application configured"
return self.client.capture_message(*args, **kwargs)
|
zuqa-agent-python
|
/zuqa-agent-python-1.0.0a1.tar.gz/zuqa-agent-python-1.0.0a1/zuqa/contrib/flask/__init__.py
|
__init__.py
|
# BSD 3-Clause License
#
# Copyright (c) 2012, the Sentry Team, see AUTHORS for more details
# Copyright (c) 2019, Elasticsearch BV
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
import inspect
from zuqa.base import Client
class Middleware(object):
"""ZUQA middleware for ZeroRPC.
>>> zuqa = Middleware(service_name='..', secret_token='...')
>>> zerorpc.Context.get_instance().register_middleware(zuqa)
Exceptions detected server-side in ZeroRPC will be submitted to the apm server (and
propagated to the client as well).
"""
def __init__(self, hide_zerorpc_frames=True, client=None, **kwargs):
"""Create a middleware object that can be injected in a ZeroRPC server.
- hide_zerorpc_frames: modify the exception stacktrace to remove the
internal zerorpc frames (True by default to make
the stacktrace as readable as possible);
- client: use an existing raven.Client object, otherwise one will be
instantiated from the keyword arguments.
"""
self._zuqa_client = client or Client(**kwargs)
self._hide_zerorpc_frames = hide_zerorpc_frames
def server_inspect_exception(self, req_event, rep_event, task_ctx, exc_info):
"""Called when an exception has been raised in the code run by ZeroRPC"""
# Hide the zerorpc internal frames for readability, for a REQ/REP or
# REQ/STREAM server the frames to hide are:
# - core.ServerBase._async_task
# - core.Pattern*.process_call
# - core.DecoratorBase.__call__
#
# For a PUSH/PULL or PUB/SUB server the frame to hide is:
# - core.Puller._receiver
if self._hide_zerorpc_frames:
traceback = exc_info[2]
while traceback:
zerorpc_frame = traceback.tb_frame
zerorpc_frame.f_locals["__traceback_hide__"] = True
frame_info = inspect.getframeinfo(zerorpc_frame)
# Is there a better way than this (or looking up the filenames
# or hardcoding the number of frames to skip) to know when we
# are out of zerorpc?
if frame_info.function == "__call__" or frame_info.function == "_receiver":
break
traceback = traceback.tb_next
self._zuqa_client.capture_exception(exc_info, extra=task_ctx, handled=False)
|
zuqa-agent-python
|
/zuqa-agent-python-1.0.0a1.tar.gz/zuqa-agent-python-1.0.0a1/zuqa/contrib/zerorpc/__init__.py
|
__init__.py
|
# BSD 3-Clause License
#
# Copyright (c) 2019, Elasticsearch BV
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from functools import partial
from django.apps import AppConfig
from django.conf import settings as django_settings
from zuqa.conf import constants
from zuqa.contrib.django.client import get_client
from zuqa.utils.disttracing import TraceParent
from zuqa.utils.logging import get_logger
logger = get_logger("zuqa.traces")
ERROR_DISPATCH_UID = "zuqa-exceptions"
REQUEST_START_DISPATCH_UID = "zuqa-request-start"
REQUEST_FINISH_DISPATCH_UID = "zuqa-request-stop"
MIDDLEWARE_NAME = "zuqa.contrib.django.middleware.TracingMiddleware"
TRACEPARENT_HEADER_NAME_WSGI = "HTTP_" + constants.TRACEPARENT_HEADER_NAME.upper().replace("-", "_")
TRACEPARENT_LEGACY_HEADER_NAME_WSGI = "HTTP_" + constants.TRACEPARENT_LEGACY_HEADER_NAME.upper().replace("-", "_")
TRACESTATE_HEADER_NAME_WSGI = "HTTP_" + constants.TRACESTATE_HEADER_NAME.upper().replace("-", "_")
class ZuqaConfig(AppConfig):
name = "zuqa.contrib.django"
label = "zuqa.contrib.django"
verbose_name = "ZUQA"
def __init__(self, *args, **kwargs):
super(ZuqaConfig, self).__init__(*args, **kwargs)
self.client = None
def ready(self):
self.client = get_client()
if self.client.config.autoinsert_django_middleware:
self.insert_middleware(django_settings)
register_handlers(self.client)
if self.client.config.instrument and self.client.config.enabled:
instrument(self.client)
else:
self.client.logger.debug("Skipping instrumentation. INSTRUMENT is set to False.")
@staticmethod
def insert_middleware(settings):
if hasattr(settings, "MIDDLEWARE"):
middleware_list = settings.MIDDLEWARE
middleware_attr = "MIDDLEWARE"
elif hasattr(settings, "MIDDLEWARE_CLASSES"): # can be removed when we drop support for Django 1.x
middleware_list = settings.MIDDLEWARE_CLASSES
middleware_attr = "MIDDLEWARE_CLASSES"
else:
logger.debug("Could not find middleware setting, not autoinserting tracing middleware")
return
is_tuple = isinstance(middleware_list, tuple)
if is_tuple:
middleware_list = list(middleware_list)
elif not isinstance(middleware_list, list):
logger.debug("%s setting is not of type list or tuple, not autoinserting tracing middleware")
return
if middleware_list is not None and MIDDLEWARE_NAME not in middleware_list:
logger.debug("Inserting tracing middleware into settings.%s", middleware_attr)
middleware_list.insert(0, MIDDLEWARE_NAME)
if is_tuple:
middleware_list = tuple(middleware_list)
if middleware_list:
setattr(settings, middleware_attr, middleware_list)
def register_handlers(client):
from django.core.signals import got_request_exception, request_started, request_finished
from zuqa.contrib.django.handlers import exception_handler
# Connect to Django's internal signal handlers
got_request_exception.disconnect(dispatch_uid=ERROR_DISPATCH_UID)
got_request_exception.connect(partial(exception_handler, client), dispatch_uid=ERROR_DISPATCH_UID, weak=False)
request_started.disconnect(dispatch_uid=REQUEST_START_DISPATCH_UID)
request_started.connect(
partial(_request_started_handler, client), dispatch_uid=REQUEST_START_DISPATCH_UID, weak=False
)
request_finished.disconnect(dispatch_uid=REQUEST_FINISH_DISPATCH_UID)
request_finished.connect(
lambda sender, **kwargs: client.end_transaction() if _should_start_transaction(client) else None,
dispatch_uid=REQUEST_FINISH_DISPATCH_UID,
weak=False,
)
# If we can import celery, register ourselves as exception handler
try:
import celery # noqa F401
from zuqa.contrib.celery import register_exception_tracking
try:
register_exception_tracking(client)
except Exception as e:
client.logger.exception("Failed installing django-celery hook: %s" % e)
except ImportError:
client.logger.debug("Not instrumenting Celery, couldn't import")
def _request_started_handler(client, sender, *args, **kwargs):
if not _should_start_transaction(client):
return
# try to find trace id
if "environ" in kwargs:
trace_parent = TraceParent.from_headers(
kwargs["environ"],
TRACEPARENT_HEADER_NAME_WSGI,
TRACEPARENT_LEGACY_HEADER_NAME_WSGI,
TRACESTATE_HEADER_NAME_WSGI,
)
elif "scope" in kwargs and "headers" in kwargs["scope"]:
trace_parent = TraceParent.from_headers(kwargs["scope"]["headers"])
else:
trace_parent = None
client.begin_transaction("request", trace_parent=trace_parent)
def instrument(client):
"""
Auto-instruments code to get nice spans
"""
from zuqa.instrumentation.control import instrument
instrument()
try:
import celery # noqa F401
from zuqa.contrib.celery import register_instrumentation
register_instrumentation(client)
except ImportError:
client.logger.debug("Not instrumenting Celery, couldn't import")
def _should_start_transaction(client):
middleware_attr = "MIDDLEWARE" if getattr(django_settings, "MIDDLEWARE", None) is not None else "MIDDLEWARE_CLASSES"
middleware = getattr(django_settings, middleware_attr)
return (
(not django_settings.DEBUG or client.config.debug)
and middleware
and "zuqa.contrib.django.middleware.TracingMiddleware" in middleware
)
|
zuqa-agent-python
|
/zuqa-agent-python-1.0.0a1.tar.gz/zuqa-agent-python-1.0.0a1/zuqa/contrib/django/apps.py
|
apps.py
|
# BSD 3-Clause License
#
# Copyright (c) 2012, the Sentry Team, see AUTHORS for more details
# Copyright (c) 2019, Elasticsearch BV
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
from __future__ import absolute_import
import django
from django.conf import settings as django_settings
from django.core.exceptions import DisallowedHost
from django.db import DatabaseError
from django.http import HttpRequest
from zuqa.base import Client
from zuqa.conf import constants
from zuqa.contrib.django.utils import iterate_with_template_sources
from zuqa.utils import compat, encoding, get_url_dict
from zuqa.utils.logging import get_logger
from zuqa.utils.module_import import import_string
from zuqa.utils.wsgi import get_environ, get_headers
__all__ = ("DjangoClient",)
default_client_class = "zuqa.contrib.django.DjangoClient"
_client = (None, None)
def get_client(client=None):
"""
Get an ZUQA client.
:param client:
:return:
:rtype: zuqa.base.Client
"""
global _client
tmp_client = client is not None
if not tmp_client:
config = getattr(django_settings, "ZUQA", {})
client = config.get("CLIENT", default_client_class)
if _client[0] != client:
client_class = import_string(client)
instance = client_class()
if not tmp_client:
_client = (client, instance)
return instance
return _client[1]
class DjangoClient(Client):
logger = get_logger("zuqa.errors.client.django")
def __init__(self, config=None, **inline):
if config is None:
config = getattr(django_settings, "ZUQA", {})
if "framework_name" not in inline:
inline["framework_name"] = "django"
inline["framework_version"] = django.get_version()
super(DjangoClient, self).__init__(config, **inline)
def get_user_info(self, request):
user_info = {}
if not hasattr(request, "user"):
return user_info
try:
user = request.user
if hasattr(user, "is_authenticated"):
if callable(user.is_authenticated):
user_info["is_authenticated"] = user.is_authenticated()
else:
user_info["is_authenticated"] = bool(user.is_authenticated)
if hasattr(user, "id"):
user_info["id"] = encoding.keyword_field(user.id)
if hasattr(user, "get_username"):
user_info["username"] = encoding.keyword_field(encoding.force_text(user.get_username()))
elif hasattr(user, "username"):
user_info["username"] = encoding.keyword_field(encoding.force_text(user.username))
if hasattr(user, "email"):
user_info["email"] = encoding.force_text(user.email)
except DatabaseError:
# If the connection is closed or similar, we'll just skip this
return {}
return user_info
def get_data_from_request(self, request, event_type):
result = {
"env": dict(get_environ(request.META)),
"method": request.method,
"socket": {"remote_address": request.META.get("REMOTE_ADDR"), "encrypted": request.is_secure()},
"cookies": dict(request.COOKIES),
}
if self.config.capture_headers:
request_headers = dict(get_headers(request.META))
for key, value in request_headers.items():
if isinstance(value, (int, float)):
request_headers[key] = str(value)
result["headers"] = request_headers
if request.method in constants.HTTP_WITH_BODY:
capture_body = self.config.capture_body in ("all", event_type)
if not capture_body:
result["body"] = "[REDACTED]"
else:
content_type = request.META.get("CONTENT_TYPE")
if content_type == "application/x-www-form-urlencoded":
data = compat.multidict_to_dict(request.POST)
elif content_type and content_type.startswith("multipart/form-data"):
data = compat.multidict_to_dict(request.POST)
if request.FILES:
data["_files"] = {field: file.name for field, file in compat.iteritems(request.FILES)}
else:
try:
data = request.body
except Exception as e:
self.logger.debug("Can't capture request body: %s", compat.text_type(e))
data = "<unavailable>"
if data is not None:
result["body"] = data
if hasattr(request, "get_raw_uri"):
# added in Django 1.9
url = request.get_raw_uri()
else:
try:
# Requires host to be in ALLOWED_HOSTS, might throw a
# DisallowedHost exception
url = request.build_absolute_uri()
except DisallowedHost:
# We can't figure out the real URL, so we have to set it to
# DisallowedHost
result["url"] = {"full": "DisallowedHost"}
url = None
if url:
result["url"] = get_url_dict(url)
return result
def get_data_from_response(self, response, event_type):
result = {"status_code": response.status_code}
if self.config.capture_headers and hasattr(response, "items"):
response_headers = dict(response.items())
for key, value in response_headers.items():
if isinstance(value, (int, float)):
response_headers[key] = str(value)
result["headers"] = response_headers
return result
def capture(self, event_type, request=None, **kwargs):
if "context" not in kwargs:
kwargs["context"] = context = {}
else:
context = kwargs["context"]
is_http_request = isinstance(request, HttpRequest)
if is_http_request:
context["request"] = self.get_data_from_request(request, constants.ERROR)
context["user"] = self.get_user_info(request)
result = super(DjangoClient, self).capture(event_type, **kwargs)
if is_http_request:
# attach the zuqa object to the request
request._zuqa = {"service_name": self.config.service_name, "id": result}
return result
def _get_stack_info_for_trace(
self,
frames,
library_frame_context_lines=None,
in_app_frame_context_lines=None,
with_locals=True,
locals_processor_func=None,
):
"""If the stacktrace originates within the zuqa module, it will skip
frames until some other module comes up."""
return list(
iterate_with_template_sources(
frames,
with_locals=with_locals,
library_frame_context_lines=library_frame_context_lines,
in_app_frame_context_lines=in_app_frame_context_lines,
include_paths_re=self.include_paths_re,
exclude_paths_re=self.exclude_paths_re,
locals_processor_func=locals_processor_func,
)
)
def send(self, url, **kwargs):
"""
Serializes and signs ``data`` and passes the payload off to ``send_remote``
If ``server`` was passed into the constructor, this will serialize the data and pipe it to
the server using ``send_remote()``.
"""
if self.config.server_url:
return super(DjangoClient, self).send(url, **kwargs)
else:
self.error_logger.error("No server configured, and zuqa not installed. Cannot send message")
return None
class ProxyClient(object):
"""
A proxy which represents the current client at all times.
"""
# introspection support:
__members__ = property(lambda x: x.__dir__())
# Need to pretend to be the wrapped class, for the sake of objects that care
# about this (especially in equality tests)
__class__ = property(lambda x: get_client().__class__)
__dict__ = property(lambda o: get_client().__dict__)
__repr__ = lambda: repr(get_client())
__getattr__ = lambda x, o: getattr(get_client(), o)
__setattr__ = lambda x, o, v: setattr(get_client(), o, v)
__delattr__ = lambda x, o: delattr(get_client(), o)
__lt__ = lambda x, o: get_client() < o
__le__ = lambda x, o: get_client() <= o
__eq__ = lambda x, o: get_client() == o
__ne__ = lambda x, o: get_client() != o
__gt__ = lambda x, o: get_client() > o
__ge__ = lambda x, o: get_client() >= o
if compat.PY2:
__cmp__ = lambda x, o: cmp(get_client(), o) # noqa F821
__hash__ = lambda x: hash(get_client())
# attributes are currently not callable
# __call__ = lambda x, *a, **kw: get_client()(*a, **kw)
__nonzero__ = lambda x: bool(get_client())
__len__ = lambda x: len(get_client())
__getitem__ = lambda x, i: get_client()[i]
__iter__ = lambda x: iter(get_client())
__contains__ = lambda x, i: i in get_client()
__getslice__ = lambda x, i, j: get_client()[i:j]
__add__ = lambda x, o: get_client() + o
__sub__ = lambda x, o: get_client() - o
__mul__ = lambda x, o: get_client() * o
__floordiv__ = lambda x, o: get_client() // o
__mod__ = lambda x, o: get_client() % o
__divmod__ = lambda x, o: get_client().__divmod__(o)
__pow__ = lambda x, o: get_client() ** o
__lshift__ = lambda x, o: get_client() << o
__rshift__ = lambda x, o: get_client() >> o
__and__ = lambda x, o: get_client() & o
__xor__ = lambda x, o: get_client() ^ o
__or__ = lambda x, o: get_client() | o
__div__ = lambda x, o: get_client().__div__(o)
__truediv__ = lambda x, o: get_client().__truediv__(o)
__neg__ = lambda x: -(get_client())
__pos__ = lambda x: +(get_client())
__abs__ = lambda x: abs(get_client())
__invert__ = lambda x: ~(get_client())
__complex__ = lambda x: complex(get_client())
__int__ = lambda x: int(get_client())
if compat.PY2:
__long__ = lambda x: long(get_client()) # noqa F821
__float__ = lambda x: float(get_client())
__str__ = lambda x: str(get_client())
__unicode__ = lambda x: compat.text_type(get_client())
__oct__ = lambda x: oct(get_client())
__hex__ = lambda x: hex(get_client())
__index__ = lambda x: get_client().__index__()
__coerce__ = lambda x, o: x.__coerce__(x, o)
__enter__ = lambda x: x.__enter__()
__exit__ = lambda x, *a, **kw: x.__exit__(*a, **kw)
client = ProxyClient()
def _get_installed_apps_paths():
"""
Generate a list of modules in settings.INSTALLED_APPS.
"""
out = set()
for app in django_settings.INSTALLED_APPS:
out.add(app)
return out
|
zuqa-agent-python
|
/zuqa-agent-python-1.0.0a1.tar.gz/zuqa-agent-python-1.0.0a1/zuqa/contrib/django/client.py
|
client.py
|
# BSD 3-Clause License
#
# Copyright (c) 2019, Elasticsearch BV
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from zuqa.traces import execution_context
def rum_tracing(request):
transaction = execution_context.get_transaction()
if transaction and transaction.trace_parent:
return {
"apm": {
"trace_id": transaction.trace_parent.trace_id,
# only put the callable into the context to ensure that we only change the span_id if the value
# is rendered
"span_id": transaction.ensure_parent_id,
"is_sampled": transaction.is_sampled,
"is_sampled_js": "true" if transaction.is_sampled else "false",
}
}
return {}
|
zuqa-agent-python
|
/zuqa-agent-python-1.0.0a1.tar.gz/zuqa-agent-python-1.0.0a1/zuqa/contrib/django/context_processors.py
|
context_processors.py
|
# BSD 3-Clause License
#
# Copyright (c) 2019, Elasticsearch BV
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from django.template.base import Node
from zuqa.utils.stacks import get_frame_info
try:
from django.template.base import Template
except ImportError:
class Template(object):
pass
def iterate_with_template_sources(
frames,
with_locals=True,
library_frame_context_lines=None,
in_app_frame_context_lines=None,
include_paths_re=None,
exclude_paths_re=None,
locals_processor_func=None,
):
template = None
for frame, lineno in frames:
f_code = getattr(frame, "f_code", None)
if f_code:
function_name = frame.f_code.co_name
if function_name == "render":
renderer = getattr(frame, "f_locals", {}).get("self")
if renderer and isinstance(renderer, Node):
if getattr(renderer, "token", None) is not None:
if hasattr(renderer, "source"):
# up to Django 1.8
yield {"lineno": renderer.token.lineno, "filename": renderer.source[0].name}
else:
template = {"lineno": renderer.token.lineno}
# Django 1.9 doesn't have the origin on the Node instance,
# so we have to get it a bit further down the stack from the
# Template instance
elif renderer and isinstance(renderer, Template):
if template and getattr(renderer, "origin", None):
template["filename"] = renderer.origin.name
yield template
template = None
yield get_frame_info(
frame,
lineno,
library_frame_context_lines=library_frame_context_lines,
in_app_frame_context_lines=in_app_frame_context_lines,
with_locals=with_locals,
include_paths_re=include_paths_re,
exclude_paths_re=exclude_paths_re,
locals_processor_func=locals_processor_func,
)
|
zuqa-agent-python
|
/zuqa-agent-python-1.0.0a1.tar.gz/zuqa-agent-python-1.0.0a1/zuqa/contrib/django/utils.py
|
utils.py
|
# BSD 3-Clause License
#
# Copyright (c) 2019, Elasticsearch BV
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from zuqa.contrib.django.client import * # noqa E401
default_app_config = "zuqa.contrib.django.apps.ZuqaConfig"
|
zuqa-agent-python
|
/zuqa-agent-python-1.0.0a1.tar.gz/zuqa-agent-python-1.0.0a1/zuqa/contrib/django/__init__.py
|
__init__.py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.