code
stringlengths 1
5.19M
| package
stringlengths 1
81
| path
stringlengths 9
304
| filename
stringlengths 4
145
|
---|---|---|---|
from nose.tools import raises
from zuper_ipce.special_strings import Email
@raises(ValueError)
def test_email():
Email("aaa")
def test_email_ok():
Email("[email protected]")
|
zuper-ipce-z5
|
/zuper-ipce-z5-5.3.0.tar.gz/zuper-ipce-z5-5.3.0/src/zuper_ipce_tests/test_special_string.py
|
test_special_string.py
|
from typing import Dict, Optional, Set, Tuple, Union
from nose.tools import raises
from zuper_ipce import object_from_ipce
from zuper_typing import dataclass
from zuper_typing.annotations_tricks import make_Tuple
from zuper_typing.my_dict import make_dict, make_list, make_set
from .test_utils import assert_object_roundtrip, assert_type_roundtrip
# noinspection PyUnresolvedReferences
def test_union_1():
@dataclass
class MyClass:
f: Union[int, str]
e = MyClass(1)
assert_object_roundtrip(e) # raise here
e = MyClass("a") # pragma: no cover
assert_object_roundtrip(e) # pragma: no cover
def test_union_2():
T = Union[int, str]
assert_type_roundtrip(T)
def test_union_2b():
T = Union[Tuple[str], int]
assert_type_roundtrip(T)
def test_union_2c():
T = Tuple[int, ...]
assert_type_roundtrip(T)
def test_tuple_empty():
T = make_Tuple()
assert_type_roundtrip(T)
def test_union_3():
@dataclass
class A:
a: int
@dataclass
class B:
b: int
@dataclass
class C:
c: Union[A, B]
ec1 = C(A(1))
ec2 = C(B(1))
assert_type_roundtrip(C)
assert_object_roundtrip(ec1)
assert_object_roundtrip(ec2)
@raises(ValueError)
def test_none1():
@dataclass
class A:
b: int
object_from_ipce(None, expect_type=A)
def test_tuple_wiht_optional_inside():
T = Tuple[int, Optional[int], str]
assert_type_roundtrip(T)
def test_dict_with_optional():
# T = Dict[str, Optional[int]]
T = make_dict(str, Optional[int])
assert_type_roundtrip(T)
def test_list_with_optional():
T = make_list(Optional[int])
assert_type_roundtrip(T)
def test_set_with_optional():
# even though it does not make sense ...
T = make_set(Optional[int])
assert_type_roundtrip(T)
def test_set_with_optional2():
# even though it does not make sense ...
T = Set[Optional[int]]
assert_type_roundtrip(T)
def test_dict_with_optional_key():
T = Dict[Optional[int], int]
assert_type_roundtrip(T)
|
zuper-ipce-z5
|
/zuper-ipce-z5-5.3.0.tar.gz/zuper-ipce-z5-5.3.0/src/zuper_ipce_tests/test_ipce_union.py
|
test_ipce_union.py
|
from decimal import Decimal
from zuper_typing import dataclass
from .test_utils import assert_object_roundtrip
def test_decimal1():
@dataclass
class MyClass:
f: Decimal
e = MyClass(Decimal(1.0))
assert_object_roundtrip(e)
e = MyClass(Decimal("0.3"))
assert_object_roundtrip(e)
def test_decimal2():
f = Decimal("3.14")
assert_object_roundtrip(f)
|
zuper-ipce-z5
|
/zuper-ipce-z5-5.3.0.tar.gz/zuper-ipce-z5-5.3.0/src/zuper_ipce_tests/test_decimal.py
|
test_decimal.py
|
from .test_from_testobjs import check_case
def test_zdpuAxKCBsAKQpEw456S49oVDkWJ9PZa44KGRfVBWHiXN3UH8_len81():
fn = "test_objects/zdpuAxKCBsAKQpEw456S49oVDkWJ9PZa44KGRfVBWHiXN3UH8.ipce.cbor.gz"
check_case(fn)
def test_zdpuB38GjS43WsQdtVPGSiFsc6xzx1BrAGMt28kEhw4MNMK6E_len84():
fn = "test_objects/zdpuB38GjS43WsQdtVPGSiFsc6xzx1BrAGMt28kEhw4MNMK6E.ipce.cbor.gz"
check_case(fn)
def test_zdpuAkRd237P39AzFu5KchyradDszSeFKdhn217Xw8zBTtmJk_len86():
fn = "test_objects/zdpuAkRd237P39AzFu5KchyradDszSeFKdhn217Xw8zBTtmJk.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuH1zLE3PoPXjkaLhQH65U8cZ5XuUBKtTRgEHKwUU3Xpd_len103():
fn = "test_objects/zdpuAuH1zLE3PoPXjkaLhQH65U8cZ5XuUBKtTRgEHKwUU3Xpd.ipce.cbor.gz"
check_case(fn)
def test_zdpuAqch55bUMPgrJWmi3MTGLpCSX3K6Fq9h2qKYxwdNYQuLX_len134():
fn = "test_objects/zdpuAqch55bUMPgrJWmi3MTGLpCSX3K6Fq9h2qKYxwdNYQuLX.ipce.cbor.gz"
check_case(fn)
def test_zdpuArHFAnEPEs7QFv8zePpZDJ8BUaxFAmqhjhCpePDgeEaqc_len134():
fn = "test_objects/zdpuArHFAnEPEs7QFv8zePpZDJ8BUaxFAmqhjhCpePDgeEaqc.ipce.cbor.gz"
check_case(fn)
def test_zdpuArtUgLfLLhcbnFzBE6qG9ePZuuYuvekRLouaj1sFjzMgy_len136():
fn = "test_objects/zdpuArtUgLfLLhcbnFzBE6qG9ePZuuYuvekRLouaj1sFjzMgy.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnhpygLrBJds79tc5vPTjRn8WRpjcEQ6vzDF52aaMDuvW_len137():
fn = "test_objects/zdpuAnhpygLrBJds79tc5vPTjRn8WRpjcEQ6vzDF52aaMDuvW.ipce.cbor.gz"
check_case(fn)
def test_zdpuAohbKGRmf4xvtKyQ6PJgGDptVqJ6MRUEADHA8WtYjb5EP_len149():
fn = "test_objects/zdpuAohbKGRmf4xvtKyQ6PJgGDptVqJ6MRUEADHA8WtYjb5EP.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuxDCcUvoQyPMxh236hYYow3QaozPgwhJiN6WtdubdRui_len150():
fn = "test_objects/zdpuAuxDCcUvoQyPMxh236hYYow3QaozPgwhJiN6WtdubdRui.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsBsn9jcB84wzKcxx5mCNjDA6xBhi4kKJaxrYjSN1aJeD_len158():
fn = "test_objects/zdpuAsBsn9jcB84wzKcxx5mCNjDA6xBhi4kKJaxrYjSN1aJeD.ipce.cbor.gz"
check_case(fn)
def test_zdpuArkpgAgQQdEYyTwWrZCicoALV8LV4QGrLLRKLSY8K7Qt8_len161():
fn = "test_objects/zdpuArkpgAgQQdEYyTwWrZCicoALV8LV4QGrLLRKLSY8K7Qt8.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtvPi9fEe2isKQ6WCBcJxGC2GvndWYmsyUCtYt2AuyGNa_len163():
fn = "test_objects/zdpuAtvPi9fEe2isKQ6WCBcJxGC2GvndWYmsyUCtYt2AuyGNa.ipce.cbor.gz"
check_case(fn)
def test_zdpuAqUiW5nTdnSohcTLgQAFcW9dRKrpEXTd7X3WpsFxCCxgT_len164():
fn = "test_objects/zdpuAqUiW5nTdnSohcTLgQAFcW9dRKrpEXTd7X3WpsFxCCxgT.ipce.cbor.gz"
check_case(fn)
def test_zdpuB1dR6zyU9b9Lk3oo1vvEtcgKJKdxeNR9CyuW2E3J9eAt6_len170():
fn = "test_objects/zdpuB1dR6zyU9b9Lk3oo1vvEtcgKJKdxeNR9CyuW2E3J9eAt6.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvFczbA3WNevKnEFHArVwUJiXg3mRyamPvhTgosGHQApP_len180():
fn = "test_objects/zdpuAvFczbA3WNevKnEFHArVwUJiXg3mRyamPvhTgosGHQApP.ipce.cbor.gz"
check_case(fn)
def test_zdpuAkeZYrDdh6SJhitpicFxuejXJFEsaWCU5VUhfWj31FiCx_len181():
fn = "test_objects/zdpuAkeZYrDdh6SJhitpicFxuejXJFEsaWCU5VUhfWj31FiCx.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsXTxkJBV8Bna4QRLfFG3dvaRhb6zUSz2FFQmyG4ACBed_len190():
fn = "test_objects/zdpuAsXTxkJBV8Bna4QRLfFG3dvaRhb6zUSz2FFQmyG4ACBed.ipce.cbor.gz"
check_case(fn)
def test_zdpuAyYqCqzyuLWmzZRHudJeUjJW7Y4ht5rVGAwnrsHTNwiiq_len194():
fn = "test_objects/zdpuAyYqCqzyuLWmzZRHudJeUjJW7Y4ht5rVGAwnrsHTNwiiq.ipce.cbor.gz"
check_case(fn)
def test_zdpuArJ491kYWoypxsq3QCTroM1hLoJDgJ5u6EUVLpWX8La3H_len198():
fn = "test_objects/zdpuArJ491kYWoypxsq3QCTroM1hLoJDgJ5u6EUVLpWX8La3H.ipce.cbor.gz"
check_case(fn)
def test_zdpuApSEKBdZa9ssiAFHm6aDgPVaKHV13SiyKmHX9QjEAwdaC_len199():
fn = "test_objects/zdpuApSEKBdZa9ssiAFHm6aDgPVaKHV13SiyKmHX9QjEAwdaC.ipce.cbor.gz"
check_case(fn)
def test_zdpuAp74h4VFBPgcXsyHgA7dSjoxA1sehD4iv4tnPsgtVmmaj_len202():
fn = "test_objects/zdpuAp74h4VFBPgcXsyHgA7dSjoxA1sehD4iv4tnPsgtVmmaj.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuZ1e6hAcLir5oySMbkxLTMmJEKyBPKQo5FRMECcKFu26_len202():
fn = "test_objects/zdpuAuZ1e6hAcLir5oySMbkxLTMmJEKyBPKQo5FRMECcKFu26.ipce.cbor.gz"
check_case(fn)
def test_zdpuB1DyZJhU8mTqSsfzio9aas6rPdmALDFTgQtBksWagbz6v_len204():
fn = "test_objects/zdpuB1DyZJhU8mTqSsfzio9aas6rPdmALDFTgQtBksWagbz6v.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsUnf2QncgnJT7t8GyNYNSY9vDSwMYBJ16FuAX2MfSF9d_len204():
fn = "test_objects/zdpuAsUnf2QncgnJT7t8GyNYNSY9vDSwMYBJ16FuAX2MfSF9d.ipce.cbor.gz"
check_case(fn)
def test_zdpuAssAg3FaXzuSKMx9G3DmoJbJucpfgd1GfpB8X9uwsZM3s_len210():
fn = "test_objects/zdpuAssAg3FaXzuSKMx9G3DmoJbJucpfgd1GfpB8X9uwsZM3s.ipce.cbor.gz"
check_case(fn)
def test_zdpuAyAA5cjEcKUa5f7ioEjVGX6byQB1vu81B1fWEC9nkuwtt_len211():
fn = "test_objects/zdpuAyAA5cjEcKUa5f7ioEjVGX6byQB1vu81B1fWEC9nkuwtt.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuX82f16auvDsdAZa85LPNAFpJUUegCC8VcnTuuYypJJa_len211():
fn = "test_objects/zdpuAuX82f16auvDsdAZa85LPNAFpJUUegCC8VcnTuuYypJJa.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnQvfRnDyqABJ57n61PkJsDZY6ehQYhPUrVK73iwpaCMA_len214():
fn = "test_objects/zdpuAnQvfRnDyqABJ57n61PkJsDZY6ehQYhPUrVK73iwpaCMA.ipce.cbor.gz"
check_case(fn)
def test_zdpuArh2pTQK1qKSNehbHb7kgGRaYbRZ8f2c7gpi9GU2siHWs_len215():
fn = "test_objects/zdpuArh2pTQK1qKSNehbHb7kgGRaYbRZ8f2c7gpi9GU2siHWs.ipce.cbor.gz"
check_case(fn)
def test_zdpuAmJRQ8cpKwWTKHUh6b4UUBXXbYFJ3yxxAXnkqcq8z56wo_len216():
fn = "test_objects/zdpuAmJRQ8cpKwWTKHUh6b4UUBXXbYFJ3yxxAXnkqcq8z56wo.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtQ1Z3rH9nFP763a7UEgJN6BZ4CXyatiB26oXkqqZV97A_len221():
fn = "test_objects/zdpuAtQ1Z3rH9nFP763a7UEgJN6BZ4CXyatiB26oXkqqZV97A.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxLZjJNUFncwr6qYpUX9BBgnJWnAUEp2qZLVxgqfzFhqM_len223():
fn = "test_objects/zdpuAxLZjJNUFncwr6qYpUX9BBgnJWnAUEp2qZLVxgqfzFhqM.ipce.cbor.gz"
check_case(fn)
def test_zdpuB38QtfszW72qnyuA3H9d9gxUQLA3mhSZxwC3YCPawrSq8_len225():
fn = "test_objects/zdpuB38QtfszW72qnyuA3H9d9gxUQLA3mhSZxwC3YCPawrSq8.ipce.cbor.gz"
check_case(fn)
def test_zdpuAz3XeWgqYTpee5Mguien7hsRPwGCgoUjmkpKxMHb3UyW7_len227():
fn = "test_objects/zdpuAz3XeWgqYTpee5Mguien7hsRPwGCgoUjmkpKxMHb3UyW7.ipce.cbor.gz"
check_case(fn)
def test_zdpuAoc5soKWRGf3KPjXW3y1XYXwd6n3e2SAx4tH6DUdAAG6i_len227():
fn = "test_objects/zdpuAoc5soKWRGf3KPjXW3y1XYXwd6n3e2SAx4tH6DUdAAG6i.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnQ4aQynmcT3SigTbHabJNxqMTrWkXhnXvj2TYkRapxLt_len228():
fn = "test_objects/zdpuAnQ4aQynmcT3SigTbHabJNxqMTrWkXhnXvj2TYkRapxLt.ipce.cbor.gz"
check_case(fn)
def test_zdpuAv4bsCu5bP9gTbb5ovsZNbWijCHnqtPEyCq4bU9z7mGZY_len228():
fn = "test_objects/zdpuAv4bsCu5bP9gTbb5ovsZNbWijCHnqtPEyCq4bU9z7mGZY.ipce.cbor.gz"
check_case(fn)
def test_zdpuAspr2MG1YQY1RUWNmkmGcvGyE453ngBQveApJPGQAPxDq_len228():
fn = "test_objects/zdpuAspr2MG1YQY1RUWNmkmGcvGyE453ngBQveApJPGQAPxDq.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvuSvo2GXzNuUqSJWUiQnHXzgP46jRooe4pEXCbye5JMm_len232():
fn = "test_objects/zdpuAvuSvo2GXzNuUqSJWUiQnHXzgP46jRooe4pEXCbye5JMm.ipce.cbor.gz"
check_case(fn)
def test_zdpuAzCoUif8wL3jHcvmFU7WCajFotEkgWMESzmj6ijtPgrsw_len241():
fn = "test_objects/zdpuAzCoUif8wL3jHcvmFU7WCajFotEkgWMESzmj6ijtPgrsw.ipce.cbor.gz"
check_case(fn)
def test_zdpuAryrmtW3t6Hj3cwLfFCxYosf43Hci81Ln3hBURnZko8ai_len264():
fn = "test_objects/zdpuAryrmtW3t6Hj3cwLfFCxYosf43Hci81Ln3hBURnZko8ai.ipce.cbor.gz"
check_case(fn)
def test_zdpuAqjT2RbGPaJmqVoyybmp88upSq7h8VAQX5ptVoKo1UcVn_len267():
fn = "test_objects/zdpuAqjT2RbGPaJmqVoyybmp88upSq7h8VAQX5ptVoKo1UcVn.ipce.cbor.gz"
check_case(fn)
def test_zdpuApFxe4Lop45uAnvgHc5A1hgTrSYwAfSdTS7xXYeHEZ7Mo_len267():
fn = "test_objects/zdpuApFxe4Lop45uAnvgHc5A1hgTrSYwAfSdTS7xXYeHEZ7Mo.ipce.cbor.gz"
check_case(fn)
def test_zdpuAzEKVEZsDokSHJRd7g3TT8264wpJdAQaeY4rQk5fG7cYg_len272():
fn = "test_objects/zdpuAzEKVEZsDokSHJRd7g3TT8264wpJdAQaeY4rQk5fG7cYg.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuJjbpei7Wz9K3Lw7Ahov5mSnNry93xHHACSTVUHqcTc5_len272():
fn = "test_objects/zdpuAuJjbpei7Wz9K3Lw7Ahov5mSnNry93xHHACSTVUHqcTc5.ipce.cbor.gz"
check_case(fn)
def test_zdpuAt1udGtePWpTjdgWv4LemFxRtyXnTUsJtShLoyppbvYpg_len274():
fn = "test_objects/zdpuAt1udGtePWpTjdgWv4LemFxRtyXnTUsJtShLoyppbvYpg.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnaVK3YFvpk7BhXidhVLEm7V7zGZuvja2egUerzcx41SQ_len276():
fn = "test_objects/zdpuAnaVK3YFvpk7BhXidhVLEm7V7zGZuvja2egUerzcx41SQ.ipce.cbor.gz"
check_case(fn)
def test_zdpuB1zb9C8h9Rk8aQbq9su9T7xdSBT7ih2e8NSiCziZMLjEx_len276():
fn = "test_objects/zdpuB1zb9C8h9Rk8aQbq9su9T7xdSBT7ih2e8NSiCziZMLjEx.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuYdvTLkAcfdYZEsg9gkugytzgsjSmtsD5RMAMSPeMLzk_len277():
fn = "test_objects/zdpuAuYdvTLkAcfdYZEsg9gkugytzgsjSmtsD5RMAMSPeMLzk.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnstkDWvpAg4r6KrzuNGNrveLSm4k5492PtaP3ZqD5k1R_len279():
fn = "test_objects/zdpuAnstkDWvpAg4r6KrzuNGNrveLSm4k5492PtaP3ZqD5k1R.ipce.cbor.gz"
check_case(fn)
def test_zdpuAmXB9d2C4mMUeGAw5Ws4ZA71F4hxKfU3QaLgWzZeP4JvL_len280():
fn = "test_objects/zdpuAmXB9d2C4mMUeGAw5Ws4ZA71F4hxKfU3QaLgWzZeP4JvL.ipce.cbor.gz"
check_case(fn)
def test_zdpuApxbADaC9ETgje6KRT4JF34tN423q5xKeG2nG7aFXh9w6_len281():
fn = "test_objects/zdpuApxbADaC9ETgje6KRT4JF34tN423q5xKeG2nG7aFXh9w6.ipce.cbor.gz"
check_case(fn)
def test_zdpuAqse8M5tKF15ooHfJxHAkazgPTVks2T8MN3xE9va7c9FB_len282():
fn = "test_objects/zdpuAqse8M5tKF15ooHfJxHAkazgPTVks2T8MN3xE9va7c9FB.ipce.cbor.gz"
check_case(fn)
def test_zdpuAu7qGKHreav8JvTkQWAniK3vXnGM2cVoEP7vpg9PCyyzz_len282():
fn = "test_objects/zdpuAu7qGKHreav8JvTkQWAniK3vXnGM2cVoEP7vpg9PCyyzz.ipce.cbor.gz"
check_case(fn)
def test_zdpuAkiQa9yJJrVZggn6DWJujFfYBWXqHtwVsLci3RBHVfMCn_len282():
fn = "test_objects/zdpuAkiQa9yJJrVZggn6DWJujFfYBWXqHtwVsLci3RBHVfMCn.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxJvGWUgeyaj5QaPqRvgWoFfcxnCYWJ8TfTd74fmp2uwU_len283():
fn = "test_objects/zdpuAxJvGWUgeyaj5QaPqRvgWoFfcxnCYWJ8TfTd74fmp2uwU.ipce.cbor.gz"
check_case(fn)
def test_zdpuAufekVPKBnjf69qzzcRsTQumhyHgwQdALSndoGB9Z1FEq_len284():
fn = "test_objects/zdpuAufekVPKBnjf69qzzcRsTQumhyHgwQdALSndoGB9Z1FEq.ipce.cbor.gz"
check_case(fn)
def test_zdpuB2m9pEpYWjkYc4ixYzqLBZEuDx5eQRySwWyxrzNQqPF7U_len284():
fn = "test_objects/zdpuB2m9pEpYWjkYc4ixYzqLBZEuDx5eQRySwWyxrzNQqPF7U.ipce.cbor.gz"
check_case(fn)
def test_zdpuAwQJnzXJNtC2eoDDJ4bKnDUh9JDEwXfCQpuJAccR1bhGB_len285():
fn = "test_objects/zdpuAwQJnzXJNtC2eoDDJ4bKnDUh9JDEwXfCQpuJAccR1bhGB.ipce.cbor.gz"
check_case(fn)
def test_zdpuAmuPLfRMTxNPWHsCUze5YY692jLxQcYNiyK4Ukic2B443_len286():
fn = "test_objects/zdpuAmuPLfRMTxNPWHsCUze5YY692jLxQcYNiyK4Ukic2B443.ipce.cbor.gz"
check_case(fn)
def test_zdpuB2gx311uTF5VD37PmTkJNuEbxJ73BdX8Vb6UPJQ7AgYGE_len289():
fn = "test_objects/zdpuB2gx311uTF5VD37PmTkJNuEbxJ73BdX8Vb6UPJQ7AgYGE.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsaG3BZ9iqR7sLzLCFH9z8oPB8RfYRKhM1JnNQWY8s3Ta_len289():
fn = "test_objects/zdpuAsaG3BZ9iqR7sLzLCFH9z8oPB8RfYRKhM1JnNQWY8s3Ta.ipce.cbor.gz"
check_case(fn)
def test_zdpuB3DKx2tQKVvFaiy85MfYHmWSFWi4EE8nadJhESU3iM9gg_len289():
fn = "test_objects/zdpuB3DKx2tQKVvFaiy85MfYHmWSFWi4EE8nadJhESU3iM9gg.ipce.cbor.gz"
check_case(fn)
def test_zdpuAu9HfKAzo8hSG55xGCHQgRhYzrQkfG2m6sAL4Xzyj9yDA_len289():
fn = "test_objects/zdpuAu9HfKAzo8hSG55xGCHQgRhYzrQkfG2m6sAL4Xzyj9yDA.ipce.cbor.gz"
check_case(fn)
def test_zdpuApffaGcLAjtoe4RYh7L7n2AfRrgyyQTgbVZpkhuE875dS_len291():
fn = "test_objects/zdpuApffaGcLAjtoe4RYh7L7n2AfRrgyyQTgbVZpkhuE875dS.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvksuXej14ku514BiGrS9EtwY1V1TkweL6nhi8WQrwut6_len291():
fn = "test_objects/zdpuAvksuXej14ku514BiGrS9EtwY1V1TkweL6nhi8WQrwut6.ipce.cbor.gz"
check_case(fn)
def test_zdpuApctwicwLhVMto9vDeZTANo7LWuj2DRSRc2GjwjdZJu1F_len291():
fn = "test_objects/zdpuApctwicwLhVMto9vDeZTANo7LWuj2DRSRc2GjwjdZJu1F.ipce.cbor.gz"
check_case(fn)
def test_zdpuApN6A9U3aFWkqcjJ8iTL4sJ6Fem8GPHPgQv3EbjZ71oiU_len292():
fn = "test_objects/zdpuApN6A9U3aFWkqcjJ8iTL4sJ6Fem8GPHPgQv3EbjZ71oiU.ipce.cbor.gz"
check_case(fn)
def test_zdpuB2v7crcndJBsAGurqqRdcpSbRyFjMBvTRwWdcXWJzkpus_len292():
fn = "test_objects/zdpuB2v7crcndJBsAGurqqRdcpSbRyFjMBvTRwWdcXWJzkpus.ipce.cbor.gz"
check_case(fn)
def test_zdpuAmktGCvWAsNnmugBWgvfREMTh3EQujGYJakeyfQfgepHa_len293():
fn = "test_objects/zdpuAmktGCvWAsNnmugBWgvfREMTh3EQujGYJakeyfQfgepHa.ipce.cbor.gz"
check_case(fn)
def test_zdpuAmxD4QAfmbFhogiMqWZuQNa3SXC57FekqNbgLefBRTFGm_len293():
fn = "test_objects/zdpuAmxD4QAfmbFhogiMqWZuQNa3SXC57FekqNbgLefBRTFGm.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnZo9dCXbagsidpcdWeRKgcNsCoQ9WACiVFusgsYDoRyi_len294():
fn = "test_objects/zdpuAnZo9dCXbagsidpcdWeRKgcNsCoQ9WACiVFusgsYDoRyi.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuq5779H2aTuu8GvNg2fnh5RzLAhHUWvHRLGdg6ztuudJ_len295():
fn = "test_objects/zdpuAuq5779H2aTuu8GvNg2fnh5RzLAhHUWvHRLGdg6ztuudJ.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuNjMynG2WpHm1MNU5sGxgFd59BbUdUfWgCcEThWJyhqK_len295():
fn = "test_objects/zdpuAuNjMynG2WpHm1MNU5sGxgFd59BbUdUfWgCcEThWJyhqK.ipce.cbor.gz"
check_case(fn)
def test_zdpuAmHCAwvT1S6B3268dSZEqh6rw7oTzuKYYhMB8xLMF41uG_len296():
fn = "test_objects/zdpuAmHCAwvT1S6B3268dSZEqh6rw7oTzuKYYhMB8xLMF41uG.ipce.cbor.gz"
check_case(fn)
def test_zdpuAkqd4XaSFzpWLdoPDsyFTKUdhtz8C5Lb2SLKBbySYXqE5_len297():
fn = "test_objects/zdpuAkqd4XaSFzpWLdoPDsyFTKUdhtz8C5Lb2SLKBbySYXqE5.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsqrpecy2oP1aBgJsEuLyDMwFC5Z6JvLXPMd5JYEGwbvJ_len297():
fn = "test_objects/zdpuAsqrpecy2oP1aBgJsEuLyDMwFC5Z6JvLXPMd5JYEGwbvJ.ipce.cbor.gz"
check_case(fn)
def test_zdpuAmdpV2tXLUyKbVrUgvLB6568M1aAEEvjz77VLdpp3KJCF_len300():
fn = "test_objects/zdpuAmdpV2tXLUyKbVrUgvLB6568M1aAEEvjz77VLdpp3KJCF.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxhN4cG2qJFRTdGbkKuHvLxoxyzkgNKkW4cjUFJngp7hT_len300():
fn = "test_objects/zdpuAxhN4cG2qJFRTdGbkKuHvLxoxyzkgNKkW4cjUFJngp7hT.ipce.cbor.gz"
check_case(fn)
def test_zdpuAs92MWFPgjqYotrZy2rHGeCDHczs5fjzL3ijRHo4sCM6D_len302():
fn = "test_objects/zdpuAs92MWFPgjqYotrZy2rHGeCDHczs5fjzL3ijRHo4sCM6D.ipce.cbor.gz"
check_case(fn)
def test_zdpuAzmbzP9BMDVenJFFSwoAe4ss7W3uZ5gkPtka7SY8JDz9m_len303():
fn = "test_objects/zdpuAzmbzP9BMDVenJFFSwoAe4ss7W3uZ5gkPtka7SY8JDz9m.ipce.cbor.gz"
check_case(fn)
def test_zdpuAr7Kw2ALFYejws3o2nD3TkoDP6ja8M3Gczu3P1i4BRBNc_len303():
fn = "test_objects/zdpuAr7Kw2ALFYejws3o2nD3TkoDP6ja8M3Gczu3P1i4BRBNc.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtkKwjdv5xaTmTfDyT3MdKYRgugUC8MAif3BBJ6yXbesS_len303():
fn = "test_objects/zdpuAtkKwjdv5xaTmTfDyT3MdKYRgugUC8MAif3BBJ6yXbesS.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtXrpc1BQ1CaKemiFXwD7kX1579KfJsocPbAzTJuxa2Ly_len303():
fn = "test_objects/zdpuAtXrpc1BQ1CaKemiFXwD7kX1579KfJsocPbAzTJuxa2Ly.ipce.cbor.gz"
check_case(fn)
def test_zdpuB3EmTitPZd3Aah6G2kHRU3WMYvSZBZ4LK4knsALz48vtG_len304():
fn = "test_objects/zdpuB3EmTitPZd3Aah6G2kHRU3WMYvSZBZ4LK4knsALz48vtG.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtdhTiKkpbefLQCjfCtVcTQ1i8UWnhgr66k2q7JyYJQrB_len304():
fn = "test_objects/zdpuAtdhTiKkpbefLQCjfCtVcTQ1i8UWnhgr66k2q7JyYJQrB.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsyFxo2hfzDHJouXXYa2YXd5oqtT5f7toJkdVBPGk74c4_len304():
fn = "test_objects/zdpuAsyFxo2hfzDHJouXXYa2YXd5oqtT5f7toJkdVBPGk74c4.ipce.cbor.gz"
check_case(fn)
def test_zdpuApqZu4ZqYCLLh1ao8HRTV82mrCoqjVFjjgzm828HE5WWx_len305():
fn = "test_objects/zdpuApqZu4ZqYCLLh1ao8HRTV82mrCoqjVFjjgzm828HE5WWx.ipce.cbor.gz"
check_case(fn)
def test_zdpuB2SvKersRTWuTLrFHRk2uaWMsvSzkvmiPRMxs9RmhGHHG_len305():
fn = "test_objects/zdpuB2SvKersRTWuTLrFHRk2uaWMsvSzkvmiPRMxs9RmhGHHG.ipce.cbor.gz"
check_case(fn)
def test_zdpuAwAuWViahLcanftLtE25DpQnmbL5c1nnkkfC5PjwKHafk_len306():
fn = "test_objects/zdpuAwAuWViahLcanftLtE25DpQnmbL5c1nnkkfC5PjwKHafk.ipce.cbor.gz"
check_case(fn)
def test_zdpuAy6EMRDbTnqnsvQ3Xqz6D7jh1N7Ys36JjShWhd4UMX4Kp_len306():
fn = "test_objects/zdpuAy6EMRDbTnqnsvQ3Xqz6D7jh1N7Ys36JjShWhd4UMX4Kp.ipce.cbor.gz"
check_case(fn)
def test_zdpuB3QtwGC9bc9usgvhwKZtUMycP5cCFdivcLm5q4rhGgiDj_len307():
fn = "test_objects/zdpuB3QtwGC9bc9usgvhwKZtUMycP5cCFdivcLm5q4rhGgiDj.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtTJ8TjVCKDGcd85DRpyb7evffEVJPXFmcrsqtywcxGR5_len307():
fn = "test_objects/zdpuAtTJ8TjVCKDGcd85DRpyb7evffEVJPXFmcrsqtywcxGR5.ipce.cbor.gz"
check_case(fn)
def test_zdpuB1njv2xUF72sSnYQUKa98GFDj1DjGhoXSbcbv4tyySCrB_len307():
fn = "test_objects/zdpuB1njv2xUF72sSnYQUKa98GFDj1DjGhoXSbcbv4tyySCrB.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtV2SU3DsQEiQ3vA5DwWczVqkAGgASNrFhzEgB22RvxzQ_len308():
fn = "test_objects/zdpuAtV2SU3DsQEiQ3vA5DwWczVqkAGgASNrFhzEgB22RvxzQ.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvhqqrV4qt9PYkLpi9SJBmtniCNQ8h7vWPHCcBcm3r5MJ_len309():
fn = "test_objects/zdpuAvhqqrV4qt9PYkLpi9SJBmtniCNQ8h7vWPHCcBcm3r5MJ.ipce.cbor.gz"
check_case(fn)
def test_zdpuB3ZuWgpj5wWiPAcCdgHaVbLGunTvG1fLn3n81DkdzXg3q_len309():
fn = "test_objects/zdpuB3ZuWgpj5wWiPAcCdgHaVbLGunTvG1fLn3n81DkdzXg3q.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvf6TDjLwwTbUmmehyQmJm44nMuX7gUsgYbH5xqt7gj7r_len309():
fn = "test_objects/zdpuAvf6TDjLwwTbUmmehyQmJm44nMuX7gUsgYbH5xqt7gj7r.ipce.cbor.gz"
check_case(fn)
def test_zdpuAkjBXxSK9qN8SdecFwKqrnSVHFE2qVFEnN4uXKS8nt9nK_len309():
fn = "test_objects/zdpuAkjBXxSK9qN8SdecFwKqrnSVHFE2qVFEnN4uXKS8nt9nK.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvHxsUsqnHV8G3HvNfwSKr18h2dbUf8Shx7zvBWCBP7aN_len309():
fn = "test_objects/zdpuAvHxsUsqnHV8G3HvNfwSKr18h2dbUf8Shx7zvBWCBP7aN.ipce.cbor.gz"
check_case(fn)
def test_zdpuAt6KJs6XQtfZAw8PfBJD6EfeK4erj9Chx1irwLeV9DFpw_len310():
fn = "test_objects/zdpuAt6KJs6XQtfZAw8PfBJD6EfeK4erj9Chx1irwLeV9DFpw.ipce.cbor.gz"
check_case(fn)
def test_zdpuAzU3EphgJpMRBizgFaZoxYnwbZJTntPKPqAYNvFfzqdsj_len310():
fn = "test_objects/zdpuAzU3EphgJpMRBizgFaZoxYnwbZJTntPKPqAYNvFfzqdsj.ipce.cbor.gz"
check_case(fn)
def test_zdpuAqNQ94yp2ffWch6s2U6kSwwX1S3EkaybQmcRbAHhtHtKU_len310():
fn = "test_objects/zdpuAqNQ94yp2ffWch6s2U6kSwwX1S3EkaybQmcRbAHhtHtKU.ipce.cbor.gz"
check_case(fn)
def test_zdpuB12MEZVSmW58GHPKpaYQzMao1h6J2N8v8Bxn7cxFnwpZ8_len311():
fn = "test_objects/zdpuB12MEZVSmW58GHPKpaYQzMao1h6J2N8v8Bxn7cxFnwpZ8.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxta952GJizQ2mExw2FG65Mscfj78XuZEzfBBA8ho9RQd_len311():
fn = "test_objects/zdpuAxta952GJizQ2mExw2FG65Mscfj78XuZEzfBBA8ho9RQd.ipce.cbor.gz"
check_case(fn)
def test_zdpuB2nDNg1db4UWfvfZWaTmoBx675YdPSsrN95rXbBNcJkgJ_len311():
fn = "test_objects/zdpuB2nDNg1db4UWfvfZWaTmoBx675YdPSsrN95rXbBNcJkgJ.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnH3fLqVECbtjv9Gd6YBbTRcDpWo8uq829LgD7sFDNe1h_len311():
fn = "test_objects/zdpuAnH3fLqVECbtjv9Gd6YBbTRcDpWo8uq829LgD7sFDNe1h.ipce.cbor.gz"
check_case(fn)
def test_zdpuAyiCfLWkybSMzLDfPCfrx3ESJBdTjZkJeQWVYfvquoofD_len311():
fn = "test_objects/zdpuAyiCfLWkybSMzLDfPCfrx3ESJBdTjZkJeQWVYfvquoofD.ipce.cbor.gz"
check_case(fn)
def test_zdpuB2p28gNLRZus2U9UUKqs6YofDkQdokLoSTe92qxiJ4vfU_len312():
fn = "test_objects/zdpuB2p28gNLRZus2U9UUKqs6YofDkQdokLoSTe92qxiJ4vfU.ipce.cbor.gz"
check_case(fn)
def test_zdpuAmnxxxnhKxEvXLohK6pz66hhUbg3yHBZg1BPWPptC4XkE_len312():
fn = "test_objects/zdpuAmnxxxnhKxEvXLohK6pz66hhUbg3yHBZg1BPWPptC4XkE.ipce.cbor.gz"
check_case(fn)
def test_zdpuAzshxK7PiTNmxSjnVhH8jGLdu2y6EwZKkVeYGMVFaAmyz_len312():
fn = "test_objects/zdpuAzshxK7PiTNmxSjnVhH8jGLdu2y6EwZKkVeYGMVFaAmyz.ipce.cbor.gz"
check_case(fn)
def test_zdpuAy5w4i3yQf6go7PHem2Qmcxa8ii6BtAgUpfnawpWubKh5_len312():
fn = "test_objects/zdpuAy5w4i3yQf6go7PHem2Qmcxa8ii6BtAgUpfnawpWubKh5.ipce.cbor.gz"
check_case(fn)
def test_zdpuAoZupHAKYytvsontxCt3gs88v8VDbnyLbX434tPnNpocr_len313():
fn = "test_objects/zdpuAoZupHAKYytvsontxCt3gs88v8VDbnyLbX434tPnNpocr.ipce.cbor.gz"
check_case(fn)
def test_zdpuAwbSzcq6xV1tJxQsTR2AaeFLJTXkkbTtm4jX3PrxsyDcX_len313():
fn = "test_objects/zdpuAwbSzcq6xV1tJxQsTR2AaeFLJTXkkbTtm4jX3PrxsyDcX.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxMovYgGbzUtzxWAjNYst7aUzYwkJeK7bypAvoycevEvS_len316():
fn = "test_objects/zdpuAxMovYgGbzUtzxWAjNYst7aUzYwkJeK7bypAvoycevEvS.ipce.cbor.gz"
check_case(fn)
def test_zdpuAzPFCeRYgaBfvcQHKYXUj1DPANiR3LJuWUFpXz1Tws7pq_len316():
fn = "test_objects/zdpuAzPFCeRYgaBfvcQHKYXUj1DPANiR3LJuWUFpXz1Tws7pq.ipce.cbor.gz"
check_case(fn)
def test_zdpuArn1kwxfwUYHxkvNtvyu4Q8rskfmbaT8g5VkiMtnhBcn7_len316():
fn = "test_objects/zdpuArn1kwxfwUYHxkvNtvyu4Q8rskfmbaT8g5VkiMtnhBcn7.ipce.cbor.gz"
check_case(fn)
def test_zdpuB1gQpNVnAQ82VHtUzJ3ygyY388N4ixr2Q6nGKw7p1RkVY_len317():
fn = "test_objects/zdpuB1gQpNVnAQ82VHtUzJ3ygyY388N4ixr2Q6nGKw7p1RkVY.ipce.cbor.gz"
check_case(fn)
def test_zdpuB2cjWwNk8zFQMRqegSYV9jVZqB2vLqBYyuJVnSyGcee1U_len317():
fn = "test_objects/zdpuB2cjWwNk8zFQMRqegSYV9jVZqB2vLqBYyuJVnSyGcee1U.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtfYuD4wSNZF5cWnFmJGserQUBexBrYFnX13FE6uCGWri_len317():
fn = "test_objects/zdpuAtfYuD4wSNZF5cWnFmJGserQUBexBrYFnX13FE6uCGWri.ipce.cbor.gz"
check_case(fn)
def test_zdpuArGjxpb94mrvtsYXgpQxaRNaBdggQ35cNbLThB7vVo6BX_len317():
fn = "test_objects/zdpuArGjxpb94mrvtsYXgpQxaRNaBdggQ35cNbLThB7vVo6BX.ipce.cbor.gz"
check_case(fn)
def test_zdpuAua9EbDaHb51vfLgWRdgYHhM2JxvD3N2Xd4ixaaeV5pxt_len317():
fn = "test_objects/zdpuAua9EbDaHb51vfLgWRdgYHhM2JxvD3N2Xd4ixaaeV5pxt.ipce.cbor.gz"
check_case(fn)
def test_zdpuB1SNcNXVh3WBKmypA4eky7473ku34jodFq3a2bCUPaV3t_len318():
fn = "test_objects/zdpuB1SNcNXVh3WBKmypA4eky7473ku34jodFq3a2bCUPaV3t.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsv3fP7yxkLxweKAwgumty5CR4StznxMTFvCDnWA6bFCv_len318():
fn = "test_objects/zdpuAsv3fP7yxkLxweKAwgumty5CR4StznxMTFvCDnWA6bFCv.ipce.cbor.gz"
check_case(fn)
def test_zdpuAz6xeVsttHZbNLHdGNG7F7aYz4YSkm8CKH4BHfBqjcPcP_len319():
fn = "test_objects/zdpuAz6xeVsttHZbNLHdGNG7F7aYz4YSkm8CKH4BHfBqjcPcP.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvVAW1J93MQ1aJzMPkysvSdbuEcQgoo54vwZdJYjtePFx_len319():
fn = "test_objects/zdpuAvVAW1J93MQ1aJzMPkysvSdbuEcQgoo54vwZdJYjtePFx.ipce.cbor.gz"
check_case(fn)
def test_zdpuAwnTWmYqEcaEVREVqUWfsAYuhGGx2nnNNGyrFhtzeWbxH_len319():
fn = "test_objects/zdpuAwnTWmYqEcaEVREVqUWfsAYuhGGx2nnNNGyrFhtzeWbxH.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvHnZ4oCfYeHcYEM6qokgn6rxBaWpWBArQrovcY8hFVog_len320():
fn = "test_objects/zdpuAvHnZ4oCfYeHcYEM6qokgn6rxBaWpWBArQrovcY8hFVog.ipce.cbor.gz"
check_case(fn)
def test_zdpuB37Le7Nf3VJRC6CQGCcUoZXf44AY5526xaPZcGFJXsxpt_len320():
fn = "test_objects/zdpuB37Le7Nf3VJRC6CQGCcUoZXf44AY5526xaPZcGFJXsxpt.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuTohrhhFY2k8y3p7U4FEGRdYaV3orFfLgHBVx2q6gH9b_len321():
fn = "test_objects/zdpuAuTohrhhFY2k8y3p7U4FEGRdYaV3orFfLgHBVx2q6gH9b.ipce.cbor.gz"
check_case(fn)
def test_zdpuApU3JFSFuh9CpGfZutjd2EcrgGqksm5U5rSPpYrmrrfmQ_len322():
fn = "test_objects/zdpuApU3JFSFuh9CpGfZutjd2EcrgGqksm5U5rSPpYrmrrfmQ.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnfUUoBbQN339j2n6d8CPxXPcmTVEat7agx6DaoiJbLD3_len323():
fn = "test_objects/zdpuAnfUUoBbQN339j2n6d8CPxXPcmTVEat7agx6DaoiJbLD3.ipce.cbor.gz"
check_case(fn)
def test_zdpuAwr41kmMK1UL7XJ4mkAvj2qwZ5WCcPJ4SfhTtDhvbUrEa_len323():
fn = "test_objects/zdpuAwr41kmMK1UL7XJ4mkAvj2qwZ5WCcPJ4SfhTtDhvbUrEa.ipce.cbor.gz"
check_case(fn)
def test_zdpuAp42Mvfk3gRdznxHAtA32y8BUM6E9vGfybCb3N2ogSjKX_len324():
fn = "test_objects/zdpuAp42Mvfk3gRdznxHAtA32y8BUM6E9vGfybCb3N2ogSjKX.ipce.cbor.gz"
check_case(fn)
def test_zdpuAqBU8ssmvV2ioYKvLXe5Ygm7azHfAAR6FcjwRTYknmK9Z_len324():
fn = "test_objects/zdpuAqBU8ssmvV2ioYKvLXe5Ygm7azHfAAR6FcjwRTYknmK9Z.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnRFKXnZdB4GABrNTbajEygBiEvDgEexW7q3mJrGLSDPc_len327():
fn = "test_objects/zdpuAnRFKXnZdB4GABrNTbajEygBiEvDgEexW7q3mJrGLSDPc.ipce.cbor.gz"
check_case(fn)
def test_zdpuB1uiqbRWjrDYv4aNeGmSwHJXsEYkQQAeweMQPPCxTgMMX_len327():
fn = "test_objects/zdpuB1uiqbRWjrDYv4aNeGmSwHJXsEYkQQAeweMQPPCxTgMMX.ipce.cbor.gz"
check_case(fn)
def test_zdpuB1shniAfBrQaQKq9dY8q7N4s3M4MnFnajVKTj8Lq8QEP1_len328():
fn = "test_objects/zdpuB1shniAfBrQaQKq9dY8q7N4s3M4MnFnajVKTj8Lq8QEP1.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnXT1QUqoFPJzTZzVA8mrQS3ymnDrWtYg4spTjpeLnD6g_len328():
fn = "test_objects/zdpuAnXT1QUqoFPJzTZzVA8mrQS3ymnDrWtYg4spTjpeLnD6g.ipce.cbor.gz"
check_case(fn)
def test_zdpuArJZjkHWoCgJd7wvQyz4V8YogQd8b1hnuCsM7QeYguFTU_len328():
fn = "test_objects/zdpuArJZjkHWoCgJd7wvQyz4V8YogQd8b1hnuCsM7QeYguFTU.ipce.cbor.gz"
check_case(fn)
def test_zdpuAwCDcAzsVqA95Yna1cWKGEQ2atjGVRg3jKximfVcfS4zP_len328():
fn = "test_objects/zdpuAwCDcAzsVqA95Yna1cWKGEQ2atjGVRg3jKximfVcfS4zP.ipce.cbor.gz"
check_case(fn)
def test_zdpuAyKj8bB8ybbwwZhQzdWC7W5nmwJaaErN8qMprCCsrHkCu_len328():
fn = "test_objects/zdpuAyKj8bB8ybbwwZhQzdWC7W5nmwJaaErN8qMprCCsrHkCu.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnT6Tvjn7zmbB3gBkuNu454c2Xy6nivRYituv5u1okoNu_len328():
fn = "test_objects/zdpuAnT6Tvjn7zmbB3gBkuNu454c2Xy6nivRYituv5u1okoNu.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxEaHLVwWiz1oeaPF2a76HDhVb8yi12QbE2WXc6aQru3u_len328():
fn = "test_objects/zdpuAxEaHLVwWiz1oeaPF2a76HDhVb8yi12QbE2WXc6aQru3u.ipce.cbor.gz"
check_case(fn)
def test_zdpuAoR1gsaJAQXQiPxVJSRVRbJ9K1gykhrXpdK7yrj3ftkFk_len330():
fn = "test_objects/zdpuAoR1gsaJAQXQiPxVJSRVRbJ9K1gykhrXpdK7yrj3ftkFk.ipce.cbor.gz"
check_case(fn)
def test_zdpuB26LE7nsa7QnxMkTZRzLaANtskEtw38vZta7hBjuYt9gZ_len330():
fn = "test_objects/zdpuB26LE7nsa7QnxMkTZRzLaANtskEtw38vZta7hBjuYt9gZ.ipce.cbor.gz"
check_case(fn)
def test_zdpuAn7yxw3czA5ApwzppxuQ9deyPFpHY4hxVvfdALTYRVFiC_len330():
fn = "test_objects/zdpuAn7yxw3czA5ApwzppxuQ9deyPFpHY4hxVvfdALTYRVFiC.ipce.cbor.gz"
check_case(fn)
def test_zdpuAy47t9ZPb78EGFkKRRNmssJcjvGPWkLQugYaFUtk229zw_len330():
fn = "test_objects/zdpuAy47t9ZPb78EGFkKRRNmssJcjvGPWkLQugYaFUtk229zw.ipce.cbor.gz"
check_case(fn)
def test_zdpuApP1FDdopbcv3xeRutMDwrk4fBkRe997AJCGdukNqEuvv_len332():
fn = "test_objects/zdpuApP1FDdopbcv3xeRutMDwrk4fBkRe997AJCGdukNqEuvv.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuA9Nr2To8R42h1s5dLvQct9xK2d9ACpV4gexnLvHif2r_len332():
fn = "test_objects/zdpuAuA9Nr2To8R42h1s5dLvQct9xK2d9ACpV4gexnLvHif2r.ipce.cbor.gz"
check_case(fn)
def test_zdpuAubwnjRkDYdTxywGAvgbY1TkSzEKkX3nEsxGTDbaDAvwL_len334():
fn = "test_objects/zdpuAubwnjRkDYdTxywGAvgbY1TkSzEKkX3nEsxGTDbaDAvwL.ipce.cbor.gz"
check_case(fn)
def test_zdpuAyKyP1GwCgAeaPhPtzax5Mp68kQmdAwpR7HsGrD9WWuyJ_len335():
fn = "test_objects/zdpuAyKyP1GwCgAeaPhPtzax5Mp68kQmdAwpR7HsGrD9WWuyJ.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsBMw1BTcmqy8yudn1D6Yob2akHoX3p74u2RRdwqyUmQu_len335():
fn = "test_objects/zdpuAsBMw1BTcmqy8yudn1D6Yob2akHoX3p74u2RRdwqyUmQu.ipce.cbor.gz"
check_case(fn)
def test_zdpuAu1tAYpK8S52EBLRwTu7fhGth8EqSaUoE6imNWYJ18GiY_len335():
fn = "test_objects/zdpuAu1tAYpK8S52EBLRwTu7fhGth8EqSaUoE6imNWYJ18GiY.ipce.cbor.gz"
check_case(fn)
def test_zdpuB194hHv8y8mAhhkCnw1cdgjvoFdhWkUVcXcMDjPhjyEKn_len335():
fn = "test_objects/zdpuB194hHv8y8mAhhkCnw1cdgjvoFdhWkUVcXcMDjPhjyEKn.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxRJomMFCzfFtWT6nEpvKa4vfhVEdxAMRuy52nqLb6Dyc_len335():
fn = "test_objects/zdpuAxRJomMFCzfFtWT6nEpvKa4vfhVEdxAMRuy52nqLb6Dyc.ipce.cbor.gz"
check_case(fn)
def test_zdpuAykZXJRzjyvzTaRNB6Z4tikLPUsJ375DKbG9cjUXTBpME_len335():
fn = "test_objects/zdpuAykZXJRzjyvzTaRNB6Z4tikLPUsJ375DKbG9cjUXTBpME.ipce.cbor.gz"
check_case(fn)
def test_zdpuB3LRvKCaxikpMUrxr2nVEXdTPw2Ze8nuWqs5n8TCFKVvJ_len336():
fn = "test_objects/zdpuB3LRvKCaxikpMUrxr2nVEXdTPw2Ze8nuWqs5n8TCFKVvJ.ipce.cbor.gz"
check_case(fn)
def test_zdpuAoyfAtvJ4f5hpKZLCsBMjMzccG7z8gJC7Q7iu9m7eTLPa_len336():
fn = "test_objects/zdpuAoyfAtvJ4f5hpKZLCsBMjMzccG7z8gJC7Q7iu9m7eTLPa.ipce.cbor.gz"
check_case(fn)
def test_zdpuAyapowyh8YxMSXkgZt4w9rxtk2GocyEWuTzqBS6oqnHWh_len336():
fn = "test_objects/zdpuAyapowyh8YxMSXkgZt4w9rxtk2GocyEWuTzqBS6oqnHWh.ipce.cbor.gz"
check_case(fn)
def test_zdpuAzz8u7uZ3bgGrESd8Q3egMRbFBSgvec98cYgZ4B7UqJH8_len338():
fn = "test_objects/zdpuAzz8u7uZ3bgGrESd8Q3egMRbFBSgvec98cYgZ4B7UqJH8.ipce.cbor.gz"
check_case(fn)
def test_zdpuAmfP8fRUQP8mEWVt8J1tdTBuRXpTZBWvBN3kumbhintQz_len340():
fn = "test_objects/zdpuAmfP8fRUQP8mEWVt8J1tdTBuRXpTZBWvBN3kumbhintQz.ipce.cbor.gz"
check_case(fn)
def test_zdpuAu2hGeXtZZ2PcSUU7jfDDn6TnabhXmYiZa2UNRgsANBJo_len342():
fn = "test_objects/zdpuAu2hGeXtZZ2PcSUU7jfDDn6TnabhXmYiZa2UNRgsANBJo.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuKZ5vUYGo1udAiwd11Lnvm8GoQ9PwreKMttJb77vQRCL_len344():
fn = "test_objects/zdpuAuKZ5vUYGo1udAiwd11Lnvm8GoQ9PwreKMttJb77vQRCL.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnHojfou1GsMJFVqb6ghvXbB877bNzByUs82j9ijncWyN_len344():
fn = "test_objects/zdpuAnHojfou1GsMJFVqb6ghvXbB877bNzByUs82j9ijncWyN.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnnu5tUZUVBtn6mLkAvc7kjby65HpndKiVhAfWHNKvuoS_len344():
fn = "test_objects/zdpuAnnu5tUZUVBtn6mLkAvc7kjby65HpndKiVhAfWHNKvuoS.ipce.cbor.gz"
check_case(fn)
def test_zdpuB2wnGNJGn6KT92dAskg7LpPieSaLNKvmQBi33XdTuocjG_len344():
fn = "test_objects/zdpuB2wnGNJGn6KT92dAskg7LpPieSaLNKvmQBi33XdTuocjG.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnSDQYP1UUQtAoKP9i1Rq2irZTdvywScgybau7tHWuhek_len344():
fn = "test_objects/zdpuAnSDQYP1UUQtAoKP9i1Rq2irZTdvywScgybau7tHWuhek.ipce.cbor.gz"
check_case(fn)
def test_zdpuB3ATAVPffCTHH3jnbFjcg6i6XbrMmfwDJk7Z5E8fpD4mS_len345():
fn = "test_objects/zdpuB3ATAVPffCTHH3jnbFjcg6i6XbrMmfwDJk7Z5E8fpD4mS.ipce.cbor.gz"
check_case(fn)
def test_zdpuAwr57fxQGoUus8Ar8K6U5CswPEvmVyFSnEzpMTbAWfhpu_len346():
fn = "test_objects/zdpuAwr57fxQGoUus8Ar8K6U5CswPEvmVyFSnEzpMTbAWfhpu.ipce.cbor.gz"
check_case(fn)
def test_zdpuApwQ7RRMUrpfeQUFrPU1uCjLWBo46NL5cePcAPLBvLLc3_len350():
fn = "test_objects/zdpuApwQ7RRMUrpfeQUFrPU1uCjLWBo46NL5cePcAPLBvLLc3.ipce.cbor.gz"
check_case(fn)
def test_zdpuAyUVArmRzXA676pRxvzyTtVtWWsTecfvZmhQeJ353hy5L_len351():
fn = "test_objects/zdpuAyUVArmRzXA676pRxvzyTtVtWWsTecfvZmhQeJ353hy5L.ipce.cbor.gz"
check_case(fn)
def test_zdpuAshnFpKqf1AZoCPtzP45Zr9vGjmgYbse75kzq7BUWfdfA_len351():
fn = "test_objects/zdpuAshnFpKqf1AZoCPtzP45Zr9vGjmgYbse75kzq7BUWfdfA.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnjHHHmNG2kjcy8Vs6jg5PGAx6Ss7Z9D9ecjYm2KGFVtZ_len352():
fn = "test_objects/zdpuAnjHHHmNG2kjcy8Vs6jg5PGAx6Ss7Z9D9ecjYm2KGFVtZ.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvvWMzpHy18mxCDeZpyRYmi4At5HFWKv1YvsrD5ba1Dwo_len352():
fn = "test_objects/zdpuAvvWMzpHy18mxCDeZpyRYmi4At5HFWKv1YvsrD5ba1Dwo.ipce.cbor.gz"
check_case(fn)
def test_zdpuAqve9Mgo6YCH1KmEw5kQurEQ3QsBKnu35Co2rRhDGdXJh_len356():
fn = "test_objects/zdpuAqve9Mgo6YCH1KmEw5kQurEQ3QsBKnu35Co2rRhDGdXJh.ipce.cbor.gz"
check_case(fn)
def test_zdpuB18qpfhXAnH7Jc1mYgMJ32gGaWvnRkKjYHKjGeayPVKrz_len361():
fn = "test_objects/zdpuB18qpfhXAnH7Jc1mYgMJ32gGaWvnRkKjYHKjGeayPVKrz.ipce.cbor.gz"
check_case(fn)
def test_zdpuAx4AyDjjD3HvhRAnjeTeeNNZqmG3uEWjZEiXRbxrouqbe_len361():
fn = "test_objects/zdpuAx4AyDjjD3HvhRAnjeTeeNNZqmG3uEWjZEiXRbxrouqbe.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtjNournrmYqCkidTS312S2BXupKmDnxevySsJGd2cnt8_len362():
fn = "test_objects/zdpuAtjNournrmYqCkidTS312S2BXupKmDnxevySsJGd2cnt8.ipce.cbor.gz"
check_case(fn)
def test_zdpuAq7E169mtFao3Y3239ai4VHsEy5L8EMJbfPn1Zt7CCWw1_len363():
fn = "test_objects/zdpuAq7E169mtFao3Y3239ai4VHsEy5L8EMJbfPn1Zt7CCWw1.ipce.cbor.gz"
check_case(fn)
def test_zdpuArXS1XCeY1o37Nqkk1q6S1F4DjV3jqkpAxsevLvbgR2Z6_len363():
fn = "test_objects/zdpuArXS1XCeY1o37Nqkk1q6S1F4DjV3jqkpAxsevLvbgR2Z6.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvrFdVpNJb4Qk35EVDucMTeJuy8SPhtCUyDcvnAkvUQSE_len365():
fn = "test_objects/zdpuAvrFdVpNJb4Qk35EVDucMTeJuy8SPhtCUyDcvnAkvUQSE.ipce.cbor.gz"
check_case(fn)
def test_zdpuAyhVidBJicBWW73Tg3sSgJ7QT9h5Ey79bMxXQjLAMdowf_len365():
fn = "test_objects/zdpuAyhVidBJicBWW73Tg3sSgJ7QT9h5Ey79bMxXQjLAMdowf.ipce.cbor.gz"
check_case(fn)
def test_zdpuAqwYuhxt4yrefacXKjgFXgTTuZyxbiTqV1QpX6T4JcwVH_len367():
fn = "test_objects/zdpuAqwYuhxt4yrefacXKjgFXgTTuZyxbiTqV1QpX6T4JcwVH.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnbyr5Tz53y4W6Ee9PoHSzjfBDFzNYW2dcEKXgZMonZUd_len372():
fn = "test_objects/zdpuAnbyr5Tz53y4W6Ee9PoHSzjfBDFzNYW2dcEKXgZMonZUd.ipce.cbor.gz"
check_case(fn)
def test_zdpuAzKZdp5Ykj8tmyfehQFFJdiMx1j6MiHJrnBpRp86XAPad_len372():
fn = "test_objects/zdpuAzKZdp5Ykj8tmyfehQFFJdiMx1j6MiHJrnBpRp86XAPad.ipce.cbor.gz"
check_case(fn)
def test_zdpuAw8hq7ZbQS68bohmdgfsReYyRgwuU3cQShhxHD8TjctZG_len373():
fn = "test_objects/zdpuAw8hq7ZbQS68bohmdgfsReYyRgwuU3cQShhxHD8TjctZG.ipce.cbor.gz"
check_case(fn)
def test_zdpuArSn2uC3vk2RUVKmy8E2XQFxaP8f4tB7TiMUGgxir3d1j_len375():
fn = "test_objects/zdpuArSn2uC3vk2RUVKmy8E2XQFxaP8f4tB7TiMUGgxir3d1j.ipce.cbor.gz"
check_case(fn)
def test_zdpuB1YqWi67S3aWrTddRdnyAoU4wCAF5sAWmx9jfri9ULLzA_len377():
fn = "test_objects/zdpuB1YqWi67S3aWrTddRdnyAoU4wCAF5sAWmx9jfri9ULLzA.ipce.cbor.gz"
check_case(fn)
def test_zdpuArGamVuRAGq53hgTX7UVLmwgJBxMZ2mwTNnaJUcpALagi_len378():
fn = "test_objects/zdpuArGamVuRAGq53hgTX7UVLmwgJBxMZ2mwTNnaJUcpALagi.ipce.cbor.gz"
check_case(fn)
def test_zdpuApWwD1FND8uLUTMMRT8J5NZ3ReNn1AdQDQxnjoAsN3AUW_len380():
fn = "test_objects/zdpuApWwD1FND8uLUTMMRT8J5NZ3ReNn1AdQDQxnjoAsN3AUW.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsKHcZeSHCLBpqx1SqxZo1RLUTR1LDohoKrW4A59Fk9RH_len380():
fn = "test_objects/zdpuAsKHcZeSHCLBpqx1SqxZo1RLUTR1LDohoKrW4A59Fk9RH.ipce.cbor.gz"
check_case(fn)
def test_zdpuAwKVnmF7tHT7nLzYv9RAm8iazg5PrFL49Zby8c58GsaHh_len384():
fn = "test_objects/zdpuAwKVnmF7tHT7nLzYv9RAm8iazg5PrFL49Zby8c58GsaHh.ipce.cbor.gz"
check_case(fn)
def test_zdpuAoAfDJFvqGjTragFAPAWq67zPt1R9x5U4Wmn1iT7KKw6C_len385():
fn = "test_objects/zdpuAoAfDJFvqGjTragFAPAWq67zPt1R9x5U4Wmn1iT7KKw6C.ipce.cbor.gz"
check_case(fn)
def test_zdpuB1P7urxTy3zFYpY1mPVAYLTXPgskEsvx17v5zdD8Bte3A_len385():
fn = "test_objects/zdpuB1P7urxTy3zFYpY1mPVAYLTXPgskEsvx17v5zdD8Bte3A.ipce.cbor.gz"
check_case(fn)
def test_zdpuAwBLkYwBTEwRXfmhYuFu3sd9sveWzVyLytuJHg29JifZt_len386():
fn = "test_objects/zdpuAwBLkYwBTEwRXfmhYuFu3sd9sveWzVyLytuJHg29JifZt.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvc7SAd5VJ73j6bn8ZJDnRVwKA6YMvSuxFdW4FcQnBU3m_len387():
fn = "test_objects/zdpuAvc7SAd5VJ73j6bn8ZJDnRVwKA6YMvSuxFdW4FcQnBU3m.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxoFMUotTC6YqzFpZFMLtUp4S5gbG3JMXQQpfCfL5r1tT_len387():
fn = "test_objects/zdpuAxoFMUotTC6YqzFpZFMLtUp4S5gbG3JMXQQpfCfL5r1tT.ipce.cbor.gz"
check_case(fn)
def test_zdpuB19wt2Wi9CHp8N7aV2mdP5bDXLGFymvsCdpLHYUR19NE2_len388():
fn = "test_objects/zdpuB19wt2Wi9CHp8N7aV2mdP5bDXLGFymvsCdpLHYUR19NE2.ipce.cbor.gz"
check_case(fn)
def test_zdpuAscGA2R1SkxaBZCe8Hh4J1H2DkqucrryWY6DKxvTjETvm_len390():
fn = "test_objects/zdpuAscGA2R1SkxaBZCe8Hh4J1H2DkqucrryWY6DKxvTjETvm.ipce.cbor.gz"
check_case(fn)
def test_zdpuAkrje7ZEJ2YNtB5FVFokRCZfJo2jKVF3s1ian3x6zGfhU_len390():
fn = "test_objects/zdpuAkrje7ZEJ2YNtB5FVFokRCZfJo2jKVF3s1ian3x6zGfhU.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxZEfj8PxGC8fuGJDwkWTbsMno2wr9bAkDdzhYFnS7cez_len392():
fn = "test_objects/zdpuAxZEfj8PxGC8fuGJDwkWTbsMno2wr9bAkDdzhYFnS7cez.ipce.cbor.gz"
check_case(fn)
def test_zdpuAzxH5r4yQz7q4c3DRGgwwzhGBPB4VVqQrWtaZ72bcmNQm_len392():
fn = "test_objects/zdpuAzxH5r4yQz7q4c3DRGgwwzhGBPB4VVqQrWtaZ72bcmNQm.ipce.cbor.gz"
check_case(fn)
def test_zdpuAzRMVpzaAbnB8buawMGQgVEfuPrz8SrjCJ2keKahsG1M1_len392():
fn = "test_objects/zdpuAzRMVpzaAbnB8buawMGQgVEfuPrz8SrjCJ2keKahsG1M1.ipce.cbor.gz"
check_case(fn)
def test_zdpuArWoa5zrwBs2TEjMZyGFpkRuP1AvcDRcPiUbZkcUDcEHu_len393():
fn = "test_objects/zdpuArWoa5zrwBs2TEjMZyGFpkRuP1AvcDRcPiUbZkcUDcEHu.ipce.cbor.gz"
check_case(fn)
def test_zdpuB23PRc1y4H4nGWL3bpPtV5qEqap5Z3hXLVuMmBYRtGmvE_len395():
fn = "test_objects/zdpuB23PRc1y4H4nGWL3bpPtV5qEqap5Z3hXLVuMmBYRtGmvE.ipce.cbor.gz"
check_case(fn)
def test_zdpuAz2BBA5iQssaNmxnFEqqqET65KndnYPsbY7BZ26PoAsMe_len400():
fn = "test_objects/zdpuAz2BBA5iQssaNmxnFEqqqET65KndnYPsbY7BZ26PoAsMe.ipce.cbor.gz"
check_case(fn)
def test_zdpuAwGWGDnsi6jbj58visgUM7zvDrGqr4zeMHP263BcjbCbR_len407():
fn = "test_objects/zdpuAwGWGDnsi6jbj58visgUM7zvDrGqr4zeMHP263BcjbCbR.ipce.cbor.gz"
check_case(fn)
def test_zdpuAt9B4YsH1pmkPQoAejMeUykrwXHztPudpt9chGagj26Xw_len411():
fn = "test_objects/zdpuAt9B4YsH1pmkPQoAejMeUykrwXHztPudpt9chGagj26Xw.ipce.cbor.gz"
check_case(fn)
def test_zdpuAzJoxXQuV9j18DKniznJApMGTndBKgP1qkA8aG3Ej6MN2_len417():
fn = "test_objects/zdpuAzJoxXQuV9j18DKniznJApMGTndBKgP1qkA8aG3Ej6MN2.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsogh5MAezC1LBSY2jPhKDhFyk3e2fRHXH4D1GceCgKpD_len418():
fn = "test_objects/zdpuAsogh5MAezC1LBSY2jPhKDhFyk3e2fRHXH4D1GceCgKpD.ipce.cbor.gz"
check_case(fn)
def test_zdpuAqXBmVdsWUTdzzbNuLFL32TRdn3uotBbdBDek43eSnCu1_len453():
fn = "test_objects/zdpuAqXBmVdsWUTdzzbNuLFL32TRdn3uotBbdBDek43eSnCu1.ipce.cbor.gz"
check_case(fn)
def test_zdpuAr2qcTAyjPf1pGHM3PchZrtBXRF1nGZQfGAiCK5ZRBbg8_len459():
fn = "test_objects/zdpuAr2qcTAyjPf1pGHM3PchZrtBXRF1nGZQfGAiCK5ZRBbg8.ipce.cbor.gz"
check_case(fn)
def test_zdpuAqffCKHfstnA9LFc3D718xPA9UEY7yyk8PiswEMWamWk5_len467():
fn = "test_objects/zdpuAqffCKHfstnA9LFc3D718xPA9UEY7yyk8PiswEMWamWk5.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnQQfn9LHs7WLLFD8SFa4Eo64kSn2hZ9wY3RkTC82LTCF_len481():
fn = "test_objects/zdpuAnQQfn9LHs7WLLFD8SFa4Eo64kSn2hZ9wY3RkTC82LTCF.ipce.cbor.gz"
check_case(fn)
def test_zdpuB3BvMWas6SWpQU2cMvkUHbwt68DGGkx8fAAGYCou2nLDa_len482():
fn = "test_objects/zdpuB3BvMWas6SWpQU2cMvkUHbwt68DGGkx8fAAGYCou2nLDa.ipce.cbor.gz"
check_case(fn)
def test_zdpuAwoxPU5vSAaujn11CZRdvuBEUkgS4hDHtE8ecRkDfSiJY_len493():
fn = "test_objects/zdpuAwoxPU5vSAaujn11CZRdvuBEUkgS4hDHtE8ecRkDfSiJY.ipce.cbor.gz"
check_case(fn)
def test_zdpuB2GQgdP63mPFwA7TQnvaqovHSXBiNM4ds2T3J2jZE2eXN_len498():
fn = "test_objects/zdpuB2GQgdP63mPFwA7TQnvaqovHSXBiNM4ds2T3J2jZE2eXN.ipce.cbor.gz"
check_case(fn)
def test_zdpuAszU7a182tGwsoNiJXyEk8K4DYeiHG7gBH1Mbj4EWQUxP_len559():
fn = "test_objects/zdpuAszU7a182tGwsoNiJXyEk8K4DYeiHG7gBH1Mbj4EWQUxP.ipce.cbor.gz"
check_case(fn)
def test_zdpuAmCRPdtSQKPu1aWjonHtLaaKYxmtVkBJ8eq8gE3rVsTGT_len717():
fn = "test_objects/zdpuAmCRPdtSQKPu1aWjonHtLaaKYxmtVkBJ8eq8gE3rVsTGT.ipce.cbor.gz"
check_case(fn)
def test_zdpuB2Wxq8TZHTDTTW57eLUeCkFHtJmayBiARSwj13fkjCnfc_len735():
fn = "test_objects/zdpuB2Wxq8TZHTDTTW57eLUeCkFHtJmayBiARSwj13fkjCnfc.ipce.cbor.gz"
check_case(fn)
def test_zdpuB18raxRhNvRNBWaJunxSp9VbqhLP9R2miBn3VFjQiJgYh_len760():
fn = "test_objects/zdpuB18raxRhNvRNBWaJunxSp9VbqhLP9R2miBn3VFjQiJgYh.ipce.cbor.gz"
check_case(fn)
def test_zdpuB1is8Nq5qa3wfeYokB1kHYdACD8PidDwdZRdMQGWMFGLv_len875():
fn = "test_objects/zdpuB1is8Nq5qa3wfeYokB1kHYdACD8PidDwdZRdMQGWMFGLv.ipce.cbor.gz"
check_case(fn)
def test_zdpuAs1iCF7eLnxEiWG7T5w4RB9no94A3Pfsr478m9UR2fV2r_len1295():
fn = "test_objects/zdpuAs1iCF7eLnxEiWG7T5w4RB9no94A3Pfsr478m9UR2fV2r.ipce.cbor.gz"
check_case(fn)
def test_zdpuAyJyqYvVcV6WeniPSbzXrWRC8DPNufSjGZr6SoVbJPJ6D_len1295():
fn = "test_objects/zdpuAyJyqYvVcV6WeniPSbzXrWRC8DPNufSjGZr6SoVbJPJ6D.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnVo6JAyRG9mcnohyDfyVyx5qUhMB4hXZQvyXxzBiKB4c_len1295():
fn = "test_objects/zdpuAnVo6JAyRG9mcnohyDfyVyx5qUhMB4hXZQvyXxzBiKB4c.ipce.cbor.gz"
check_case(fn)
def test_zdpuAzCRGska7useF8Dm6WFTX4HoxNvEribDBeRQ2xRg78FwJ_len1295():
fn = "test_objects/zdpuAzCRGska7useF8Dm6WFTX4HoxNvEribDBeRQ2xRg78FwJ.ipce.cbor.gz"
check_case(fn)
def test_zdpuAspi23xQP1S3dcg3Nr5kkedTDYxZU4QeCUevLEANc1SuD_len1295():
fn = "test_objects/zdpuAspi23xQP1S3dcg3Nr5kkedTDYxZU4QeCUevLEANc1SuD.ipce.cbor.gz"
check_case(fn)
def test_zdpuAwnqEUqeN3oWGme7y3xDDLVKh6ioUiPjvr7cT3FnRyHt2_len1295():
fn = "test_objects/zdpuAwnqEUqeN3oWGme7y3xDDLVKh6ioUiPjvr7cT3FnRyHt2.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsciXKRn5QGXGVU7dmSJuUAgmuryr46FVhrHPHWmnxpBv_len1296():
fn = "test_objects/zdpuAsciXKRn5QGXGVU7dmSJuUAgmuryr46FVhrHPHWmnxpBv.ipce.cbor.gz"
check_case(fn)
def test_zdpuAzcv8hyfZGYChAQizNEhmsr27NjgmJKB2X4Spmqrta23X_len1298():
fn = "test_objects/zdpuAzcv8hyfZGYChAQizNEhmsr27NjgmJKB2X4Spmqrta23X.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxTSfFeu4ZmZgqQgJwbsHSB2k7EsfkLhsncZ2d4kkSeBr_len1298():
fn = "test_objects/zdpuAxTSfFeu4ZmZgqQgJwbsHSB2k7EsfkLhsncZ2d4kkSeBr.ipce.cbor.gz"
check_case(fn)
def test_zdpuApdfmbv52pWTvpoZmxvtWUt9fzd9KP9Ltk74ve8Q6jNQE_len1299():
fn = "test_objects/zdpuApdfmbv52pWTvpoZmxvtWUt9fzd9KP9Ltk74ve8Q6jNQE.ipce.cbor.gz"
check_case(fn)
def test_zdpuB17HDPfVVoi5RLwHML6zPnyf2K5dVxhLgSoUfj482qW4h_len1300():
fn = "test_objects/zdpuB17HDPfVVoi5RLwHML6zPnyf2K5dVxhLgSoUfj482qW4h.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtAN26BiPXokkxg94NFP5gcyx5qaJMm9bHongA6mzVWHm_len1300():
fn = "test_objects/zdpuAtAN26BiPXokkxg94NFP5gcyx5qaJMm9bHongA6mzVWHm.ipce.cbor.gz"
check_case(fn)
def test_zdpuApHBfC39QfjcWeXEqFPuuRVnxoSwsaDaUfV7SxLu82thU_len1300():
fn = "test_objects/zdpuApHBfC39QfjcWeXEqFPuuRVnxoSwsaDaUfV7SxLu82thU.ipce.cbor.gz"
check_case(fn)
def test_zdpuAyTf9qfXwG7BYSb1xLXRFc6ETSVYrnYdh8fgcZuegGfLS_len1301():
fn = "test_objects/zdpuAyTf9qfXwG7BYSb1xLXRFc6ETSVYrnYdh8fgcZuegGfLS.ipce.cbor.gz"
check_case(fn)
def test_zdpuB2ntwkzTx7n8FUTNk5voTgUnn5ApnyFS93zVKVnAbyJCu_len1301():
fn = "test_objects/zdpuB2ntwkzTx7n8FUTNk5voTgUnn5ApnyFS93zVKVnAbyJCu.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxw8Bh1hCgEayhjdnfatUAQdngddyKXWfGvvQ2XKbjn5S_len1301():
fn = "test_objects/zdpuAxw8Bh1hCgEayhjdnfatUAQdngddyKXWfGvvQ2XKbjn5S.ipce.cbor.gz"
check_case(fn)
def test_zdpuAmRAp6CDm8Fzu7zzgH72hSbvYh6vDFx1MsUUQUHq7ihEn_len1301():
fn = "test_objects/zdpuAmRAp6CDm8Fzu7zzgH72hSbvYh6vDFx1MsUUQUHq7ihEn.ipce.cbor.gz"
check_case(fn)
def test_zdpuAzzmp3n8XjUqQ5JoyBtU7EMLvfyf6yhTP3HjVsANXBB48_len1301():
fn = "test_objects/zdpuAzzmp3n8XjUqQ5JoyBtU7EMLvfyf6yhTP3HjVsANXBB48.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsGYHpX5sSCoV9KodDEKwhNdZoAbMEezR3bV3JFwXLqe1_len1301():
fn = "test_objects/zdpuAsGYHpX5sSCoV9KodDEKwhNdZoAbMEezR3bV3JFwXLqe1.ipce.cbor.gz"
check_case(fn)
def test_zdpuAoNtcTBFjeEj4CkuXyvWVo1NBDJLFfVaMqJgS7pVHFY2H_len1301():
fn = "test_objects/zdpuAoNtcTBFjeEj4CkuXyvWVo1NBDJLFfVaMqJgS7pVHFY2H.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsiGLusTtHu87ksvQFqVDJjgMN8eKzjkEiwg3fNPPEsvt_len1301():
fn = "test_objects/zdpuAsiGLusTtHu87ksvQFqVDJjgMN8eKzjkEiwg3fNPPEsvt.ipce.cbor.gz"
check_case(fn)
def test_zdpuAmmqk5zMUWG5ox3SDNhAYcesVkhNG17pHiKMjwoAi88fy_len1301():
fn = "test_objects/zdpuAmmqk5zMUWG5ox3SDNhAYcesVkhNG17pHiKMjwoAi88fy.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtDXYx7yUbq1nZXzbuhn8xyK6f9mNzF1TNQchyrbb1BTL_len1302():
fn = "test_objects/zdpuAtDXYx7yUbq1nZXzbuhn8xyK6f9mNzF1TNQchyrbb1BTL.ipce.cbor.gz"
check_case(fn)
def test_zdpuB2YiB4XFvh9atHEm9siRgpH61iKTSXceL79ZnyiEJZC13_len1302():
fn = "test_objects/zdpuB2YiB4XFvh9atHEm9siRgpH61iKTSXceL79ZnyiEJZC13.ipce.cbor.gz"
check_case(fn)
def test_zdpuAqW4BCU1qQ8vprqNGvbNJ45sjAhABprRcnq5YNwCNCjnS_len1302():
fn = "test_objects/zdpuAqW4BCU1qQ8vprqNGvbNJ45sjAhABprRcnq5YNwCNCjnS.ipce.cbor.gz"
check_case(fn)
def test_zdpuAopN7CbMTDDLx8U6uNMH5D5aoXaVYW9FbeehzdDefT2hF_len1302():
fn = "test_objects/zdpuAopN7CbMTDDLx8U6uNMH5D5aoXaVYW9FbeehzdDefT2hF.ipce.cbor.gz"
check_case(fn)
def test_zdpuAz34ydjXkXCBqVnxj5jwfmJJMVsJ1brrcpmDCSragz3KU_len1302():
fn = "test_objects/zdpuAz34ydjXkXCBqVnxj5jwfmJJMVsJ1brrcpmDCSragz3KU.ipce.cbor.gz"
check_case(fn)
def test_zdpuB3Hv6EiNrY2TxmfznawWVyEMPbHJb3dUnTAoXdnV3QHCF_len1302():
fn = "test_objects/zdpuB3Hv6EiNrY2TxmfznawWVyEMPbHJb3dUnTAoXdnV3QHCF.ipce.cbor.gz"
check_case(fn)
def test_zdpuAzy7gQsE5jBGiw9DSGKbYP5R48tL6TuyQWSnRk5BmsNYn_len1302():
fn = "test_objects/zdpuAzy7gQsE5jBGiw9DSGKbYP5R48tL6TuyQWSnRk5BmsNYn.ipce.cbor.gz"
check_case(fn)
def test_zdpuAyYufQ7ZQvAK1waxhWvMYQvbfwUdFUNGkoMoKtsC7JJvy_len1303():
fn = "test_objects/zdpuAyYufQ7ZQvAK1waxhWvMYQvbfwUdFUNGkoMoKtsC7JJvy.ipce.cbor.gz"
check_case(fn)
def test_zdpuAwsFn8sbEYWkxs9R27CsjRaCcaV83WFkZXDmKehsf41nz_len1303():
fn = "test_objects/zdpuAwsFn8sbEYWkxs9R27CsjRaCcaV83WFkZXDmKehsf41nz.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvDCFFGKNe1WMyShkn9c7oRrzRaDKnDxTmuUEj1EVd6ko_len1303():
fn = "test_objects/zdpuAvDCFFGKNe1WMyShkn9c7oRrzRaDKnDxTmuUEj1EVd6ko.ipce.cbor.gz"
check_case(fn)
def test_zdpuAyVqhgb8kt87nthYbBr71jziAP9NVS1hXAWFFmEX4E7No_len1303():
fn = "test_objects/zdpuAyVqhgb8kt87nthYbBr71jziAP9NVS1hXAWFFmEX4E7No.ipce.cbor.gz"
check_case(fn)
def test_zdpuAwPXm1WAboraP8X6hTpFFzEYonJCsfQbF5XN1gJPjBnJc_len1303():
fn = "test_objects/zdpuAwPXm1WAboraP8X6hTpFFzEYonJCsfQbF5XN1gJPjBnJc.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxjuTfSmmzh5tPi6oc3q5gvcF9DkJUUGqJHkV48aHokc8_len1303():
fn = "test_objects/zdpuAxjuTfSmmzh5tPi6oc3q5gvcF9DkJUUGqJHkV48aHokc8.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuNrN7qBAwcAx7GaiMgMpipnZLTyBPHfaNdkpyriLd6wp_len1303():
fn = "test_objects/zdpuAuNrN7qBAwcAx7GaiMgMpipnZLTyBPHfaNdkpyriLd6wp.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtWcNCmSymUrbw12gmzLNiJ1VssiG3RRoYAjK8mUcm6V8_len1303():
fn = "test_objects/zdpuAtWcNCmSymUrbw12gmzLNiJ1VssiG3RRoYAjK8mUcm6V8.ipce.cbor.gz"
check_case(fn)
def test_zdpuB2MLkdJKL2KFiHvcnP11EbJNHuBH2fZZgaPjV6r5365wU_len1304():
fn = "test_objects/zdpuB2MLkdJKL2KFiHvcnP11EbJNHuBH2fZZgaPjV6r5365wU.ipce.cbor.gz"
check_case(fn)
def test_zdpuAyXMHX6vpQMt1qiSm9UkMgTo8ELG17HK7uiHfea1yQstN_len1304():
fn = "test_objects/zdpuAyXMHX6vpQMt1qiSm9UkMgTo8ELG17HK7uiHfea1yQstN.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxCJyTi6JCPMfHGwrXswCntzVaWGjfqtMQGmFisaxsrws_len1304():
fn = "test_objects/zdpuAxCJyTi6JCPMfHGwrXswCntzVaWGjfqtMQGmFisaxsrws.ipce.cbor.gz"
check_case(fn)
def test_zdpuAu2FwQgYsDQ4sRgDWSuzfgMYydL8hn1zeLFzb4UxPMnew_len1304():
fn = "test_objects/zdpuAu2FwQgYsDQ4sRgDWSuzfgMYydL8hn1zeLFzb4UxPMnew.ipce.cbor.gz"
check_case(fn)
def test_zdpuAzCvNqLFFnboFVg2i1ubkbcHqoKgQMoSJ9Z19x53kLpoG_len1304():
fn = "test_objects/zdpuAzCvNqLFFnboFVg2i1ubkbcHqoKgQMoSJ9Z19x53kLpoG.ipce.cbor.gz"
check_case(fn)
def test_zdpuArVfM84W5ZchsjhuMHNxBeApS4LktX7T98KkMsAXMbQps_len1304():
fn = "test_objects/zdpuArVfM84W5ZchsjhuMHNxBeApS4LktX7T98KkMsAXMbQps.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnur9CDrJgx9MERcCVzpUWKVuBMJ21HpZGv265qjZsv9j_len1304():
fn = "test_objects/zdpuAnur9CDrJgx9MERcCVzpUWKVuBMJ21HpZGv265qjZsv9j.ipce.cbor.gz"
check_case(fn)
def test_zdpuApoRsdojJt2fiRJZBxNUqCNzw8VtB8pHwTRMAwHpbkx9z_len1304():
fn = "test_objects/zdpuApoRsdojJt2fiRJZBxNUqCNzw8VtB8pHwTRMAwHpbkx9z.ipce.cbor.gz"
check_case(fn)
def test_zdpuAyJkD5GWy4QCckQneVMnpe3RDpgRaaqy2pS4tkXpHNvf1_len1304():
fn = "test_objects/zdpuAyJkD5GWy4QCckQneVMnpe3RDpgRaaqy2pS4tkXpHNvf1.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuF3FGFB6JrhjZSMsZVWZuDqBqebd5bfg4Spp4G6hfAwn_len1304():
fn = "test_objects/zdpuAuF3FGFB6JrhjZSMsZVWZuDqBqebd5bfg4Spp4G6hfAwn.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuDnvwwqhY1jGt8CPtd2TqaCpQWrSLfbkooN173k1RkH4_len1304():
fn = "test_objects/zdpuAuDnvwwqhY1jGt8CPtd2TqaCpQWrSLfbkooN173k1RkH4.ipce.cbor.gz"
check_case(fn)
def test_zdpuAr5E6F3atz5q2d6fWaK5SYvXYEd7xYvcd4MhhjQQr7a8h_len1304():
fn = "test_objects/zdpuAr5E6F3atz5q2d6fWaK5SYvXYEd7xYvcd4MhhjQQr7a8h.ipce.cbor.gz"
check_case(fn)
def test_zdpuAma8xfyrRX8sCJ8rqj3nqiFeFLNY53BXdhwVJH5Epjsba_len1304():
fn = "test_objects/zdpuAma8xfyrRX8sCJ8rqj3nqiFeFLNY53BXdhwVJH5Epjsba.ipce.cbor.gz"
check_case(fn)
def test_zdpuAzBjNrFjPvVjBsDfh1geFWtRdoUXHoJ98axKMg9BMC1tM_len1304():
fn = "test_objects/zdpuAzBjNrFjPvVjBsDfh1geFWtRdoUXHoJ98axKMg9BMC1tM.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuTNSQ1G2M71RXquPLQuRJT231e8nk2ahCpx7kmoz9AFV_len1304():
fn = "test_objects/zdpuAuTNSQ1G2M71RXquPLQuRJT231e8nk2ahCpx7kmoz9AFV.ipce.cbor.gz"
check_case(fn)
def test_zdpuAv8pXKEj6mJ2T6HyLjyiQfmvs4pWgZtXABjfiEkaumxCN_len1304():
fn = "test_objects/zdpuAv8pXKEj6mJ2T6HyLjyiQfmvs4pWgZtXABjfiEkaumxCN.ipce.cbor.gz"
check_case(fn)
def test_zdpuAroWBCDAzFbCCFZD3XQZo4T9knXhpHrpfPUuKr8duag4c_len1305():
fn = "test_objects/zdpuAroWBCDAzFbCCFZD3XQZo4T9knXhpHrpfPUuKr8duag4c.ipce.cbor.gz"
check_case(fn)
def test_zdpuAs6PemQmUpndswaMQQk741kEcLcULAbizvNmoBRmn2KBV_len1305():
fn = "test_objects/zdpuAs6PemQmUpndswaMQQk741kEcLcULAbizvNmoBRmn2KBV.ipce.cbor.gz"
check_case(fn)
def test_zdpuAmamjtmJbDrqcFjaUE29ZwDKAMG3x1Lgq1uZWLYfwJGqE_len1305():
fn = "test_objects/zdpuAmamjtmJbDrqcFjaUE29ZwDKAMG3x1Lgq1uZWLYfwJGqE.ipce.cbor.gz"
check_case(fn)
def test_zdpuB2Rs1yQffZBdmEiKath9XSAQaYfTSUHwpChs3u4xXLXyS_len1305():
fn = "test_objects/zdpuB2Rs1yQffZBdmEiKath9XSAQaYfTSUHwpChs3u4xXLXyS.ipce.cbor.gz"
check_case(fn)
def test_zdpuAwjCpfKdQ6nFC3nZKFJKYvu4sRf1nYjXaskac75jWNcBa_len1305():
fn = "test_objects/zdpuAwjCpfKdQ6nFC3nZKFJKYvu4sRf1nYjXaskac75jWNcBa.ipce.cbor.gz"
check_case(fn)
def test_zdpuAruNzwTM3wVQ3iATaAcCpRU1SWDqvgfN25yX8L2vYaER6_len1305():
fn = "test_objects/zdpuAruNzwTM3wVQ3iATaAcCpRU1SWDqvgfN25yX8L2vYaER6.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuzgfpFuQtQ4ZcTaxB4MQKp7ZFh5QNhmWFzo7WmJfouB4_len1305():
fn = "test_objects/zdpuAuzgfpFuQtQ4ZcTaxB4MQKp7ZFh5QNhmWFzo7WmJfouB4.ipce.cbor.gz"
check_case(fn)
def test_zdpuB2KvLfftuvBcnYW6YTopMEKAMGQN6qPtAa2VKB6WVZ3X7_len1305():
fn = "test_objects/zdpuB2KvLfftuvBcnYW6YTopMEKAMGQN6qPtAa2VKB6WVZ3X7.ipce.cbor.gz"
check_case(fn)
def test_zdpuAqcEX3faTGoocfvBuGSrYGpvvAGhyVD6x8ShbWtnSAJpi_len1305():
fn = "test_objects/zdpuAqcEX3faTGoocfvBuGSrYGpvvAGhyVD6x8ShbWtnSAJpi.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuvQS1w1sxsAEYDLd2GUVzcrVEPvKFE28FaoKTfVq2NMX_len1305():
fn = "test_objects/zdpuAuvQS1w1sxsAEYDLd2GUVzcrVEPvKFE28FaoKTfVq2NMX.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnJsB1GS1onx3yHL3bb6KZsq864pydcMS5yt6ky9XgyKw_len1305():
fn = "test_objects/zdpuAnJsB1GS1onx3yHL3bb6KZsq864pydcMS5yt6ky9XgyKw.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvpYv4GdLWvXD4Y6cXcy4Kt7NGyFo7xNuH8mxPZuGtNGm_len1305():
fn = "test_objects/zdpuAvpYv4GdLWvXD4Y6cXcy4Kt7NGyFo7xNuH8mxPZuGtNGm.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnBRL6UtZtN9Fq9t1L2Ld4Jg7bujyMVaoXjUD4wevbQ41_len1305():
fn = "test_objects/zdpuAnBRL6UtZtN9Fq9t1L2Ld4Jg7bujyMVaoXjUD4wevbQ41.ipce.cbor.gz"
check_case(fn)
def test_zdpuB1rF5Pu5dEVx1vxcwQeARac3kK5j36AGG2K18Vc7jHGbP_len1305():
fn = "test_objects/zdpuB1rF5Pu5dEVx1vxcwQeARac3kK5j36AGG2K18Vc7jHGbP.ipce.cbor.gz"
check_case(fn)
def test_zdpuB1r7yTcRhSQ8s76UgUqSDtF1iFwjcGhUKz6prpgYvhcqk_len1305():
fn = "test_objects/zdpuB1r7yTcRhSQ8s76UgUqSDtF1iFwjcGhUKz6prpgYvhcqk.ipce.cbor.gz"
check_case(fn)
def test_zdpuApV4GYJH8UTRy1DZsAkrBt1etCm2Uts5AfJYzDSUjGG7c_len1305():
fn = "test_objects/zdpuApV4GYJH8UTRy1DZsAkrBt1etCm2Uts5AfJYzDSUjGG7c.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsP4c4yD2NXP2LAsPNL1sVQcqi3VCYSruBV3yd79q3itS_len1305():
fn = "test_objects/zdpuAsP4c4yD2NXP2LAsPNL1sVQcqi3VCYSruBV3yd79q3itS.ipce.cbor.gz"
check_case(fn)
def test_zdpuAy75xcVQDdbn4MQ7q5Fg8eM7HHFsYrntBG33qgGMzpRWv_len1305():
fn = "test_objects/zdpuAy75xcVQDdbn4MQ7q5Fg8eM7HHFsYrntBG33qgGMzpRWv.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnjkaoVErVRB3FX1kheLZB927GTgL479EEij9DfJBv6XW_len1305():
fn = "test_objects/zdpuAnjkaoVErVRB3FX1kheLZB927GTgL479EEij9DfJBv6XW.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxLDnYXu36kYbHbyPJcmWystWiCSuk3ZEabG8ryZt75Lq_len1305():
fn = "test_objects/zdpuAxLDnYXu36kYbHbyPJcmWystWiCSuk3ZEabG8ryZt75Lq.ipce.cbor.gz"
check_case(fn)
def test_zdpuAznLBAv9GJygLNNDMDpwpMKT8cZ7oX4WeHnMJx9Yt8vRH_len1305():
fn = "test_objects/zdpuAznLBAv9GJygLNNDMDpwpMKT8cZ7oX4WeHnMJx9Yt8vRH.ipce.cbor.gz"
check_case(fn)
def test_zdpuAy5Bj39EtV68eattcZxvXuVpzx8k9iDmKdhC5Fm8h7RSC_len1305():
fn = "test_objects/zdpuAy5Bj39EtV68eattcZxvXuVpzx8k9iDmKdhC5Fm8h7RSC.ipce.cbor.gz"
check_case(fn)
def test_zdpuB3Up1yK74xnpWNDZ2cQWvs5Z1U3fVwNhT2nscqSJJHGCt_len1305():
fn = "test_objects/zdpuB3Up1yK74xnpWNDZ2cQWvs5Z1U3fVwNhT2nscqSJJHGCt.ipce.cbor.gz"
check_case(fn)
def test_zdpuApNFkuqyQYrRSd3QW1pmUgRzbgg6L4nHF1XQuwyeuSoCG_len1305():
fn = "test_objects/zdpuApNFkuqyQYrRSd3QW1pmUgRzbgg6L4nHF1XQuwyeuSoCG.ipce.cbor.gz"
check_case(fn)
def test_zdpuApAJ2Ug61LUo5rMuTQzJLXm7QUhNr9QvHvRjFjdzR8J3M_len1306():
fn = "test_objects/zdpuApAJ2Ug61LUo5rMuTQzJLXm7QUhNr9QvHvRjFjdzR8J3M.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtPxChJhypAg69PoSSoe8EvGUXm6MHGdVPxYrWP3K2TRq_len1306():
fn = "test_objects/zdpuAtPxChJhypAg69PoSSoe8EvGUXm6MHGdVPxYrWP3K2TRq.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnHm8HMHTyR5gxWFHKZsZ57NFTMM8d4F1ZRDLqKpSRKEh_len1306():
fn = "test_objects/zdpuAnHm8HMHTyR5gxWFHKZsZ57NFTMM8d4F1ZRDLqKpSRKEh.ipce.cbor.gz"
check_case(fn)
def test_zdpuAokycuPPTkuek9NZ4TcCWmryKFFFNgnMJGbmmELafjbfW_len1306():
fn = "test_objects/zdpuAokycuPPTkuek9NZ4TcCWmryKFFFNgnMJGbmmELafjbfW.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnx7c3yN51KLUbNuBQxtocff5gAMUJVuFkSwMphxYnZya_len1306():
fn = "test_objects/zdpuAnx7c3yN51KLUbNuBQxtocff5gAMUJVuFkSwMphxYnZya.ipce.cbor.gz"
check_case(fn)
def test_zdpuAqAgcDV2d5cN7a39LxHG3H8pXJFhg8525CnCVNaSFHrDb_len1306():
fn = "test_objects/zdpuAqAgcDV2d5cN7a39LxHG3H8pXJFhg8525CnCVNaSFHrDb.ipce.cbor.gz"
check_case(fn)
def test_zdpuB2jvodP5BTuRBz5AzP5NEjd2bt2B5VJNYHM9THxyUtNjr_len1306():
fn = "test_objects/zdpuB2jvodP5BTuRBz5AzP5NEjd2bt2B5VJNYHM9THxyUtNjr.ipce.cbor.gz"
check_case(fn)
def test_zdpuAqC6F3LnRJAZomwNssDhRbWticYwQ5Pokh5iuPaPmJq64_len1306():
fn = "test_objects/zdpuAqC6F3LnRJAZomwNssDhRbWticYwQ5Pokh5iuPaPmJq64.ipce.cbor.gz"
check_case(fn)
def test_zdpuAytj5SQGLdfxyB3VrWZfhkgXLd8GDbKxUV2s7VTsBmyxB_len1306():
fn = "test_objects/zdpuAytj5SQGLdfxyB3VrWZfhkgXLd8GDbKxUV2s7VTsBmyxB.ipce.cbor.gz"
check_case(fn)
def test_zdpuAqyhgpaXaVsULnrA6esRrfzA92MdMAqWznfJhY7hJTRHz_len1306():
fn = "test_objects/zdpuAqyhgpaXaVsULnrA6esRrfzA92MdMAqWznfJhY7hJTRHz.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxG3VEmUDVAoJrbSfhFvd9aQXgpBnCBiNc1p3iQ7AsQEA_len1306():
fn = "test_objects/zdpuAxG3VEmUDVAoJrbSfhFvd9aQXgpBnCBiNc1p3iQ7AsQEA.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsQQXzxFAGJDtazQJsdxGkvxF5dGqavupeUakhPsJaoVY_len1306():
fn = "test_objects/zdpuAsQQXzxFAGJDtazQJsdxGkvxF5dGqavupeUakhPsJaoVY.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvzf78ZXZZJWCLh6APL5GwEYjaZ2hzp8iyknnhtdw4S6h_len1306():
fn = "test_objects/zdpuAvzf78ZXZZJWCLh6APL5GwEYjaZ2hzp8iyknnhtdw4S6h.ipce.cbor.gz"
check_case(fn)
def test_zdpuAn6FDQvZ11xXTemLmyRJjNgStqUtswriv6CKZuHwjgjW3_len1306():
fn = "test_objects/zdpuAn6FDQvZ11xXTemLmyRJjNgStqUtswriv6CKZuHwjgjW3.ipce.cbor.gz"
check_case(fn)
def test_zdpuAw5w24vY3P2MnRjQTgWoGpWoHvG4HLmxZqsFTnh9jBjCX_len1307():
fn = "test_objects/zdpuAw5w24vY3P2MnRjQTgWoGpWoHvG4HLmxZqsFTnh9jBjCX.ipce.cbor.gz"
check_case(fn)
def test_zdpuAyFBNeC55VFsGqFxzCeUTtAN7F9CMrk85eLfKueQjei4j_len1307():
fn = "test_objects/zdpuAyFBNeC55VFsGqFxzCeUTtAN7F9CMrk85eLfKueQjei4j.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvmHsg4QFhvnBNjzVBw5EgWRUtJhybWsNtv4ksp2esXHt_len1307():
fn = "test_objects/zdpuAvmHsg4QFhvnBNjzVBw5EgWRUtJhybWsNtv4ksp2esXHt.ipce.cbor.gz"
check_case(fn)
def test_zdpuAzGFmmnWb9ybcdUVddczMyTprFu8oWZKrXwTsatTkFvxq_len1307():
fn = "test_objects/zdpuAzGFmmnWb9ybcdUVddczMyTprFu8oWZKrXwTsatTkFvxq.ipce.cbor.gz"
check_case(fn)
def test_zdpuB19wSyXsjXYuKyiBjZDt1uV3FeBtZWas9cNufb6VzhYff_len1307():
fn = "test_objects/zdpuB19wSyXsjXYuKyiBjZDt1uV3FeBtZWas9cNufb6VzhYff.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxGnGchgQpA1obM5cKQV6Kf3ksQmWzQdC3LhVCp3Ua8xz_len1307():
fn = "test_objects/zdpuAxGnGchgQpA1obM5cKQV6Kf3ksQmWzQdC3LhVCp3Ua8xz.ipce.cbor.gz"
check_case(fn)
def test_zdpuB1yZpkbfEW75P7DFTpGhUB6qwcCuLMtKVQofmHHjx8s7n_len1307():
fn = "test_objects/zdpuB1yZpkbfEW75P7DFTpGhUB6qwcCuLMtKVQofmHHjx8s7n.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvFrKxQmSAJcX6qvEZP4eDu5iKYf1wnnbtszJzmdxuuP8_len1307():
fn = "test_objects/zdpuAvFrKxQmSAJcX6qvEZP4eDu5iKYf1wnnbtszJzmdxuuP8.ipce.cbor.gz"
check_case(fn)
def test_zdpuApv3krFihEAz7fF8hdDGhq5YpXEesLWVAEAjmZoQKjTiF_len1307():
fn = "test_objects/zdpuApv3krFihEAz7fF8hdDGhq5YpXEesLWVAEAjmZoQKjTiF.ipce.cbor.gz"
check_case(fn)
def test_zdpuAzzov4kUqCx8EhdvWfyJSKdryJy8Fu1XKc5pXn9iojMqL_len1307():
fn = "test_objects/zdpuAzzov4kUqCx8EhdvWfyJSKdryJy8Fu1XKc5pXn9iojMqL.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtSY4fmbaD7BcVWEAT7hr8DTs8ADGrQcBy6PJmH7gZ1Hz_len1307():
fn = "test_objects/zdpuAtSY4fmbaD7BcVWEAT7hr8DTs8ADGrQcBy6PJmH7gZ1Hz.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvR3qVUa8DqFxfCsKP3HodJhrmykEJdHcFfvGhCrJXtey_len1307():
fn = "test_objects/zdpuAvR3qVUa8DqFxfCsKP3HodJhrmykEJdHcFfvGhCrJXtey.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsss2mZYszezekaNaBd3JgH3pxS77ucgd4qgxQkBqSnGd_len1307():
fn = "test_objects/zdpuAsss2mZYszezekaNaBd3JgH3pxS77ucgd4qgxQkBqSnGd.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtkCaKHMayVPnkrHDycT52KaWs7egpRw3ErmYxvKKHDQn_len1307():
fn = "test_objects/zdpuAtkCaKHMayVPnkrHDycT52KaWs7egpRw3ErmYxvKKHDQn.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsB6qeK54qt9wj5N3264GLgKbTU6wrhjPM1LGFrEWmo4f_len1308():
fn = "test_objects/zdpuAsB6qeK54qt9wj5N3264GLgKbTU6wrhjPM1LGFrEWmo4f.ipce.cbor.gz"
check_case(fn)
def test_zdpuAymis2H6EUP2tf3YjnVrQeLf1y4ornwAJP7P7ntEe39qg_len1308():
fn = "test_objects/zdpuAymis2H6EUP2tf3YjnVrQeLf1y4ornwAJP7P7ntEe39qg.ipce.cbor.gz"
check_case(fn)
def test_zdpuAkRTaNjNkThKHSbvYYK2Hh4ETUqx2zX9VCQkrwvuzEwr3_len1308():
fn = "test_objects/zdpuAkRTaNjNkThKHSbvYYK2Hh4ETUqx2zX9VCQkrwvuzEwr3.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnEqEJefo6vs21byWNYBaBr1Ai4HihyeUyxJqHAG56Afq_len1308():
fn = "test_objects/zdpuAnEqEJefo6vs21byWNYBaBr1Ai4HihyeUyxJqHAG56Afq.ipce.cbor.gz"
check_case(fn)
def test_zdpuAndWSwzVFy9Jvs4Pzso7v118SwMC65QCkeB7dme1DmcHj_len1308():
fn = "test_objects/zdpuAndWSwzVFy9Jvs4Pzso7v118SwMC65QCkeB7dme1DmcHj.ipce.cbor.gz"
check_case(fn)
def test_zdpuAq78GQZorJLnFpKF8Q7H4bDq39zpEgArU7sEf1r9BagSd_len1308():
fn = "test_objects/zdpuAq78GQZorJLnFpKF8Q7H4bDq39zpEgArU7sEf1r9BagSd.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxNSQa5TRVDHiYEPXF1kyJNC2ibqn8vRJheZcRnVyd6tR_len1308():
fn = "test_objects/zdpuAxNSQa5TRVDHiYEPXF1kyJNC2ibqn8vRJheZcRnVyd6tR.ipce.cbor.gz"
check_case(fn)
def test_zdpuB22bpGg88RvHPZS5XSwk7szmdA3dL6jaS88R57RoX7fhz_len1308():
fn = "test_objects/zdpuB22bpGg88RvHPZS5XSwk7szmdA3dL6jaS88R57RoX7fhz.ipce.cbor.gz"
check_case(fn)
def test_zdpuB2McYF943ntuK2ioZt8HqyoH6jGUm1H2vJDuXPDTndfQw_len1308():
fn = "test_objects/zdpuB2McYF943ntuK2ioZt8HqyoH6jGUm1H2vJDuXPDTndfQw.ipce.cbor.gz"
check_case(fn)
def test_zdpuAserBCZG5A3qv2zjNEJo8fuZyJ7ZH1qpW6Wr836o1BwNi_len1308():
fn = "test_objects/zdpuAserBCZG5A3qv2zjNEJo8fuZyJ7ZH1qpW6Wr836o1BwNi.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnPTxtfh9Zapj7dExBepUujmgkA3PG8VgyAX2rfgJxkHK_len1308():
fn = "test_objects/zdpuAnPTxtfh9Zapj7dExBepUujmgkA3PG8VgyAX2rfgJxkHK.ipce.cbor.gz"
check_case(fn)
def test_zdpuAussyqGY6dNJVEyp3LSjk9oWLc2rQ3HkQcybvwCnqZByd_len1308():
fn = "test_objects/zdpuAussyqGY6dNJVEyp3LSjk9oWLc2rQ3HkQcybvwCnqZByd.ipce.cbor.gz"
check_case(fn)
def test_zdpuApWeRUAyxN7qa8orAN5N3jevCbCZdws3VkjcjDvbHmJNZ_len1308():
fn = "test_objects/zdpuApWeRUAyxN7qa8orAN5N3jevCbCZdws3VkjcjDvbHmJNZ.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuSDaGJCwtaaHaUNBpkVX7uzuq9AFdBauALQS5KG3io9Y_len1308():
fn = "test_objects/zdpuAuSDaGJCwtaaHaUNBpkVX7uzuq9AFdBauALQS5KG3io9Y.ipce.cbor.gz"
check_case(fn)
def test_zdpuAmmxWYUBmbTLgZuVtnY7f1kZFXb1hJnqs88hGFEMrpxVZ_len1308():
fn = "test_objects/zdpuAmmxWYUBmbTLgZuVtnY7f1kZFXb1hJnqs88hGFEMrpxVZ.ipce.cbor.gz"
check_case(fn)
def test_zdpuB1RnGqpU4S62vLWkRuhMXKQMyWQ7EHE1FASSf75hSHtYT_len1308():
fn = "test_objects/zdpuB1RnGqpU4S62vLWkRuhMXKQMyWQ7EHE1FASSf75hSHtYT.ipce.cbor.gz"
check_case(fn)
def test_zdpuArGuDDHkUbrvAqbNwdsEV7UBg5k57hPztNitzW9zSLPJA_len1308():
fn = "test_objects/zdpuArGuDDHkUbrvAqbNwdsEV7UBg5k57hPztNitzW9zSLPJA.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxHYYAHYFQ9JM7kmP5LrEc8WwnTV4hhcTYC6EdQkCvPHT_len1308():
fn = "test_objects/zdpuAxHYYAHYFQ9JM7kmP5LrEc8WwnTV4hhcTYC6EdQkCvPHT.ipce.cbor.gz"
check_case(fn)
def test_zdpuAyMftq4rT8ELNn8YUNuXmYBLNhc6E8YC5ZWwUoDR6AHKT_len1308():
fn = "test_objects/zdpuAyMftq4rT8ELNn8YUNuXmYBLNhc6E8YC5ZWwUoDR6AHKT.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvGZSzmRYjZiDUiEeh6qP9Gp2uHkqvWzmHmpx2EXsdryZ_len1308():
fn = "test_objects/zdpuAvGZSzmRYjZiDUiEeh6qP9Gp2uHkqvWzmHmpx2EXsdryZ.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsGUdnZTbX2cXzxrxy3RWSCvnGNExWHfEJLebrrLtgiFP_len1308():
fn = "test_objects/zdpuAsGUdnZTbX2cXzxrxy3RWSCvnGNExWHfEJLebrrLtgiFP.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvJWagb8Nqf3dCmrvVfcddcGJpce2cZyyAUGem18fwGsm_len1308():
fn = "test_objects/zdpuAvJWagb8Nqf3dCmrvVfcddcGJpce2cZyyAUGem18fwGsm.ipce.cbor.gz"
check_case(fn)
def test_zdpuAu7o4SuLJpxpo2FS2UtViVSHsaq1CEYvqspEjwV88KNGq_len1308():
fn = "test_objects/zdpuAu7o4SuLJpxpo2FS2UtViVSHsaq1CEYvqspEjwV88KNGq.ipce.cbor.gz"
check_case(fn)
def test_zdpuAkU9tMS4mWnA6sWN5BSnJTanvfUnctAYmeJiUm8BD5Gfm_len1308():
fn = "test_objects/zdpuAkU9tMS4mWnA6sWN5BSnJTanvfUnctAYmeJiUm8BD5Gfm.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsLo7bXKYiai9NcxS9a2pAgTBw4cCPcU5DVrHE4t3x3jY_len1309():
fn = "test_objects/zdpuAsLo7bXKYiai9NcxS9a2pAgTBw4cCPcU5DVrHE4t3x3jY.ipce.cbor.gz"
check_case(fn)
def test_zdpuAmth2htv4NQJpiDLY3pCCoN67WB4wEWzdV7CVqV9YYext_len1309():
fn = "test_objects/zdpuAmth2htv4NQJpiDLY3pCCoN67WB4wEWzdV7CVqV9YYext.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvjRXKsKTjWVY4VucYp7HtecejLrAbnwQBzTYTbd1Fjw5_len1309():
fn = "test_objects/zdpuAvjRXKsKTjWVY4VucYp7HtecejLrAbnwQBzTYTbd1Fjw5.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtNQJeL1GxhCn5H6NHscA6KmMR3FZ9xgpB5HdqsDXkFbb_len1309():
fn = "test_objects/zdpuAtNQJeL1GxhCn5H6NHscA6KmMR3FZ9xgpB5HdqsDXkFbb.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvb3kZ7SmUX27AsoE5pp2sAaR639YEJ6NRbT7mezsBjyP_len1309():
fn = "test_objects/zdpuAvb3kZ7SmUX27AsoE5pp2sAaR639YEJ6NRbT7mezsBjyP.ipce.cbor.gz"
check_case(fn)
def test_zdpuAzbHBsM4HNUVHhnNtPMUgZddTjs9Wt9dbrkunjPE61yEh_len1309():
fn = "test_objects/zdpuAzbHBsM4HNUVHhnNtPMUgZddTjs9Wt9dbrkunjPE61yEh.ipce.cbor.gz"
check_case(fn)
def test_zdpuAq7ETPGYNyS8gSoDhvwjiYVZ2TQsGudPkkVZ7kGEom5mE_len1310():
fn = "test_objects/zdpuAq7ETPGYNyS8gSoDhvwjiYVZ2TQsGudPkkVZ7kGEom5mE.ipce.cbor.gz"
check_case(fn)
def test_zdpuAqdQGjCqZVkhM1x3URwpqx1W8z9FWUyQrwbhphUJvzWzG_len1310():
fn = "test_objects/zdpuAqdQGjCqZVkhM1x3URwpqx1W8z9FWUyQrwbhphUJvzWzG.ipce.cbor.gz"
check_case(fn)
def test_zdpuB13vs56xdxSxBmHHqWAqXzxbDzRUq5nY4hkDXuPXE84FY_len1310():
fn = "test_objects/zdpuB13vs56xdxSxBmHHqWAqXzxbDzRUq5nY4hkDXuPXE84FY.ipce.cbor.gz"
check_case(fn)
def test_zdpuAwS4RA5ffC3Y6ecgAZqKau8SNrydSJ3iNg4t3FhkpsNAc_len1310():
fn = "test_objects/zdpuAwS4RA5ffC3Y6ecgAZqKau8SNrydSJ3iNg4t3FhkpsNAc.ipce.cbor.gz"
check_case(fn)
def test_zdpuB1HYWg4GLnMM1beVkWvs1yL6dhiznpLJCFMQVT89gPBqt_len1310():
fn = "test_objects/zdpuB1HYWg4GLnMM1beVkWvs1yL6dhiznpLJCFMQVT89gPBqt.ipce.cbor.gz"
check_case(fn)
def test_zdpuAx9WEJkfZN2kKL9Hwwm9ovmv1cszwcaXQwVFvKYmL3PDG_len1310():
fn = "test_objects/zdpuAx9WEJkfZN2kKL9Hwwm9ovmv1cszwcaXQwVFvKYmL3PDG.ipce.cbor.gz"
check_case(fn)
def test_zdpuAqyZdf96JrMnRr9L7EJhNKu38cSLkgQRcwz9FZpA5kpKP_len1310():
fn = "test_objects/zdpuAqyZdf96JrMnRr9L7EJhNKu38cSLkgQRcwz9FZpA5kpKP.ipce.cbor.gz"
check_case(fn)
def test_zdpuAwBaRSWv1JaEaJB9wrbsdr9QoM3n6bzCXtVYPZiyzuMSc_len1310():
fn = "test_objects/zdpuAwBaRSWv1JaEaJB9wrbsdr9QoM3n6bzCXtVYPZiyzuMSc.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxbFZXfWVxiw69ThK4A7s2m155jqeHsxMytfXsox5nLsS_len1310():
fn = "test_objects/zdpuAxbFZXfWVxiw69ThK4A7s2m155jqeHsxMytfXsox5nLsS.ipce.cbor.gz"
check_case(fn)
def test_zdpuAqiHPb9avk6ajkY8paeKAa3Ce4u3L32PiQnyjomUb16rx_len1310():
fn = "test_objects/zdpuAqiHPb9avk6ajkY8paeKAa3Ce4u3L32PiQnyjomUb16rx.ipce.cbor.gz"
check_case(fn)
def test_zdpuAr9hzz9Xjm94Kr7hnp8wx3ZXCLUtbeeJ8AKqa32iCGtCe_len1310():
fn = "test_objects/zdpuAr9hzz9Xjm94Kr7hnp8wx3ZXCLUtbeeJ8AKqa32iCGtCe.ipce.cbor.gz"
check_case(fn)
def test_zdpuAowWVUGJcs4QYXjtqgESmvDuK9jXiomCmcMhTkwk2LPPk_len1310():
fn = "test_objects/zdpuAowWVUGJcs4QYXjtqgESmvDuK9jXiomCmcMhTkwk2LPPk.ipce.cbor.gz"
check_case(fn)
def test_zdpuAmQLXVTQrK9M7pDK8VGfTW6C3tn3fKUznUGxHhgwQKP4H_len1310():
fn = "test_objects/zdpuAmQLXVTQrK9M7pDK8VGfTW6C3tn3fKUznUGxHhgwQKP4H.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuZioLi13XEtuF6iezZ7ZV7MUq1SNRV7NQEoz43X4HoPu_len1311():
fn = "test_objects/zdpuAuZioLi13XEtuF6iezZ7ZV7MUq1SNRV7NQEoz43X4HoPu.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvQy7nw9yLaCy3RHupyteaUQQgXMo2Wnr6iMk1QdG5jTA_len1311():
fn = "test_objects/zdpuAvQy7nw9yLaCy3RHupyteaUQQgXMo2Wnr6iMk1QdG5jTA.ipce.cbor.gz"
check_case(fn)
def test_zdpuAqXthmiH1tfZYbjAfPvAKoxF8sdUUB6hN8DyQTAE6jEJG_len1311():
fn = "test_objects/zdpuAqXthmiH1tfZYbjAfPvAKoxF8sdUUB6hN8DyQTAE6jEJG.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtTFy8ggiLs5Dn8EzJtr4KmdWCVdP1Ay9Z9XmMeL6HMos_len1311():
fn = "test_objects/zdpuAtTFy8ggiLs5Dn8EzJtr4KmdWCVdP1Ay9Z9XmMeL6HMos.ipce.cbor.gz"
check_case(fn)
def test_zdpuAkrZEhfs1586q3ak9QT2jjrBVymL7fyWytM86Aq6soEYx_len1311():
fn = "test_objects/zdpuAkrZEhfs1586q3ak9QT2jjrBVymL7fyWytM86Aq6soEYx.ipce.cbor.gz"
check_case(fn)
def test_zdpuAzDdy8UgkBDd5U5yJqktXYkeyh6HTKM2xSwhAcLt2eyg5_len1311():
fn = "test_objects/zdpuAzDdy8UgkBDd5U5yJqktXYkeyh6HTKM2xSwhAcLt2eyg5.ipce.cbor.gz"
check_case(fn)
def test_zdpuB1SWrPUcvRDGNTyCd17Bz9gpxcBCTuCRUusCZd5J6eCnz_len1311():
fn = "test_objects/zdpuB1SWrPUcvRDGNTyCd17Bz9gpxcBCTuCRUusCZd5J6eCnz.ipce.cbor.gz"
check_case(fn)
def test_zdpuAm2ofd8CgqZGVRUfTSn6Trten2oSQVMNCuADQMd4FynUD_len1311():
fn = "test_objects/zdpuAm2ofd8CgqZGVRUfTSn6Trten2oSQVMNCuADQMd4FynUD.ipce.cbor.gz"
check_case(fn)
def test_zdpuAqJDwuCf3mD3zyFzHuoVRC8ZNkxmCsCkotFU21UiWZWEd_len1312():
fn = "test_objects/zdpuAqJDwuCf3mD3zyFzHuoVRC8ZNkxmCsCkotFU21UiWZWEd.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuvQtxbatL6fuSQzHTfMMfQHPbJzgX4QxL4utBzNJitoD_len1312():
fn = "test_objects/zdpuAuvQtxbatL6fuSQzHTfMMfQHPbJzgX4QxL4utBzNJitoD.ipce.cbor.gz"
check_case(fn)
def test_zdpuAn7rmBqyWHHigvT8GoaqVsscMCr89u26bu6UgWtDDGkBZ_len1312():
fn = "test_objects/zdpuAn7rmBqyWHHigvT8GoaqVsscMCr89u26bu6UgWtDDGkBZ.ipce.cbor.gz"
check_case(fn)
def test_zdpuAs4bAwEm4oq9hh9ssr6dpZ54XLPXvrjkWBruN4ZzZsfwf_len1312():
fn = "test_objects/zdpuAs4bAwEm4oq9hh9ssr6dpZ54XLPXvrjkWBruN4ZzZsfwf.ipce.cbor.gz"
check_case(fn)
def test_zdpuB1fzCiYqhcZP8kxKtdT7LWftrbR4g2mYdkuiSkq2zDTWF_len1312():
fn = "test_objects/zdpuB1fzCiYqhcZP8kxKtdT7LWftrbR4g2mYdkuiSkq2zDTWF.ipce.cbor.gz"
check_case(fn)
def test_zdpuAu9VBFPjhKvncAT8cSvVr8D2Nfg1bBBxmHXFBXjzbczqH_len1312():
fn = "test_objects/zdpuAu9VBFPjhKvncAT8cSvVr8D2Nfg1bBBxmHXFBXjzbczqH.ipce.cbor.gz"
check_case(fn)
def test_zdpuAmXQ5p9CDaPudTJ8yyytcK6uVgpfJYCLndEkiHhXmC6YZ_len1312():
fn = "test_objects/zdpuAmXQ5p9CDaPudTJ8yyytcK6uVgpfJYCLndEkiHhXmC6YZ.ipce.cbor.gz"
check_case(fn)
def test_zdpuB274kkegbsx2uPPXe9Xe1LMJD7urRY4bWRqHFyAhLeTPS_len1312():
fn = "test_objects/zdpuB274kkegbsx2uPPXe9Xe1LMJD7urRY4bWRqHFyAhLeTPS.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtpoRxt9QfNRJULrnkKBRYva71K2R8w7o1AWRz5Rmw18E_len1313():
fn = "test_objects/zdpuAtpoRxt9QfNRJULrnkKBRYva71K2R8w7o1AWRz5Rmw18E.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsRf4ThpbZAR5jGKucBS5RvujKhSfC6y9iRnCyaW6UCA7_len1313():
fn = "test_objects/zdpuAsRf4ThpbZAR5jGKucBS5RvujKhSfC6y9iRnCyaW6UCA7.ipce.cbor.gz"
check_case(fn)
def test_zdpuB1mSs4YXYBFiq7afJgA31y3wHugpJPGHXCauA3fe6Eyre_len1313():
fn = "test_objects/zdpuB1mSs4YXYBFiq7afJgA31y3wHugpJPGHXCauA3fe6Eyre.ipce.cbor.gz"
check_case(fn)
def test_zdpuAt87UPfDTmkUDYEtU5MmAtTPBQgPqRcbuMPRUJ5LqLMXR_len1313():
fn = "test_objects/zdpuAt87UPfDTmkUDYEtU5MmAtTPBQgPqRcbuMPRUJ5LqLMXR.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtQM4c8j89tcH6nBq6UFVb1HW8ikW6sUcUUtYEsmP4PmB_len1313():
fn = "test_objects/zdpuAtQM4c8j89tcH6nBq6UFVb1HW8ikW6sUcUUtYEsmP4PmB.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuo3YG2UH7pMLDrM82ZN8rRUimL6jZhWjUE72fjobXJwP_len1313():
fn = "test_objects/zdpuAuo3YG2UH7pMLDrM82ZN8rRUimL6jZhWjUE72fjobXJwP.ipce.cbor.gz"
check_case(fn)
def test_zdpuAmGGSDKLr5jjHtSENd94x5SeXnYGiSGRDL8RaiD1Wn3UJ_len1313():
fn = "test_objects/zdpuAmGGSDKLr5jjHtSENd94x5SeXnYGiSGRDL8RaiD1Wn3UJ.ipce.cbor.gz"
check_case(fn)
def test_zdpuAyiuNjprUUh9efYnEmX3g5PcZ4QbgQ3GSyNgsDt2YLfj7_len1313():
fn = "test_objects/zdpuAyiuNjprUUh9efYnEmX3g5PcZ4QbgQ3GSyNgsDt2YLfj7.ipce.cbor.gz"
check_case(fn)
def test_zdpuApt9zxqJTocgLVAJNTrUyhmkMocJjm7R5L66warFKRuVp_len1313():
fn = "test_objects/zdpuApt9zxqJTocgLVAJNTrUyhmkMocJjm7R5L66warFKRuVp.ipce.cbor.gz"
check_case(fn)
def test_zdpuAmYeXJCaDzKorTCB5Ro1SJCsbCQur5k6kfWh8gFCfh8kb_len1314():
fn = "test_objects/zdpuAmYeXJCaDzKorTCB5Ro1SJCsbCQur5k6kfWh8gFCfh8kb.ipce.cbor.gz"
check_case(fn)
def test_zdpuAwKEwEB5YdZxg7EvZ8jQUHJgvXf2rhbmyNpZWWQtRnbZX_len1314():
fn = "test_objects/zdpuAwKEwEB5YdZxg7EvZ8jQUHJgvXf2rhbmyNpZWWQtRnbZX.ipce.cbor.gz"
check_case(fn)
def test_zdpuAq8RpAnGJ2NasPerKPq1fL8sEw2aGVcv1JbuC52QJVvpb_len1314():
fn = "test_objects/zdpuAq8RpAnGJ2NasPerKPq1fL8sEw2aGVcv1JbuC52QJVvpb.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxggHkiChPqcqqv38bybVBG5BxEAhsK9mSXpGcf82RStK_len1314():
fn = "test_objects/zdpuAxggHkiChPqcqqv38bybVBG5BxEAhsK9mSXpGcf82RStK.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsxyHgJii4CoevX5687Psf5ZC5r9GbYuQ87PNaGBnHKcv_len1315():
fn = "test_objects/zdpuAsxyHgJii4CoevX5687Psf5ZC5r9GbYuQ87PNaGBnHKcv.ipce.cbor.gz"
check_case(fn)
def test_zdpuB2rPWv2XJAyCdWL8HQZ2Dmm5aZxG4RW4YtCaTd5cSwpyd_len1315():
fn = "test_objects/zdpuB2rPWv2XJAyCdWL8HQZ2Dmm5aZxG4RW4YtCaTd5cSwpyd.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuuxNoh2kGAFRqyLLkGrPpYpEHyjytRFsCkUCzZPi77xa_len1315():
fn = "test_objects/zdpuAuuxNoh2kGAFRqyLLkGrPpYpEHyjytRFsCkUCzZPi77xa.ipce.cbor.gz"
check_case(fn)
def test_zdpuAzLzbUuYBru2zzb8iX2PtQ9PBV4tDCTm3hbrFSriLNn5g_len1315():
fn = "test_objects/zdpuAzLzbUuYBru2zzb8iX2PtQ9PBV4tDCTm3hbrFSriLNn5g.ipce.cbor.gz"
check_case(fn)
def test_zdpuAv2toy6qAasXWg9kBxK8DujX8ri6yJKRTg2AJmoscMEQy_len1315():
fn = "test_objects/zdpuAv2toy6qAasXWg9kBxK8DujX8ri6yJKRTg2AJmoscMEQy.ipce.cbor.gz"
check_case(fn)
def test_zdpuAt6iJPZNmgX9UYHvUb9nrjurgp2LupYuHo8FFV8cYP7y4_len1315():
fn = "test_objects/zdpuAt6iJPZNmgX9UYHvUb9nrjurgp2LupYuHo8FFV8cYP7y4.ipce.cbor.gz"
check_case(fn)
def test_zdpuAksWEGji9rq9UetYpiddBPcCVatWyosDs11nZGnFuSYoN_len1315():
fn = "test_objects/zdpuAksWEGji9rq9UetYpiddBPcCVatWyosDs11nZGnFuSYoN.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxDq7f1Xhv6TKGo3vbncuUpikGKCwe6nszJTbDFLaUPUw_len1316():
fn = "test_objects/zdpuAxDq7f1Xhv6TKGo3vbncuUpikGKCwe6nszJTbDFLaUPUw.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtfYtq92x7qzQ1B8a5G2gbMUHm5UkbByLqjpXCygqXwuH_len1316():
fn = "test_objects/zdpuAtfYtq92x7qzQ1B8a5G2gbMUHm5UkbByLqjpXCygqXwuH.ipce.cbor.gz"
check_case(fn)
def test_zdpuAx2jL2DKnvR8DdDkwrrhSnspA7xRoybWxnwUthd3Kkk5y_len1316():
fn = "test_objects/zdpuAx2jL2DKnvR8DdDkwrrhSnspA7xRoybWxnwUthd3Kkk5y.ipce.cbor.gz"
check_case(fn)
def test_zdpuApRLPZJJZCdpNXYTesgdtAzxrgfxNPptZNr5dUjqUzfmh_len1316():
fn = "test_objects/zdpuApRLPZJJZCdpNXYTesgdtAzxrgfxNPptZNr5dUjqUzfmh.ipce.cbor.gz"
check_case(fn)
def test_zdpuB2RRtUA36Dc5HqM5K8jnLqLmxxewQGDz2mTvY3SCJpNpx_len1316():
fn = "test_objects/zdpuB2RRtUA36Dc5HqM5K8jnLqLmxxewQGDz2mTvY3SCJpNpx.ipce.cbor.gz"
check_case(fn)
def test_zdpuAmQwM2E9TwXj2HhPMDEfc3z3zQHVvezS6nE5pssPYKH4n_len1318():
fn = "test_objects/zdpuAmQwM2E9TwXj2HhPMDEfc3z3zQHVvezS6nE5pssPYKH4n.ipce.cbor.gz"
check_case(fn)
def test_zdpuB23jj62YbxcKKEkNAinXwmo9DDs2FmADjxAjz3oX7rbic_len1319():
fn = "test_objects/zdpuB23jj62YbxcKKEkNAinXwmo9DDs2FmADjxAjz3oX7rbic.ipce.cbor.gz"
check_case(fn)
def test_zdpuAy1fqCEUHVuhechNYABHRNJgde7jZgAuN3SUwDtjYxFRV_len1319():
fn = "test_objects/zdpuAy1fqCEUHVuhechNYABHRNJgde7jZgAuN3SUwDtjYxFRV.ipce.cbor.gz"
check_case(fn)
def test_zdpuAmWjfyjnUc3ERzbX1vmL1eQXmpG6nSDUMKe4TKbfkri6W_len1319():
fn = "test_objects/zdpuAmWjfyjnUc3ERzbX1vmL1eQXmpG6nSDUMKe4TKbfkri6W.ipce.cbor.gz"
check_case(fn)
def test_zdpuAmsaedqG7XzVMovpVcq5sApx6eVc89bBXAoYYWHxr1J28_len1322():
fn = "test_objects/zdpuAmsaedqG7XzVMovpVcq5sApx6eVc89bBXAoYYWHxr1J28.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtn7kAwpmFLuVqqJZkycECDh9FKSgdoyMxPUnTGxiqrPu_len1322():
fn = "test_objects/zdpuAtn7kAwpmFLuVqqJZkycECDh9FKSgdoyMxPUnTGxiqrPu.ipce.cbor.gz"
check_case(fn)
def test_zdpuAw1gQp947YjdHZex7dbpcTCeuRTSoZzPzyCYDnyDt8BFH_len1322():
fn = "test_objects/zdpuAw1gQp947YjdHZex7dbpcTCeuRTSoZzPzyCYDnyDt8BFH.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvriQNY67wGkQNVnhcdAVzfhc5Zq9h8UfcA5Ea2vMvU8x_len1322():
fn = "test_objects/zdpuAvriQNY67wGkQNVnhcdAVzfhc5Zq9h8UfcA5Ea2vMvU8x.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtju7pgPhFPdKqrs1qKzMnDr9TPqV1Q8WV4y6agtCjCAF_len1323():
fn = "test_objects/zdpuAtju7pgPhFPdKqrs1qKzMnDr9TPqV1Q8WV4y6agtCjCAF.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtYzQQR9onwmyVCM3uoZQauFVNhXhrg3W6G9wGRJj7QVY_len1324():
fn = "test_objects/zdpuAtYzQQR9onwmyVCM3uoZQauFVNhXhrg3W6G9wGRJj7QVY.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuophuEPSvn1Ze9GeTsKaakBjmNBK9o4Jzc949wbqB31Z_len1324():
fn = "test_objects/zdpuAuophuEPSvn1Ze9GeTsKaakBjmNBK9o4Jzc949wbqB31Z.ipce.cbor.gz"
check_case(fn)
def test_zdpuAq2pSb4KQGnV9L26kZxhv4vntujG39crvCPYPePDMu6br_len1324():
fn = "test_objects/zdpuAq2pSb4KQGnV9L26kZxhv4vntujG39crvCPYPePDMu6br.ipce.cbor.gz"
check_case(fn)
def test_zdpuB26FfqJeMLkAYyQLnRYoSN248sddtatbxs4tjJKUoegnV_len1324():
fn = "test_objects/zdpuB26FfqJeMLkAYyQLnRYoSN248sddtatbxs4tjJKUoegnV.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnsudy9A4aYS3tMEybKAJFtFVVnZfJwFXJq75p7JcJQCW_len1325():
fn = "test_objects/zdpuAnsudy9A4aYS3tMEybKAJFtFVVnZfJwFXJq75p7JcJQCW.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsdYp6eoLXVFU3hsn276sp8tzpTwzieZt7SdfWcZGwZ2t_len1325():
fn = "test_objects/zdpuAsdYp6eoLXVFU3hsn276sp8tzpTwzieZt7SdfWcZGwZ2t.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxHfGpMnmQbhCmV8f6zErRPjxKmysruGNAm8hSX2DyN45_len1325():
fn = "test_objects/zdpuAxHfGpMnmQbhCmV8f6zErRPjxKmysruGNAm8hSX2DyN45.ipce.cbor.gz"
check_case(fn)
def test_zdpuAwLcYhfiZTrTSKybXzSyzLiA1Z1SKFzPU2cq2HoH9dboc_len1325():
fn = "test_objects/zdpuAwLcYhfiZTrTSKybXzSyzLiA1Z1SKFzPU2cq2HoH9dboc.ipce.cbor.gz"
check_case(fn)
def test_zdpuAt7RPYAEawEe8Y3VmXRmUZMVETvYpKxHUa76hXxAjogdd_len1325():
fn = "test_objects/zdpuAt7RPYAEawEe8Y3VmXRmUZMVETvYpKxHUa76hXxAjogdd.ipce.cbor.gz"
check_case(fn)
def test_zdpuAomNYggknAn6WcKz3sb5tZoUpEBfhRVbuwMUYpWqgaWmb_len1325():
fn = "test_objects/zdpuAomNYggknAn6WcKz3sb5tZoUpEBfhRVbuwMUYpWqgaWmb.ipce.cbor.gz"
check_case(fn)
def test_zdpuArnNTqAqoAud999k59uKGm8rYw4K2WZnys3oKyuwu9JnJ_len1326():
fn = "test_objects/zdpuArnNTqAqoAud999k59uKGm8rYw4K2WZnys3oKyuwu9JnJ.ipce.cbor.gz"
check_case(fn)
def test_zdpuAyFsMZbXjTLYQrfJq7Li8EZNiRt1xhLLBipztfDiApkuW_len1327():
fn = "test_objects/zdpuAyFsMZbXjTLYQrfJq7Li8EZNiRt1xhLLBipztfDiApkuW.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnKWyogXoZxK9etkRiCxkBY7KmX1fvQbnDBoy88xSGngp_len1327():
fn = "test_objects/zdpuAnKWyogXoZxK9etkRiCxkBY7KmX1fvQbnDBoy88xSGngp.ipce.cbor.gz"
check_case(fn)
def test_zdpuAzkXfySaDT9BjtJSLW8xLNGW9A37xgw7oiyGegcWzyDve_len1327():
fn = "test_objects/zdpuAzkXfySaDT9BjtJSLW8xLNGW9A37xgw7oiyGegcWzyDve.ipce.cbor.gz"
check_case(fn)
def test_zdpuAq7AwquSZ3wuzcs7RtjAoPHPPBDWCfhWrv5ShMZKPGrJA_len1327():
fn = "test_objects/zdpuAq7AwquSZ3wuzcs7RtjAoPHPPBDWCfhWrv5ShMZKPGrJA.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtPt5S8ibTpbgY4o3BTBY4Ra1zggGCHN7XFWeG3tnZshC_len1327():
fn = "test_objects/zdpuAtPt5S8ibTpbgY4o3BTBY4Ra1zggGCHN7XFWeG3tnZshC.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuR97mKKfWG71uNohuwfn5mWW3fdncVP7LzvBsQgtjXYK_len1327():
fn = "test_objects/zdpuAuR97mKKfWG71uNohuwfn5mWW3fdncVP7LzvBsQgtjXYK.ipce.cbor.gz"
check_case(fn)
def test_zdpuB3W1aHtC9xGEPUt5s4VjDpPbSwM8D9zvqqnxLwViPhc1C_len1327():
fn = "test_objects/zdpuB3W1aHtC9xGEPUt5s4VjDpPbSwM8D9zvqqnxLwViPhc1C.ipce.cbor.gz"
check_case(fn)
def test_zdpuArJegqPXQ2CbCb4NDYkTEFuFYVdVYMABCBp3WCZqR4Z5q_len1327():
fn = "test_objects/zdpuArJegqPXQ2CbCb4NDYkTEFuFYVdVYMABCBp3WCZqR4Z5q.ipce.cbor.gz"
check_case(fn)
def test_zdpuB14RBeur5n9uxTDuKFnxE5xn4ggWeKahdyjJhzvAeMqcA_len1327():
fn = "test_objects/zdpuB14RBeur5n9uxTDuKFnxE5xn4ggWeKahdyjJhzvAeMqcA.ipce.cbor.gz"
check_case(fn)
def test_zdpuAkXLwZ9etyNhXYd4HXWrq2Q1gsmkDRyph58PPgpcphjvW_len1328():
fn = "test_objects/zdpuAkXLwZ9etyNhXYd4HXWrq2Q1gsmkDRyph58PPgpcphjvW.ipce.cbor.gz"
check_case(fn)
def test_zdpuAqbq7qZDXBAUvGc1G2bNqpHnum4MncwF4DHHyMaRq32nj_len1328():
fn = "test_objects/zdpuAqbq7qZDXBAUvGc1G2bNqpHnum4MncwF4DHHyMaRq32nj.ipce.cbor.gz"
check_case(fn)
def test_zdpuAn4CpgbSXyGvuwQYqD6qMBz9Egb4e1UvcxhNcQSpPWD3L_len1329():
fn = "test_objects/zdpuAn4CpgbSXyGvuwQYqD6qMBz9Egb4e1UvcxhNcQSpPWD3L.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtcUZomdAcbbonguAbXzaCfjRbMz4a1cortmnDNqBjpCg_len1329():
fn = "test_objects/zdpuAtcUZomdAcbbonguAbXzaCfjRbMz4a1cortmnDNqBjpCg.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtQ9MJRcVvoMvfXHh747UvEVcsNrFgggeh4RD987M9pJH_len1329():
fn = "test_objects/zdpuAtQ9MJRcVvoMvfXHh747UvEVcsNrFgggeh4RD987M9pJH.ipce.cbor.gz"
check_case(fn)
def test_zdpuB3Csd8s2EnMfqYaDD6923KCZvC7bNBov7kRCvRWs71kwG_len1330():
fn = "test_objects/zdpuB3Csd8s2EnMfqYaDD6923KCZvC7bNBov7kRCvRWs71kwG.ipce.cbor.gz"
check_case(fn)
def test_zdpuB1uguMwm6hcyyjNohN7oVgZDpaqZq6GwFFihunG1Ymch5_len1330():
fn = "test_objects/zdpuB1uguMwm6hcyyjNohN7oVgZDpaqZq6GwFFihunG1Ymch5.ipce.cbor.gz"
check_case(fn)
def test_zdpuAugvzgT5oBN9pxd9eVmjWTFsmCeCoMDeeFA8baD8pck44_len1330():
fn = "test_objects/zdpuAugvzgT5oBN9pxd9eVmjWTFsmCeCoMDeeFA8baD8pck44.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsgGd1iuLm3JsvGTikADn5RZ9Q3RUZgtPgHnBym7RDphT_len1331():
fn = "test_objects/zdpuAsgGd1iuLm3JsvGTikADn5RZ9Q3RUZgtPgHnBym7RDphT.ipce.cbor.gz"
check_case(fn)
def test_zdpuB2nYb78XLDvuyX67se7M25kEZizi9K7PbvZcmksHcwJJg_len1331():
fn = "test_objects/zdpuB2nYb78XLDvuyX67se7M25kEZizi9K7PbvZcmksHcwJJg.ipce.cbor.gz"
check_case(fn)
def test_zdpuAyucu3VsdnWEJGq2k4dER991g7E9dLqVurs3DfLrznxDV_len1331():
fn = "test_objects/zdpuAyucu3VsdnWEJGq2k4dER991g7E9dLqVurs3DfLrznxDV.ipce.cbor.gz"
check_case(fn)
def test_zdpuApzQXDKDFod7DWXTAuqpz7hwtVgyXgVeftZhjNpx88DrB_len1331():
fn = "test_objects/zdpuApzQXDKDFod7DWXTAuqpz7hwtVgyXgVeftZhjNpx88DrB.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuwjh5KFFiokipXbjBV6mkB9cv6VrEQHyRwubiWsAi3Mi_len1332():
fn = "test_objects/zdpuAuwjh5KFFiokipXbjBV6mkB9cv6VrEQHyRwubiWsAi3Mi.ipce.cbor.gz"
check_case(fn)
def test_zdpuArKqc5jZCx9PyNAnYzmFu7JnnQ3SvnUzDqBxDrBBdBYYt_len1332():
fn = "test_objects/zdpuArKqc5jZCx9PyNAnYzmFu7JnnQ3SvnUzDqBxDrBBdBYYt.ipce.cbor.gz"
check_case(fn)
def test_zdpuAx3be4K62FpHtNXeYhWcc25GWE6DtVwp3c5QEkHSwyRKw_len1332():
fn = "test_objects/zdpuAx3be4K62FpHtNXeYhWcc25GWE6DtVwp3c5QEkHSwyRKw.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsBsBu7xRuGEaAENRUmbtwFwRmw4FGnbVtVNtYWFeFBW4_len1332():
fn = "test_objects/zdpuAsBsBu7xRuGEaAENRUmbtwFwRmw4FGnbVtVNtYWFeFBW4.ipce.cbor.gz"
check_case(fn)
def test_zdpuAyvzwGVyMNKDuyim6VwfFeAcuVxsTHkhJU67aH6gy8PFZ_len1333():
fn = "test_objects/zdpuAyvzwGVyMNKDuyim6VwfFeAcuVxsTHkhJU67aH6gy8PFZ.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnTwB7Ho9azKouhxZHA34MJnUKYQMUSqBFcT1KTpPKJaz_len1333():
fn = "test_objects/zdpuAnTwB7Ho9azKouhxZHA34MJnUKYQMUSqBFcT1KTpPKJaz.ipce.cbor.gz"
check_case(fn)
def test_zdpuAw8Y17knCHZ1yXEGWPwCcnBhhmuEe5LHpF7zpVhSss62K_len1333():
fn = "test_objects/zdpuAw8Y17knCHZ1yXEGWPwCcnBhhmuEe5LHpF7zpVhSss62K.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsXRJ86EPMXsSVfF7wwJKpPmu3JfaUGwAc7gFeLqWJQEs_len1334():
fn = "test_objects/zdpuAsXRJ86EPMXsSVfF7wwJKpPmu3JfaUGwAc7gFeLqWJQEs.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnGKzgkhh8ZSifxFPKzs3tiAYBhgkzaNyXeSbcgQCUgNb_len1334():
fn = "test_objects/zdpuAnGKzgkhh8ZSifxFPKzs3tiAYBhgkzaNyXeSbcgQCUgNb.ipce.cbor.gz"
check_case(fn)
def test_zdpuB2Vossdsv7Kx7PmTdUfiFPoncipSPzu9nbLEwZbGZmoje_len1334():
fn = "test_objects/zdpuB2Vossdsv7Kx7PmTdUfiFPoncipSPzu9nbLEwZbGZmoje.ipce.cbor.gz"
check_case(fn)
def test_zdpuAqQJSC8VTuPywGC1u6PkPzcEHw3CNNasCrXN3UMfrK6p8_len1334():
fn = "test_objects/zdpuAqQJSC8VTuPywGC1u6PkPzcEHw3CNNasCrXN3UMfrK6p8.ipce.cbor.gz"
check_case(fn)
def test_zdpuAukRoj18iqBu9viGzh4NbNRVCnZrDt8cH1aVmqNZwE5DZ_len1334():
fn = "test_objects/zdpuAukRoj18iqBu9viGzh4NbNRVCnZrDt8cH1aVmqNZwE5DZ.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvyi38cLZhrn4iJ5xTzVRzn5UFseeedKUggFTzjpGZRFe_len1334():
fn = "test_objects/zdpuAvyi38cLZhrn4iJ5xTzVRzn5UFseeedKUggFTzjpGZRFe.ipce.cbor.gz"
check_case(fn)
def test_zdpuAoFGPBmre345EWTC13MT2fyjVMupAhBqJYTdhdGPr92HZ_len1334():
fn = "test_objects/zdpuAoFGPBmre345EWTC13MT2fyjVMupAhBqJYTdhdGPr92HZ.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuXzrLmnRNdta2Aoj6v9fxHSqXsy1QH5RyotTWYZ5Nbqc_len1334():
fn = "test_objects/zdpuAuXzrLmnRNdta2Aoj6v9fxHSqXsy1QH5RyotTWYZ5Nbqc.ipce.cbor.gz"
check_case(fn)
def test_zdpuB2EMMqjNn5CjBHQd1S1bYVi3vVcD9245e7RWKpVXuM4DS_len1335():
fn = "test_objects/zdpuB2EMMqjNn5CjBHQd1S1bYVi3vVcD9245e7RWKpVXuM4DS.ipce.cbor.gz"
check_case(fn)
def test_zdpuAo4hPbcN6CgXDAR6VeUYajcAeja98KBkWAhfLKcaEtkd5_len1335():
fn = "test_objects/zdpuAo4hPbcN6CgXDAR6VeUYajcAeja98KBkWAhfLKcaEtkd5.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuv5YAn1tS8mVE4uW8wHDEXZfF4DLEVLLBJ5ADjhodnfa_len1335():
fn = "test_objects/zdpuAuv5YAn1tS8mVE4uW8wHDEXZfF4DLEVLLBJ5ADjhodnfa.ipce.cbor.gz"
check_case(fn)
def test_zdpuB3doYuyMm7DPs225Y2FtFJGuN9YUDEaGTq5LU4BPHhar7_len1335():
fn = "test_objects/zdpuB3doYuyMm7DPs225Y2FtFJGuN9YUDEaGTq5LU4BPHhar7.ipce.cbor.gz"
check_case(fn)
def test_zdpuB33heW6v35KVkh2fUS35J31JeZBobMEbdkUKPBrQkMbzq_len1335():
fn = "test_objects/zdpuB33heW6v35KVkh2fUS35J31JeZBobMEbdkUKPBrQkMbzq.ipce.cbor.gz"
check_case(fn)
def test_zdpuAyPtj2UZjQDsfKZ8HPmkeeVozZaLsCPTiVco54kv5UTWA_len1335():
fn = "test_objects/zdpuAyPtj2UZjQDsfKZ8HPmkeeVozZaLsCPTiVco54kv5UTWA.ipce.cbor.gz"
check_case(fn)
def test_zdpuAz3XjFg3AUHFr66j7vHX66tR5oke86MzQtLBuqbabDibQ_len1335():
fn = "test_objects/zdpuAz3XjFg3AUHFr66j7vHX66tR5oke86MzQtLBuqbabDibQ.ipce.cbor.gz"
check_case(fn)
def test_zdpuAzjHwtGnzxzrKn5YASVhYbHmU29wKruHvhRH7SjLMZ16A_len1336():
fn = "test_objects/zdpuAzjHwtGnzxzrKn5YASVhYbHmU29wKruHvhRH7SjLMZ16A.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnDku7kNgrrsiGAfqfWTbMorAW4tGrnprSSYZzpPCJufW_len1336():
fn = "test_objects/zdpuAnDku7kNgrrsiGAfqfWTbMorAW4tGrnprSSYZzpPCJufW.ipce.cbor.gz"
check_case(fn)
def test_zdpuAo4eCXfgTUpWX69Ff5e8gdkenvEeKc2Qrj6skgGDCCpgg_len1336():
fn = "test_objects/zdpuAo4eCXfgTUpWX69Ff5e8gdkenvEeKc2Qrj6skgGDCCpgg.ipce.cbor.gz"
check_case(fn)
def test_zdpuB31gH3fc9GuycfYdbqN824isBcQrKJAo7T8vc4e9g2zsF_len1336():
fn = "test_objects/zdpuB31gH3fc9GuycfYdbqN824isBcQrKJAo7T8vc4e9g2zsF.ipce.cbor.gz"
check_case(fn)
def test_zdpuAynWFY3Rsxu4pkubbJnezLTnjL9Hv1xGVgUT7ycTFHnXq_len1336():
fn = "test_objects/zdpuAynWFY3Rsxu4pkubbJnezLTnjL9Hv1xGVgUT7ycTFHnXq.ipce.cbor.gz"
check_case(fn)
def test_zdpuAoxVMfwTBkDUrYFyuUpM4CrH4eBYaxgu71w13dgi7C7No_len1336():
fn = "test_objects/zdpuAoxVMfwTBkDUrYFyuUpM4CrH4eBYaxgu71w13dgi7C7No.ipce.cbor.gz"
check_case(fn)
def test_zdpuB1wMSNmdc29ks8SU6UTgjwTm3SZsdPgWrpmriCcCUEVqE_len1336():
fn = "test_objects/zdpuB1wMSNmdc29ks8SU6UTgjwTm3SZsdPgWrpmriCcCUEVqE.ipce.cbor.gz"
check_case(fn)
def test_zdpuAzNLX9tYaSWJoRwDmLiFm4NVatsccTTYpKBWHxzXMgeUt_len1336():
fn = "test_objects/zdpuAzNLX9tYaSWJoRwDmLiFm4NVatsccTTYpKBWHxzXMgeUt.ipce.cbor.gz"
check_case(fn)
def test_zdpuAqUT5VpPbrgixibkbF78U3EDWiGV9jAQTkZ3C1KMF4TSi_len1336():
fn = "test_objects/zdpuAqUT5VpPbrgixibkbF78U3EDWiGV9jAQTkZ3C1KMF4TSi.ipce.cbor.gz"
check_case(fn)
def test_zdpuB1Nkco4KVqKM2cqiJdW5jBkH6HWKvBF6vHTD9SDdBqC8N_len1336():
fn = "test_objects/zdpuB1Nkco4KVqKM2cqiJdW5jBkH6HWKvBF6vHTD9SDdBqC8N.ipce.cbor.gz"
check_case(fn)
def test_zdpuApQagvkfGQuybtPya9bD7cmhX3A1eVnB9NKK8eK5dKeis_len1337():
fn = "test_objects/zdpuApQagvkfGQuybtPya9bD7cmhX3A1eVnB9NKK8eK5dKeis.ipce.cbor.gz"
check_case(fn)
def test_zdpuAz5AhvqX3C5kysRQ5oaFHdCecPw3eJsWU8YLR1fUXMFfU_len1337():
fn = "test_objects/zdpuAz5AhvqX3C5kysRQ5oaFHdCecPw3eJsWU8YLR1fUXMFfU.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvqsLnhb7jCcEQYC1w9ytyDtQT5YTKzCcRtk7AvppWJQC_len1337():
fn = "test_objects/zdpuAvqsLnhb7jCcEQYC1w9ytyDtQT5YTKzCcRtk7AvppWJQC.ipce.cbor.gz"
check_case(fn)
def test_zdpuArHuEVZLYze4YkAhfPkbVWEJH4ETz4rYcq2djnxAe1tY7_len1337():
fn = "test_objects/zdpuArHuEVZLYze4YkAhfPkbVWEJH4ETz4rYcq2djnxAe1tY7.ipce.cbor.gz"
check_case(fn)
def test_zdpuAzZfzR4rD9XGajMRSRWh6cQ8WLYGNuAYnsKBMo2Eu9ZXc_len1337():
fn = "test_objects/zdpuAzZfzR4rD9XGajMRSRWh6cQ8WLYGNuAYnsKBMo2Eu9ZXc.ipce.cbor.gz"
check_case(fn)
def test_zdpuAo6Y5Booo4EJk2Zbje2EFRfhFTnNtGm7iMHXcuunS4o4S_len1337():
fn = "test_objects/zdpuAo6Y5Booo4EJk2Zbje2EFRfhFTnNtGm7iMHXcuunS4o4S.ipce.cbor.gz"
check_case(fn)
def test_zdpuB1XT5FfTsRoBGWZsfyMYgHNX8qWD39s8AWAz7G9BitBKK_len1337():
fn = "test_objects/zdpuB1XT5FfTsRoBGWZsfyMYgHNX8qWD39s8AWAz7G9BitBKK.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnmKwgH9UV3X3NSgEMiSV7cTykQqDWQDXStUgN1eiu2tB_len1338():
fn = "test_objects/zdpuAnmKwgH9UV3X3NSgEMiSV7cTykQqDWQDXStUgN1eiu2tB.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnMTvhdRajfqDxC3dMfqKcXLqKdA1ktNxVkKKR6vJcqCE_len1338():
fn = "test_objects/zdpuAnMTvhdRajfqDxC3dMfqKcXLqKdA1ktNxVkKKR6vJcqCE.ipce.cbor.gz"
check_case(fn)
def test_zdpuAysvVULR9mtdEScWQ6sbVLd6PE1cWWKYbsXioNF5ypYdF_len1338():
fn = "test_objects/zdpuAysvVULR9mtdEScWQ6sbVLd6PE1cWWKYbsXioNF5ypYdF.ipce.cbor.gz"
check_case(fn)
def test_zdpuAwfLDGj4DnxfxfvA7qHcpMRiwgW66WNvHZwB5G4hepK6Y_len1338():
fn = "test_objects/zdpuAwfLDGj4DnxfxfvA7qHcpMRiwgW66WNvHZwB5G4hepK6Y.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuyuGafLbeccmmqc1vJ5P32JrZKQZmpXAHuC7WcmG6qkd_len1338():
fn = "test_objects/zdpuAuyuGafLbeccmmqc1vJ5P32JrZKQZmpXAHuC7WcmG6qkd.ipce.cbor.gz"
check_case(fn)
def test_zdpuB33k16wbpRBcQRoW4cVj8vs4MLzPbdPyHCAzzH1ysg7Fz_len1338():
fn = "test_objects/zdpuB33k16wbpRBcQRoW4cVj8vs4MLzPbdPyHCAzzH1ysg7Fz.ipce.cbor.gz"
check_case(fn)
def test_zdpuAoRGLaJS4H72E9tEMWcSWV7MNjmdrWfYgDxcVEBtHWJCX_len1339():
fn = "test_objects/zdpuAoRGLaJS4H72E9tEMWcSWV7MNjmdrWfYgDxcVEBtHWJCX.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuo9Hi6YQZKs5c4WxKd367Fbkifo79XRThgyPehwjorqU_len1339():
fn = "test_objects/zdpuAuo9Hi6YQZKs5c4WxKd367Fbkifo79XRThgyPehwjorqU.ipce.cbor.gz"
check_case(fn)
def test_zdpuAz3pkpeVFfJoJamvbQdYkQUCyCNDpqdixbqq2JPbJQ5LK_len1339():
fn = "test_objects/zdpuAz3pkpeVFfJoJamvbQdYkQUCyCNDpqdixbqq2JPbJQ5LK.ipce.cbor.gz"
check_case(fn)
def test_zdpuAqhHyVsZraCQdiDBBc8Ti2Aaqc11waUQ5nTchmwziuAmG_len1339():
fn = "test_objects/zdpuAqhHyVsZraCQdiDBBc8Ti2Aaqc11waUQ5nTchmwziuAmG.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtdKPhGwYLyT5dpmoZNsh4iSEGr73LehpA6NDvFXE82ad_len1339():
fn = "test_objects/zdpuAtdKPhGwYLyT5dpmoZNsh4iSEGr73LehpA6NDvFXE82ad.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsgA4EGKNVzGCSNowz5GmfPiuTUbr4t24nwPPRYUmEk5k_len1339():
fn = "test_objects/zdpuAsgA4EGKNVzGCSNowz5GmfPiuTUbr4t24nwPPRYUmEk5k.ipce.cbor.gz"
check_case(fn)
def test_zdpuArE1NbNd8n7EVKaf5LnAcYgsKLwC1tpTbdGcRpds1FDVE_len1339():
fn = "test_objects/zdpuArE1NbNd8n7EVKaf5LnAcYgsKLwC1tpTbdGcRpds1FDVE.ipce.cbor.gz"
check_case(fn)
def test_zdpuAzT7b1XdB2XoJwGvturgpnYs4jHXTLhCFpj4oF4AGxteV_len1340():
fn = "test_objects/zdpuAzT7b1XdB2XoJwGvturgpnYs4jHXTLhCFpj4oF4AGxteV.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtEcKycqoJJWR4qsQRb2Xcvn5BtVJE42xvc1RaN5pLMyL_len1340():
fn = "test_objects/zdpuAtEcKycqoJJWR4qsQRb2Xcvn5BtVJE42xvc1RaN5pLMyL.ipce.cbor.gz"
check_case(fn)
def test_zdpuB1iagCorCMgqfGXWYMT7TPqXs1GtCcPgYoa9ZLJNgE1nW_len1340():
fn = "test_objects/zdpuB1iagCorCMgqfGXWYMT7TPqXs1GtCcPgYoa9ZLJNgE1nW.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnbaSQWLy8gJZW8S3feQUBxmyHuC1A45bEBKomEKEEdxw_len1340():
fn = "test_objects/zdpuAnbaSQWLy8gJZW8S3feQUBxmyHuC1A45bEBKomEKEEdxw.ipce.cbor.gz"
check_case(fn)
def test_zdpuAt1XEsyGgwcVMQvj4MWKwKdpRfYydcSvmnAcbUQuqJGx1_len1340():
fn = "test_objects/zdpuAt1XEsyGgwcVMQvj4MWKwKdpRfYydcSvmnAcbUQuqJGx1.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtTjetwhzS4DV3Mte97tkkkqZwunovjd1UgZXnTCW7bGu_len1340():
fn = "test_objects/zdpuAtTjetwhzS4DV3Mte97tkkkqZwunovjd1UgZXnTCW7bGu.ipce.cbor.gz"
check_case(fn)
def test_zdpuAwBEzTCZTweuNgHqbjieNWU3YFT1kucKPGVwqRXDcmy6x_len1341():
fn = "test_objects/zdpuAwBEzTCZTweuNgHqbjieNWU3YFT1kucKPGVwqRXDcmy6x.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxWnMvCokTd2s16ipSYHz7A6Rj4TxTyDSTtsd4brcUin5_len1341():
fn = "test_objects/zdpuAxWnMvCokTd2s16ipSYHz7A6Rj4TxTyDSTtsd4brcUin5.ipce.cbor.gz"
check_case(fn)
def test_zdpuApRmta1M3FQsUcKpsb2hfpNGEATFziizfcNuU83WZaytm_len1341():
fn = "test_objects/zdpuApRmta1M3FQsUcKpsb2hfpNGEATFziizfcNuU83WZaytm.ipce.cbor.gz"
check_case(fn)
def test_zdpuAqmuXn6uQdiHmxYBcAQzPpJhGstMniWzFnK8My5i7m2xA_len1342():
fn = "test_objects/zdpuAqmuXn6uQdiHmxYBcAQzPpJhGstMniWzFnK8My5i7m2xA.ipce.cbor.gz"
check_case(fn)
def test_zdpuAv2Jpf3zeHq86NNPV5LTjH2BbztVGVM6Qgz7uRjVMdGfh_len1342():
fn = "test_objects/zdpuAv2Jpf3zeHq86NNPV5LTjH2BbztVGVM6Qgz7uRjVMdGfh.ipce.cbor.gz"
check_case(fn)
def test_zdpuAyZZPzoo226ecJqspHRwkESQ6dQiqWw7am2ji6AyNLwrZ_len1342():
fn = "test_objects/zdpuAyZZPzoo226ecJqspHRwkESQ6dQiqWw7am2ji6AyNLwrZ.ipce.cbor.gz"
check_case(fn)
def test_zdpuAw29DHzmLFE7enQWCGyNGAxX3AzJUxjrnZki2PLnhjrdG_len1342():
fn = "test_objects/zdpuAw29DHzmLFE7enQWCGyNGAxX3AzJUxjrnZki2PLnhjrdG.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtBMB9FCVKhG7bWexud1tKhkmuTkWpZJN48Q5cZc6xDQ5_len1342():
fn = "test_objects/zdpuAtBMB9FCVKhG7bWexud1tKhkmuTkWpZJN48Q5cZc6xDQ5.ipce.cbor.gz"
check_case(fn)
def test_zdpuB1SLbnGk6gnux2kVEx1UvrN7Xw733zjxMNL5Bh5oZVxeY_len1342():
fn = "test_objects/zdpuB1SLbnGk6gnux2kVEx1UvrN7Xw733zjxMNL5Bh5oZVxeY.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvR9mbodi6jPxt2NEAHZk7cfzy1w1rLGCib7fix8j1dLR_len1342():
fn = "test_objects/zdpuAvR9mbodi6jPxt2NEAHZk7cfzy1w1rLGCib7fix8j1dLR.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuYvdQPFiXYv2XEvy558pu2L6guajynJ1jzwt4TUQj56N_len1342():
fn = "test_objects/zdpuAuYvdQPFiXYv2XEvy558pu2L6guajynJ1jzwt4TUQj56N.ipce.cbor.gz"
check_case(fn)
def test_zdpuAranxxK23ukNVt6vQA5VeC8TCJ8QsmSdJcS1UuFpQeGA4_len1343():
fn = "test_objects/zdpuAranxxK23ukNVt6vQA5VeC8TCJ8QsmSdJcS1UuFpQeGA4.ipce.cbor.gz"
check_case(fn)
def test_zdpuB2fi8Neu9BShurdBhpqsiQDbz4fgWX2PxZ7UPqNYFZo59_len1343():
fn = "test_objects/zdpuB2fi8Neu9BShurdBhpqsiQDbz4fgWX2PxZ7UPqNYFZo59.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnczH6DwJFPgcjuzKD3EA4aqJmJDfajwCkknZtfaXWibb_len1343():
fn = "test_objects/zdpuAnczH6DwJFPgcjuzKD3EA4aqJmJDfajwCkknZtfaXWibb.ipce.cbor.gz"
check_case(fn)
def test_zdpuAmJFstvexVaHTsJjhSXTUrMfaLmkNCeH618zqaCdgm1Kq_len1345():
fn = "test_objects/zdpuAmJFstvexVaHTsJjhSXTUrMfaLmkNCeH618zqaCdgm1Kq.ipce.cbor.gz"
check_case(fn)
def test_zdpuB3WA6MF5nXBkkJm9DY4f9kdkNJKBsSa1N6kic1BiqEbQk_len1345():
fn = "test_objects/zdpuB3WA6MF5nXBkkJm9DY4f9kdkNJKBsSa1N6kic1BiqEbQk.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxh5JL8qKWQKoaiJwxqS6aNjAeaGf4pY6r85xCcNW3dEZ_len1346():
fn = "test_objects/zdpuAxh5JL8qKWQKoaiJwxqS6aNjAeaGf4pY6r85xCcNW3dEZ.ipce.cbor.gz"
check_case(fn)
def test_zdpuArThdkdvtogvgmEW5LZ5QVRBD6Zqke96PBDnBVxfcszfs_len1347():
fn = "test_objects/zdpuArThdkdvtogvgmEW5LZ5QVRBD6Zqke96PBDnBVxfcszfs.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtK4PdusmzVknV7DXjso6EKmUgjn95qRZ962p1ZbZvacd_len1347():
fn = "test_objects/zdpuAtK4PdusmzVknV7DXjso6EKmUgjn95qRZ962p1ZbZvacd.ipce.cbor.gz"
check_case(fn)
def test_zdpuB3Uique25yZz2DcLFUe6AMz9ZHuehHXYf1kwq5MWm1AFh_len1348():
fn = "test_objects/zdpuB3Uique25yZz2DcLFUe6AMz9ZHuehHXYf1kwq5MWm1AFh.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuixgRg89ZwG2DuUX6gwDzwDkdDUeZum3x5L7dBxKkyrK_len1348():
fn = "test_objects/zdpuAuixgRg89ZwG2DuUX6gwDzwDkdDUeZum3x5L7dBxKkyrK.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtbVMNXNx2Rchd6tzuYdc6WmpiPCtQkyCmZs7nNVTQeTP_len1350():
fn = "test_objects/zdpuAtbVMNXNx2Rchd6tzuYdc6WmpiPCtQkyCmZs7nNVTQeTP.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxjpXHspt6WNnAe5mX6TbF4EcW1WL2ogH13kdQyEL2VzV_len1350():
fn = "test_objects/zdpuAxjpXHspt6WNnAe5mX6TbF4EcW1WL2ogH13kdQyEL2VzV.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxRQLTTwgUKW5VhqwqpsECKzQZkpxXVuLkgFPjRf7c2sb_len1350():
fn = "test_objects/zdpuAxRQLTTwgUKW5VhqwqpsECKzQZkpxXVuLkgFPjRf7c2sb.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtDTnJ3ncRrsmzsg1X2dbF53vZbGeV1gqr6vbANUQWRWe_len1351():
fn = "test_objects/zdpuAtDTnJ3ncRrsmzsg1X2dbF53vZbGeV1gqr6vbANUQWRWe.ipce.cbor.gz"
check_case(fn)
def test_zdpuAyfCH4ZTepvg9CKgG1YWFuEFeD7DKbVP1R5JDc7YAEF5E_len1352():
fn = "test_objects/zdpuAyfCH4ZTepvg9CKgG1YWFuEFeD7DKbVP1R5JDc7YAEF5E.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsszjDKKnEioqnArTdm1f4zxxqNobUypHtdG4TXsKGu92_len1352():
fn = "test_objects/zdpuAsszjDKKnEioqnArTdm1f4zxxqNobUypHtdG4TXsKGu92.ipce.cbor.gz"
check_case(fn)
def test_zdpuAucntU6Tju97hhSBwjavEvSSoSg56Mcag4Hurs8DXGb1F_len1352():
fn = "test_objects/zdpuAucntU6Tju97hhSBwjavEvSSoSg56Mcag4Hurs8DXGb1F.ipce.cbor.gz"
check_case(fn)
def test_zdpuAoxqY2dxdBL71VjF2SzS54w7MapJYt3z5pqDhykKrX5dY_len1352():
fn = "test_objects/zdpuAoxqY2dxdBL71VjF2SzS54w7MapJYt3z5pqDhykKrX5dY.ipce.cbor.gz"
check_case(fn)
def test_zdpuB2iZJXZ5gPNL4XFCFhzm9fxyb7tBthH8GDvfmE1T12mMn_len1353():
fn = "test_objects/zdpuB2iZJXZ5gPNL4XFCFhzm9fxyb7tBthH8GDvfmE1T12mMn.ipce.cbor.gz"
check_case(fn)
def test_zdpuB2NVQTrW8XGDDc5CUTNapYCQjrpNBbegSZTK9Vxxax93M_len1353():
fn = "test_objects/zdpuB2NVQTrW8XGDDc5CUTNapYCQjrpNBbegSZTK9Vxxax93M.ipce.cbor.gz"
check_case(fn)
def test_zdpuB1wuFn1FpvYoG5UwcRm2Wk7zrSUzMbsrFe1EoMmxpeEhj_len1354():
fn = "test_objects/zdpuB1wuFn1FpvYoG5UwcRm2Wk7zrSUzMbsrFe1EoMmxpeEhj.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnQ2T8eK216mhyphb5iQ2bsw2e7TgFwmTVwh1gp9mqKSg_len1354():
fn = "test_objects/zdpuAnQ2T8eK216mhyphb5iQ2bsw2e7TgFwmTVwh1gp9mqKSg.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxjMjBT62Zrv4z8zncZTysW1aMw89NvYdFhBT6N5iEweF_len1355():
fn = "test_objects/zdpuAxjMjBT62Zrv4z8zncZTysW1aMw89NvYdFhBT6N5iEweF.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtzHdrg9GDiUhQKm9BkSfWnQrU2k5Rvf1Wkx5RXoSyQJj_len1355():
fn = "test_objects/zdpuAtzHdrg9GDiUhQKm9BkSfWnQrU2k5Rvf1Wkx5RXoSyQJj.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuijpd27W4QUEaNG31pF4sFR7tUaV2fnUUjQXE3PqJZfn_len1355():
fn = "test_objects/zdpuAuijpd27W4QUEaNG31pF4sFR7tUaV2fnUUjQXE3PqJZfn.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvSHMhqsi4pAs6crXrZj1LYDMtR1WcAsTTb2HF9QhgRfZ_len1355():
fn = "test_objects/zdpuAvSHMhqsi4pAs6crXrZj1LYDMtR1WcAsTTb2HF9QhgRfZ.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuhP2vqu2cRHLyLZMat7CWde37SiBdmuCrSDHydppLriM_len1356():
fn = "test_objects/zdpuAuhP2vqu2cRHLyLZMat7CWde37SiBdmuCrSDHydppLriM.ipce.cbor.gz"
check_case(fn)
def test_zdpuAw6DAHKUHD3R61k3z52nkHTLHzD6LgyevRChAYo4f5jDZ_len1356():
fn = "test_objects/zdpuAw6DAHKUHD3R61k3z52nkHTLHzD6LgyevRChAYo4f5jDZ.ipce.cbor.gz"
check_case(fn)
def test_zdpuAzDFbjwfpiBsmmbE5dyKFpSVCaAkvYewC2A2t1oJtrRdM_len1356():
fn = "test_objects/zdpuAzDFbjwfpiBsmmbE5dyKFpSVCaAkvYewC2A2t1oJtrRdM.ipce.cbor.gz"
check_case(fn)
def test_zdpuAqZ6LVPNJJbJu1HU19S9H3aHDLPo6Y4m6T92849caJvzb_len1358():
fn = "test_objects/zdpuAqZ6LVPNJJbJu1HU19S9H3aHDLPo6Y4m6T92849caJvzb.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtkt8Av6VJAFxKjc5JsdKaxxBmedR1HkxiUXVjPwWbpQZ_len1358():
fn = "test_objects/zdpuAtkt8Av6VJAFxKjc5JsdKaxxBmedR1HkxiUXVjPwWbpQZ.ipce.cbor.gz"
check_case(fn)
def test_zdpuAu6mN9SxDY9y9pnZEUQPRofsqW8zECXov75mzTtNWmowD_len1358():
fn = "test_objects/zdpuAu6mN9SxDY9y9pnZEUQPRofsqW8zECXov75mzTtNWmowD.ipce.cbor.gz"
check_case(fn)
def test_zdpuApgVSi9yehNezm1SgSxag6uf2phFTruWGPGZ3zYaW285c_len1359():
fn = "test_objects/zdpuApgVSi9yehNezm1SgSxag6uf2phFTruWGPGZ3zYaW285c.ipce.cbor.gz"
check_case(fn)
def test_zdpuAucvpbBr4u7zKCfFCgwnUtsv3AZuqmj76A1v4uXUCWBDw_len1359():
fn = "test_objects/zdpuAucvpbBr4u7zKCfFCgwnUtsv3AZuqmj76A1v4uXUCWBDw.ipce.cbor.gz"
check_case(fn)
def test_zdpuB2wjPtWckAYQjd5FAsTubGxSxtPnrLLypMZW6wMCYJ2wf_len1360():
fn = "test_objects/zdpuB2wjPtWckAYQjd5FAsTubGxSxtPnrLLypMZW6wMCYJ2wf.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtcHRB3tjypWLqZpmTqYHnG5hnD4QCPpHi1EMsyoooR5j_len1360():
fn = "test_objects/zdpuAtcHRB3tjypWLqZpmTqYHnG5hnD4QCPpHi1EMsyoooR5j.ipce.cbor.gz"
check_case(fn)
def test_zdpuB39iek3ri7ZJbmfHgfnwAGxiChe8ph2APg8v4BsXiELvV_len1360():
fn = "test_objects/zdpuB39iek3ri7ZJbmfHgfnwAGxiChe8ph2APg8v4BsXiELvV.ipce.cbor.gz"
check_case(fn)
def test_zdpuAu2Rfv6uPW5zZLsauJ8ftsfntduYFW5HJJBHn5deLKoEM_len1360():
fn = "test_objects/zdpuAu2Rfv6uPW5zZLsauJ8ftsfntduYFW5HJJBHn5deLKoEM.ipce.cbor.gz"
check_case(fn)
def test_zdpuAw6xA2TCxZFmiVo952F3tsczGCb3EyP115mgUqai8qCh1_len1360():
fn = "test_objects/zdpuAw6xA2TCxZFmiVo952F3tsczGCb3EyP115mgUqai8qCh1.ipce.cbor.gz"
check_case(fn)
def test_zdpuAyvkvUhLQg9NtYZmFWXqth4sDHNHv5q3NUQ8sjSqMS5YF_len1361():
fn = "test_objects/zdpuAyvkvUhLQg9NtYZmFWXqth4sDHNHv5q3NUQ8sjSqMS5YF.ipce.cbor.gz"
check_case(fn)
def test_zdpuB2bWk59gWMk4TW4m49tuNShMJbAW7erYCdX56S43JH854_len1361():
fn = "test_objects/zdpuB2bWk59gWMk4TW4m49tuNShMJbAW7erYCdX56S43JH854.ipce.cbor.gz"
check_case(fn)
def test_zdpuAszY6jf714PyTaig4kwGMuTWVnXETsikAyLC8gv1roDEq_len1361():
fn = "test_objects/zdpuAszY6jf714PyTaig4kwGMuTWVnXETsikAyLC8gv1roDEq.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuV9hzsKCWZtH4QoxyTvw76Y1PNHCZhuXHCW5zt2yX9rM_len1363():
fn = "test_objects/zdpuAuV9hzsKCWZtH4QoxyTvw76Y1PNHCZhuXHCW5zt2yX9rM.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnamEgPq1xgC8hboPS24f5oJ374H4WrV6vBQVLSFYDbAP_len1363():
fn = "test_objects/zdpuAnamEgPq1xgC8hboPS24f5oJ374H4WrV6vBQVLSFYDbAP.ipce.cbor.gz"
check_case(fn)
def test_zdpuAt9M5NhnRznLTMEHEPKNBBFtATahMjTBRehePaSNhfdgH_len1365():
fn = "test_objects/zdpuAt9M5NhnRznLTMEHEPKNBBFtATahMjTBRehePaSNhfdgH.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuzjShXsUCXV7cTqnRRLEjtW3qTt6b7b2jjei2cdouUdT_len1366():
fn = "test_objects/zdpuAuzjShXsUCXV7cTqnRRLEjtW3qTt6b7b2jjei2cdouUdT.ipce.cbor.gz"
check_case(fn)
def test_zdpuAychE3HZTzbVVLhQbnojqT6D9MmBfuZ7MgLggg4ZwXm7D_len1366():
fn = "test_objects/zdpuAychE3HZTzbVVLhQbnojqT6D9MmBfuZ7MgLggg4ZwXm7D.ipce.cbor.gz"
check_case(fn)
def test_zdpuAr7SekHJK3KTbdmKFKMGTiefmBes6MaaT4hxoY5Xr1T1H_len1367():
fn = "test_objects/zdpuAr7SekHJK3KTbdmKFKMGTiefmBes6MaaT4hxoY5Xr1T1H.ipce.cbor.gz"
check_case(fn)
def test_zdpuB3bRxPHQ4TAzgY3qw8MA5tkjNT2Aqf95qEWU3gvKjkwQs_len1367():
fn = "test_objects/zdpuB3bRxPHQ4TAzgY3qw8MA5tkjNT2Aqf95qEWU3gvKjkwQs.ipce.cbor.gz"
check_case(fn)
def test_zdpuAoNLxUtzhESM7TsCdJqpe7pzudbfTVPidWMopcPok8kiz_len1368():
fn = "test_objects/zdpuAoNLxUtzhESM7TsCdJqpe7pzudbfTVPidWMopcPok8kiz.ipce.cbor.gz"
check_case(fn)
def test_zdpuB1ZRB3TmTKktKGKZfHs4Lo7xK7UGygQ1bLMFbjG8Djzwf_len1370():
fn = "test_objects/zdpuB1ZRB3TmTKktKGKZfHs4Lo7xK7UGygQ1bLMFbjG8Djzwf.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtVtZvh7ooqQtcPJbginQEbB7dZwiUhXjWLji6jHkYnCP_len1370():
fn = "test_objects/zdpuAtVtZvh7ooqQtcPJbginQEbB7dZwiUhXjWLji6jHkYnCP.ipce.cbor.gz"
check_case(fn)
def test_zdpuAmyPPAP31iPyFthc37G6tAph86e3LEBEejDJ1F1gPVBv7_len1373():
fn = "test_objects/zdpuAmyPPAP31iPyFthc37G6tAph86e3LEBEejDJ1F1gPVBv7.ipce.cbor.gz"
check_case(fn)
def test_zdpuAtvSZBbDuL3yfKm2zFuu5hQ7rcQVr7R528DbaYxxfBMJR_len1375():
fn = "test_objects/zdpuAtvSZBbDuL3yfKm2zFuu5hQ7rcQVr7R528DbaYxxfBMJR.ipce.cbor.gz"
check_case(fn)
def test_zdpuAr61ArNc8WwHmz1AtsaTMvReeQCXsphpxj9aXwnjb79tk_len1375():
fn = "test_objects/zdpuAr61ArNc8WwHmz1AtsaTMvReeQCXsphpxj9aXwnjb79tk.ipce.cbor.gz"
check_case(fn)
def test_zdpuAw7SKqYiZkQ2TNxVGE9Y2dWPonPSNTYoaCP33oeHHkai8_len1376():
fn = "test_objects/zdpuAw7SKqYiZkQ2TNxVGE9Y2dWPonPSNTYoaCP33oeHHkai8.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxS59RrYribYVSEyt11HExJWsyTGq16h1CAxZ7VLtFCxU_len1376():
fn = "test_objects/zdpuAxS59RrYribYVSEyt11HExJWsyTGq16h1CAxZ7VLtFCxU.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnyS3NwQFbptB1XN6BkpLceM1ycKsmqZjQzgWtv6ZaM2s_len1380():
fn = "test_objects/zdpuAnyS3NwQFbptB1XN6BkpLceM1ycKsmqZjQzgWtv6ZaM2s.ipce.cbor.gz"
check_case(fn)
def test_zdpuAwsuo9QmHyWT8xeAhddNjaVKV3nU59uHq7QJBedqPVGPD_len1381():
fn = "test_objects/zdpuAwsuo9QmHyWT8xeAhddNjaVKV3nU59uHq7QJBedqPVGPD.ipce.cbor.gz"
check_case(fn)
def test_zdpuAqZLmBANwNWh4tSAWxNUZyxogWtJWwaYojmT51zFqEVBq_len1381():
fn = "test_objects/zdpuAqZLmBANwNWh4tSAWxNUZyxogWtJWwaYojmT51zFqEVBq.ipce.cbor.gz"
check_case(fn)
def test_zdpuApQz6qu9ZsSKMjk2F1pUNjgTc86Zv9EQ1tpbagL21VktJ_len1382():
fn = "test_objects/zdpuApQz6qu9ZsSKMjk2F1pUNjgTc86Zv9EQ1tpbagL21VktJ.ipce.cbor.gz"
check_case(fn)
def test_zdpuAmpq5VSBiEq6x7GVaWjSqp3CBMWm3iRTjxJ8BRBUzxKP9_len1382():
fn = "test_objects/zdpuAmpq5VSBiEq6x7GVaWjSqp3CBMWm3iRTjxJ8BRBUzxKP9.ipce.cbor.gz"
check_case(fn)
def test_zdpuB2bp2dTg1812TN2dtRZaPREmisGHcq6ybPcJLA7TLpQRC_len1383():
fn = "test_objects/zdpuB2bp2dTg1812TN2dtRZaPREmisGHcq6ybPcJLA7TLpQRC.ipce.cbor.gz"
check_case(fn)
def test_zdpuAz4uJWikHdrkdiMvLqP6eiqHtSPMUFKk211SjKepXuo7o_len1383():
fn = "test_objects/zdpuAz4uJWikHdrkdiMvLqP6eiqHtSPMUFKk211SjKepXuo7o.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsh28WsQSpSahrBFdsdfXntXEtrn9YjD25sbFpGwXNtJC_len1383():
fn = "test_objects/zdpuAsh28WsQSpSahrBFdsdfXntXEtrn9YjD25sbFpGwXNtJC.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvNDsKpBzyAkV1cqbxks3RGbcqosCygB5zM6QvwDwwnLd_len1384():
fn = "test_objects/zdpuAvNDsKpBzyAkV1cqbxks3RGbcqosCygB5zM6QvwDwwnLd.ipce.cbor.gz"
check_case(fn)
def test_zdpuAx42cVkasDnsFQP6BuJdTYj1i3KV8YSDH4SCEjjbbMsva_len1384():
fn = "test_objects/zdpuAx42cVkasDnsFQP6BuJdTYj1i3KV8YSDH4SCEjjbbMsva.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsGDsEGSen1yeyoHZ9fY1ikhnVtuHAoUQN8FdeXKoSvRs_len1384():
fn = "test_objects/zdpuAsGDsEGSen1yeyoHZ9fY1ikhnVtuHAoUQN8FdeXKoSvRs.ipce.cbor.gz"
check_case(fn)
def test_zdpuArhKHaiyvbkxmHiDbGL5Sg5UUkHFJ6EKvwgJ166fZNs8R_len1385():
fn = "test_objects/zdpuArhKHaiyvbkxmHiDbGL5Sg5UUkHFJ6EKvwgJ166fZNs8R.ipce.cbor.gz"
check_case(fn)
def test_zdpuAx39M3Ti2BPHcYUVMB6kby8xd83sUSKRUo9BS7Uj5K7cY_len1385():
fn = "test_objects/zdpuAx39M3Ti2BPHcYUVMB6kby8xd83sUSKRUo9BS7Uj5K7cY.ipce.cbor.gz"
check_case(fn)
def test_zdpuArMtwhUBcFCKMNwVjnTFHBB68ighYDEnZKtFFof67wgRV_len1390():
fn = "test_objects/zdpuArMtwhUBcFCKMNwVjnTFHBB68ighYDEnZKtFFof67wgRV.ipce.cbor.gz"
check_case(fn)
def test_zdpuAmUCk26ne18fDN46LGpwmjkLzZnTRAzXNnAeAx7q1Xwug_len1395():
fn = "test_objects/zdpuAmUCk26ne18fDN46LGpwmjkLzZnTRAzXNnAeAx7q1Xwug.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxXEP6aARdoGhim8tPjHzcaa8vvs92urgkHjbZsuK7kgv_len1409():
fn = "test_objects/zdpuAxXEP6aARdoGhim8tPjHzcaa8vvs92urgkHjbZsuK7kgv.ipce.cbor.gz"
check_case(fn)
def test_zdpuAt86hYBzVh3YzhUnZ3z6twaBwDjht7a4FxXN4avqVvFWq_len1414():
fn = "test_objects/zdpuAt86hYBzVh3YzhUnZ3z6twaBwDjht7a4FxXN4avqVvFWq.ipce.cbor.gz"
check_case(fn)
def test_zdpuB14mDSaeoiRYR3psXggiXvhty4L9rrvdKMNzN93Ws175G_len1415():
fn = "test_objects/zdpuB14mDSaeoiRYR3psXggiXvhty4L9rrvdKMNzN93Ws175G.ipce.cbor.gz"
check_case(fn)
def test_zdpuAkUMhW3261HUnFhy5MaoREArc8t4i1rRL71oHyLdTJ9bb_len1424():
fn = "test_objects/zdpuAkUMhW3261HUnFhy5MaoREArc8t4i1rRL71oHyLdTJ9bb.ipce.cbor.gz"
check_case(fn)
def test_zdpuAz98zz7qvKbraZjMiR29T5B493vbBBwBTAQqem5anqcNN_len1430():
fn = "test_objects/zdpuAz98zz7qvKbraZjMiR29T5B493vbBBwBTAQqem5anqcNN.ipce.cbor.gz"
check_case(fn)
def test_zdpuArcP8Syz8w3isEvqsQEkvH6c2svjqRGKkLR32Xdp9xkRr_len1449():
fn = "test_objects/zdpuArcP8Syz8w3isEvqsQEkvH6c2svjqRGKkLR32Xdp9xkRr.ipce.cbor.gz"
check_case(fn)
def test_zdpuAoNjy9t5rTAb6TXowoW8reEcyBYyZSHJM5ykyWoabU7rd_len1454():
fn = "test_objects/zdpuAoNjy9t5rTAb6TXowoW8reEcyBYyZSHJM5ykyWoabU7rd.ipce.cbor.gz"
check_case(fn)
def test_zdpuApqJEwrCJ3m4Pbm3Eiwttkf24BtWcjuUC9pA4X8rv3grc_len1469():
fn = "test_objects/zdpuApqJEwrCJ3m4Pbm3Eiwttkf24BtWcjuUC9pA4X8rv3grc.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvwT13tYsVqef25M8jdVbQvH9J3o9NgM1uhGtVkfH1LiF_len1485():
fn = "test_objects/zdpuAvwT13tYsVqef25M8jdVbQvH9J3o9NgM1uhGtVkfH1LiF.ipce.cbor.gz"
check_case(fn)
def test_zdpuAokWypfv5KVwRc8qUbTtGVBGprbzpaKrn1meMAdgmNgJk_len1496():
fn = "test_objects/zdpuAokWypfv5KVwRc8qUbTtGVBGprbzpaKrn1meMAdgmNgJk.ipce.cbor.gz"
check_case(fn)
def test_zdpuB2Erx3a2ArPxQAZKkC3AL8kv4CoDBMRBSxHhiv6hAn4US_len1505():
fn = "test_objects/zdpuB2Erx3a2ArPxQAZKkC3AL8kv4CoDBMRBSxHhiv6hAn4US.ipce.cbor.gz"
check_case(fn)
def test_zdpuAmba9btXAo96hWt2LWXJ4pq4iDkdw5m1gebhX54axokVv_len1595():
fn = "test_objects/zdpuAmba9btXAo96hWt2LWXJ4pq4iDkdw5m1gebhX54axokVv.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsiHCg7LZP8HQrVpZ88kWSMCLyN8866gjSafVSAKyeodF_len1596():
fn = "test_objects/zdpuAsiHCg7LZP8HQrVpZ88kWSMCLyN8866gjSafVSAKyeodF.ipce.cbor.gz"
check_case(fn)
def test_zdpuAphtD2vGCSLpM1yQD3KC5CK7jietngPrpqhejMT5YCNyK_len1597():
fn = "test_objects/zdpuAphtD2vGCSLpM1yQD3KC5CK7jietngPrpqhejMT5YCNyK.ipce.cbor.gz"
check_case(fn)
def test_zdpuArLUsFXJehLAvpHcaNzp18mhKLwZWJpmwWRH1Zqgb8gFH_len1627():
fn = "test_objects/zdpuArLUsFXJehLAvpHcaNzp18mhKLwZWJpmwWRH1Zqgb8gFH.ipce.cbor.gz"
check_case(fn)
def test_zdpuApu6Yts5CKWaUTZUssLWaPc5U3awwAkNebkceALgfd2pc_len1669():
fn = "test_objects/zdpuApu6Yts5CKWaUTZUssLWaPc5U3awwAkNebkceALgfd2pc.ipce.cbor.gz"
check_case(fn)
def test_zdpuAyQ9XpPC3oDo9HNQxz7fvBPCug8m5B8ub2k8vTt9XNxeX_len1777():
fn = "test_objects/zdpuAyQ9XpPC3oDo9HNQxz7fvBPCug8m5B8ub2k8vTt9XNxeX.ipce.cbor.gz"
check_case(fn)
def test_zdpuAwgo57iCeK5UJKUZvbQHoG16XVCq48hB31jGqHgqmfWQw_len1778():
fn = "test_objects/zdpuAwgo57iCeK5UJKUZvbQHoG16XVCq48hB31jGqHgqmfWQw.ipce.cbor.gz"
check_case(fn)
def test_zdpuAwq2pdLREPhP8vkY811ngZgiXiGCcgEPnVnAxNh8j8HWt_len1829():
fn = "test_objects/zdpuAwq2pdLREPhP8vkY811ngZgiXiGCcgEPnVnAxNh8j8HWt.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvvvUva6eBCo91t1pWYpbezjvRkwzay8GL9g87QxkYu1c_len1856():
fn = "test_objects/zdpuAvvvUva6eBCo91t1pWYpbezjvRkwzay8GL9g87QxkYu1c.ipce.cbor.gz"
check_case(fn)
def test_zdpuAvvkYZb3Ln4mXrfMW76h1co1y8Eu6eEPhzGmiYn7uyodw_len1860():
fn = "test_objects/zdpuAvvkYZb3Ln4mXrfMW76h1co1y8Eu6eEPhzGmiYn7uyodw.ipce.cbor.gz"
check_case(fn)
def test_zdpuAqhjUUqBYK8HYgtQnXhfxUaduCV9Wrzkb2ScBUeSFgqjw_len1898():
fn = "test_objects/zdpuAqhjUUqBYK8HYgtQnXhfxUaduCV9Wrzkb2ScBUeSFgqjw.ipce.cbor.gz"
check_case(fn)
def test_zdpuB31Yzk6BVuGzH76HjmuwjSVH7a898jpfcANWaRYr8cmem_len1903():
fn = "test_objects/zdpuB31Yzk6BVuGzH76HjmuwjSVH7a898jpfcANWaRYr8cmem.ipce.cbor.gz"
check_case(fn)
def test_zdpuAu5EAoFbBUmHDw6CcgJmimaAcqKHXmQ5sywxABk2rxZAz_len1913():
fn = "test_objects/zdpuAu5EAoFbBUmHDw6CcgJmimaAcqKHXmQ5sywxABk2rxZAz.ipce.cbor.gz"
check_case(fn)
def test_zdpuAuLHxSJcd5J1Lq3W5y54f5A71augaTEtNYtyqkS9ADb44_len2254():
fn = "test_objects/zdpuAuLHxSJcd5J1Lq3W5y54f5A71augaTEtNYtyqkS9ADb44.ipce.cbor.gz"
check_case(fn)
def test_zdpuApnTmAPXsiUsA7f1h68J7KPsZYFEkdrWfe8akq3BZhEiY_len2338():
fn = "test_objects/zdpuApnTmAPXsiUsA7f1h68J7KPsZYFEkdrWfe8akq3BZhEiY.ipce.cbor.gz"
check_case(fn)
def test_zdpuAsVQKV2rAyG4VGmLBdXeC2jdJ3ynZBTFQCm777uAohwQf_len2376():
fn = "test_objects/zdpuAsVQKV2rAyG4VGmLBdXeC2jdJ3ynZBTFQCm777uAohwQf.ipce.cbor.gz"
check_case(fn)
def test_zdpuAnozgfhsFbfUunTsy9U1jNdgAyzAefhqZp3kSP9ZVT9wm_len2377():
fn = "test_objects/zdpuAnozgfhsFbfUunTsy9U1jNdgAyzAefhqZp3kSP9ZVT9wm.ipce.cbor.gz"
check_case(fn)
def test_zdpuApd5BWNULEFUMec9tJEtT1DmQwQrxN5ZEDpJr3FLybenJ_len2446():
fn = "test_objects/zdpuApd5BWNULEFUMec9tJEtT1DmQwQrxN5ZEDpJr3FLybenJ.ipce.cbor.gz"
check_case(fn)
def test_zdpuAyWEYrGmY38fphMNkCEkccNtENkbBgEZ2ZAFqTc8rRpoT_len2532():
fn = "test_objects/zdpuAyWEYrGmY38fphMNkCEkccNtENkbBgEZ2ZAFqTc8rRpoT.ipce.cbor.gz"
check_case(fn)
def test_zdpuAzZsZceRP1884HHt9tYVVoPwUnPzNxabdJL23Me6VpAjv_len2532():
fn = "test_objects/zdpuAzZsZceRP1884HHt9tYVVoPwUnPzNxabdJL23Me6VpAjv.ipce.cbor.gz"
check_case(fn)
def test_zdpuArVrriU4cajTk4jHS5vUNWnFcrTdcKg8bFvWsgUhzyzQQ_len3895():
fn = "test_objects/zdpuArVrriU4cajTk4jHS5vUNWnFcrTdcKg8bFvWsgUhzyzQQ.ipce.cbor.gz"
check_case(fn)
def test_zdpuB27NYJMM4tLDR9qBmH51c6mxPB2VWiVKTJJ1GHpmF7HYB_len3905():
fn = "test_objects/zdpuB27NYJMM4tLDR9qBmH51c6mxPB2VWiVKTJJ1GHpmF7HYB.ipce.cbor.gz"
check_case(fn)
def test_zdpuAxnv7gp2h69SWVnkM2EkJrwihs3swDcdgqwgLLiFZBSms_len5862():
fn = "test_objects/zdpuAxnv7gp2h69SWVnkM2EkJrwihs3swDcdgqwgLLiFZBSms.ipce.cbor.gz"
check_case(fn)
|
zuper-ipce-z5
|
/zuper-ipce-z5-5.3.0.tar.gz/zuper-ipce-z5-5.3.0/src/zuper_ipce_tests/test_z_from_testobjs_autogenerated.py
|
test_z_from_testobjs_autogenerated.py
|
import gzip
import os
from typing import Iterator
from unittest import SkipTest
import cbor2
from zuper_commons.fs import locate_files, read_bytes_from_file
from zuper_ipce import IPCE, ipce_from_object, object_from_ipce
from zuper_typing import dataclass
@dataclass
class Case:
fn: str
ipce: IPCE
digest: str
def find_objects(load: bool = True) -> Iterator[Case]: # pragma: no cover
d = "test_objects"
filenames = locate_files(d, "*.ipce.cbor.gz", normalize=False)
def file_length(_: str) -> int:
return os.stat(_).st_size
filenames.sort(key=file_length)
for f in filenames:
if load:
data = read_bytes_from_file(f)
ipce = cbor2.loads(data)
else:
ipce = None
digest, _, _ = os.path.basename(f).partition(".")
n = file_length(f)
digest += f"_len{n}"
yield Case(f, ipce=ipce, digest=digest)
#
# def test_from_test_objects():
# for case in find_objects():
# print(case.fn)
# ob = object_from_ipce(case.ipce, {})
# ipce2 = ipce_from_object(ob)
def check_case(fn: str):
from zuper_ipce import logger
import traceback
try:
if not os.path.exists(fn): # pragma: no cover
raise SkipTest(f"File {fn} not found")
# print("check_case " + fn)
ipce_gz = read_bytes_from_file(fn)
ipce_cbor = gzip.decompress(ipce_gz)
ipce = cbor2.loads(ipce_cbor)
# logger.info(f'ipce:\n\n{yaml.dump(ipce)}')
ob = object_from_ipce(ipce)
# logger.info(f'ob:\n\n{ob}')
ipce2 = ipce_from_object(ob)
# logger.info(f'ipce2:\n\n{yaml.dump(ipce2)}')
# assert_equal_ipce("", ipce, ipce2)
# assert ipce == ipce2
except BaseException: # pragma: no cover
logger.error(traceback.format_exc())
raise
def main(): # pragma: no cover
print(
"""
from .test_from_testobjs import check_case
"""
)
for case in find_objects(load=False):
s = f"""
def test_{case.digest}():
fn = {case.fn!r}
check_case(fn)
"""
print(s)
if __name__ == "__main__": # pragma: no cover
main()
|
zuper-ipce-z5
|
/zuper-ipce-z5-5.3.0.tar.gz/zuper-ipce-z5-5.3.0/src/zuper_ipce_tests/test_from_testobjs.py
|
test_from_testobjs.py
|
from datetime import datetime
from typing import Dict, TypeVar
from zuper_typing import dataclass
def test_pretty1():
@dataclass
class Animal:
a: int
b: bool
c: float
d: datetime
e: str
f: bytes
g: str
h: str
i: str
j: str
g = "Traceback"
h = "-----BEGIN ciao"
i = "zd..."
j = "Qm..."
# print(Animal)
a = Animal(1, True, 0.1, datetime.now(), "a", b"a", g, h, i, j)
# print(a.__repr__())
# print(a.__str__())
def test_pretty2():
X = TypeVar("X", bound=Dict[str, str])
# print(X)
Y = TypeVar("Y", contravariant=True)
# print(Y)
Z = TypeVar("Z", covariant=True)
# print(Z)
|
zuper-ipce-z5
|
/zuper-ipce-z5-5.3.0.tar.gz/zuper-ipce-z5-5.3.0/src/zuper_ipce_tests/test_pretty.py
|
test_pretty.py
|
from zuper_typing import dataclass
from .test_utils import assert_object_roundtrip, assert_type_roundtrip
def test_bool1():
@dataclass
class M:
a: bool
a = M(True)
assert_object_roundtrip(a)
assert_type_roundtrip(M)
def test_none1():
assert_type_roundtrip(type(None))
@dataclass
class M:
a: type(None)
m = M(None)
assert_object_roundtrip(m)
assert_type_roundtrip(M)
|
zuper-ipce-z5
|
/zuper-ipce-z5-5.3.0.tar.gz/zuper-ipce-z5-5.3.0/src/zuper_ipce_tests/test_bool.py
|
test_bool.py
|
from zuper_typing import dataclass
from .test_utils import assert_object_roundtrip, assert_type_roundtrip
def test_float_1():
@dataclass
class MyClass:
f: float
e = MyClass(1.0)
assert_object_roundtrip(e)
def test_float_2():
@dataclass
class MyClass:
f: float
T2 = assert_type_roundtrip(MyClass)
# print(T2)
assert T2.__annotations__["f"] is float
|
zuper-ipce-z5
|
/zuper-ipce-z5-5.3.0.tar.gz/zuper-ipce-z5-5.3.0/src/zuper_ipce_tests/test_float.py
|
test_float.py
|
from typing import Any, cast, Dict, Type
from nose.tools import assert_equal, raises
from zuper_ipce import ipce_from_object, object_from_ipce
from zuper_typing import dataclass
from zuper_typing.my_dict import get_DictLike_args, make_dict, make_list, make_set
from .test_utils import assert_object_roundtrip, assert_type_roundtrip
def test_dict_int_int0():
D = make_dict(int, int)
assert_type_roundtrip(D)
def test_dict_int_int1():
D = Dict[int, int]
# pprint(schema=ipce_from_typelike(D))
assert_type_roundtrip(D)
# @dataclass
# class MyClass:
# f: Dict[int, int]
#
# e = MyClass({1: 2})
# assert_object_roundtrip(e, {})
def test_dict_int_int():
@dataclass
class MyClass:
f: Dict[int, int]
e = MyClass({1: 2})
assert_object_roundtrip(e)
@raises(ValueError)
def test_dict_err():
# noinspection PyTypeChecker
make_dict(int, "str")
def test_dict_int_str():
D = make_dict(str, int)
assert_type_roundtrip(D)
def test_dict_int_str2():
D = make_dict(str, int)
d = D({"a": 1, "b": 2})
assert assert_object_roundtrip(d)
def test_dict_int_str3():
D = make_dict(str, int)
@dataclass
class C:
d: D
assert_type_roundtrip(C)
d = D({"a": 1, "b": 2})
c = C(d)
res = assert_object_roundtrip(c)
x1b = res["x1b"]
# print(f"x1b: {debug_print(res['x1b'])}")
K, V = get_DictLike_args(type(x1b.d))
assert_equal(V, int)
def test_dict_int_str4_type():
D = make_dict(str, int)
ipce = ipce_from_object(D)
D2 = object_from_ipce(ipce)
D = cast(Type[Dict], D)
K, V = get_DictLike_args(D)
D2 = cast(Type[Dict], D2)
K2, V2 = get_DictLike_args(D2)
assert_equal((K, V), (K2, V2))
def test_dict_int_str4():
D = make_dict(str, int)
c = D({"a": 1, "b": 2})
K, V = get_DictLike_args(type(c))
debug_print = str
# logger.info(f"c: {debug_print(c)}")
ipce = ipce_from_object(c)
c2 = object_from_ipce(ipce)
# logger.info(f"ipce: {oyaml_dump(ipce)}")
# logger.info(f"c2: {debug_print(c2)}")
K2, V2 = get_DictLike_args(cast(Type[dict], type(c2)))
assert_equal((K, V), (K2, V2))
def test_dict_kv01():
x = get_DictLike_args(dict)
assert_equal(x, (Any, Any))
def test_dict_kv02():
x = get_DictLike_args(Dict)
assert_equal(x, (Any, Any))
def test_dict_kv03():
x = get_DictLike_args(Dict[int, str])
assert_equal(x, (int, str))
def test_dict_copy():
T = make_dict(int, str)
x = T({1: "a"})
y = x.copy()
assert type(y) is T
def test_set_copy():
T = make_set(int)
x = T({1})
y = x.copy()
assert type(y) is T
def test_list_copy():
T = make_list(int)
x = T([1])
y = x.copy()
assert type(y) is T
|
zuper-ipce-z5
|
/zuper-ipce-z5-5.3.0.tar.gz/zuper-ipce-z5-5.3.0/src/zuper_ipce_tests/test_dict.py
|
test_dict.py
|
from typing import (
Any,
cast,
ClassVar,
Dict,
List,
NewType,
Optional,
Sequence,
Set,
Tuple,
Type,
TypeVar,
Union,
)
from nose.tools import assert_equal, raises
from zuper_ipce import (
IESO,
IPCE,
ipce_from_object,
ipce_from_typelike,
object_from_ipce,
typelike_from_ipce,
)
from zuper_ipce.ipce_spec import assert_sorted_dict_cbor_ord
from zuper_ipce.utils_text import oyaml_load
from zuper_ipce_tests.test_utils import assert_object_roundtrip, assert_type_roundtrip
from zuper_typing.get_patches_ import NotEquivalentException, assert_equivalent_types
from zuper_typing import dataclass, Generic
from zuper_typing.annotations_tricks import (
get_NewType_arg,
get_NewType_name,
get_NewType_repr,
is_Any,
is_NewType,
is_Type,
make_Tuple,
name_for_type_like,
)
from zuper_typing.logging_util import ztinfo
from zuper_typing.my_dict import get_CustomSet_arg, get_ListLike_arg, make_set
from zuper_typing.subcheck import can_be_used_as2
from zuper_typing_tests.test_utils import known_failure
def test_corner_cases01():
assert None is object_from_ipce(None, Optional[int])
def test_corner_cases02():
assert 2 == object_from_ipce(2, Optional[int])
def test_corner_cases03():
assert None is object_from_ipce(None)
def test_corner_cases04():
ipce_from_object({1: 2})
def test_corner_cases05():
ipce_from_object(12, Optional[int])
@raises(ValueError)
def test_corner_cases09():
# noinspection PyTypeChecker
ipce_from_typelike(None)
@raises(ValueError)
def test_property_error():
@dataclass
class MyClass32:
a: int
ok = can_be_used_as2(str, int)
assert not ok.result
# noinspection PyTypeChecker
ob = MyClass32("not an int")
# ipce_to_object(ob, {}, {}, expect_type=MyClass32)
_res = ipce_from_object(ob)
# print(yaml.dump(res))
@raises(NotImplementedError)
def test_not_know():
class C:
pass
ipce_from_object(C())
@raises(ValueError)
def test_corner_cases07():
can0 = can_be_used_as2(int, bool)
assert not can0, can0
T = Union[bool, str]
can = can_be_used_as2(int, T)
assert not can, can
object_from_ipce(12, T)
@raises(ValueError)
def test_corner_cases08():
T = Optional[bool]
assert not can_be_used_as2(int, T).result
object_from_ipce(12, T)
def test_newtype1():
T = NewType("a", int)
assert is_NewType(T)
assert_equal(get_NewType_arg(T), int)
assert_equal(get_NewType_name(T), "a")
assert_equal(get_NewType_repr(T), "NewType('a', int)")
assert_equal(name_for_type_like(T), "NewType('a', int)")
def test_newtype2():
T = NewType("a", object)
assert is_NewType(T)
A = get_NewType_arg(T)
assert A is object, A
r = "NewType('a')"
assert_equal(get_NewType_repr(T), r)
assert_equal(name_for_type_like(T), r)
def test_newtype2b():
T = NewType("a", Any)
assert is_NewType(T)
A = get_NewType_arg(T)
assert is_Any(A), A
r = "NewType('a')"
assert_equal(get_NewType_repr(T), r)
assert_equal(name_for_type_like(T), r)
def test_list0():
v = get_ListLike_arg(list)
assert is_Any(v)
def test_set0():
a = get_CustomSet_arg(make_set(int))
assert a is int
def test_default1():
@dataclass
class C:
a: bool = False
assert_type_roundtrip(C)
ipce1 = ipce_from_typelike(C)
C2 = typelike_from_ipce(ipce1)
# print(debug_print(C))
# print(debug_print(C2))
ipce2 = ipce_from_typelike(C2)
assert ipce1 == ipce2
def test_default2():
X = TypeVar("X")
@dataclass
class C(Generic[X]):
a: bool = False
assert_type_roundtrip(C)
ipce1 = ipce_from_typelike(C)
C2 = typelike_from_ipce(ipce1)
# print(debug_print(C))
# print(oyaml_dump(ipce1))
assert ipce1["properties"]["a"]["default"] == False
# print(debug_print(C2))
ipce2 = ipce_from_typelike(C2)
assert ipce1 == ipce2
def test_type1():
assert_type_roundtrip(type)
assert_object_roundtrip(type)
def test_parsing():
schema_bool = (
"""{$schema: 'http://json-schema.org/draft-07/schema#', type: boolean}"""
)
ipce = cast(IPCE, oyaml_load(schema_bool))
T0 = typelike_from_ipce(ipce)
assert T0 is bool, T0
T0 = object_from_ipce(ipce)
assert T0 is bool, T0
a = """\
$schema:
$id: http://invalid.json-schema.org/M#
$schema: http://json-schema.org/draft-07/schema#
__module__: zuper_ipce_tests.test_bool
description: 'M(a: bool)'
order: [a]
properties:
a: {$schema: 'http://json-schema.org/draft-07/schema#', type: boolean}
required: [a]
title: M
type: object
__qualname__: misc
a: true
"""
ipce = cast(dict, oyaml_load(a))
T = typelike_from_ipce(ipce["$schema"])
# print(T)
# print(T.__annotations__)
assert T.__annotations__["a"] is bool, T.__annotations__
_ob = object_from_ipce(ipce)
def test_Type1():
T = Type[int]
assert is_Type(T)
# noinspection PyMissingTypeHints
@raises(TypeError)
def test_error_list1():
a = [1, 2, 3]
S = int
ipce_from_object(a, S)
@raises(TypeError)
def test_error_list2():
a = [1, 2, 3]
S = Union[int, str]
ipce_from_object(a, S)
@raises(ValueError)
def test_error_list2b():
a = [1, 2, 3]
S = Union[int, str]
object_from_ipce(a, S)
@raises(TypeError)
def test_error_scalar1():
a = "s"
S = Union[int, bool]
_ipce = ipce_from_object(a, S)
@raises(ValueError)
def test_error_scalar2():
a = "s"
S = Union[int, bool]
object_from_ipce(a, S)
def test_corner_optional():
a = {}
S = Optional[Dict[str, int]]
object_from_ipce(a, S)
@raises(ValueError)
def test_corner_union():
a = {}
S = Union[str, int]
object_from_ipce(a, S)
@raises(ValueError)
def test_corner_noclass():
a = {}
class S:
pass
object_from_ipce(a, S)
def test_classvars():
@dataclass
class MyConstant:
a: object
@dataclass
class MyNominal:
op1: MyConstant
op2: MyConstant
nominal = True
assert_type_roundtrip(MyNominal)
a = MyNominal(MyConstant(1), MyConstant(2))
assert_object_roundtrip(a)
def test_corner_optional_with_default():
@dataclass
class MyCD:
a: Optional[bool] = True
assert_type_roundtrip(MyCD)
a = MyCD()
assert_object_roundtrip(a)
ipce = ipce_from_typelike(MyCD)
# logger.info("yaml:\n\n" + oyaml_dump(ipce))
assert ipce["properties"]["a"]["default"] == True
assert "required" not in ipce
def test_corner_optional_with_default2():
@dataclass
class MyCD2:
a: bool = True
assert_type_roundtrip(MyCD2)
a = MyCD2()
assert_object_roundtrip(a)
ipce = ipce_from_typelike(MyCD2)
# logger.info("yaml:\n\n" + oyaml_dump(ipce))
assert ipce["properties"]["a"]["default"] == True
assert "required" not in ipce
def test_sequence():
a = Sequence[int]
b = List[int]
ipce1 = ipce_from_typelike(a)
ipce2 = ipce_from_typelike(b)
assert ipce1 == ipce2
def test_union1():
@dataclass
class MyCD3:
a: Union[float, int]
assert_type_roundtrip(MyCD3)
assert_object_roundtrip(MyCD3)
def make_class(tl):
@dataclass
class MyCD4:
a: tl
return MyCD4
def make_class_default(tl, default):
@dataclass
class MyCD4:
a: tl = default
return MyCD4
@raises(NotEquivalentException)
def test_not_equal1():
T1 = Union[int, bool]
T2 = Union[int, str]
A = make_class(T1)
B = make_class(T2)
assert_equivalent_types(A, B, set())
@raises(NotEquivalentException)
def test_not_equal2():
T1 = Dict[int, bool]
T2 = Dict[int, str]
A = make_class(T1)
B = make_class(T2)
assert_equivalent_types(A, B, set())
@raises(NotEquivalentException)
def test_not_equal3():
T1 = Dict[str, bool]
T2 = Dict[int, bool]
A = make_class(T1)
B = make_class(T2)
assert_equivalent_types(A, B, set())
@raises(NotEquivalentException)
def test_not_equal4():
T1 = Set[str]
T2 = Set[int]
A = make_class(T1)
B = make_class(T2)
assert_equivalent_types(A, B, set())
@raises(NotEquivalentException)
def test_not_equal5():
T1 = List[str]
T2 = List[int]
A = make_class(T1)
B = make_class(T2)
assert_equivalent_types(A, B, set())
@raises(NotEquivalentException)
def test_not_equal6():
T1 = ClassVar[str]
T2 = ClassVar[int]
A = make_class(T1)
B = make_class(T2)
# print(A.__annotations__)
# print(B.__annotations__)
assert_equivalent_types(A, B, set())
@raises(NotEquivalentException)
def test_not_equal7():
T1 = ClassVar[str]
T2 = ClassVar[int]
assert_equivalent_types(T1, T2, set())
@raises(NotEquivalentException)
def test_not_equal8():
T = bool
A = make_class_default(T, True)
B = make_class_default(T, False)
assert_equivalent_types(A, B, set())
@raises(NotEquivalentException)
def test_not_equal9():
T = Optional[bool]
A = make_class_default(T, True)
B = make_class_default(T, False)
assert_equivalent_types(A, B, set())
@raises(NotEquivalentException)
def test_not_equal10():
T = Optional[bool]
A = make_class_default(T, None)
B = make_class_default(T, False)
assert_equivalent_types(A, B, set())
def test_type():
X = TypeVar("X")
@dataclass
class MyClass(Generic[X]):
a: X
XT: ClassVar[Type[X]]
MyClassInt = MyClass[int]
# print(MyClassInt.__annotations__)
assert_equal(MyClassInt.XT, int)
assert_type_roundtrip(MyClass)
def test_corner_list1():
x = [1, 2, 3]
T = Optional[List[int]]
ieso = IESO(with_schema=True)
ipce_from_object(x, T, ieso=ieso)
@raises(TypeError)
def test_corner_list2():
x = [1, 2, 3]
T = Dict[str, str]
ipce_from_object(x, T)
@raises(ValueError)
def test_corner_list2b_a():
x = [1, 2, 3]
T = Dict[str, str]
object_from_ipce(x, T)
def test_corner_tuple1():
x = (1, 2, 3)
T = Optional[Tuple[int, ...]]
ipce_from_object(x, T)
def test_corner_tuple2():
x = (1, "a")
T = Optional[Tuple[int, str]]
ipce_from_object(x, T)
@raises(ValueError)
def test_corner_tuple3():
x = (1, "a")
T = Dict[str, str]
ipce_from_object(x, T)
def test_corner_none3():
x = None
T = object
ipce_from_object(x, T)
@known_failure
@raises(ValueError)
def test_corner_int3():
x = 1
T = Dict[str, str]
ipce_from_object(x, T)
@raises(ValueError)
def test_corner_int4():
x = 1
T = Dict[str, str]
object_from_ipce(x, T)
def test_corner_none():
x = None
T = object
object_from_ipce(x, T)
def test_corner_noneb():
x = None
T = Any
object_from_ipce(x, T)
def test_corner_none2():
x = None
T = object
ipce_from_object(x, T)
def test_corner_none2b():
x = None
T = Any
ipce_from_object(x, T)
def test_corner_list_Any():
x = [1, 2]
T = Any
ipce_from_object(x, T)
@raises(ValueError)
def test_corner_ipce():
res = {"aa": 1, "a": 2}
assert_sorted_dict_cbor_ord(res)
def test_corner_same_default_value():
@dataclass
class SameDefault:
a: int = 2
c = SameDefault(2)
assert_object_roundtrip(c)
def test_corner_classvar():
@dataclass
class Dog41:
x: ClassVar[Type[int]]
assert_type_roundtrip(Dog41)
def test_corner_classvar2():
@dataclass
class Dog42:
x: ClassVar[Type[int]] = int
ztinfo(Dog=Dog42)
assert_type_roundtrip(Dog42)
def test_corner_classvar3():
@dataclass
class Dog43:
x: ClassVar[Type[int]] = Union[float, int]
assert_type_roundtrip(Dog43)
def test_empty_tuple1():
@dataclass
class Container1:
ob: object
c = Container1(())
assert_object_roundtrip(c, works_without_schema=False)
def test_empty_tuple2():
@dataclass
class Container2:
ob: Tuple[str, ...]
c = Container2(())
assert_object_roundtrip(c)
def test_empty_tuple3():
@dataclass
class Container3:
ob: make_Tuple()
c = Container3(())
assert_object_roundtrip(c)
|
zuper-ipce-z5
|
/zuper-ipce-z5-5.3.0.tar.gz/zuper-ipce-z5-5.3.0/src/zuper_ipce_tests/test_corner_cases.py
|
test_corner_cases.py
|
from dataclasses import field
from typing import Any, cast, Dict, NewType, Optional, TypeVar, Union, Set
from zuper_commons.logs import setup_logging
from zuper_ipce import ipce_from_typelike, object_from_ipce, typelike_from_ipce
from zuper_ipce.constants import JSONSchema, SCHEMA_ATT, SCHEMA_ID
from zuper_ipce.structures import CannotFindSchemaReference
from zuper_typing import Generic
from zuper_typing.annotations_tricks import make_ForwardRef
from zuper_typing.monkey_patching_typing import my_dataclass as dataclass
from zuper_typing.my_dict import make_dict
from zuper_typing_tests.test_utils import known_failure
from .test_utils import assert_object_roundtrip, assert_type_roundtrip
@dataclass
class Empty:
...
@dataclass
class Contents:
data: bytes
@dataclass
class Address:
""" An address with street and number """
street: str
number: int
@dataclass
class Person:
""" Describes a Person """
first: str
last: str
address: Address
@dataclass
class Office:
""" An Office contains people. """
people: Dict[str, Person] = field(default_factory=make_dict(str, Person))
def test_ser0():
""" Make sure we can have a constructor with default """
# class Office0:
# """ An Office contains people. """
# people: Dict[str, Person] = field(default_factory=make_dict(str, Person))
#
# print(Office0.__dict__)
Office()
def test_ser1():
# Address_schema = type_to_schema(Address, {})
# assert Address_schema['description'] == Address.__doc__
# Address2 = schema_to_type(Address_schema, {}, {})
# assert Address2.__doc__ == Address.__doc__
Person_schema = ipce_from_typelike(Person)
# print(oyaml_dump(Person_schema))
Address2 = typelike_from_ipce(Person_schema["properties"]["address"])
assert_equal(Address2.__doc__, Address.__doc__)
assert Person_schema["description"] == Person.__doc__
Person2 = typelike_from_ipce(Person_schema)
assert Person2.__doc__ == Person.__doc__
assert_equal(Person2.__annotations__["address"].__doc__, Address.__doc__)
assert_type_roundtrip(Address, expect_type_equal=False)
assert_type_roundtrip(Person, expect_type_equal=False)
assert_type_roundtrip(Office, expect_type_equal=False)
x1 = Office()
x1.people["andrea"] = Person("Andrea", "Censi", Address("Sonnegstrasse", 3))
assert_object_roundtrip(x1, use_globals=get_symbols())
def test_ser2():
x1 = Office()
x1.people["andrea"] = Person("Andrea", "Censi", Address("Sonnegstrasse", 3))
assert_object_roundtrip(x1, expect_equality=True)
@dataclass
class Name:
""" Describes a Name with optional middle name"""
first: str
last: str
middle: Optional[str] = None
@dataclass
class Chain:
""" Describes a Name with optional middle name"""
value: str
down: Optional["Chain"] = None
def get_symbols():
@dataclass
class FB:
mine: int
@dataclass
class FA:
""" Describes a Name with optional middle name"""
value: str
down: FB
symbols = {
"Office": Office,
"Person": Person,
"Address": Address,
"Name": Name,
"Contents": Contents,
"Empty": Empty,
"FA": FA,
"FB": FB,
"Chain": Chain,
}
return symbols
def test_optional_1():
n1 = Name(first="H", middle="J", last="Wells")
assert_object_roundtrip(n1, use_globals=get_symbols())
def test_optional_2():
n1 = Name(first="H", last="Wells")
assert_object_roundtrip(n1, use_globals=get_symbols())
def test_optional_3():
n1 = Name(first="H", last="Wells")
assert_object_roundtrip(n1, expect_equality=True)
def test_recursive():
n1 = Chain(value="12")
assert_object_roundtrip(n1, use_globals={"Chain": Chain})
def test_ser_forward1():
symbols = get_symbols()
FA = symbols["FA"]
FB = symbols["FB"]
n1 = FA(value="a", down=FB(12))
# with private_register('test_forward'):
assert_object_roundtrip(n1, use_globals=get_symbols())
def test_ser_forward2():
n1 = Empty()
assert_object_roundtrip(n1, use_globals=get_symbols())
def test_ser_dict_object():
@dataclass
class M:
x: int
y: int
@dataclass()
class P:
x: int
y: int
@dataclass(unsafe_hash=True)
class N:
x: int
y: int
@dataclass(frozen=True)
class O:
x: int
y: int
@dataclass(frozen=True, unsafe_hash=True, order=True)
class L:
x: int
y: int
@dataclass
class M:
a: Dict[L, str]
d = {L(0, 0): "one", L(1, 1): "two"}
m = M(d)
symbols2 = {L.__qualname__: L}
assert_object_roundtrip(m, use_globals=symbols2)
def test_mixed_dict():
@dataclass(frozen=True, unsafe_hash=True, order=True)
class L:
x: int
y: int
@dataclass
class M:
a: Dict[L, str]
symbols2 = {L.__qualname__: L}
d = {L(0, 0): "one", L(1, 1): "two"}
m = M(d)
assert_object_roundtrip(m, use_globals=symbols2)
@dataclass
class M2:
a: Dict[Union[int, L], str]
d2 = {L(0, 0): "one", 1: "two"}
m2 = M2(d2)
assert_object_roundtrip(m2, use_globals=symbols2)
def test_mixed_set():
@dataclass(frozen=True, unsafe_hash=True, order=True)
class L:
x: int
y: int
@dataclass
class M:
a: Set[L]
symbols2 = {L.__qualname__: L}
d = {L(0, 0), L(1, 1)}
m = M(d)
assert_object_roundtrip(m, use_globals=symbols2)
@dataclass
class M2:
a: Set[Union[int, L]]
d2 = {L(0, 0), 1}
m2 = M2(d2)
assert_object_roundtrip(m2, use_globals=symbols2)
from nose.tools import raises, assert_equal
def test_bytes1():
n1 = Contents(b"1234")
assert_object_roundtrip(n1, use_globals=get_symbols())
@raises(ValueError)
def test_abnormal_no_schema():
res = object_from_ipce({})
# logger.info(f"res = {res!r}")
# logger.info(f"type = {type(res)}")
#
def test_lists():
object_from_ipce([])
def test_nulls():
assert_object_roundtrip(None)
def test_lists_2():
assert_object_roundtrip([1])
# @raises(ValueError)
# def test_the_tester_no_links2_in_snd_not():
# h = 'myhash'
# x = {LINKS: {h: {}}, "a": {"one": {"/": h}}}
# assert_good_canonical(x)
@raises(ValueError)
def test_the_tester_no_links2_in_snd_not2():
class NotDataClass:
...
T = NotDataClass
ipce_from_typelike(T, globals0=get_symbols())
# @raises(AssertionError)
def test_not_optional():
T = Optional[int]
ipce_from_typelike(T, globals0=get_symbols())
def test_not_union0():
T = Union[int, str]
ipce_from_typelike(T)
@raises(ValueError)
def test_not_str1():
# noinspection PyTypeChecker
ipce_from_typelike("T")
@raises(ValueError)
def test_not_fref2():
# noinspection PyTypeChecker
ipce_from_typelike(make_ForwardRef("one"))
def test_serialize_any():
# noinspection PyTypeChecker
s = ipce_from_typelike(Any)
assert_equal(s, {SCHEMA_ATT: SCHEMA_ID, "title": "Any"})
assert assert_type_roundtrip(Any)
def test_serialize_object():
# noinspection PyTypeChecker
s = ipce_from_typelike(object)
assert_equal(s, {SCHEMA_ATT: SCHEMA_ID, "title": "object"})
assert assert_type_roundtrip(object)
# @raises(NotImplementedError)
def test_any_instantiate():
# noinspection PyTypeChecker
schema = ipce_from_typelike(Name)
object_from_ipce(schema)
@known_failure
def test_not_dict_naked():
class A(dict):
...
ipce_from_typelike(A)
def test_any1b():
res = cast(JSONSchema, {})
t = typelike_from_ipce(res)
assert t is object, t
def test_any2():
@dataclass
class C:
a: Any
e = C(12)
assert_object_roundtrip(e)
@raises(CannotFindSchemaReference)
def test_invalid_schema():
schema = cast(JSONSchema, {"$ref": "not-existing"})
typelike_from_ipce(schema)
# @raises(CannotFindSchemaReference)
def test_dict_only():
T = Dict[str, str]
_ = ipce_from_typelike(T)
@raises(ValueError)
def test_str1():
# noinspection PyTypeChecker
ipce_from_typelike("string-arg")
@raises(ValueError)
def test_forward_ref1():
ipce_from_typelike(make_ForwardRef("AA"))
@raises(TypeError)
def test_forward_ref2():
@dataclass
class MyClass:
# noinspection PyUnresolvedReferences
f: make_ForwardRef("unknown")
ipce_from_typelike(MyClass)
@raises(TypeError)
def test_forward_ref3():
@dataclass
class MyClass:
# noinspection PyUnresolvedReferences
f: Optional["unknown"]
# do not put MyClass
ipce_from_typelike(MyClass)
@raises(TypeError)
def test_forward_ref4():
class Other:
pass
@dataclass
class MyClass:
f: Optional["Other"]
__depends__ = (Other,)
# do not put MyClass
g = {"Other": Other}
ipce_from_typelike(MyClass, globals0=g)
# @raises(NotImplementedError)
# def test_error1():
# try:
# def f():
# raise NotImplementedError()
#
# @dataclass
# class MyClass:
# f: Optional['f()']
#
# # do not put MyClass
# ipce_from_typelike(MyClass, {'f': f})
# except (TypeError, NotImplementedError, NameError):
# pass
# else:
# raise AssertionError()
def test_2_ok():
X = TypeVar("X")
@dataclass
class M(Generic[X]):
x: X
@dataclass
class MyClass:
f: "Optional[M[int]]"
__depends__ = (M,)
# do not put M
# ipce_from_typelike(MyClass, {'M': M}) # <---- note
ipce_from_typelike(MyClass) # <---- note
@raises(Exception)
def test_2_error():
X = TypeVar("X")
@dataclass
class M(Generic[X]):
x: X
@dataclass
class MyClass:
f: "Optional[M[int]]"
# do not put M
ipce_from_typelike(MyClass) # <---- note
# # for completeness
# @raises(CannotResolveTypeVar)
# def test_cannot_resolve():
# X = TypeVar('X')
# eval_field(X, {}, {})
@raises(TypeError)
def test_random_json():
""" Invalid because of $schema """
data = {"$schema": {"title": "LogEntry"}, "topic": "next_episode", "data": None}
object_from_ipce(data)
def test_newtype_1():
A = NewType("A", str)
@dataclass
class M10:
a: A
assert_type_roundtrip(M10)
def test_newtype_2():
X = TypeVar("X")
A = NewType("A", str)
@dataclass
class M11(Generic[X]):
a: A
assert_type_roundtrip(M11)
#
# def __init__(self, cid):
# self.cid = cid
def test_nonetype0():
T = type(None)
assert_type_roundtrip(T)
assert_object_roundtrip(T)
def test_none2():
T = None
assert_object_roundtrip(T)
@raises(ValueError)
def test_none3():
T = None
assert_type_roundtrip(T)
def test_nonetype1():
@dataclass
class M12:
a: type(None)
assert_type_roundtrip(M12)
assert_object_roundtrip(M12)
def test_optional0():
T = Optional[int]
assert_type_roundtrip(T)
assert_object_roundtrip(T)
if __name__ == "__main__":
setup_logging()
test_ser0()
|
zuper-ipce-z5
|
/zuper-ipce-z5-5.3.0.tar.gz/zuper-ipce-z5-5.3.0/src/zuper_ipce_tests/test_serialization1.py
|
test_serialization1.py
|
from typing import Any, Set
from nose.tools import raises
from zuper_ipce import ipce_from_object, typelike_from_ipce
from zuper_typing import dataclass
from zuper_typing.annotations_tricks import is_Set
from zuper_typing.exceptions import ZValueError
from zuper_typing.my_dict import make_set
from .test_utils import assert_object_roundtrip, assert_type_roundtrip
def test_not_implemented_set():
@dataclass
class MyClass:
f: Set[int]
e = MyClass({1, 2, 3})
assert_object_roundtrip(e) # pragma: no cover
def test_is_set01():
assert not is_Set(set)
def test_is_set02():
T = Set
# print(f"the set is {T}")
assert is_Set(T)
def test_is_set03():
assert is_Set(Set[int])
def test_rt():
T = Set[int]
assert_type_roundtrip(T, expect_type_equal=False)
def test_rt_yes():
T = make_set(int)
assert_type_roundtrip(T, expect_type_equal=True)
def test_rt2():
T = make_set(int)
assert_type_roundtrip(T)
@raises(ZValueError)
def test_not_implemented_set_2():
""" Cannot use as dict if not ordered """
@dataclass
class A:
a: int
@dataclass
class MyClass:
f: Set[A]
e = MyClass({A(1), A(2)})
assert_object_roundtrip(e) # pragma: no cover
def test_not_implemented_set_2_fixed():
@dataclass(order=True)
class A:
a: int
@dataclass
class MyClass:
f: Set[A]
e = MyClass({A(1), A(2)})
assert_object_roundtrip(e) # pragma: no cover
def test_set_any():
@dataclass
class A:
v: Any
v = {"a"}
a = A(v)
assert_object_roundtrip(a)
def test_set_any2():
@dataclass
class A:
v: Any
v = {"a"}
v = make_set(str)(v)
a = A(v)
ipce_v = ipce_from_object(v)
# print(oyaml_dump(ipce_v))
schema = ipce_v["$schema"]
T = typelike_from_ipce(schema)
# print(T)
ipce = ipce_from_object(a)
# print(oyaml_dump(ipce))
assert_object_roundtrip(a)
def test_set_any3():
v = {"a"}
v = make_set(str)(v)
assert_object_roundtrip(v)
def test_set_any4():
v = {"a"}
assert_object_roundtrip(v)
|
zuper-ipce-z5
|
/zuper-ipce-z5-5.3.0.tar.gz/zuper-ipce-z5-5.3.0/src/zuper_ipce_tests/test_sets.py
|
test_sets.py
|
from typing import cast, TypeVar
from zuper_ipce import IEDO, ipce_from_object, object_from_ipce
from zuper_typing import dataclass, Generic
from zuper_typing.exceptions import ZAssertionError
from zuper_typing.logging_util import ztinfo
from zuper_typing.zeneric2 import StructuralTyping
def test1():
@dataclass
class C1(metaclass=StructuralTyping):
a: int
b: float
@dataclass
class C2(metaclass=StructuralTyping):
a: int
b: float
c: str
c1 = C1(1, 2)
c2 = C2(1, 2, "a")
assert_isinstance(c1, C1)
assert_isinstance(c2, C2)
assert_isinstance(c2, C1)
assert_issubclass(C2, C1)
def test2():
@dataclass
class C4:
a: int
b: float
c1 = C4(1, 2.0)
C4_ = object_from_ipce(ipce_from_object(C4))
c1_ = object_from_ipce(ipce_from_object(c1))
assert_isinstance(c1, C4)
# noinspection PyTypeChecker
assert_isinstance(c1_, C4_), (c1_, C4_)
# noinspection PyTypeChecker
assert_isinstance(c1, C4_), (c1, C4_)
assert_isinstance(c1_, C4)
def test3():
X = TypeVar("X")
@dataclass
class CB(Generic[X]):
a: X
C5 = CB[int]
c1 = C5(1)
iedo = IEDO(use_remembered_classes=False, remember_deserialized_classes=False)
C5_ipce = ipce_from_object(C5)
C5_ = cast(type, object_from_ipce(C5_ipce, iedo=iedo))
ztinfo("test3", C5=C5, C5_=C5_)
c1_ipce = ipce_from_object(c1)
c1_ = object_from_ipce(c1_ipce, iedo=iedo)
# different class
assert C5 is not C5_
# however isinstance should always work
# noinspection PyTypeHints
assert_isinstance(c1, C5)
assert_isinstance(c1_, C5_)
assert_isinstance(c1, C5_)
# noinspection PyTypeHints
assert_isinstance(c1_, C5)
assert_issubclass(C5, C5_)
assert_issubclass(C5, CB)
# logger.info(f"CB {id(CB)}")
# logger.info(type(CB))
# logger.info(CB.mro())
# logger.info(f"C5_ {id(C5_)}")
# logger.info(type(C5_))
# logger.info(C5_.mro())
assert_issubclass(C5_, CB)
def assert_isinstance(a, C):
if not isinstance(a, C):
raise ZAssertionError(
"not isinstance",
a=a,
type_a=type(a),
type_type_a=type(type(a)),
C=C,
type_C=type(C),
)
def assert_issubclass(A, C):
if not issubclass(A, C):
raise ZAssertionError(
"not issubclass", A=A, C=C, type_A=type(A), type_C=type(C)
)
|
zuper-ipce-z5
|
/zuper-ipce-z5-5.3.0.tar.gz/zuper-ipce-z5-5.3.0/src/zuper_ipce_tests/test_isinstance_check.py
|
test_isinstance_check.py
|
import json
from typing import Optional
import cbor2
from nose.tools import assert_equal
from zuper_commons.fs import write_bytes_to_file, write_ustring_to_utf8_file
from zuper_ipce import (
IEDO,
IESO,
ipce_from_object,
ipce_from_typelike,
logger,
typelike_from_ipce,
)
from zuper_ipce.constants import IEDS
from zuper_ipce.conv_object_from_ipce import object_from_ipce, object_from_ipce_
from zuper_ipce.json_utils import (
decode_bytes_before_json_deserialization,
encode_bytes_before_json_serialization,
)
from zuper_ipce.pretty import pretty_dict
from zuper_ipce.utils_text import oyaml_dump
from zuper_typing import dataclass
from zuper_typing.get_patches_ import NotEquivalentException, assert_equivalent_types
from zuper_typing.logging_util import ztinfo
def assert_type_roundtrip(
T, *, use_globals: Optional[dict] = None, expect_type_equal: bool = True
):
if use_globals is None:
use_globals = {}
schema0 = ipce_from_typelike(T, globals0=use_globals)
# why 2?
schema = ipce_from_typelike(T, globals0=use_globals)
save_object(T, ipce=schema)
# logger.info(debug_print('schema', schema=schema))
iedo = IEDO(use_remembered_classes=False, remember_deserialized_classes=False)
T2 = typelike_from_ipce(schema, iedo=iedo)
# TODO: in 3.6 does not hold for Dict, Union, etc.
# if hasattr(T, '__qualname__'):
# assert hasattr(T, '__qualname__')
# assert T2.__qualname__ == T.__qualname__, (T2.__qualname__, T.__qualname__)
# if False:
# rl.pp('\n\nschema', schema=json.dumps(schema, indent=2))
# rl.pp(f"\n\nT ({T}) the original one", **getattr(T, '__dict__', {}))
# rl.pp(f"\n\nT2 ({T2}) - reconstructed from schema ", **getattr(T2, '__dict__', {}))
# pprint("schema", schema=json.dumps(schema, indent=2))
try:
assert_equal(schema, schema0)
if expect_type_equal:
# assert_same_types(T, T)
# assert_same_types(T2, T)
assert_equivalent_types(T, T2, assume_yes=set())
except:
ztinfo("assert_type_roundtrip", T=T, schema=schema, T2=T2)
raise
schema2 = ipce_from_typelike(T2, globals0=use_globals)
if schema != schema2: # pragma: no cover
msg = "Different schemas"
d = {
"T": T,
"T.qual": T.__qualname__,
"TAnn": T.__annotations__,
"Td": T.__dict__,
"schema": schema0,
"T2": T2,
"T2.qual": T2.__qualname__,
"TAnn2": T2.__annotations__,
"Td2": T2.__dict__,
"schema2": schema2,
}
msg = pretty_dict(msg, d)
# print(msg)
with open("tmp1.json", "w") as f:
f.write(json.dumps(schema, indent=2))
with open("tmp2.json", "w") as f:
f.write(json.dumps(schema2, indent=2))
# assert_equal(schema, schema2)
raise AssertionError(msg)
return T2
#
#
# def debug(s, **kwargs):
# ss = pretty_dict(s, kwargs)
# logger.debug(ss)
def save_object(x: object, ipce: object):
# noinspection PyBroadException
try:
import zuper_ipcl
except:
return
# print(f"saving {x}")
_x2 = object_from_ipce(ipce)
ipce_bytes = cbor2.dumps(ipce, canonical=True, value_sharing=True)
from zuper_ipcl.cid2mh import get_cbor_dag_hash_bytes
from zuper_ipcl.debug_print_ import debug_print
digest = get_cbor_dag_hash_bytes(ipce_bytes)
dn = "test_objects"
# if not os.path.exists(dn):
# os.makedirs(dn)
fn = os.path.join(dn, digest + ".ipce.cbor.gz")
if os.path.exists(fn):
pass
else:
fn = os.path.join(dn, digest + ".ipce.cbor")
write_bytes_to_file(ipce_bytes, fn)
# fn = os.path.join(dn, digest + '.ipce.yaml')
# write_ustring_to_utf8_file(yaml.dump(y1), fn)
fn = os.path.join(dn, digest + ".object.ansi")
s = debug_print(x) # '\n\n as ipce: \n\n' + debug_print(ipce) \
write_ustring_to_utf8_file(s, fn)
fn = os.path.join(dn, digest + ".ipce.yaml")
s = oyaml_dump(ipce)
write_ustring_to_utf8_file(s, fn)
import os
def assert_object_roundtrip(
x1,
*,
use_globals: Optional[dict] = None,
expect_equality=True,
works_without_schema=True,
):
"""
expect_equality: if __eq__ is preserved
Will not be preserved if use_globals = {}
because a new Dataclass will be created
and different Dataclasses with the same fields do not compare equal.
"""
if use_globals is None:
use_globals = {}
ieds = IEDS(use_globals, {})
iedo = IEDO(use_remembered_classes=False, remember_deserialized_classes=False)
y1 = ipce_from_object(x1, globals_=use_globals)
y1_cbor: bytes = cbor2.dumps(y1)
save_object(x1, ipce=y1)
y1 = cbor2.loads(y1_cbor)
y1e = encode_bytes_before_json_serialization(y1)
y1es = json.dumps(y1e, indent=2)
y1esl = decode_bytes_before_json_deserialization(json.loads(y1es))
y1eslo = object_from_ipce_(y1esl, object, ieds=ieds, iedo=iedo)
x1b = object_from_ipce_(y1, object, ieds=ieds, iedo=iedo)
x1bj = ipce_from_object(x1b, globals_=use_globals)
check_equality(x1, x1b, expect_equality)
if y1 != x1bj: # pragma: no cover
msg = pretty_dict(
"Round trip not obtained",
dict(x1bj_json=oyaml_dump(x1bj), y1_json=oyaml_dump(y1)),
)
# assert_equal(y1, x1bj, msg=msg)
if "propertyNames" in y1["$schema"]:
assert_equal(
y1["$schema"]["propertyNames"],
x1bj["$schema"]["propertyNames"],
msg=msg,
)
with open("y1.json", "w") as f:
f.write(json.dumps(y1, indent=2))
with open("x1bj.json", "w") as f:
f.write(json.dumps(x1bj, indent=2))
raise AssertionError(msg)
# once again, without schema
ieso_false = IESO(with_schema=False)
if works_without_schema:
z1 = ipce_from_object(x1, globals_=use_globals, ieso=ieso_false)
z2 = cbor2.loads(cbor2.dumps(z1))
u1 = object_from_ipce_(z2, type(x1), ieds=ieds, iedo=iedo)
check_equality(x1, u1, expect_equality)
return locals()
import numpy as np
def check_equality(x1: object, x1b: object, expect_equality: bool) -> None:
if isinstance(x1b, type) and isinstance(x1, type):
# logger.warning("Skipping type equality check for %s and %s" % (x1b, x1))
pass
else:
if isinstance(x1, np.ndarray):
pass
else:
eq1 = x1b == x1
eq2 = x1 == x1b
if expect_equality: # pragma: no cover
if not eq1:
m = "Object equality (next == orig) not preserved"
msg = pretty_dict(
m,
dict(
x1b=x1b,
x1b_=type(x1b),
x1=x1,
x1_=type(x1),
x1b_eq=x1b.__eq__,
),
)
raise AssertionError(msg)
if not eq2:
m = "Object equality (orig == next) not preserved"
msg = pretty_dict(
m,
dict(
x1b=x1b,
x1b_=type(x1b),
x1=x1,
x1_=type(x1),
x1_eq=x1.__eq__,
),
)
raise AssertionError(msg)
else:
if eq1 and eq2: # pragma: no cover
msg = "You did not expect equality but they actually are"
logger.info(msg)
# raise Exception(msg)
def test_testing1() -> None:
def get1() -> type:
@dataclass
class C1:
a: int
return C1
def get2() -> type:
@dataclass
class C1:
a: int
b: float
return C1
try:
assert_equivalent_types(get1(), get2(), set())
except NotEquivalentException:
pass
else:
raise Exception()
def test_testing2() -> None:
def get1() -> type:
@dataclass
class C1:
A: int
return C1
def get2() -> type:
@dataclass
class C2:
A: float
return C2
try:
assert_equivalent_types(get1(), get2(), set())
except NotEquivalentException:
pass
else:
raise Exception()
|
zuper-ipce-z5
|
/zuper-ipce-z5-5.3.0.tar.gz/zuper-ipce-z5-5.3.0/src/zuper_ipce_tests/test_utils.py
|
test_utils.py
|
from typing import Union
from zuper_typing.annotations_tricks import is_Union
x = Union[int, str]
assert is_Union(x)
|
zuper-ipce-z5
|
/zuper-ipce-z5-5.3.0.tar.gz/zuper-ipce-z5-5.3.0/src/zuper_ipce_tests/__init__.py
|
__init__.py
|
from nose.tools import raises
from zuper_typing import dataclass
from zuper_typing.constants import PYTHON_37
from zuper_typing.exceptions import ZValueError
from zuper_typing.logging import logger
logger.info("")
from typing import Type, NewType, Dict, Any, Tuple
from zuper_ipce import object_from_ipce
from zuper_ipce import typelike_from_ipce
from zuper_ipce import ipce_from_typelike
from zuper_typing_tests.test_utils import relies_on_missing_features
from .test_utils import assert_object_roundtrip, assert_type_roundtrip
symbols = {}
if PYTHON_37:
@relies_on_missing_features
def test_type1():
T = Type
assert_type_roundtrip(T, use_globals=symbols)
def test_type2():
T = type
assert_type_roundtrip(T, use_globals=symbols)
def test_newtype():
T = NewType("T", str)
assert_type_roundtrip(T, use_globals=symbols)
def test_dict1():
c = {}
assert_object_roundtrip(c, use_globals=symbols)
def test_dict2():
T = Dict[str, Any]
# <class 'zuper_json.my_dict.Dict[str,Any]'>
assert_type_roundtrip(T, use_globals=symbols, expect_type_equal=False)
@raises(ValueError)
def test_dict4():
ob = {}
object_from_ipce(ob, Any)
def test_type__any():
T = Any
assert_type_roundtrip(T, use_globals=symbols)
@raises(ZValueError)
def test_type_any2():
@dataclass
class C:
a: Any
c = C(a={})
assert_object_roundtrip(c, use_globals=symbols)
def test_type__any3():
@dataclass
class C:
a: Any
c = C(a=1)
assert_object_roundtrip(c, use_globals=symbols)
def test_type__any4():
assert_object_roundtrip(Any, use_globals=symbols)
def test_defaults1():
@dataclass
class DummyImageSourceConfig:
shape: Tuple[int, int] = (480, 640)
images_per_episode: int = 120
num_episodes: int = 10
mj = ipce_from_typelike(DummyImageSourceConfig)
# print(json.dumps(mj, indent=2))
T2: Type[dataclass] = typelike_from_ipce(mj)
# print(dataclasses.fields(T2))
assert_type_roundtrip(DummyImageSourceConfig)
def test_type_slice():
assert_object_roundtrip(slice)
def test_type_slice2():
s = slice(1, 2, 3)
assert_object_roundtrip(s)
|
zuper-ipce-z5
|
/zuper-ipce-z5-5.3.0.tar.gz/zuper-ipce-z5-5.3.0/src/zuper_ipce_tests/test_type.py
|
test_type.py
|
import typing
from dataclasses import fields
from numbers import Number
from typing import cast, ClassVar, Type
import yaml
from nose.tools import assert_equal, raises
from zuper_ipce import ipce_from_typelike, typelike_from_ipce
from zuper_ipce.constants import JSONSchema
from zuper_ipce.utils_text import oyaml_load
from zuper_ipce_tests.test_utils import assert_object_roundtrip, assert_type_roundtrip
from zuper_typing import dataclass, Generic
from zuper_typing.annotations_tricks import (
get_ClassVar_arg,
get_Type_arg,
is_ClassVar,
is_ForwardRef,
is_Type,
make_ForwardRef,
)
from zuper_typing.constants import enable_type_checking
from zuper_typing.monkey_patching_typing import debug_print_str
from zuper_typing.subcheck import can_be_used_as2
from zuper_typing.zeneric2 import resolve_types
from zuper_typing_tests.test_utils import known_failure
def test_basic():
U = TypeVar("U")
T = Generic[U]
# print(T.mro())
assert_equal(T.__name__, "Generic[U]")
# print("inheriting C(T)")
@dataclass
class C(T):
...
# print(C.mro())
assert_equal(C.__name__, "C[U]")
# print("subscribing C[int]")
D = C[int]
assert_equal(D.__name__, "C[int]")
@raises(TypeError)
def test_dataclass_can_preserve_init():
X = TypeVar("X")
@dataclass
class M(Generic[X]):
x: int
M(x=2)
def test_serialize_generic_typevar():
X = typing.TypeVar("X", bound=Number)
@dataclass
class MN1(Generic[X]):
""" A generic class """
x: X
assert_type_roundtrip(MN1)
# noinspection PyDataclass
f1 = fields(MN1)
assert f1[0].type == X
# there was a bug with modifying this
MN1int = MN1[int]
# noinspection PyDataclass
f1b = fields(MN1)
assert f1b[0].type == X
assert f1 == f1b
# M2 = assert_type_roundtrip(M1, {})
assert_type_roundtrip(MN1int)
def test_serialize_generic():
X = typing.TypeVar("X", bound=Number)
@dataclass
class MP1(Generic[X]):
""" A generic class """
x: X
M1int = MP1[int]
assert_type_roundtrip(MP1)
assert_type_roundtrip(M1int)
m1a = M1int(x=2)
m1b = M1int(x=3)
s = ipce_from_typelike(MP1)
# print(json.dumps(s, indent=3))
M2 = typelike_from_ipce(s)
# noinspection PyUnresolvedReferences
M2int = M2[int]
assert_equal(MP1.__module__, M2.__module__)
m2a = M2int(x=2)
m2b = M2int(x=3)
# print(m1a)
# print(m2a)
# print(type(m1a))
# print(type(m2a))
# print(type(m1a).__module__)
# print(type(m2a).__module__)
assert m1a == m2a
assert m2a == m1a
assert m2b == m1b
assert m1b == m2b
assert m1b != m1a
assert m2b != m2a
# assert_object_roundtrip(M, {'M': M})
def test_serialize_generic_optional():
# @dataclass
# class Animal:
# pass
X = typing.TypeVar("X", bound=Number)
@dataclass
class MR1(Generic[X]):
""" A generic class """
x: X
xo: Optional[X] = None
M1int = MR1[int]
m1a = M1int(x=2)
m1b = M1int(x=3)
s = ipce_from_typelike(MR1)
# print("M1 schema: \n" + oyaml_dump(s))
M2 = typelike_from_ipce(s)
assert "xo" in MR1.__annotations__, MR1.__annotations__
assert "xo" in M2.__annotations__, M2.__annotations__
assert_equal(sorted(MR1.__annotations__), sorted(M2.__annotations__))
# k = ("<class 'zuper_json.zeneric2.test_serialize_generic_optional.<locals>.M1'>", "{~X<Number: <class 'int'>}")
# print(f'cached: {MakeTypeCache.cache}')
# c = MakeTypeCache.cache[k]
# print(f'cached: {c.__annotations__}')
# MakeTypeCache.cache = {}
# noinspection PyUnresolvedReferences
M2int = M2[int]
assert_equal(MR1.__module__, M2.__module__)
assert_equal(sorted(M1int.__annotations__), sorted(M2int.__annotations__))
# noinspection PyUnresolvedReferences
M2int = M2[int]
assert_equal(MR1.__module__, M2.__module__)
assert_equal(sorted(M1int.__annotations__), sorted(M2int.__annotations__))
assert_type_roundtrip(MR1)
assert_type_roundtrip(M1int)
m2a = M2int(x=2)
m2b = M2int(x=3)
assert_equal(sorted(m1a.__dict__), sorted(m2a.__dict__))
assert m1a == m2a
assert m2a == m1a
assert m2b == m1b
assert m1b == m2b
assert m1b != m1a
assert m2b != m2a
from typing import Optional, TypeVar
def test_more():
X = TypeVar("X")
@dataclass
class Entity0(Generic[X]):
data0: X
parent: "Optional[Entity0[X]]" = None
resolve_types(Entity0)
# print(Entity0.__annotations__["parent"].__repr__())
assert not isinstance(Entity0.__annotations__["parent"], str)
# raise Exception()
schema = ipce_from_typelike(Entity0)
# print(oyaml_dump(schema))
T = typelike_from_ipce(schema)
# print(T.__annotations__)
assert_type_roundtrip(Entity0)
EI = Entity0[int]
assert_equal(EI.__annotations__["parent"].__args__[0].__name__, "Entity0[int]")
assert_type_roundtrip(EI)
x = EI(data0=3, parent=EI(data0=4))
assert_object_roundtrip(x) # {'Entity': Entity, 'X': X})
@known_failure
def test_more_direct():
""" parent should be declared as Optional[X] rather than X"""
# language=yaml
schema = oyaml_load(
"""
$id: http://invalid.json-schema.org/Entity0[X]#
$schema: http://json-schema.org/draft-07/schema#
__module__: zuper_json.zeneric2
__qualname__: test_more.<locals>.Entity0
definitions:
X: {$id: 'http://invalid.json-schema.org/Entity0[X]/X#', $schema: 'http://json-schema.org/draft-07/schema#'}
description: 'Entity0[X](data0: ~X, parent: ''Optional[Entity0[X]]'' = None)'
properties:
data0: {$ref: 'http://invalid.json-schema.org/Entity0[X]/X#'}
parent: {$ref: 'http://invalid.json-schema.org/Entity0[X]#', default: null}
required: [data0]
title: Entity0[X]
type: object
""",
Loader=yaml.SafeLoader,
)
schema = cast(JSONSchema, schema)
_T = typelike_from_ipce(schema)
def test_more2():
X = TypeVar("X")
Y = TypeVar("Y")
@dataclass
class Entity11(Generic[X]):
data0: X
parent: "Optional[Entity11[X]]" = None
ipce_from_typelike(Entity11)
EI = Entity11[int]
assert_type_roundtrip(Entity11)
assert_type_roundtrip(EI)
@dataclass
class Entity42(Generic[Y]):
parent: Optional[Entity11[Y]] = None
ipce_from_typelike(Entity42)
assert_type_roundtrip(Entity42) # boom
E2I = Entity42[int]
assert_type_roundtrip(E2I)
x = E2I(parent=EI(data0=4))
# print(json.dumps(type_to_schema(type(x), {}), indent=2))
assert_object_roundtrip(
x,
use_globals={"Entity11": Entity11, "Entity42": Entity42},
works_without_schema=False,
)
def test_more2b():
X = TypeVar("X")
Y = TypeVar("Y")
class E0(Generic[X]):
pass
assert_equal(E0.__doc__, None)
@dataclass
class Entity12(Generic[X]):
data0: X
parent: "Optional[Entity12[X]]" = None
assert_equal(Entity12.__doc__, None)
@dataclass
class Entity13(Generic[Y]):
parent: Optional[Entity12[Y]] = None
assert_equal(Entity13.__doc__, None)
assert_type_roundtrip(Entity12)
assert_type_roundtrip(Entity13)
EI = Entity12[int]
# print(EI.__annotations__['parent'])
E2I = Entity13[int]
assert_type_roundtrip(EI)
assert_type_roundtrip(E2I)
parent2 = E2I.__annotations__["parent"]
# print(parent2)
x = E2I(parent=EI(data0=4))
# print(json.dumps(type_to_schema(type(x), {}), indent=2))
# print(type(x).__name__)
assert_object_roundtrip(
x,
use_globals={"Entity12": Entity12, "Entity13": Entity13},
works_without_schema=False,
)
def test_isClassVar():
X = TypeVar("X")
A = ClassVar[Type[X]]
assert is_ClassVar(A)
assert get_ClassVar_arg(A) == Type[X]
def test_isType():
X = TypeVar("X")
A = Type[X]
# print(type(A))
# print(A.__dict__)
assert is_Type(A)
assert get_Type_arg(A) == X
def test_more3_simpler():
X = TypeVar("X")
@dataclass
class MyClass(Generic[X]):
XT: ClassVar[Type[X]]
a: int
ipce = ipce_from_typelike(MyClass)
# print(oyaml_dump(ipce))
assert_type_roundtrip(MyClass)
#
# # type_to_schema(MyClass, {})
C = MyClass[int]
assert_type_roundtrip(C)
def test_more3b_simpler():
X = TypeVar("X")
@dataclass
class MyClass(Generic[X]):
XT: ClassVar[Type[X]]
ipce = ipce_from_typelike(MyClass)
# print(oyaml_dump(ipce))
assert_type_roundtrip(MyClass)
#
# # type_to_schema(MyClass, {})
C = MyClass[int]
assert_type_roundtrip(C)
def test_more3():
# class Base:
# pass
X = TypeVar("X")
Y = TypeVar("Y")
@dataclass
class MyClass(Generic[X, Y]):
a: X
XT: ClassVar[Type[X]]
YT: ClassVar[Type[Y]]
def method(self, x: X) -> Y:
return type(self).YT(x)
assert_type_roundtrip(MyClass)
# type_to_schema(MyClass, {})
C = MyClass[int, str]
assert_type_roundtrip(C)
# print(f'Annotations for C: {C.__annotations__}')
assert_equal(C.__annotations__["XT"], ClassVar[type])
assert_equal(C.XT, int)
assert_equal(C.__annotations__["YT"], ClassVar[type])
assert_equal(C.YT, str)
schema = ipce_from_typelike(C)
# print(json.dumps(schema, indent=2))
typelike_from_ipce(schema)
# print(f'Annotations for C2: {C2.__annotations__}')
e = C(2)
r = e.method(1)
assert r == "1"
assert_object_roundtrip(e)
def test_entity():
X = TypeVar("X")
# SchemaCache.key2schema = {}
@dataclass
class SecurityModel2:
# guid: Any
owner: str
arbiter: str
@dataclass
class Entity43(Generic[X]):
data0: X
guid: str
security_model: SecurityModel2
parent: "Optional[Entity43[X]]" = None
forked: "Optional[Entity43[X]]" = None
# noinspection PyDataclass
fs = fields(Entity43)
f0 = fs[3]
assert f0.name == "parent"
# print(f0)
assert f0.default is None
assert_equal(Entity43.__name__, "Entity43[X]")
qn = Entity43.__qualname__
assert "Entity43[X]" in qn, qn
T = ipce_from_typelike(Entity43)
C = typelike_from_ipce(T)
# print(oyaml_dump(T))
# print(C.__annotations__)
# logger.info(f'SchemaCache: {pretty_dict("", SchemaCache.key2schema)}')
# resolve_types(Entity2, locals())
# assert_type_roundtrip(Entity2, locals())
assert_type_roundtrip(Entity43)
Entity43_int = Entity43[int]
assert_equal(Entity43_int.__name__, "Entity43[int]")
qn = Entity43_int.__qualname__
assert "Entity43[int]" in qn, qn
# logger.info("\n\nIgnore above\n\n")
assert_type_roundtrip(Entity43_int)
@known_failure
def test_entity0():
""" Wrong type as in test_entity. parent should be defined as Optional[Entity2[X]]"""
# language=yaml
schema = oyaml_load(
"""
$id: http://invalid.json-schema.org/Entity2[X]#
$schema: http://json-schema.org/draft-07/schema#
definitions:
X: {$id: 'http://invalid.json-schema.org/Entity2[X]/X#', $schema: 'http://json-schema.org/draft-07/schema#'}
description:
properties:
parent: {$ref: 'http://invalid.json-schema.org/Entity2[X]#', default: null}
required: [data0, guid, security_model]
__qualname__: QUAL
__module__: module
title: Entity2[X]
type: object
""",
Loader=yaml.SafeLoader,
)
schema = cast(JSONSchema, schema)
_C = typelike_from_ipce(schema)
# print(C.__annotations__)
#
# assert not is_ForwardRef(C.__annotations__["parent"].__args__[0])
def test_classvar1():
@dataclass
class C:
v: ClassVar[int] = 1
assert_type_roundtrip(C)
# schema = type_to_schema(C, {})
# C2: C = schema_to_type(schema, {}, {})
#
# assert_equal(C.v, C2.v)
def test_classvar2():
X = TypeVar("X", bound=int)
@dataclass
class CG(Generic[X]):
v: ClassVar[X] = 1
C = CG[int]
schema = ipce_from_typelike(C)
C2 = cast(Type[CG[int]], typelike_from_ipce(schema))
assert_type_roundtrip(C)
assert_type_roundtrip(CG)
assert_equal(C.v, C2.v)
@raises(TypeError)
def test_check_bound1():
@dataclass
class Animal:
a: int
assert not can_be_used_as2(int, Animal).result
assert not issubclass(int, Animal)
X = TypeVar("X", bound=Animal)
@dataclass
class CG(Generic[X]):
a: X
_ = CG[int] # boom, int !< Animal
@raises(TypeError)
def test_check_bound2():
@dataclass
class Animal:
a: int
class Not:
b: int
assert not can_be_used_as2(Not, Animal).result
X = TypeVar("X", bound=Animal)
@dataclass
class CG(Generic[X]):
a: X
_ = CG[Not] # boom, Not !< Animal
# assert_type_roundtrip(CG, {})
# assert_type_roundtrip(CG[int], {})
#
if enable_type_checking:
@raises(ValueError, TypeError) # typerror in 3.6
def test_check_value():
@dataclass
class CG(Generic[()]):
a: int
CG[int](a="a")
def test_signing():
X = TypeVar("X")
@dataclass
class PublicKey1:
key: bytes
@dataclass
class Signed1(Generic[X]):
key: PublicKey1
signature_data: bytes
data: X
s = Signed1[str](key=PublicKey1(key=b""), signature_data=b"xxx", data="message")
assert_type_roundtrip(Signed1)
assert_type_roundtrip(Signed1[str])
assert_object_roundtrip(s)
def test_derived1():
X = TypeVar("X")
@dataclass
class Signed3(Generic[X]):
data: X
S = Signed3[int]
# logger.info(fields(S))
class Y(S):
"""hello"""
pass
# assert S.__doc__ in ['Signed3[int](data:int)', 'Signed3[int](data: int)']
assert S.__doc__ in [None], S.__doc__
assert_equal(Y.__doc__, """hello""")
assert_type_roundtrip(Y)
assert_type_roundtrip(Signed3)
def test_derived2_no_doc():
X = TypeVar("X")
@dataclass
class Signed3(Generic[X]):
data: X
S = Signed3[int]
class Z(S):
pass
assert_type_roundtrip(Z)
assert_type_roundtrip(S)
def test_derived2_subst():
X = TypeVar("X")
# print(dir(Generic))
# print(dir(typing.GenericMeta))
# print(Generic.__getitem__)
@dataclass
class Signed3(Generic[X]):
data: X
parent: Optional["Signed3[X]"] = None
_ = Signed3[int]
# resolve_types(Signed3, locals())
S = Signed3[int]
# pprint(**S.__annotations__)
# Now we actually have it
# assert 'X' not in str(S.__annotations__), S.__annotations__
# assert_type_roundtrip(S, {})
@dataclass
class Y(S):
pass
# pprint(**Y.__annotations__)
schema = ipce_from_typelike(Y)
# print(oyaml_dump(schema))
TY = typelike_from_ipce(schema)
# pprint("annotations", **TY.__annotations__)
P = TY.__annotations__["parent"]
assert not is_ForwardRef(P)
# raise Exception()
# raise Exception()
assert_type_roundtrip(Y)
def test_derived3_subst():
X = TypeVar("X")
@dataclass
class Signed3(Generic[X]):
data: Optional[X]
# noinspection PyDataclass
# print(fields(Signed3))
assert_type_roundtrip(Signed3)
S = Signed3[int]
assert_type_roundtrip(S)
x = S(data=2)
assert_object_roundtrip(x)
def test_entity_field():
@dataclass
class Entity44:
parent: "Optional[Entity44]" = None
assert_type_roundtrip(Entity44)
def test_entity_field2():
@dataclass
class Entity45:
parent: "Optional[Entity45]"
assert_type_roundtrip(Entity45)
def test_entity_field3():
X = TypeVar("X")
@dataclass
class Entity46(Generic[X]):
parent: "Optional[Entity46[X]]"
assert_type_roundtrip(Entity46)
def test_classvar_not_type1():
@dataclass
class Entity47:
parent: ClassVar[int] = 2
assert_type_roundtrip(Entity47)
def test_classvar_not_type2():
@dataclass
class Entity48:
parent: ClassVar[int]
assert_type_roundtrip(Entity48)
def test_classvar_type_not_typvar():
@dataclass
class Entity49:
parent: ClassVar[Type[int]]
assert_type_roundtrip(Entity49)
# XXX: __post_init__ only used for make_type(cls, bindings), rather than for dataclass
# def test_post_init_preserved():
# C = 42
#
# @dataclass
# class Entity60:
# x: int
#
# def __post_init__(self):
# self.x = C
#
# a = Entity60('a')
# print(Entity60.__post_init__)
# a = Entity60(1)
# assert a.x == C
def test_post_init_preserved():
C = 42
X = TypeVar("X")
@dataclass
class Entity60(Generic[X]):
x: int
def __post_init__(self):
self.x = C
Concrete = Entity60[int]
a = Concrete(1)
assert a.x == C
def test_post_init_preserved2():
X = TypeVar("X")
@dataclass
class Entity61(Generic[X]):
x: int
Concrete = Entity61[int]
if enable_type_checking: # pragma: no cover
try:
Concrete("a")
except ValueError:
pass
else: # pragma: no cover
raise Exception()
else: # pragma: no cover
Concrete("a")
f = known_failure if enable_type_checking else (lambda x: x)
@f
def test_type_checking():
@dataclass
class Entity62:
x: int
if enable_type_checking: # pragma: no cover
try:
Entity62("a")
except ValueError:
pass
else: # pragma: no cover
raise Exception()
else: # pragma: no cover
Entity62("a")
def test_same_forward():
assert make_ForwardRef("one") is make_ForwardRef("one")
def test_debug_print_str_multiple_lines():
debug_print_str("a\nb", prefix="prefix")
if __name__ == "__main__":
test_entity_field()
test_entity_field2()
test_entity_field3()
|
zuper-ipce-z5
|
/zuper-ipce-z5-5.3.0.tar.gz/zuper-ipce-z5-5.3.0/src/zuper_ipce_tests/test_zeneric2.py
|
test_zeneric2.py
|
from datetime import datetime
from nose.tools import raises
from zuper_ipce_tests.test_utils import assert_object_roundtrip, assert_type_roundtrip
def test_datetime01():
assert_type_roundtrip(datetime)
@raises(ValueError)
def test_datetime02():
d = datetime.now()
assert_object_roundtrip(d)
import pytz
def test_datetime03():
d = datetime(2010, 1, 1, 12, 12, 12)
timezone = pytz.timezone("America/Los_Angeles")
d_aware = timezone.localize(d)
assert_object_roundtrip(d_aware)
|
zuper-ipce-z5
|
/zuper-ipce-z5-5.3.0.tar.gz/zuper-ipce-z5-5.3.0/src/zuper_ipce_tests/test_datetime.py
|
test_datetime.py
|
from nose.tools import raises
from zuper_ipce.ipce_spec import assert_canonical_ipce
@raises(ValueError)
def test_spec1():
x = {"/": ""}
assert_canonical_ipce(x)
@raises(ValueError)
def test_spec2():
x = {"$links": {}}
assert_canonical_ipce(x)
@raises(ValueError)
def test_spec3():
x = {"$self": {}}
assert_canonical_ipce(x)
@raises(ValueError)
def test_spec4():
x = (1, 2)
assert_canonical_ipce(x)
def test_spec4ok():
x = [1, 2]
assert_canonical_ipce(x)
|
zuper-ipce-z5
|
/zuper-ipce-z5-5.3.0.tar.gz/zuper-ipce-z5-5.3.0/src/zuper_ipce_tests/test_spec.py
|
test_spec.py
|
from typing import Dict, List, Optional, Set, Tuple, TypeVar, Union
from nose.tools import assert_equal
from zuper_ipce import ipce_from_typelike, typelike_from_ipce
from zuper_typing import dataclass, Generic
from zuper_typing.zeneric2 import resolve_types
from .test_utils import assert_object_roundtrip, assert_type_roundtrip
def test_forward1_ok_no_locals_if_using_name():
# """
# *USED TO* Fail because there is no "C" in the context
# if we don't evaluate locals().
# l
# """
@dataclass
class C:
a: int
b: Optional["C"] = None
e = C(12, C(1))
assert_object_roundtrip(e)
def test_forward1():
@dataclass
class C:
a: int
b: Optional["C"] = None
e = C(12, C(1))
assert_object_roundtrip(e, use_globals={"C": C})
def test_forward2():
@dataclass
class C:
a: int
b: "Optional[C]" = None
# noinspection PyTypeChecker
e = C(12, C(1))
assert_object_roundtrip(e, use_globals={"C": C})
def test_forward3():
@dataclass
class C:
a: int
b: "Optional[C]"
e = C(12, C(1, None))
assert_object_roundtrip(e, use_globals={"C": C})
def test_forward04():
@dataclass
class C:
a: int
b: "Dict[str, C]"
assert_type_roundtrip(C, expect_type_equal=False)
def test_forward05():
@dataclass
class C:
a: int
b: "List[C]"
assert_type_roundtrip(C, expect_type_equal=False)
def test_forward05b():
@dataclass
class C:
a: int
b: "Set[C]"
assert_type_roundtrip(C, expect_type_equal=False)
def test_forward06():
@dataclass
class C:
a: int
b: "Union[int, C]"
assert_type_roundtrip(C, expect_type_equal=False)
def test_forward07():
@dataclass
class C:
a: int
b: "Tuple[C, int]"
assert_type_roundtrip(C, expect_type_equal=False)
def test_forward08():
@dataclass
class C:
a: int
b: "Tuple[C, ...]"
assert_type_roundtrip(C, expect_type_equal=False)
# if USE_REMEMBERED_CLASSES:
# f = lambda x: x
# else:
# f = known_failure
#
#
# @f
def test_forward09():
X = TypeVar("X")
@dataclass
class B(Generic[X]):
# b: Optional[X]
b: X
@dataclass
class A:
pass
BA = B[A]
assert_equal(BA.__doc__, None)
s = ipce_from_typelike(BA)
# print(oyaml_dump(s))
@dataclass
class C:
a: int
b: "B[C]"
__depends__ = (B,)
resolve_types(C, refs=(B,))
# print("\n\n\n\n")
Cb = C.__annotations__["b"]
# print("Cb: " + Cb.__qualname__)
assert "forward09" in C.__qualname__
assert "forward09" in C.__annotations__["b"].__qualname__
ipce_Cb = ipce_from_typelike(Cb)
# logger.info("ipce_CB: \n" + oyaml_dump(ipce_Cb))
assert ipce_Cb["__qualname__"] == "test_forward09.<locals>.B[C]"
assert ipce_Cb["properties"]["b"]["__qualname__"] == "test_forward09.<locals>.C"
Cb2 = typelike_from_ipce(ipce_Cb)
Cb2_C = Cb2.__annotations__["b"]
# print(Cb2_C)
assert_equal(Cb2_C.__qualname__, "test_forward09.<locals>.C")
# assert_type_roundtrip(Cb, {}, expect_type_equal=False)
# assert_type_roundtrip(C, {}, expect_type_equal=False)
assert_type_roundtrip(B)
assert_type_roundtrip(BA)
if __name__ == "__main__":
test_forward05b()
|
zuper-ipce-z5
|
/zuper-ipce-z5-5.3.0.tar.gz/zuper-ipce-z5-5.3.0/src/zuper_ipce_tests/test_forward.py
|
test_forward.py
|
from setuptools import find_packages, setup
def get_version(filename):
import ast
version = None
with open(filename) as f:
for line in f:
if line.startswith("__version__"):
version = ast.parse(line).body[0].value.s
break
else:
raise ValueError("No version found in %r." % filename)
if version is None:
raise ValueError(filename)
return version
install_requires = [
"oyaml",
"pybase64",
"PyYAML",
"validate-email",
"mypy-extensions",
"typing-extensions",
"nose",
"coverage>=1.4.33",
"jsonschema",
"cbor2>=5,<6",
"numpy",
"base58<2.0,>=1.0.2",
"zuper-commons-z6",
"zuper-typing-z6",
"frozendict",
"pytz",
"termcolor",
"numpy",
"py-multihash",
"py-cid",
]
import sys
system_version = tuple(sys.version_info)[:3]
if system_version < (3, 7):
install_requires.append("dataclasses")
version = get_version(filename="src/zuper_ipce/__init__.py")
line = "z6"
setup(
name=f"zuper-ipce-{line}",
package_dir={"": "src"},
packages=find_packages("src"),
version=version,
zip_safe=False,
entry_points={
"console_scripts": [
# 'zj = zuper_ipce.zj:zj_main',
"json2cbor = zuper_ipce.json2cbor:json2cbor_main",
"cbor2json = zuper_ipce.json2cbor:cbor2json_main",
"cbor2yaml = zuper_ipce.json2cbor:cbor2yaml_main",
]
},
install_requires=install_requires,
)
|
zuper-ipce-z6
|
/zuper-ipce-z6-6.1.2.tar.gz/zuper-ipce-z6-6.1.2/setup.py
|
setup.py
|
import io
import json
import select
import time
import traceback
from io import BufferedReader
from json import JSONDecodeError
from typing import Iterator
import base58
import cbor2
from cbor2 import CBORDecodeEOF
from . import logger
from .json_utils import (
decode_bytes_before_json_deserialization,
encode_bytes_before_json_serialization,
)
from .utils_text import oyaml_dump
__all__ = [
"read_cbor_or_json_objects",
"json2cbor_main",
"cbor2json_main",
"cbor2yaml_main",
"read_next_cbor",
"read_next_either_json_or_cbor",
"tag_hook",
]
def json2cbor_main() -> None:
fo = open("/dev/stdout", "wb", buffering=0)
fi = open("/dev/stdin", "rb", buffering=0)
# noinspection PyTypeChecker
fi = BufferedReader(fi, buffer_size=1)
for j in read_cbor_or_json_objects(fi):
c = cbor2.dumps(j)
fo.write(c)
fo.flush()
def cbor2json_main() -> None:
fo = open("/dev/stdout", "wb", buffering=0)
fi = open("/dev/stdin", "rb", buffering=0)
for j in read_cbor_objects(fi):
j = encode_bytes_before_json_serialization(j)
ob = json.dumps(j)
ob = ob.encode("utf-8")
fo.write(ob)
fo.write(b"\n")
fo.flush()
def cbor2yaml_main() -> None:
fo = open("/dev/stdout", "wb")
fi = open("/dev/stdin", "rb")
for j in read_cbor_objects(fi):
ob = oyaml_dump(j)
ob = ob.encode("utf-8")
fo.write(ob)
fo.write(b"\n")
fo.flush()
def read_cbor_or_json_objects(f, timeout=None) -> Iterator:
""" Reads cbor or line-separated json objects from the binary file f."""
while True:
try:
ob = read_next_either_json_or_cbor(f, timeout=timeout)
yield ob
except StopIteration:
break
except TimeoutError:
raise
def read_cbor_objects(f, timeout=None) -> Iterator:
""" Reads cbor or line-separated json objects from the binary file f."""
while True:
try:
ob = read_next_cbor(f, timeout=timeout)
yield ob
except StopIteration:
break
except TimeoutError:
raise
def read_next_either_json_or_cbor(f, timeout=None, waiting_for: str = None) -> dict:
""" Raises StopIteration if it is EOF.
Raises TimeoutError if over timeout"""
fs = [f]
t0 = time.time()
intermediate_timeout = 3.0
while True:
try:
readyr, readyw, readyx = select.select(fs, [], fs, intermediate_timeout)
except io.UnsupportedOperation:
break
if readyr:
break
elif readyx:
logger.warning("Exceptional condition on input channel %s" % readyx)
else:
delta = time.time() - t0
if (timeout is not None) and (delta > timeout):
msg = "Timeout after %.1f s." % delta
logger.error(msg)
raise TimeoutError(msg)
else:
msg = "I have been waiting %.1f s." % delta
if timeout is None:
msg += " I will wait indefinitely."
else:
msg += " Timeout will occurr at %.1f s." % timeout
if waiting_for:
msg += " " + waiting_for
logger.warning(msg)
first = f.peek(1)[:1]
if len(first) == 0:
msg = "Detected EOF on %s." % f
if waiting_for:
msg += " " + waiting_for
raise StopIteration(msg)
# logger.debug(f'first char is {first}')
if first in [b" ", b"\n", b"{"]:
line = f.readline()
line = line.strip()
if not line:
msg = "Read empty line. Re-trying."
logger.warning(msg)
return read_next_either_json_or_cbor(f)
# logger.debug(f'line is {line!r}')
try:
j = json.loads(line)
except JSONDecodeError:
msg = f"Could not decode line {line!r}: {traceback.format_exc()}"
logger.error(msg)
return read_next_either_json_or_cbor(f)
j = decode_bytes_before_json_deserialization(j)
return j
else:
j = cbor2.load(f, tag_hook=tag_hook)
return j
def tag_hook(decoder, tag, shareable_index=None) -> dict:
if tag.tag != 42:
return tag
d = tag.value
val = base58.b58encode(d).decode("ascii")
val = "z" + val[1:]
return {"/": val}
def wait_for_data(f, timeout=None, waiting_for: str = None):
""" Raises StopIteration if it is EOF.
Raises TimeoutError if over timeout"""
# XXX: StopIteration not implemented
fs = [f]
t0 = time.time()
intermediate_timeout = 3.0
while True:
try:
readyr, readyw, readyx = select.select(fs, [], fs, intermediate_timeout)
except io.UnsupportedOperation:
break
if readyr:
break
elif readyx:
logger.warning(f"Exceptional condition on input channel {readyx}")
else:
delta = time.time() - t0
if (timeout is not None) and (delta > timeout):
msg = f"Timeout after {delta:.1f} s."
logger.error(msg)
raise TimeoutError(msg)
else:
msg = f"I have been waiting {delta:.1f} s."
if timeout is None:
msg += " I will wait indefinitely."
else:
msg += f" Timeout will occurr at {timeout:.1f} s."
if waiting_for:
msg += " " + waiting_for
logger.warning(msg)
def read_next_cbor(f, timeout=None, waiting_for: str = None) -> dict:
""" Raises StopIteration if it is EOF.
Raises TimeoutError if over timeout"""
wait_for_data(f, timeout, waiting_for)
try:
j = cbor2.load(f, tag_hook=tag_hook)
return j
except CBORDecodeEOF:
raise StopIteration from None
except OSError as e:
if e.errno == 29:
raise StopIteration from None
raise
|
zuper-ipce-z6
|
/zuper-ipce-z6-6.1.2.tar.gz/zuper-ipce-z6-6.1.2/src/zuper_ipce/json2cbor.py
|
json2cbor.py
|
from collections import UserString
from typing import Callable, Dict, NewType
from zuper_commons.types import ZValueError
def valid_email(s: str) -> None:
import validate_email
is_valid = validate_email.validate_email(s)
if not is_valid:
msg = "Invalid email address."
raise ZValueError(msg, s=s)
json_formats: Dict[str, Callable[[str], None]] = {
"date-time": None,
"email": valid_email,
"ipv4": None,
"ipv6": None,
"uri": None,
"uri-reference": None,
"json-pointer": None,
"uri-template": None,
# others:
"domain": None,
"multihash": None,
}
def make_special(name: str, sformat: str) -> type:
validator = json_formats[sformat]
class Special(UserString):
data: str
def __init__(self, seq: object):
UserString.__init__(self, seq)
if validator is not None:
validator(self.data)
return type(name, (Special,), {})
__all__ = [
"URL",
"DateTimeString",
"Email",
"IP4",
"IP6",
"URI",
"URIReference",
"JSONPointer",
"URITemplate",
"Domain",
"Multihash",
# 'IPDELink',
]
URL = make_special("URL", "uri")
DateTimeString = make_special("DateTimeString", "date-time")
Email = make_special("Email", "email")
IP4 = make_special("IP4", "ipv4")
IP6 = make_special("IP6", "ipv6")
URI = make_special("URI", "uri")
URIReference = make_special("URIReference", "uri")
JSONPointer = make_special("JSONPointer", "json-pointer")
URITemplate = make_special("URITemplate", "uri-template")
Domain = make_special("Domain", "domain")
Multihash = make_special("Multihash", "multihash")
IPDELink = NewType("IPDELink", str)
|
zuper-ipce-z6
|
/zuper-ipce-z6-6.1.2.tar.gz/zuper-ipce-z6-6.1.2/src/zuper_ipce/special_strings.py
|
special_strings.py
|
from datetime import datetime
from typing import Dict, List, Union
from zuper_typing import get_NewType_arg, is_Any, is_NewType, TypeLike
__all__ = ["IPCE", "TypeLike", "ModuleName", "QualName", "is_unconstrained"]
IPCE = Union[int, str, float, bytes, datetime, List["IPCE"], Dict[str, "IPCE"], type(None)]
ModuleName = QualName = str
_ = TypeLike
def is_unconstrained(t: TypeLike):
assert t is not None
return is_Any(t) or (t is object)
def get_effective_type(K):
""" unwrap the NewType aliases"""
if is_NewType(K):
return get_effective_type(get_NewType_arg(K))
return K
|
zuper-ipce-z6
|
/zuper-ipce-z6-6.1.2.tar.gz/zuper-ipce-z6-6.1.2/src/zuper_ipce/types.py
|
types.py
|
import numpy as np
from zuper_commons.types import check_isinstance
from .types import IPCE
def ipce_from_numpy_array(x: np.ndarray) -> IPCE:
res = {"shape": list(x.shape), "dtype": x.dtype.name, "data": x.tobytes()}
from .ipce_spec import sorted_dict_cbor_ord
res = sorted_dict_cbor_ord(res)
return res
def numpy_array_from_ipce(d: IPCE) -> np.ndarray:
shape = tuple(d["shape"])
dtype = d["dtype"]
data: bytes = d["data"]
check_isinstance(data, bytes)
a = np.frombuffer(data, dtype=dtype)
res = a.reshape(shape)
return res
#
#
# def bytes_from_numpy(a: np.ndarray) -> bytes:
# import h5py
# io = BytesIO()
# with h5py.File(io) as f:
# # f.setdefault("compression", "lzo")
# f['value'] = a
# uncompressed = io.getvalue()
#
# compressed_data = zlib.compress(uncompressed)
# return compressed_data
#
#
# def numpy_from_bytes(b: bytes) -> np.ndarray:
# b = zlib.decompress(b)
# import h5py
# io = BytesIO(b)
# with h5py.File(io) as f:
# # f.setdefault("compression", "lzw")
# a = f['value']
# res = np.array(a)
# return res
|
zuper-ipce-z6
|
/zuper-ipce-z6-6.1.2.tar.gz/zuper-ipce-z6-6.1.2/src/zuper_ipce/numpy_encoding.py
|
numpy_encoding.py
|
from typing import Any
import oyaml
#
# __all__ = ["get_sha256_base58"]
#
#
# def get_sha256_base58(contents: bytes) -> bytes:
# m = hashlib.sha256()
# m.update(contents)
# s = m.digest()
# return base58.b58encode(s)
def oyaml_dump(x: object) -> str:
return oyaml.dump(x)
def oyaml_load(x: str, **kwargs: Any) -> object:
return oyaml.load(x, Loader=oyaml.Loader)
|
zuper-ipce-z6
|
/zuper-ipce-z6-6.1.2.tar.gz/zuper-ipce-z6-6.1.2/src/zuper_ipce/utils_text.py
|
utils_text.py
|
from dataclasses import dataclass, field
from datetime import datetime
from decimal import Decimal
from typing import cast, Dict, NewType, Tuple
from zuper_typing import DictStrType, MyBytes
from .types import ModuleName, QualName
JSONSchema = NewType("JSONSchema", dict)
GlobalsDict = Dict[str, object]
ProcessingDict = Dict[str, str]
# EncounteredDict = Dict[str, object]
SCHEMA_ID = "http://json-schema.org/draft-07/schema#"
SCHEMA_ATT = "$schema"
HINTS_ATT = "$hints"
ANY_OF = "anyOf"
ALL_OF = "allOf"
ID_ATT = "$id"
REF_ATT = "$ref"
X_CLASSVARS = "classvars"
X_CLASSATTS = "classatts"
X_ORDER = "order"
JSC_FORMAT = "format"
JSC_REQUIRED = "required"
JSC_TYPE = "type"
JSC_ITEMS = "items"
JSC_DEFAULT = "default"
JSC_ENUM = "enum"
JSC_TITLE = "title"
JSC_NUMBER = "number"
JSC_INTEGER = "integer"
JSC_ARRAY = "array"
JSC_OBJECT = "object"
JSC_ADDITIONAL_PROPERTIES = "additionalProperties"
JSC_PROPERTY_NAMES = "propertyNames"
JSC_DESCRIPTION = "description"
JSC_STRING = "string"
JSC_NULL = "null"
JSC_BOOL = "boolean"
JSC_PROPERTIES = "properties"
JSC_DEFINITIONS = "definitions"
JSC_ALLOF = "allOf"
JSC_ANYOF = "anyOf"
# Z_ATT_LSIZE = "lsize"
# Z_ATT_TSIZE = "tsize"
X_ORIG = "__orig__"
# X_EXTRA = "__extra__"
# X_BINDINGS = "__bindings__"
X_PYTHON_MODULE_ATT = "__module__"
ATT_PYTHON_NAME = "__qualname__"
JSC_TITLE_NUMPY = "numpy"
JSC_TITLE_SLICE = "slice"
JSC_TITLE_BYTES = "bytes"
JSC_TITLE_DECIMAL = "decimal"
JSC_TITLE_FLOAT = "float"
JSC_TITLE_DATETIME = "datetime"
JSC_TITLE_CALLABLE = "Callable"
JSC_TITLE_TYPE = "type"
JSC_TITLE_CID = "cid"
# JSC_TITLE_TUPLE = 'Tuple'
# JSC_TITLE_LIST = 'List'
JSC_FORMAT_CID = "cid"
SCHEMA_BYTES = cast(
JSONSchema, {JSC_TYPE: JSC_STRING, JSC_TITLE: JSC_TITLE_BYTES, SCHEMA_ATT: SCHEMA_ID},
)
SCHEMA_CID = cast(
JSONSchema,
{
JSC_TYPE: JSC_STRING,
JSC_TITLE: JSC_TITLE_CID,
JSC_FORMAT: JSC_FORMAT_CID,
SCHEMA_ATT: SCHEMA_ID,
},
)
# IPCE_SCALARS = (bool, int, str, float, bytes, datetime, Decimal, type(None))
IPCE_TRIVIAL = (bool, int, str, float, bytes, datetime, Decimal, MyBytes)
IPCE_TRIVIAL_NONE = IPCE_TRIVIAL + (type(None),)
# check_types = False
CALLABLE_ORDERING = "ordering"
CALLABLE_RETURN = "return"
@dataclass
class IEDO:
use_remembered_classes: bool
remember_deserialized_classes: bool
@dataclass
class IEDS:
global_symbols: Dict[str, type]
encountered: Dict
klasses: Dict[Tuple[ModuleName, QualName], type] = field(default_factory=DictStrType)
@dataclass
class IESO:
use_ipce_from_typelike_cache: bool = True
with_schema: bool = True
IPCE_PASS_THROUGH = (
NotImplementedError,
KeyboardInterrupt,
MemoryError,
AttributeError,
NameError,
AttributeError,
# TypeError,
RecursionError,
RuntimeError,
)
|
zuper-ipce-z6
|
/zuper-ipce-z6-6.1.2.tar.gz/zuper-ipce-z6-6.1.2/src/zuper_ipce/constants.py
|
constants.py
|
from dataclasses import is_dataclass
from typing import Dict, List, overload, Tuple, TypeVar
from zuper_commons.types import ZAssertionError, ZValueError
from .constants import JSONSchema
from .types import IPCE
D = TypeVar("D")
_V = TypeVar("_V")
__all__ = ["assert_canonical_ipce"]
@overload
def sorted_dict_cbor_ord(x: JSONSchema) -> JSONSchema:
...
@overload
def sorted_dict_cbor_ord(x: Dict[str, _V]) -> Dict[str, _V]:
...
def sorted_dict_cbor_ord(x: Dict[str, _V]) -> Dict[str, _V]:
if not isinstance(x, dict):
raise ZAssertionError(x=x)
res = dict(sorted(x.items(), key=key_dict))
# TODO
# assert_sorted_dict_cbor_ord(res)
return res
def key_dict(item: Tuple[str, object]) -> Tuple[int, str]:
k, v = item
return (len(k), k)
def key_list(k: str) -> Tuple[int, str]:
return (len(k), k)
def sorted_list_cbor_ord(x: List[str]) -> List[str]:
return sorted(x, key=key_list)
def assert_sorted_dict_cbor_ord(x: dict):
keys = list(x.keys())
keys2 = sorted_list_cbor_ord(keys)
if keys != keys2:
msg = f"x not sorted"
raise ZValueError(msg, keys=keys, keys2=keys2)
def assert_canonical_ipce(ob_ipce: IPCE, max_rec=2) -> None:
IPCL_LINKS = "$links"
IPCL_SELF = "$self"
if isinstance(ob_ipce, dict):
if "/" in ob_ipce:
msg = 'Cannot have "/" in here '
raise ZValueError(msg, ob_ipce=ob_ipce)
assert_sorted_dict_cbor_ord(ob_ipce)
if IPCL_LINKS in ob_ipce:
msg = f"Should have dropped the {IPCL_LINKS} part."
raise ZValueError(msg, ob_ipce=ob_ipce)
if IPCL_SELF in ob_ipce:
msg = f"Re-processing the {IPCL_LINKS}."
raise ZValueError(msg, ob_ipce=ob_ipce)
for k, v in ob_ipce.items():
assert not is_dataclass(v), ob_ipce
if max_rec > 0:
assert_canonical_ipce(v, max_rec=max_rec - 1)
elif isinstance(ob_ipce, list):
pass
elif isinstance(ob_ipce, tuple):
msg = "Tuple is not valid."
raise ZValueError(msg, ob_ipce=ob_ipce)
|
zuper-ipce-z6
|
/zuper-ipce-z6-6.1.2.tar.gz/zuper-ipce-z6-6.1.2/src/zuper_ipce/ipce_spec.py
|
ipce_spec.py
|
from datetime import datetime
from .base64_utils import (
decode_bytes_base64,
encode_bytes_base64,
is_encoded_bytes_base64,
)
def transform_leaf(x, transform):
if isinstance(x, dict):
return {k: transform_leaf(v, transform) for k, v in x.items()}
if isinstance(x, list):
return [transform_leaf(_, transform) for _ in x]
return transform(x)
from decimal import Decimal
DECIMAL_PREFIX = "decimal:"
def encode_bytes_before_json_serialization(x0):
def f(x):
if isinstance(x, bytes):
return encode_bytes_base64(x)
elif isinstance(x, datetime):
return x.isoformat()
elif isinstance(x, Decimal):
return DECIMAL_PREFIX + str(x)
else:
return x
return transform_leaf(x0, f)
def decode_bytes_before_json_deserialization(x0):
def f(x):
if isinstance(x, str) and is_encoded_bytes_base64(x):
return decode_bytes_base64(x)
elif isinstance(x, str) and x.startswith(DECIMAL_PREFIX):
x = x.replace(DECIMAL_PREFIX, "")
return Decimal(x)
else:
return x
return transform_leaf(x0, f)
|
zuper-ipce-z6
|
/zuper-ipce-z6-6.1.2.tar.gz/zuper-ipce-z6-6.1.2/src/zuper_ipce/json_utils.py
|
json_utils.py
|
from typing import cast
from .constants import JSONSchema, REF_ATT
# def schema_hash(k):
# ob_cbor = cbor2.dumps(k)
# ob_cbor_hash = hashlib.sha256(ob_cbor).digest()
# return ob_cbor_hash
#
# def get_all_refs(schema):
# if isinstance(schema, dict):
# if '$ref' in schema:
# yield schema['$ref']
# for _, v in schema.items():
# yield from get_all_refs(v)
# if isinstance(schema, list):
# for v in schema:
# yield from get_all_refs(v)
def make_url(x: str):
assert isinstance(x, str), x
return f"http://invalid.json-schema.org/{x}#"
def make_ref(x: str) -> JSONSchema:
assert len(x) > 1, x
assert isinstance(x, str), x
return cast(JSONSchema, {REF_ATT: x})
|
zuper-ipce-z6
|
/zuper-ipce-z6-6.1.2.tar.gz/zuper-ipce-z6-6.1.2/src/zuper_ipce/schema_utils.py
|
schema_utils.py
|
from collections import defaultdict
from dataclasses import dataclass, field
from typing import Dict, Tuple
from zuper_commons.types import ZValueError
from .constants import JSONSchema, REF_ATT, SCHEMA_ATT, SCHEMA_ID
from .ipce_attr import make_key
from .ipce_spec import assert_canonical_ipce
def assert_canonical_schema(x: JSONSchema):
assert isinstance(x, dict)
if SCHEMA_ATT in x:
assert x[SCHEMA_ATT] in [SCHEMA_ID]
elif REF_ATT in x:
pass
else:
msg = f"No {SCHEMA_ATT} or {REF_ATT}"
raise ZValueError(msg, x=x)
assert_canonical_ipce(x)
# json.dumps(x) # try no bytes
@dataclass
class TRE:
schema: JSONSchema
used: Dict[str, str] = field(default_factory=dict)
def __post_init__(self) -> None:
try:
assert_canonical_schema(self.schema)
except ValueError as e: # pragma: no cover
msg = f"Invalid schema"
raise ZValueError(msg, schema=self.schema) from e
class IPCETypelikeCache:
c: Dict[Tuple, Dict[Tuple, JSONSchema]] = defaultdict(dict)
# def get_cached():
# return {k[1]: [x for x, _ in v.items()] for k, v in IPCETypelikeCache.c.items()}
def get_ipce_from_typelike_cache(T, context: Dict[str, str]) -> TRE:
k = make_key(T)
if k not in IPCETypelikeCache.c:
raise KeyError()
items = list(IPCETypelikeCache.c[k].items())
# actually first look for the ones with more context
items.sort(key=lambda x: len(x[1]), reverse=True)
for context0, schema in items:
if compatible(context0, context):
# if context0:
# logger.debug(f'Returning cached {T} with context {context0}')
return TRE(schema, dict(context0))
raise KeyError()
def compatible(c0: Tuple[Tuple[str, str]], context: Dict[str, str]) -> bool:
for k, v in c0:
if k not in context or context[k] != v:
return False
return True
def set_ipce_from_typelike_cache(T, context: Dict[str, str], schema: JSONSchema):
k = make_key(T)
ci = tuple(sorted(context.items()))
IPCETypelikeCache.c[k][ci] = schema
|
zuper-ipce-z6
|
/zuper-ipce-z6-6.1.2.tar.gz/zuper-ipce-z6-6.1.2/src/zuper_ipce/schema_caching.py
|
schema_caching.py
|
import base64
def encode_bytes_base64(data: bytes, mime=None) -> str:
encoded = base64.b64encode(data).decode("ascii")
if mime is None:
mime = "binary/octet-stream"
res = "data:%s;base64,%s" % (mime, encoded)
return res
def is_encoded_bytes_base64(s: str):
return s.startswith("data:") and "base64," in s
def decode_bytes_base64(s: str) -> bytes:
assert is_encoded_bytes_base64(s)
i = s.index("base64,")
j = i + len("base64,")
s2 = s[j:]
res = base64.b64decode(s2)
return res
|
zuper-ipce-z6
|
/zuper-ipce-z6-6.1.2.tar.gz/zuper-ipce-z6-6.1.2/src/zuper_ipce/base64_utils.py
|
base64_utils.py
|
import dataclasses
import datetime
from dataclasses import dataclass, field, Field, make_dataclass, MISSING
from decimal import Decimal
from numbers import Number
from typing import (
Any,
Callable,
cast,
ClassVar,
Dict,
List,
NewType,
Optional,
Tuple,
Type,
TypeVar,
)
import numpy as np
from zuper_commons.types import (
check_isinstance,
ZException,
ZNotImplementedError,
ZTypeError,
ZValueError,
)
from zuper_typing import (
DataclassInfo,
get_remembered_class,
is_ClassVar,
is_ForwardRef,
is_placeholder,
make_dict,
make_Intersection,
make_list,
make_Literal,
make_set,
make_Tuple,
make_Union,
make_VarTuple,
MyABC,
MyNamedArg,
PYTHON_36,
recursive_type_subst,
remember_created_class,
set_dataclass_info,
)
from . import logger
from .constants import (
ATT_PYTHON_NAME,
CALLABLE_ORDERING,
CALLABLE_RETURN,
ID_ATT,
IEDO,
IEDS,
JSC_ADDITIONAL_PROPERTIES,
JSC_ALLOF,
JSC_ANYOF,
JSC_ARRAY,
JSC_BOOL,
JSC_DEFAULT,
JSC_DEFINITIONS,
JSC_DESCRIPTION,
JSC_ENUM,
JSC_INTEGER,
JSC_NULL,
JSC_NUMBER,
JSC_OBJECT,
JSC_PROPERTIES,
JSC_REQUIRED,
JSC_STRING,
JSC_TITLE,
JSC_TITLE_BYTES,
JSC_TITLE_CALLABLE,
JSC_TITLE_DATETIME,
JSC_TITLE_DECIMAL,
JSC_TITLE_FLOAT,
JSC_TITLE_NUMPY,
JSC_TITLE_SLICE,
JSC_TYPE,
JSONSchema,
REF_ATT,
SCHEMA_ATT,
SCHEMA_ID,
X_CLASSATTS,
X_CLASSVARS,
X_ORDER,
X_ORIG,
X_PYTHON_MODULE_ATT,
)
from .structures import CannotFindSchemaReference
from .types import IPCE, is_unconstrained, TypeLike
_X = TypeVar("_X")
@dataclass
class SRE:
res: TypeLike
used: Dict[str, object] = dataclasses.field(default_factory=dict)
# XXX: same class?
@dataclass
class SRO:
res: object
used: Dict[str, object] = dataclasses.field(default_factory=dict)
def typelike_from_ipce(schema0: JSONSchema, *, iedo: Optional[IEDO] = None) -> TypeLike:
if iedo is None:
iedo = IEDO(use_remembered_classes=False, remember_deserialized_classes=False)
ieds = IEDS({}, {})
sre = typelike_from_ipce_sr(schema0, ieds=ieds, iedo=iedo)
#
# try:
# assert_equivalent_types(sre.res, sre.res)
# except Exception as e:
# raise ZValueError(sre=sre) from e
return sre.res
def typelike_from_ipce_sr(schema0: JSONSchema, *, ieds: IEDS, iedo: IEDO) -> SRE:
try:
sre = typelike_from_ipce_sr_(schema0, ieds=ieds, iedo=iedo)
assert isinstance(sre, SRE), (schema0, sre)
res = sre.res
except (TypeError, ValueError) as e: # pragma: no cover
msg = "Cannot interpret schema as a type."
raise ZTypeError(msg, schema0=schema0) from e
if ID_ATT in schema0:
schema_id = schema0[ID_ATT]
ieds.encountered[schema_id] = res
return sre
def typelike_from_ipce_sr_(schema0: JSONSchema, *, ieds: IEDS, iedo: IEDO) -> SRE:
# pprint('schema_to_type_', schema0=schema0)
# encountered = encountered or {}
check_isinstance(schema0, dict)
schema = cast(JSONSchema, dict(schema0))
# noinspection PyUnusedLocal
metaschema = schema.pop(SCHEMA_ATT, None)
schema_id = schema.pop(ID_ATT, None)
if schema_id:
if not JSC_TITLE in schema:
pass
else:
cls_name = schema[JSC_TITLE]
ieds.encountered[schema_id] = cls_name
if JSC_ENUM in schema0:
schema1 = dict(schema0)
enum = schema1.pop(JSC_ENUM)
schema1 = cast(JSONSchema, schema1)
return typelike_from_ipce_enum(schema1, enum, ieds=ieds, iedo=iedo)
if schema == {JSC_TITLE: "Any"}:
return SRE(Any)
if schema == {}:
return SRE(object)
if schema == {JSC_TITLE: "object"}:
return SRE(object)
if REF_ATT in schema:
r = schema[REF_ATT]
if r == SCHEMA_ID:
if schema.get(JSC_TITLE, "") == "type":
return SRE(type)
else: # pragma: no cover
raise ZNotImplementedError(schema=schema)
# return SRE(Type)
if r in ieds.encountered:
res = ieds.encountered[r]
if is_placeholder(res):
used = {r: res}
else:
used = {}
return SRE(res, used=used)
else:
msg = f"Cannot evaluate reference {r!r}"
raise CannotFindSchemaReference(msg, ieds=ieds)
if JSC_ANYOF in schema:
return typelike_from_ipce_Union(schema, ieds=ieds, iedo=iedo)
if JSC_ALLOF in schema:
return typelike_from_ipce_Intersection(schema, ieds=ieds, iedo=iedo)
jsc_type = schema.get(JSC_TYPE, None)
jsc_title = schema.get(JSC_TITLE, "-not-provided-")
if jsc_title == JSC_TITLE_NUMPY:
res = np.ndarray
return SRE(res)
if jsc_type == "NewType":
kt = KeepTrackDes(ieds, iedo)
if "newtype" not in schema:
original = object
else:
nt = schema["newtype"]
tre = typelike_from_ipce_sr(nt, ieds=ieds, iedo=iedo)
original = tre.res
res = NewType(jsc_title, original)
return kt.sre(res)
if jsc_type == JSC_STRING:
if jsc_title == JSC_TITLE_BYTES:
return SRE(bytes)
elif jsc_title == JSC_TITLE_DATETIME:
return SRE(datetime.datetime)
elif jsc_title == JSC_TITLE_DECIMAL:
return SRE(Decimal)
else:
return SRE(str)
elif jsc_type == JSC_NULL:
return SRE(type(None))
elif jsc_type == JSC_BOOL:
return SRE(bool)
elif jsc_type == JSC_NUMBER:
if jsc_title == JSC_TITLE_FLOAT:
return SRE(float)
else:
return SRE(Number)
elif jsc_type == JSC_INTEGER:
return SRE(int)
elif jsc_type == "subtype":
s = schema["subtype"]
r = typelike_from_ipce_sr(s, ieds=ieds, iedo=iedo)
T = Type[r.res]
return SRE(T, r.used)
elif jsc_type == JSC_OBJECT:
if jsc_title == JSC_TITLE_CALLABLE:
return typelike_from_ipce_Callable(schema, ieds=ieds, iedo=iedo)
elif jsc_title.startswith("Dict["):
return typelike_from_ipce_DictType(schema, ieds=ieds, iedo=iedo)
elif jsc_title.startswith("Set["):
return typelike_from_ipce_SetType(schema, ieds=ieds, iedo=iedo)
elif jsc_title == JSC_TITLE_SLICE:
return SRE(slice)
else:
return typelike_from_ipce_dataclass(schema, schema_id=schema_id, ieds=ieds, iedo=iedo)
elif jsc_type == JSC_ARRAY:
return typelike_from_ipce_array(schema, ieds=ieds, iedo=iedo)
msg = "Cannot recover schema"
raise ZValueError(msg, schema=schema)
# assert False, schema # pragma: no cover
def typelike_from_ipce_enum(schema: JSONSchema, enum: List[object], *, ieds: IEDS, iedo: IEDO):
kt = KeepTrackDes(ieds, iedo)
T = kt.typelike_from_ipce(schema)
values = [kt.object_from_ipce(_, T) for _ in enum]
res = make_Literal(*tuple(values))
return kt.sre(res)
def typelike_from_ipce_Union(schema, *, ieds: IEDS, iedo: IEDO) -> SRE:
options = schema[JSC_ANYOF]
kt = KeepTrackDes(ieds, iedo)
args = [kt.typelike_from_ipce(_) for _ in options]
if args and args[-1] is type(None):
V = args[0]
res = Optional[V]
else:
res = make_Union(*args)
return kt.sre(res)
def typelike_from_ipce_Intersection(schema, *, ieds: IEDS, iedo: IEDO) -> SRE:
options = schema[JSC_ALLOF]
kt = KeepTrackDes(ieds, iedo)
args = [kt.typelike_from_ipce(_) for _ in options]
res = make_Intersection(tuple(args))
return kt.sre(res)
class KeepTrackDes:
def __init__(self, ieds: IEDS, iedo: IEDO):
self.ieds = ieds
self.iedo = iedo
self.used = {}
def typelike_from_ipce(self, x: IPCE):
sre = typelike_from_ipce_sr(x, ieds=self.ieds, iedo=self.iedo)
self.used.update(sre.used)
return sre.res
def object_from_ipce(self, x: IPCE, st: Type[_X] = object) -> _X:
from .conv_object_from_ipce import object_from_ipce_
res = object_from_ipce_(x, st, ieds=self.ieds, iedo=self.iedo)
return res
def sre(self, x: IPCE) -> SRE:
return SRE(x, self.used)
def typelike_from_ipce_array(schema, *, ieds: IEDS, iedo: IEDO) -> SRE:
assert schema[JSC_TYPE] == JSC_ARRAY
items = schema["items"]
kt = KeepTrackDes(ieds, iedo)
if isinstance(items, list):
# assert len(items) > 0
args = tuple([kt.typelike_from_ipce(_) for _ in items])
res = make_Tuple(*args)
else:
if schema[JSC_TITLE].startswith("Tuple["):
V = kt.typelike_from_ipce(items)
res = make_VarTuple(V)
else:
V = kt.typelike_from_ipce(items)
res = make_list(V)
# logger.info(f'found list like: {res}')
return kt.sre(res)
def typelike_from_ipce_DictType(schema, *, ieds: IEDS, iedo: IEDO) -> SRE:
K = str
kt = KeepTrackDes(ieds, iedo)
V = kt.typelike_from_ipce(schema[JSC_ADDITIONAL_PROPERTIES])
# pprint(f'here:', d=dict(V.__dict__))
# if issubclass(V, FakeValues):
if isinstance(V, type) and V.__name__.startswith("FakeValues"):
K = V.__annotations__["real_key"]
V = V.__annotations__["value"]
try:
D = make_dict(K, V)
except (TypeError, ValueError) as e: # pragma: no cover
msg = f"Cannot reconstruct dict type."
raise ZTypeError(msg, K=K, V=V, ieds=ieds) from e
return kt.sre(D)
def typelike_from_ipce_SetType(schema, *, ieds: IEDS, iedo: IEDO):
if not JSC_ADDITIONAL_PROPERTIES in schema: # pragma: no cover
msg = f"Expected {JSC_ADDITIONAL_PROPERTIES!r} in @schema."
raise ZValueError(msg, schema=schema)
kt = KeepTrackDes(ieds, iedo)
V = kt.typelike_from_ipce(schema[JSC_ADDITIONAL_PROPERTIES])
res = make_set(V)
return kt.sre(res)
def typelike_from_ipce_Callable(schema: JSONSchema, *, ieds: IEDS, iedo: IEDO):
kt = KeepTrackDes(ieds, iedo)
schema = dict(schema)
definitions = dict(schema[JSC_DEFINITIONS])
ret = kt.typelike_from_ipce(definitions.pop(CALLABLE_RETURN))
others = []
for k in schema[CALLABLE_ORDERING]:
d = kt.typelike_from_ipce(definitions[k])
if not looks_like_int(k):
d = MyNamedArg(d, k)
others.append(d)
# noinspection PyTypeHints
res = Callable[others, ret]
# logger.info(f'typelike_from_ipce_Callable: {schema} \n others = {others}\n res = {res}')
return kt.sre(res)
def looks_like_int(k: str) -> bool:
try:
int(k)
except:
return False
else:
return True
def typelike_from_ipce_dataclass(
res: JSONSchema, schema_id: Optional[str], *, ieds: IEDS, iedo: IEDO
) -> SRE:
kt = KeepTrackDes(ieds, iedo)
assert res[JSC_TYPE] == JSC_OBJECT
cls_name = res[JSC_TITLE]
definitions = res.get(JSC_DEFINITIONS, {})
# bindings_ipce = res.get(X_BINDINGS, {})
required = res.get(JSC_REQUIRED, [])
properties = res.get(JSC_PROPERTIES, {})
classvars = res.get(X_CLASSVARS, {})
classatts = res.get(X_CLASSATTS, {})
if (not X_PYTHON_MODULE_ATT in res) or (not ATT_PYTHON_NAME in res): # pragma: no cover
msg = f"Cannot find attributes for {cls_name!r}."
raise ZValueError(msg, res=res)
module_name = res[X_PYTHON_MODULE_ATT]
qual_name = res[ATT_PYTHON_NAME]
key = (module_name, qual_name)
if iedo.use_remembered_classes:
try:
res = get_remembered_class(module_name, qual_name)
return SRE(res)
except KeyError:
pass
if key in ieds.klasses:
return SRE(ieds.klasses[key], {})
typevars: List[TypeVar] = []
typevars_dict: Dict[str, type] = {}
for tname, t in definitions.items():
bound = kt.typelike_from_ipce(t)
# noinspection PyTypeHints
if is_unconstrained(bound):
bound = None
# noinspection PyTypeHints
tv = TypeVar(tname, bound=bound)
typevars_dict[tname] = tv
typevars.append(tv)
if ID_ATT in t:
ieds.encountered[t[ID_ATT]] = tv
if typevars:
typevars2: Tuple[TypeVar, ...] = tuple(typevars)
from zuper_typing import Generic
# TODO: typevars
if PYTHON_36: # pragma: no cover
# noinspection PyUnresolvedReferences
# base = Generic.__getitem__(typevars2)
base = Generic.__class_getitem__(typevars2)
else:
# noinspection PyUnresolvedReferences
base = Generic.__class_getitem__(typevars2)
# ztinfo("", base=base, type_base=type(base))
bases = (base,)
else:
class B(metaclass=MyABC):
pass
bases = (B,)
Placeholder = type(f"PlaceholderFor{cls_name}", (), {})
ieds.encountered[schema_id] = Placeholder
fields_triples: List[Tuple[str, TypeLike, Field]] = [] # (name, type, Field)
if X_ORDER in res:
ordered = res[X_ORDER]
else:
ordered = list(properties) + list(classvars) + list(classatts)
# assert_equal(set(names), set(properties), msg=yaml.dump(res))
# else:
# names = list(properties)
#
# logger.info(f'reading {cls_name} names {names}')
# other_set_attr = {}
for pname in ordered:
if pname in properties:
v = properties[pname]
ptype = kt.typelike_from_ipce(v)
_Field = field()
_Field.name = pname
has_default = JSC_DEFAULT in v
if has_default:
default_value = kt.object_from_ipce(v[JSC_DEFAULT], ptype)
if isinstance(default_value, (list, dict, set)):
_Field.default_factory = MyDefaultFactory(default_value)
else:
_Field.default = default_value
assert not isinstance(default_value, dataclasses.Field)
# other_set_attr[pname] = default_value
else:
if not pname in required:
msg = f"Field {pname!r} is not required but I did not find a default"
raise ZException(msg, res=res)
fields_triples.append((pname, ptype, _Field))
elif pname in classvars:
v = classvars[pname]
ptype = kt.typelike_from_ipce(v)
# logger.info(f'ipce classvar: {pname} {ptype}')
f = field()
if pname in classatts:
f.default = kt.object_from_ipce(classatts[pname], object)
fields_triples.append((pname, ClassVar[ptype], f))
elif pname in classatts: # pragma: no cover
msg = f"Found {pname!r} in @classatts but not in @classvars"
raise ZValueError(msg, res=res, classatts=classatts, classvars=classvars)
else: # pragma: no cover
msg = f"Cannot find {pname!r} either in @properties or @classvars or @classatts."
raise ZValueError(msg, properties=properties, classvars=classvars, classatts=classatts)
check_fields_order(fields_triples)
# ztinfo('fields', fields_triples=fields_triples)
unsafe_hash = True
try:
T = make_dataclass(
cls_name,
fields_triples,
bases=bases,
namespace=None,
init=True,
repr=True,
eq=True,
order=True,
unsafe_hash=unsafe_hash,
frozen=False,
)
except TypeError: # pragma: no cover
#
# msg = "Cannot make dataclass with fields:"
# for f in fields:
# msg += f"\n {f}"
# logger.error(msg)
raise
setattr(T, "__name__", cls_name)
# logger.info("before fix_anno\n" + debug_print(dict(T=T, ieds=ieds)))
fix_annotations_with_self_reference(T, cls_name, Placeholder)
for pname, v in classatts.items():
if isinstance(v, dict) and SCHEMA_ATT in v and v[SCHEMA_ATT] == SCHEMA_ID:
interpreted = kt.typelike_from_ipce(cast(JSONSchema, v))
else:
interpreted = kt.object_from_ipce(v, object)
assert not isinstance(interpreted, dataclasses.Field)
# logger.info("setting class att", pname, interpreted)
setattr(T, pname, interpreted)
if JSC_DESCRIPTION in res:
setattr(T, "__doc__", res[JSC_DESCRIPTION])
else:
# we don't want the stock dataclass
setattr(T, "__doc__", None)
# logger.info(f"Did find __doc__ for {T.__name__} {id(T)}")
# else:
# logger.error(f"Did not find __doc__ for {T.__name__} {id(T)}", res=res)
# # the original one did not have it
# # setattr(T, "__doc__", None)
# raise ValueError()
setattr(T, "__module__", module_name)
setattr(T, "__qualname__", qual_name)
used = kt.used
if schema_id in used:
used.pop(schema_id)
if not used:
if iedo.remember_deserialized_classes:
remember_created_class(T, "typelike_from_ipce")
ieds.klasses[key] = T
else:
msg = f"Cannot remember {key} because used = {used}"
logger.warning(msg)
# logger.info(f"Estimated class {key} used = {used} ")
# assert not "varargs" in T.__dict__, T
# ztinfo("typelike_from_ipce", T=T, type_T=type(T), bases=bases)
# bindings = {}
# for tname, t in bindings_ipce.items():
# u = make_TypeVar(tname)
# bindings[u] = kt.typelike_from_ipce(t)
orig = res.get(X_ORIG, [])
# extra = res.get(X_EXTRA, [])
#
# def make_tv(tname: str) -> type:
# if tname in typevars_dict:
# return typevars_dict[tname]
# else:
# return make_TypeVar(tname)
clsi_orig = []
for o in orig:
oT = kt.typelike_from_ipce(o)
clsi_orig.append(oT)
# clsi_orig = tuple(make_tv(_) for _ in orig)
clsi_orig = tuple(clsi_orig)
# clsi_extra = tuple(make_tv(_) for _ in extra)
clsi = DataclassInfo(
name=cls_name,
# bindings=bindings,
orig=clsi_orig,
)
# extra=clsi_extra)
set_dataclass_info(T, clsi)
# try:
# assert_equivalent_types(T,T)
# except Exception as e:
# msg = 'I did not create a well-formed class'
# raise ZAssertionError(msg, T=T, used=used, ieds=ieds) from e
return SRE(T, used)
def field_has_default(f: Field) -> bool:
if f.default != MISSING:
return True
elif f.default_factory != MISSING:
return True
else:
return False
def check_fields_order(fields_triples: List[Tuple[str, TypeLike, Field]]):
found_default = None
for name, type_, f in fields_triples:
if is_ClassVar(type_):
continue
if field_has_default(f):
found_default = name
else:
if found_default:
msg = (
f"Found out of order fields. Field {name!r} without default found after "
f"{found_default!r}."
)
raise ZValueError(msg, fields_triples=fields_triples)
def fix_annotations_with_self_reference(
T: Type[dataclass], cls_name: str, Placeholder: type
) -> None:
# print('fix_annotations_with_self_reference')
# logger.info(f'fix_annotations_with_self_reference {cls_name}, placeholder: {Placeholder}')
# logger.info(f'encountered: {encountered}')
# logger.info(f'global_symbols: {global_symbols}')
def f(M: TypeLike) -> TypeLike:
assert not is_ForwardRef(M)
if M is Placeholder:
return T
if hasattr(M, "__name__") and "Placeholder" in M.__name__:
pass
# logger.debug('Found this placeholder', M=M, T=T, Placeholder=Placeholder)
if hasattr(M, "__name__") and M.__name__ == Placeholder.__name__:
# logger.debug('Placeholder of different class')
return T
# elif hasattr(M, '__name__') and M.__name__ == Placeholder.__name__:
# return T
else:
return M
f.__name__ = f"replacer_for_{cls_name}"
anns2 = {}
anns: dict = T.__annotations__
items = list(anns.items())
for k, v0 in items:
anns2[k] = recursive_type_subst(v0, f)
del k, v0
T.__annotations__ = anns2
for fi in dataclasses.fields(T):
fi.type = T.__annotations__[fi.name]
# if T.__name__ == 'FailedResult':
#
# logger.info('substituted', anns=anns, anns2=anns2)
class MyDefaultFactory:
def __init__(self, value: object):
self.value = value
def __call__(self) -> object:
v = self.value
return type(v)(v)
|
zuper-ipce-z6
|
/zuper-ipce-z6-6.1.2.tar.gz/zuper-ipce-z6-6.1.2/src/zuper_ipce/conv_typelike_from_ipce.py
|
conv_typelike_from_ipce.py
|
def make_key(x: object) -> tuple:
k0 = id(type(x))
k1 = getattr(x, "__qualname__", None)
k2 = getattr(x, "__name__", None)
k2b = getattr(x, "__dict_type__", None)
k2c = getattr(x, "__set_type__", None)
k2d = getattr(x, "__list_type__", None)
k3 = id(x)
try:
h = x.__hash__()
except:
h = 0
k = (k3, k0, k1, k2, k2b, k2c, k2d, h)
return k
|
zuper-ipce-z6
|
/zuper-ipce-z6-6.1.2.tar.gz/zuper-ipce-z6-6.1.2/src/zuper_ipce/ipce_attr.py
|
ipce_attr.py
|
from typing import cast, Dict, List, Set, Tuple, Type, TypeVar
from zuper_commons.types import ZTypeError, ZValueError
from zuper_typing import (
CustomDict,
CustomList,
CustomSet,
CustomTuple,
get_CustomDict_args,
get_CustomList_arg,
get_CustomSet_arg,
get_CustomTuple_args,
get_DictLike_args,
get_FixedTupleLike_args,
get_ListLike_arg,
get_Optional_arg,
get_SetLike_arg,
get_Union_args,
get_VarTuple_arg,
is_CustomDict,
is_CustomList,
is_CustomSet,
is_CustomTuple,
is_DictLike,
is_FixedTupleLike,
is_ListLike,
is_Optional,
is_SetLike,
is_Union,
is_VarTuple,
TypeLike,
)
from .types import is_unconstrained
X_ = TypeVar("X_")
def get_set_type_suggestion(x: set, st: TypeLike) -> TypeLike:
T = type(x)
if is_CustomSet(T):
T = cast(Type[CustomSet], T)
return get_CustomSet_arg(T)
if is_SetLike(st):
st = cast(Type[Set], st)
V = get_SetLike_arg(st)
return V
elif is_unconstrained(st):
return object
else: # pragma: no cover
msg = "suggest_type does not make sense for a list"
raise ZTypeError(msg, suggest_type=st)
def get_list_type_suggestion(x: list, st: TypeLike) -> TypeLike:
T = type(x)
if is_CustomList(T):
T = cast(Type[CustomList], T)
return get_CustomList_arg(T)
# TODO: if it is custom dict
if is_unconstrained(st):
return object
elif is_ListLike(st):
T = cast(Type[List], st)
V = get_ListLike_arg(T)
return V
else:
msg = "suggest_type does not make sense for a list"
raise ZTypeError(msg, suggest_type=st, x=type(st))
def get_dict_type_suggestion(ob: dict, st: TypeLike) -> Tuple[type, type]:
""" Gets the type to use to serialize a dict.
Returns Dict[K, V], K, V
"""
T = type(ob)
if is_CustomDict(T):
# if it has the type information, then go for it
T = cast(Type[CustomDict], T)
K, V = get_CustomDict_args(T)
return K, V
if is_DictLike(st):
# There was a suggestion of Dict-like
st = cast(Type[Dict], st)
K, V = get_DictLike_args(st)
return K, V
elif is_unconstrained(st):
# Guess from the dictionary itself
K, V = guess_type_for_naked_dict(ob)
return K, V
else: # pragma: no cover
msg = f"@suggest_type does not make sense for a dict"
raise ZValueError(msg, ob=ob, suggest_type=st)
def is_UnionLike(x: TypeLike) -> bool:
return is_Union(x) or is_Optional(x)
def get_UnionLike_args(x: TypeLike) -> Tuple[TypeLike, ...]:
if is_Union(x):
return get_Union_args(x)
elif is_Optional(x):
y = get_Optional_arg(x)
if is_UnionLike(y):
return get_UnionLike_args(y) + (type(None),)
return (y,)
else:
assert False
def get_tuple_type_suggestion(x: tuple, st: TypeLike) -> Tuple[TypeLike, ...]:
if is_UnionLike(st):
raise ZValueError("Does not support unions here", x=x, st=st)
# if isinstance(x, CustomTuple):
# return type(x).__tuple_types__
if is_CustomTuple(st):
st = cast(Type[CustomTuple], st)
return get_CustomTuple_args(st)
n = len(x)
# options = get_UnionLike_args(st)
# else:
# options = (st,)
# first look for any tufor op in options:
if is_VarTuple(st):
st = cast(Type[Tuple[X_, ...]], st)
V = get_VarTuple_arg(st)
return tuple([V] * n)
if is_FixedTupleLike(st):
st = cast(Type[Tuple], st)
ts = get_FixedTupleLike_args(st)
return ts
if is_unconstrained(st):
return tuple([object] * n)
msg = f"@suggest_type does not make sense for a tuple"
raise ZValueError(msg, suggest_type=st)
def guess_type_for_naked_dict(ob: dict) -> Tuple[type, type]:
if not ob:
return object, object
type_values = tuple(type(_) for _ in ob.values())
type_keys = tuple(type(_) for _ in ob.keys())
if len(set(type_keys)) == 1:
K = type_keys[0]
else:
K = object
if len(set(type_values)) == 1:
V = type_values[0]
else:
V = object
return K, V
|
zuper-ipce-z6
|
/zuper-ipce-z6-6.1.2.tar.gz/zuper-ipce-z6-6.1.2/src/zuper_ipce/guesses.py
|
guesses.py
|
import inspect
import traceback
from dataclasses import Field, fields, is_dataclass, MISSING, replace
from typing import cast, Dict, Optional, Set, Tuple, Type, TypeVar
import numpy as np
import yaml
from zuper_commons.fs import write_ustring_to_utf8_file
from zuper_commons.types import ZTypeError, ZValueError
from zuper_ipce.types import get_effective_type
from zuper_typing import (
get_DictLike_args,
get_FixedTupleLike_args,
get_Intersection_args,
get_ListLike_arg,
get_Literal_args,
get_NewType_arg,
get_Optional_arg,
get_SetLike_arg,
get_Union_args,
get_VarTuple_arg,
is_ClassVar,
is_DictLike,
is_FixedTupleLike,
is_Intersection,
is_ListLike,
is_Literal,
is_NewType,
is_Optional,
is_SetLike,
is_TupleLike,
is_TypeVar,
is_Union,
is_VarTuple,
lift_to_customtuple_type,
make_CustomTuple,
make_dict,
make_list,
make_set,
)
from .constants import (
HINTS_ATT,
IEDO,
IEDS,
IPCE_PASS_THROUGH,
IPCE_TRIVIAL,
JSC_TITLE,
JSC_TITLE_TYPE,
JSONSchema,
REF_ATT,
SCHEMA_ATT,
SCHEMA_ID,
)
from .conv_typelike_from_ipce import typelike_from_ipce_sr
from .exceptions import ZDeserializationErrorSchema
from .numpy_encoding import numpy_array_from_ipce
from .structures import FakeValues
from .types import IPCE, is_unconstrained, TypeLike
DEBUGGING = False
_X = TypeVar("_X")
def object_from_ipce(
mj: IPCE, expect_type: Type[_X] = object, *, iedo: Optional[IEDO] = None
) -> _X:
assert expect_type is not None
if iedo is None:
iedo = IEDO(use_remembered_classes=False, remember_deserialized_classes=False)
ieds = IEDS({}, {})
try:
res = object_from_ipce_(mj, expect_type, ieds=ieds, iedo=iedo)
return res
except IPCE_PASS_THROUGH: # pragma: no cover
raise
except ZValueError as e:
msg = f"Cannot deserialize object"
if isinstance(mj, dict) and "$schema" in mj:
schema = mj["$schema"]
else:
schema = None
if DEBUGGING: # pragma: no cover
prefix = f"object_{id(mj)}"
fn = write_out_yaml(prefix + "_data", mj)
msg += f"\n object data in {fn}"
if schema:
fn = write_out_yaml(prefix + "_schema", schema)
msg += f"\n object schema in {fn}"
raise ZValueError(msg, expect_type=expect_type, mj=mj) from e
def object_from_ipce_(mj: IPCE, st: Type[_X], *, ieds: IEDS, iedo: IEDO) -> _X:
# ztinfo('object_from_ipce_', mj=mj, st=st)
# if mj == {'ob': []}:
# raise ZException(mj=mj, st=st)
if is_NewType(st):
st = get_NewType_arg(st)
if is_Optional(st):
return object_from_ipce_optional(mj, st, ieds=ieds, iedo=iedo)
if is_Union(st):
return object_from_ipce_union(mj, st, ieds=ieds, iedo=iedo)
if is_Intersection(st):
return object_from_ipce_intersection(mj, st, ieds=ieds, iedo=iedo)
if st in IPCE_TRIVIAL:
if not isinstance(mj, st):
msg = "Type mismatch for a simple type."
raise ZValueError(msg, expected=st, given_object=mj)
else:
return mj
if isinstance(mj, IPCE_TRIVIAL):
# T = type(mj)
if is_Literal(st): # TODO: put in IPCL as well
values = get_Literal_args(st)
if not mj in values:
msg = "mismatch"
raise ZValueError(msg, expected=st, given=mj)
else:
if not is_unconstrained(st) and not is_TypeVar(st):
msg = f"Type mismatch"
raise ZValueError(msg, expected=st, given_object=mj)
return mj
if isinstance(mj, list):
return object_from_ipce_list(mj, st, ieds=ieds, iedo=iedo)
if mj is None:
if st is type(None):
return None
elif is_unconstrained(st):
return None
else:
msg = f"The value is None but the expected type is @expect_type."
raise ZValueError(msg, st=st)
assert isinstance(mj, dict), type(mj)
from .conv_typelike_from_ipce import typelike_from_ipce_sr
if mj.get(SCHEMA_ATT, "") == SCHEMA_ID or REF_ATT in mj:
schema = cast(JSONSchema, mj)
sr = typelike_from_ipce_sr(schema, ieds=ieds, iedo=iedo)
return sr.res
if mj.get(JSC_TITLE, None) == JSC_TITLE_TYPE:
schema = cast(JSONSchema, mj)
sr = typelike_from_ipce_sr(schema, ieds=ieds, iedo=iedo)
return sr.res
if SCHEMA_ATT in mj:
sa = mj[SCHEMA_ATT]
R = typelike_from_ipce_sr(sa, ieds=ieds, iedo=iedo)
K = R.res
if R.used:
msg = "An open type - not good."
raise ZValueError(msg, sre=R)
# logger.debug(f' loaded K = {K} from {mj}')
else:
K = st
if K is np.ndarray:
return numpy_array_from_ipce(mj)
if is_DictLike(K):
K = cast(Type[Dict], K)
return object_from_ipce_dict(mj, K, ieds=ieds, iedo=iedo)
if is_SetLike(K):
K = cast(Type[Set], K)
res = object_from_ipce_SetLike(mj, K, ieds=ieds, iedo=iedo)
return res
if is_dataclass(K):
return object_from_ipce_dataclass_instance(mj, K, ieds=ieds, iedo=iedo)
if K is slice:
return object_from_ipce_slice(mj)
if is_unconstrained(K):
if looks_like_set(mj):
st = Set[object]
res = object_from_ipce_SetLike(mj, st, ieds=ieds, iedo=iedo)
return res
else:
msg = "No schema found and very ambiguous."
raise ZDeserializationErrorSchema(msg=msg, mj=mj, ieds=ieds, st=st)
# st = Dict[str, object]
#
# return object_from_ipce_dict(mj, st, ieds=ieds, opt=opt)
msg = f"Invalid type or type suggestion."
raise ZValueError(msg, K=K)
def looks_like_set(d: dict):
return len(d) > 0 and all(k.startswith("set:") for k in d)
def object_from_ipce_slice(mj) -> slice:
start = mj["start"]
stop = mj["stop"]
step = mj["step"]
return slice(start, stop, step)
def object_from_ipce_list(mj: IPCE, expect_type, *, ieds: IEDS, iedo: IEDO) -> IPCE:
def rec(x, TT: TypeLike) -> object:
return object_from_ipce_(x, TT, ieds=ieds, iedo=iedo)
# logger.info(f'expect_type for list is {expect_type}')
from .conv_ipce_from_object import is_unconstrained
if is_unconstrained(expect_type):
suggest = object
seq = [rec(_, suggest) for _ in mj]
T = make_list(object)
# noinspection PyArgumentList
return T(seq)
elif is_TupleLike(expect_type):
return object_from_ipce_tuple(mj, expect_type, ieds=ieds, iedo=iedo)
elif is_ListLike(expect_type):
suggest = get_ListLike_arg(expect_type)
seq = [rec(_, suggest) for _ in mj]
T = make_list(suggest)
# noinspection PyArgumentList
return T(seq)
else:
msg = f"The object is a list, but expected different"
raise ZValueError(msg, expect_type=expect_type, mj=mj)
def object_from_ipce_optional(mj: IPCE, expect_type: TypeLike, *, ieds: IEDS, iedo: IEDO) -> IPCE:
if mj is None:
return mj
K = get_Optional_arg(expect_type)
return object_from_ipce_(mj, K, ieds=ieds, iedo=iedo)
def object_from_ipce_union(mj: IPCE, expect_type: TypeLike, *, ieds: IEDS, iedo: IEDO) -> IPCE:
errors = []
ts = get_Union_args(expect_type)
for T in ts:
try:
return object_from_ipce_(mj, T, ieds=ieds, iedo=iedo)
except IPCE_PASS_THROUGH: # pragma: no cover
raise
except BaseException:
errors.append(dict(T=T, e=traceback.format_exc()))
msg = f"Cannot deserialize with any type."
fn = write_out_yaml(f"object{id(mj)}", mj)
msg += f"\n ipce in {fn}"
raise ZValueError(msg, ts=ts, errors=errors)
def object_from_ipce_intersection(
mj: IPCE, expect_type: TypeLike, *, ieds: IEDS, iedo: IEDO
) -> IPCE:
errors = {}
ts = get_Intersection_args(expect_type)
for T in ts:
try:
return object_from_ipce_(mj, T, ieds=ieds, iedo=iedo)
except IPCE_PASS_THROUGH: # pragma: no cover
raise
except BaseException:
errors[str(T)] = traceback.format_exc()
if True: # pragma: no cover
msg = f"Cannot deserialize with any of @ts"
fn = write_out_yaml(f"object{id(mj)}", mj)
msg += f"\n ipce in {fn}"
raise ZValueError(msg, errors=errors, ts=ts)
def object_from_ipce_tuple(mj: IPCE, st: TypeLike, *, ieds: IEDS, iedo: IEDO) -> Tuple:
if is_FixedTupleLike(st):
st = cast(Type[Tuple], st)
seq = []
ts = get_FixedTupleLike_args(st)
for st_i, ob in zip(ts, mj):
st_i = cast(Type[_X], st_i) # XXX should not be necessary
r = object_from_ipce_(ob, st_i, ieds=ieds, iedo=iedo)
seq.append(r)
T = make_CustomTuple(ts)
# noinspection PyArgumentList
return T(seq)
elif is_VarTuple(st):
st = cast(Type[Tuple], st)
T = get_VarTuple_arg(st)
seq = []
for i, ob in enumerate(mj):
r = object_from_ipce_(ob, T, ieds=ieds, iedo=iedo)
seq.append(r)
r = tuple(seq)
try:
return lift_to_customtuple_type(r, T)
except BaseException as e:
raise ZValueError(mj=mj, st=st) from e
else:
assert False
def get_class_fields(K) -> Dict[str, Field]:
class_fields: Dict[str, Field] = {}
for f in fields(K):
class_fields[f.name] = f
return class_fields
def add_to_globals(ieds: IEDS, name: str, val: object) -> IEDS:
g = dict(ieds.global_symbols)
g[name] = val
return replace(ieds, global_symbols=g)
def object_from_ipce_dataclass_instance(mj: IPCE, K: TypeLike, *, ieds: IEDS, iedo: IEDO):
ieds = add_to_globals(ieds, K.__name__, K)
anns = getattr(K, "__annotations__", {})
attrs = {}
hints = mj.get(HINTS_ATT, {})
# ztinfo('hints', mj=mj, h=hints)
# logger.info(f'hints for {K.__name__} = {hints}')
for k, v in mj.items():
if k not in anns:
continue
et_k = anns[k]
if inspect.isabstract(et_k): # pragma: no cover
msg = f"Trying to instantiate abstract class for field {k!r} of class {K.__name__}."
raise ZValueError(msg, K=K, expect_type=et_k, mj=mj, annotation=anns[k])
if k in hints:
R = typelike_from_ipce_sr(hints[k], ieds=ieds, iedo=iedo)
hint = R.res
et_k = hint
#
# else:
# hint = None
try:
attrs[k] = object_from_ipce_(v, et_k, ieds=ieds, iedo=iedo)
except IPCE_PASS_THROUGH: # pragma: no cover
raise
except ZValueError as e: # pragma: no cover
msg = f"Cannot deserialize attribute {k!r} of {K.__name__}."
raise ZValueError(
msg,
K_annotations=K.__annotations__,
expect_type=et_k,
ann_K=anns[k],
K_name=K.__name__,
) from e
# ztinfo(f'result for {k}', raw=v, hint = hint, et_k=et_k, attrs_k=attrs[k])
class_fields = get_class_fields(K)
for k, T in anns.items():
if is_ClassVar(T):
continue
if not k in mj:
f = class_fields[k]
if f.default != MISSING:
attrs[k] = f.default
elif f.default_factory != MISSING:
attrs[k] = f.default_factory()
else:
msg = (
f"Cannot find field {k!r} in data for class {K.__name__} "
f"and no default available"
)
raise ZValueError(msg, anns=anns, T=T, known=sorted(mj), f=f)
for k, v in attrs.items():
assert not isinstance(v, Field), (k, v)
try:
return K(**attrs)
except TypeError as e: # pragma: no cover
msg = f"Cannot instantiate type {K.__name__}."
raise ZTypeError(msg, K=K, attrs=attrs, bases=K.__bases__, fields=anns) from e
def ignore_aliases(self, data) -> bool:
_ = self
if data is None:
return True
if isinstance(data, tuple) and data == ():
return True
if isinstance(data, list) and len(data) == 0:
return True
if isinstance(data, (bool, int, float)):
return True
if isinstance(data, str) and len(data) < 10:
return True
safe = ["additionalProperties", "properties", "__module__"]
if isinstance(data, str) and data in safe:
return True
return False
def write_out_yaml(prefix: str, v: object, no_aliases: bool = False, ansi=False) -> str:
if no_aliases:
yaml.Dumper.ignore_aliases = lambda _, data: True
else:
yaml.Dumper.ignore_aliases = ignore_aliases
# d = oyaml_dump(v)
if isinstance(v, str):
d = v
else:
d = yaml.dump(v)
if ansi:
fn = f"errors/{prefix}.ansi"
else:
fn = f"errors/{prefix}.yaml"
write_ustring_to_utf8_file(d, fn)
return fn
def object_from_ipce_dict(mj: IPCE, D: Type[Dict], *, ieds: IEDS, iedo: IEDO):
assert is_DictLike(D), D
K, V = get_DictLike_args(D)
D = make_dict(K, V)
ob = D()
attrs = {}
# TODO: reflect in ipcl
if is_NewType(K):
K = get_NewType_arg(K)
FV = FakeValues[K, V]
if isinstance(K, type) and (issubclass(K, str) or issubclass(K, int)):
et_V = V
else:
et_V = FV
for k, v in mj.items():
if k == SCHEMA_ATT:
continue
try:
attrs[k] = object_from_ipce_(v, et_V, ieds=ieds, iedo=iedo)
except (TypeError, NotImplementedError) as e: # pragma: no cover
msg = f'Cannot deserialize element at index "{k}".'
raise ZTypeError(msg, expect_type_V=et_V, v=v, D=D, mj_yaml=mj) from e
K = get_effective_type(K)
if isinstance(K, type) and issubclass(K, str):
ob.update(attrs)
return ob
elif isinstance(K, type) and issubclass(K, int):
attrs = {int(k): v for k, v in attrs.items()}
ob.update(attrs)
return ob
else:
for k, v in attrs.items():
# noinspection PyUnresolvedReferences
ob[v.real_key] = v.value
return ob
def object_from_ipce_SetLike(mj: IPCE, D: Type[Set], *, ieds: IEDS, iedo: IEDO):
V = get_SetLike_arg(D)
res = set()
# logger.info(f'loading SetLike wiht V = {V}')
for k, v in mj.items():
if k == SCHEMA_ATT:
continue
vob = object_from_ipce_(v, V, ieds=ieds, iedo=iedo)
# logger.info(f'loaded k = {k} vob = {vob}')
res.add(vob)
T = make_set(V)
return T(res)
|
zuper-ipce-z6
|
/zuper-ipce-z6-6.1.2.tar.gz/zuper-ipce-z6-6.1.2/src/zuper_ipce/conv_object_from_ipce.py
|
conv_object_from_ipce.py
|
import hashlib
import json
import os
from typing import Optional
import cbor2
import multihash
import numpy as np
from cid import make_cid
from nose.tools import assert_equal
from zuper_commons.fs import write_bytes_to_file, write_ustring_to_utf8_file
from zuper_commons.text import pretty_dict
from zuper_commons.types import ZAssertionError, ZValueError
from zuper_typing import assert_equivalent_types, debug_print, get_patches
from . import logger
from .constants import IEDO, IEDS, IESO
from .conv_ipce_from_object import ipce_from_object
from .conv_ipce_from_typelike import ipce_from_typelike
from .conv_object_from_ipce import object_from_ipce, object_from_ipce_
from .conv_typelike_from_ipce import typelike_from_ipce
from .json_utils import (
decode_bytes_before_json_deserialization,
encode_bytes_before_json_serialization,
)
# from .pretty import pretty_dict
from .utils_text import oyaml_dump
def get_cbor_dag_hash_bytes__(ob_cbor: bytes) -> str:
if not isinstance(ob_cbor, bytes):
msg = "Expected bytes."
raise ZValueError(msg, ob_cbor=ob_cbor)
ob_cbor_hash = hashlib.sha256(ob_cbor).digest()
mh = multihash.encode(digest=ob_cbor_hash, code=18)
# the above returns a bytearray
mh = bytes(mh)
cid = make_cid(1, "dag-cbor", mh)
res = cid.encode().decode("ascii")
return res
def save_object(x: object, ipce: object):
# noinspection PyBroadException
# try:
# import zuper_ipcl
# except:
# return
# print(f"saving {x}")
_x2 = object_from_ipce(ipce)
ipce_bytes = cbor2.dumps(ipce, canonical=True, value_sharing=True)
# from zuper_ipcl.debug_print_ import debug_print
digest = get_cbor_dag_hash_bytes__(ipce_bytes)
dn = "test_objects"
# if not os.path.exists(dn):
# os.makedirs(dn)
fn = os.path.join(dn, digest + ".ipce.cbor.gz")
if os.path.exists(fn):
pass
else: # pragma: no cover
fn = os.path.join(dn, digest + ".ipce.cbor")
write_bytes_to_file(ipce_bytes, fn)
# fn = os.path.join(dn, digest + '.ipce.yaml')
# write_ustring_to_utf8_file(yaml.dump(y1), fn)
fn = os.path.join(dn, digest + ".object.ansi")
s = debug_print(x) # '\n\n as ipce: \n\n' + debug_print(ipce) \
write_ustring_to_utf8_file(s, fn)
fn = os.path.join(dn, digest + ".ipce.yaml")
s = oyaml_dump(ipce)
write_ustring_to_utf8_file(s, fn)
def assert_type_roundtrip(T, *, use_globals: Optional[dict] = None, expect_type_equal: bool = True):
assert_equivalent_types(T, T)
if use_globals is None:
use_globals = {}
schema0 = ipce_from_typelike(T, globals0=use_globals)
# why 2?
schema = ipce_from_typelike(T, globals0=use_globals)
save_object(T, ipce=schema)
# logger.info(debug_print('schema', schema=schema))
iedo = IEDO(use_remembered_classes=False, remember_deserialized_classes=False)
T2 = typelike_from_ipce(schema, iedo=iedo)
# TODO: in 3.6 does not hold for Dict, Union, etc.
# if hasattr(T, '__qualname__'):
# assert hasattr(T, '__qualname__')
# assert T2.__qualname__ == T.__qualname__, (T2.__qualname__, T.__qualname__)
# if False:
# rl.pp('\n\nschema', schema=json.dumps(schema, indent=2))
# rl.pp(f"\n\nT ({T}) the original one", **getattr(T, '__dict__', {}))
# rl.pp(f"\n\nT2 ({T2}) - reconstructed from schema ", **getattr(T2, '__dict__', {}))
# pprint("schema", schema=json.dumps(schema, indent=2))
try:
assert_equal(schema, schema0)
if expect_type_equal:
# assert_same_types(T, T)
# assert_same_types(T2, T)
assert_equivalent_types(T, T2, assume_yes=set())
except: # pragma: no cover
logger.info("assert_type_roundtrip", T=T, schema=schema, T2=T2)
raise
schema2 = ipce_from_typelike(T2, globals0=use_globals)
if schema != schema2: # pragma: no cover
msg = "Different schemas"
d = {
"T": T,
"T.qual": T.__qualname__,
"TAnn": T.__annotations__,
"Td": T.__dict__,
"schema": schema0,
"T2": T2,
"T2.qual": T2.__qualname__,
"TAnn2": T2.__annotations__,
"Td2": T2.__dict__,
"schema2": schema2,
"patches": get_patches(schema, schema2),
}
# msg = pretty_dict(msg, d)
# print(msg)
# with open("tmp1.json", "w") as f:
# f.write(json.dumps(schema, indent=2))
# with open("tmp2.json", "w") as f:
# f.write(json.dumps(schema2, indent=2))
# assert_equal(schema, schema2)
raise ZAssertionError(msg, **d)
return T2
def assert_object_roundtrip(
x1, *, use_globals: Optional[dict] = None, expect_equality=True, works_without_schema=True,
):
"""
expect_equality: if __eq__ is preserved
Will not be preserved if use_globals = {}
because a new Dataclass will be created
and different Dataclasses with the same fields do not compare equal.
"""
if use_globals is None:
use_globals = {}
ieds = IEDS(use_globals, {})
iedo = IEDO(use_remembered_classes=False, remember_deserialized_classes=False)
# dumps = lambda x: yaml.dump(x)
y1 = ipce_from_object(x1, globals_=use_globals)
y1_cbor: bytes = cbor2.dumps(y1)
save_object(x1, ipce=y1)
y1 = cbor2.loads(y1_cbor)
y1e = encode_bytes_before_json_serialization(y1)
y1es = json.dumps(y1e, indent=2)
y1esl = decode_bytes_before_json_deserialization(json.loads(y1es))
y1eslo = object_from_ipce_(y1esl, object, ieds=ieds, iedo=iedo)
x1b = object_from_ipce_(y1, object, ieds=ieds, iedo=iedo)
x1bj = ipce_from_object(x1b, globals_=use_globals)
check_equality(x1, x1b, expect_equality)
patches = get_patches(y1, x1bj)
if not patches and y1 != x1bj:
pass # something weird
if patches: # pragma: no cover
# patches = get_patches(y1, x1bj)
# if patches:
# msg = "The objects are not equivalent"
# raise ZValueError(msg, y1=y1, x1bj=x1bj, patches=patches)
msg = "Round trip not obtained"
# if not patches:
# msg += " but no patches??? "
# msg = pretty_dict(
# "Round trip not obtained",
# # dict(x1bj_json=oyaml_dump(x1bj), y1_json=oyaml_dump(y1)),
# )
# assert_equal(y1, x1bj, msg=msg)
if "propertyNames" in y1["$schema"]:
assert_equal(
y1["$schema"]["propertyNames"], x1bj["$schema"]["propertyNames"], msg=msg,
)
#
# with open("y1.json", "w") as f:
# f.write(oyaml_dump(y1))
# with open("x1bj.json", "w") as f:
# f.write(oyaml_dump(x1bj))
raise ZAssertionError(msg, y1=y1, x1bj=x1bj, patches=patches)
# once again, without schema
ieso_false = IESO(with_schema=False)
if works_without_schema:
z1 = ipce_from_object(x1, globals_=use_globals, ieso=ieso_false)
z2 = cbor2.loads(cbor2.dumps(z1))
u1 = object_from_ipce_(z2, type(x1), ieds=ieds, iedo=iedo)
check_equality(x1, u1, expect_equality)
return locals()
def check_equality(x1: object, x1b: object, expect_equality: bool) -> None:
if isinstance(x1b, type) and isinstance(x1, type):
# logger.warning("Skipping type equality check for %s and %s" % (x1b, x1))
pass
else:
if isinstance(x1, np.ndarray): # pragma: no cover
pass
else:
eq1 = x1b == x1
eq2 = x1 == x1b
if expect_equality: # pragma: no cover
if not eq1:
m = "Object equality (next == orig) not preserved"
msg = pretty_dict(
m, dict(x1b=x1b, x1b_=type(x1b), x1=x1, x1_=type(x1), x1b_eq=x1b.__eq__,),
)
raise AssertionError(msg)
if not eq2:
m = "Object equality (orig == next) not preserved"
msg = pretty_dict(
m, dict(x1b=x1b, x1b_=type(x1b), x1=x1, x1_=type(x1), x1_eq=x1.__eq__,),
)
raise AssertionError(msg)
else:
if eq1 and eq2: # pragma: no cover
msg = "You did not expect equality but they actually are"
logger.info(msg)
# raise Exception(msg)
|
zuper-ipce-z6
|
/zuper-ipce-z6-6.1.2.tar.gz/zuper-ipce-z6-6.1.2/src/zuper_ipce/utils_others.py
|
utils_others.py
|
import copy
import dataclasses
import datetime
import warnings
from dataclasses import Field, is_dataclass, replace
from decimal import Decimal
from numbers import Number
from typing import (
cast,
Dict,
Iterator,
List,
Optional,
Sequence,
Set,
Tuple,
Type,
TypeVar,
)
import numpy as np
from zuper_commons.types import (
ZAssertionError,
ZTypeError,
ZValueError,
)
from zuper_ipce.types import get_effective_type
from zuper_typing import (
dataclass,
get_Callable_info,
get_ClassVar_arg,
get_dataclass_info,
get_Dict_name_K_V,
get_DictLike_args,
get_fields_including_static,
get_FixedTupleLike_args,
get_FixedTupleLike_name,
get_ForwardRef_arg,
get_Intersection_args,
get_ListLike_arg,
get_ListLike_name,
get_Literal_args,
get_name_without_brackets,
get_NewType_arg,
get_NewType_name,
get_Optional_arg,
get_Sequence_arg,
get_Set_name_V,
get_SetLike_arg,
get_Tuple_name,
get_Type_arg,
get_TypeVar_bound,
get_TypeVar_name,
get_Union_args,
get_VarTuple_arg,
is_Any,
is_Callable,
is_ClassVar,
is_FixedTupleLike,
is_ForwardRef,
is_DictLike,
is_Intersection,
is_ListLike,
is_Literal,
is_NewType,
is_Optional,
is_Sequence,
is_SetLike,
is_TupleLike,
is_Type,
is_TypeLike,
is_TypeVar,
is_Union,
is_VarTuple,
key_for_sorting_types,
MyBytes,
MyStr,
TypeLike,
)
from .constants import (
ALL_OF,
ANY_OF,
ATT_PYTHON_NAME,
CALLABLE_ORDERING,
CALLABLE_RETURN,
ID_ATT,
IESO,
IPCE_PASS_THROUGH,
JSC_ADDITIONAL_PROPERTIES,
JSC_ARRAY,
JSC_BOOL,
JSC_DEFINITIONS,
JSC_DESCRIPTION,
JSC_INTEGER,
JSC_ITEMS,
JSC_NULL,
JSC_NUMBER,
JSC_OBJECT,
JSC_PROPERTIES,
JSC_PROPERTY_NAMES,
JSC_REQUIRED,
JSC_STRING,
JSC_TITLE,
JSC_TITLE_CALLABLE,
JSC_TITLE_DATETIME,
JSC_TITLE_DECIMAL,
JSC_TITLE_FLOAT,
JSC_TITLE_NUMPY,
JSC_TITLE_SLICE,
JSC_TITLE_TYPE,
JSC_TYPE,
JSONSchema,
ProcessingDict,
REF_ATT,
SCHEMA_ATT,
SCHEMA_BYTES,
SCHEMA_CID,
SCHEMA_ID,
X_CLASSATTS,
X_CLASSVARS,
X_ORDER,
X_ORIG,
X_PYTHON_MODULE_ATT,
)
from .ipce_spec import assert_canonical_ipce, sorted_dict_cbor_ord
from .schema_caching import (
get_ipce_from_typelike_cache,
set_ipce_from_typelike_cache,
TRE,
)
from .schema_utils import make_ref, make_url
from .structures import FakeValues
from .types import IPCE
def is_placeholder(x):
return hasattr(x, "__name__") and "Placeholder" in x.__name__
def ipce_from_typelike(
T: TypeLike,
*,
globals0: Optional[dict] = None,
processing: Optional[ProcessingDict] = None,
ieso: Optional[IESO] = None,
) -> JSONSchema:
if ieso is None:
ieso = IESO(with_schema=True)
if processing is None:
processing = {}
if globals0 is None:
globals0 = {}
c = IFTContext(globals0, processing, ())
tr = ipce_from_typelike_tr(T, c, ieso=ieso)
schema = tr.schema
assert_canonical_ipce(schema)
return schema
@dataclass
class IFTContext:
globals_: dict
processing: ProcessingDict
context: Tuple[str, ...]
def ipce_from_typelike_tr(T: TypeLike, c: IFTContext, ieso: IESO) -> TRE:
if is_placeholder(T):
raise ZValueError(T=T)
if not is_TypeLike(T):
raise ValueError(T)
if hasattr(T, "__name__"):
if T.__name__ in c.processing:
ref = c.processing[T.__name__]
res = make_ref(ref)
return TRE(res, {T.__name__: ref})
if ieso.use_ipce_from_typelike_cache:
try:
return get_ipce_from_typelike_cache(T, c.processing)
except KeyError:
pass
try:
if T is type:
res = cast(
JSONSchema,
{
REF_ATT: SCHEMA_ID,
JSC_TITLE: JSC_TITLE_TYPE
# JSC_DESCRIPTION: T.__doc__
},
)
res = sorted_dict_cbor_ord(res)
return TRE(res)
if T is type(None):
res = cast(JSONSchema, {SCHEMA_ATT: SCHEMA_ID, JSC_TYPE: JSC_NULL})
res = sorted_dict_cbor_ord(res)
return TRE(res)
if isinstance(T, type):
for klass in T.mro():
if klass.__name__.startswith("Generic"):
continue
if klass is object:
continue
globals2 = dict(c.globals_)
globals2[get_name_without_brackets(klass.__name__)] = klass
# clsi = get_dataclass_info(klass)
# bindings = getattr(klass, BINDINGS_ATT, {})
# for k, v in clsi.bindings.items():
# if hasattr(v, "__name__") and v.__name__ not in globals2:
# globals2[v.__name__] = v
# globals2[k.__name__] = v
c = dataclasses.replace(c, globals_=globals2)
tr: TRE = ipce_from_typelike_tr_(T, c=c, ieso=ieso)
if ieso.use_ipce_from_typelike_cache:
set_ipce_from_typelike_cache(T, tr.used, tr.schema)
return tr
except IPCE_PASS_THROUGH: # pragma: no cover
raise
except ValueError as e:
msg = "Cannot get schema for type @T"
raise ZValueError(msg, T=T, T_type=type(T), c=c) from e
except AssertionError as e:
msg = "Cannot get schema for type @T"
raise ZAssertionError(msg, T=T, T_type=type(T), c=c) from e
except BaseException as e:
msg = "Cannot get schema for @T"
raise ZTypeError(msg, T=T, c=c) from e
def ipce_from_typelike_DictLike(T: Type[Dict], c: IFTContext, ieso: IESO) -> TRE:
assert is_DictLike(T), T
K, V = get_DictLike_args(T)
res = cast(JSONSchema, {JSC_TYPE: JSC_OBJECT})
res[JSC_TITLE] = get_Dict_name_K_V(K, V)
K = get_effective_type(K)
if isinstance(K, type) and issubclass(K, str):
res[JSC_PROPERTIES] = {SCHEMA_ATT: {}} # XXX
tr = ipce_from_typelike_tr(V, c=c, ieso=ieso)
res[JSC_ADDITIONAL_PROPERTIES] = tr.schema
res[SCHEMA_ATT] = SCHEMA_ID
res = sorted_dict_cbor_ord(res)
return TRE(res, tr.used)
else:
res[JSC_PROPERTIES] = {SCHEMA_ATT: {}} # XXX
props = FakeValues[K, V]
tr = ipce_from_typelike_tr(props, c=c, ieso=ieso)
# logger.warning(f'props IPCE:\n\n {yaml.dump(tr.schema)}')
res[JSC_ADDITIONAL_PROPERTIES] = tr.schema
res[SCHEMA_ATT] = SCHEMA_ID
res = sorted_dict_cbor_ord(res)
return TRE(res, tr.used)
def ipce_from_typelike_SetLike(T: Type[Set], c: IFTContext, ieso: IESO) -> TRE:
assert is_SetLike(T), T
V = get_SetLike_arg(T)
res = cast(JSONSchema, {JSC_TYPE: JSC_OBJECT})
res[JSC_TITLE] = get_Set_name_V(V)
res[JSC_PROPERTY_NAMES] = SCHEMA_CID
tr = ipce_from_typelike_tr(V, c=c, ieso=ieso)
res[JSC_ADDITIONAL_PROPERTIES] = tr.schema
res[SCHEMA_ATT] = SCHEMA_ID
res = sorted_dict_cbor_ord(res)
return TRE(res, tr.used)
def ipce_from_typelike_TupleLike(T: TypeLike, c: IFTContext, ieso: IESO) -> TRE:
assert is_TupleLike(T), T
used = {}
def f(x: TypeLike) -> JSONSchema:
tr = ipce_from_typelike_tr(x, c=c, ieso=ieso)
used.update(tr.used)
return tr.schema
if is_VarTuple(T):
T = cast(Type[Tuple], T)
items = get_VarTuple_arg(T)
res = cast(JSONSchema, {})
res[SCHEMA_ATT] = SCHEMA_ID
res[JSC_TYPE] = JSC_ARRAY
res[JSC_ITEMS] = f(items)
res[JSC_TITLE] = get_Tuple_name(T)
res = sorted_dict_cbor_ord(res)
return TRE(res, used)
elif is_FixedTupleLike(T):
T = cast(Type[Tuple], T)
args = get_FixedTupleLike_args(T)
res = cast(JSONSchema, {})
res[SCHEMA_ATT] = SCHEMA_ID
res[JSC_TYPE] = JSC_ARRAY
res[JSC_ITEMS] = []
res[JSC_TITLE] = get_FixedTupleLike_name(T)
for a in args:
res[JSC_ITEMS].append(f(a))
res = sorted_dict_cbor_ord(res)
return TRE(res, used)
else:
assert False
class KeepTrackSer:
def __init__(self, c: IFTContext, ieso: IESO):
self.c = c
self.ieso = ieso
self.used = {}
def ipce_from_typelike(self, T: TypeLike) -> JSONSchema:
tre = ipce_from_typelike_tr(T, c=self.c, ieso=self.ieso)
self.used.update(tre.used)
return tre.schema
# def ipce_from_object(self, x: IPCE, st: TypeLike) -> IPCE:
# from .conv_ipce_from_object import ipce_from_object_
# res = object_from_ipce_(x, st, ieds=self.ieds, iedo=self.iedo)
# return res
def tre(self, x: IPCE) -> TRE:
return TRE(x, self.used)
def ipce_from_typelike_NewType(T: TypeLike, c: IFTContext, ieso: IESO) -> TRE:
_ = c, ieso
name = get_NewType_name(T)
T0 = get_NewType_arg(T)
kt = KeepTrackSer(c, ieso)
res = cast(JSONSchema, {})
res[SCHEMA_ATT] = SCHEMA_ID
res[JSC_TYPE] = "NewType"
res["newtype"] = kt.ipce_from_typelike(T0)
res[JSC_TITLE] = name
res = sorted_dict_cbor_ord(res)
return kt.tre(res)
def ipce_from_typelike_ListLike(T: Type[List], c: IFTContext, ieso: IESO) -> TRE:
assert is_ListLike(T), T
items = get_ListLike_arg(T)
res = cast(JSONSchema, {})
kt = KeepTrackSer(c, ieso)
res[SCHEMA_ATT] = SCHEMA_ID
res[JSC_TYPE] = JSC_ARRAY
res[JSC_ITEMS] = kt.ipce_from_typelike(items)
res[JSC_TITLE] = get_ListLike_name(T)
res = sorted_dict_cbor_ord(res)
return kt.tre(res)
def ipce_from_typelike_Callable(T: TypeLike, c: IFTContext, ieso: IESO) -> TRE:
assert is_Callable(T), T
cinfo = get_Callable_info(T)
kt = KeepTrackSer(c, ieso)
res = cast(
JSONSchema,
{
JSC_TYPE: JSC_OBJECT,
SCHEMA_ATT: SCHEMA_ID,
JSC_TITLE: JSC_TITLE_CALLABLE,
"special": "callable",
},
)
p = res[JSC_DEFINITIONS] = {}
for k, v in cinfo.parameters_by_name.items():
p[k] = kt.ipce_from_typelike(v)
p[CALLABLE_RETURN] = kt.ipce_from_typelike(cinfo.returns)
res[CALLABLE_ORDERING] = list(cinfo.ordering)
# print(res)
res = sorted_dict_cbor_ord(res)
return kt.tre(res)
def ipce_from_typelike_tr_(T: TypeLike, c: IFTContext, ieso: IESO) -> TRE:
if T is None:
msg = "None is not a type!"
raise ZValueError(msg)
# This can actually happen inside a Tuple (or Dict, etc.) even though
# we have a special case for dataclass
if is_ForwardRef(T): # pragma: no cover
msg = "It is not supported to have an ForwardRef here yet."
raise ZValueError(msg, T=T)
if isinstance(T, str): # pragma: no cover
msg = "It is not supported to have a string here."
raise ZValueError(msg, T=T)
if T is str or T is MyStr:
res = cast(JSONSchema, {JSC_TYPE: JSC_STRING, SCHEMA_ATT: SCHEMA_ID})
res = sorted_dict_cbor_ord(res)
return TRE(res)
if T is bool:
res = cast(JSONSchema, {JSC_TYPE: JSC_BOOL, SCHEMA_ATT: SCHEMA_ID})
res = sorted_dict_cbor_ord(res)
return TRE(res)
if T is Number:
res = cast(JSONSchema, {JSC_TYPE: JSC_NUMBER, SCHEMA_ATT: SCHEMA_ID})
res = sorted_dict_cbor_ord(res)
return TRE(res)
if T is float:
res = {JSC_TYPE: JSC_NUMBER, SCHEMA_ATT: SCHEMA_ID, JSC_TITLE: JSC_TITLE_FLOAT}
res = cast(JSONSchema, res,)
res = sorted_dict_cbor_ord(res)
return TRE(res)
if T is int:
res = cast(JSONSchema, {JSC_TYPE: JSC_INTEGER, SCHEMA_ATT: SCHEMA_ID})
res = sorted_dict_cbor_ord(res)
return TRE(res)
if T is slice:
return ipce_from_typelike_slice(ieso=ieso)
if T is Decimal:
res = {JSC_TYPE: JSC_STRING, JSC_TITLE: JSC_TITLE_DECIMAL, SCHEMA_ATT: SCHEMA_ID}
res = cast(JSONSchema, res,)
res = sorted_dict_cbor_ord(res)
return TRE(res)
if T is datetime.datetime:
res = {
JSC_TYPE: JSC_STRING,
JSC_TITLE: JSC_TITLE_DATETIME,
SCHEMA_ATT: SCHEMA_ID,
}
res = cast(JSONSchema, res)
res = sorted_dict_cbor_ord(res)
return TRE(res)
if T is bytes or T is MyBytes:
res = SCHEMA_BYTES
res = sorted_dict_cbor_ord(res)
return TRE(res)
if T is object:
res = cast(JSONSchema, {SCHEMA_ATT: SCHEMA_ID, JSC_TITLE: "object"})
res = sorted_dict_cbor_ord(res)
return TRE(res)
# we cannot use isinstance on typing.Any
if is_Any(T): # XXX not possible...
res = cast(JSONSchema, {SCHEMA_ATT: SCHEMA_ID, JSC_TITLE: "Any"})
res = sorted_dict_cbor_ord(res)
return TRE(res)
if is_Union(T):
return ipce_from_typelike_Union(T, c=c, ieso=ieso)
if is_Optional(T):
return ipce_from_typelike_Optional(T, c=c, ieso=ieso)
if is_DictLike(T):
T = cast(Type[Dict], T)
return ipce_from_typelike_DictLike(T, c=c, ieso=ieso)
if is_SetLike(T):
T = cast(Type[Set], T)
return ipce_from_typelike_SetLike(T, c=c, ieso=ieso)
if is_Intersection(T):
return ipce_from_typelike_Intersection(T, c=c, ieso=ieso)
if is_Callable(T):
return ipce_from_typelike_Callable(T, c=c, ieso=ieso)
if is_NewType(T):
return ipce_from_typelike_NewType(T, c=c, ieso=ieso)
if is_Sequence(T):
msg = "Translating Sequence into List"
warnings.warn(msg)
T = cast(Type[Sequence], T)
# raise ValueError(msg)
V = get_Sequence_arg(T)
T = List[V]
return ipce_from_typelike_ListLike(T, c=c, ieso=ieso)
if is_ListLike(T):
T = cast(Type[List], T)
return ipce_from_typelike_ListLike(T, c=c, ieso=ieso)
if is_TupleLike(T):
# noinspection PyTypeChecker
return ipce_from_typelike_TupleLike(T, c=c, ieso=ieso)
if is_Type(T):
TT = get_Type_arg(T)
r = ipce_from_typelike_tr(TT, c, ieso=ieso)
res = {SCHEMA_ATT: SCHEMA_ID, JSC_TYPE: "subtype", "subtype": r.schema}
res = cast(JSONSchema, res)
res = sorted_dict_cbor_ord(res)
return TRE(res, r.used)
# raise NotImplementedError(T)
if is_Literal(T):
values = get_Literal_args(T)
T0 = type(values[0])
r = ipce_from_typelike_tr(T0, c, ieso=ieso)
from .conv_ipce_from_object import ipce_from_object # ok-ish
enum = [ipce_from_object(_, T0) for _ in values]
res = cast(JSONSchema, dict(r.schema))
res["enum"] = enum
res = sorted_dict_cbor_ord(res)
return TRE(res, r.used)
assert isinstance(T, type), (T, type(T), is_Optional(T), is_Union(T), is_Literal(T))
if is_dataclass(T):
return ipce_from_typelike_dataclass(T, c=c, ieso=ieso)
if T is np.ndarray:
return ipce_from_typelike_ndarray()
msg = "Cannot interpret the type @T"
raise ZValueError(msg, T=T, c=c)
def ipce_from_typelike_ndarray() -> TRE:
res = cast(JSONSchema, {SCHEMA_ATT: SCHEMA_ID})
res[JSC_TYPE] = JSC_OBJECT
res[JSC_TITLE] = JSC_TITLE_NUMPY
properties = {"shape": {}, "dtype": {}, "data": SCHEMA_BYTES} # TODO # TODO
properties = sorted_dict_cbor_ord(properties)
res[JSC_PROPERTIES] = properties
res = sorted_dict_cbor_ord(res)
return TRE(res)
def ipce_from_typelike_slice(ieso: IESO) -> TRE:
res = cast(JSONSchema, {SCHEMA_ATT: SCHEMA_ID})
res[JSC_TYPE] = JSC_OBJECT
res[JSC_TITLE] = JSC_TITLE_SLICE
c = IFTContext({}, {}, ())
tr = ipce_from_typelike_tr(Optional[int], c=c, ieso=ieso)
properties = {
"start": tr.schema, # TODO
"stop": tr.schema, # TODO
"step": tr.schema,
}
res[JSC_PROPERTIES] = sorted_dict_cbor_ord(properties)
res = sorted_dict_cbor_ord(res)
return TRE(res, tr.used)
def ipce_from_typelike_Intersection(T: TypeLike, c: IFTContext, ieso: IESO):
args = get_Intersection_args(T)
kt = KeepTrackSer(c, ieso)
options = [kt.ipce_from_typelike(t) for t in args]
res = cast(JSONSchema, {SCHEMA_ATT: SCHEMA_ID, ALL_OF: options})
res = sorted_dict_cbor_ord(res)
return kt.tre(res)
def get_mentioned_names(T: TypeLike, context=()) -> Iterator[str]:
if T in context:
return
c2 = context + (T,)
if is_dataclass(T):
if context:
yield T.__name__
annotations = getattr(T, "__annotations__", {})
for v in annotations.values():
yield from get_mentioned_names(v, c2)
elif is_Type(T):
v = get_Type_arg(T)
yield from get_mentioned_names(v, c2)
elif is_TypeVar(T):
yield get_TypeVar_name(T)
elif is_FixedTupleLike(T):
T = cast(Type[Tuple], T)
for t in get_FixedTupleLike_args(T):
yield from get_mentioned_names(t, c2)
elif is_VarTuple(T):
T = cast(Type[Tuple], T)
t = get_VarTuple_arg(T)
yield from get_mentioned_names(t, c2)
elif is_ListLike(T):
T = cast(Type[List], T)
t = get_ListLike_arg(T)
yield from get_mentioned_names(t, c2)
elif is_DictLike(T):
T = cast(Type[Dict], T)
K, V = get_DictLike_args(T)
yield from get_mentioned_names(K, c2)
yield from get_mentioned_names(V, c2)
elif is_SetLike(T):
T = cast(Type[Set], T)
t = get_SetLike_arg(T)
yield from get_mentioned_names(t, c2)
elif is_ForwardRef(T):
return get_ForwardRef_arg(T)
elif is_Optional(T):
t = get_Optional_arg(T)
yield from get_mentioned_names(t, c2)
elif is_Union(T):
for t in get_Union_args(T):
yield from get_mentioned_names(t, c2)
else:
pass
def ipce_from_typelike_dataclass(T: TypeLike, c: IFTContext, ieso: IESO) -> TRE:
assert is_dataclass(T), T
# noinspection PyDataclass
c = replace(
c,
globals_=dict(c.globals_),
processing=dict(c.processing),
context=c.context + (T.__name__,),
)
used = {}
def ftl(x: TypeLike) -> JSONSchema:
if not is_TypeLike(x):
raise ValueError(x)
tr = ipce_from_typelike_tr(x, c=c, ieso=ieso)
used.update(tr.used)
return tr.schema
def fob(x: object) -> IPCE:
return ipce_from_object(x, globals_=c.globals_, ieso=ieso)
def f(x: object) -> IPCE:
if is_TypeLike(x):
x = cast(TypeLike, x)
return ftl(x)
else:
return fob(x)
res = cast(JSONSchema, {})
mentioned = set(get_mentioned_names(T, ()))
relevant = [x for x in c.context if x in mentioned and x != T.__name__]
relevant.append(T.__qualname__)
url_name = "_".join(relevant)
my_ref = make_url(url_name)
res[ID_ATT] = my_ref
res[JSC_TITLE] = T.__name__
c.processing[T.__name__] = my_ref
res[ATT_PYTHON_NAME] = T.__qualname__
res[X_PYTHON_MODULE_ATT] = T.__module__
res[SCHEMA_ATT] = SCHEMA_ID
res[JSC_TYPE] = JSC_OBJECT
if hasattr(T, "__doc__") and T.__doc__ is not None:
res[JSC_DESCRIPTION] = T.__doc__
Ti = get_dataclass_info(T)
definitions = {}
# bindings: Dict[str, type] = {}
types2: Tuple[type, ...] = Ti.get_open()
to_add: Dict[type, type] = {}
for tx in types2:
if not isinstance(tx, TypeVar):
continue
abound = get_TypeVar_bound(tx)
to_add[tx] = abound
c.globals_[tx] = tx
#
# for tx, val in Ti.bindings.items():
# to_add[tx] = val
def get_schema_with_url(url, bound):
schema = ftl(bound)
schema = copy.copy(schema)
schema[ID_ATT] = url
schema = sorted_dict_cbor_ord(schema)
return schema
for t2, bound in to_add.items():
t2_name = get_TypeVar_name(t2)
url = make_url(f"{T.__qualname__}/{t2_name}")
schema = get_schema_with_url(url, bound)
c.processing[t2_name] = url
if t2 in Ti.get_open():
definitions[t2_name] = schema
# if t2 in Ti.bindings:
# bindings[t2_name] = schema
if Ti.orig:
# res[X_ORIG] = list(get_TypeVar_name(_) for _ in Ti.orig)
def ff(_) -> JSONSchema:
if is_TypeVar(_):
_name = get_TypeVar_name(_)
url = make_url(f"{T.__qualname__}/{_name}")
return make_ref(url)
else:
return ftl(_)
res[X_ORIG] = [ff(_) for _ in Ti.orig]
# if Ti.extra:
# res[X_EXTRA] = list(get_TypeVar_name(_) for _ in Ti.extra)
if definitions:
res[JSC_DEFINITIONS] = sorted_dict_cbor_ord(definitions)
# if bindings:
# res[X_BINDINGS] = sorted_dict_cbor_ord(bindings)
properties = {}
classvars = {}
classatts = {}
required = []
all_fields: Dict[str, Field] = get_fields_including_static(T)
from .conv_ipce_from_object import ipce_from_object
original_order = list(all_fields)
ordered = sorted(all_fields)
for name in ordered:
afield = all_fields[name]
t = afield.type
try:
if isinstance(t, str): # pragma: no cover
# t = eval_just_string(t, c.globals_)
msg = "Before serialization, need to have all text references substituted."
msg += f"\n found reference {t!r} in class {T}."
raise Exception(msg)
if is_ClassVar(t):
tt = get_ClassVar_arg(t)
classvars[name] = ftl(tt)
try:
the_att = get_T_attribute(T, name)
except AttributeError:
pass
else:
classatts[name] = f(the_att)
else: # not classvar
schema = ftl(t)
try:
default = get_field_default(afield)
except KeyError:
required.append(name)
else:
schema = make_schema_with_default(schema, default, c, ieso)
properties[name] = schema
except IPCE_PASS_THROUGH: # pragma: no cover
raise
except BaseException as e:
msg = "Cannot write schema for attribute @name -> @t of type @T."
raise ZTypeError(msg, name=name, t=t, T=T) from e
if required: # empty is error
res[JSC_REQUIRED] = sorted(required)
if classvars:
res[X_CLASSVARS] = classvars
if classatts:
res[X_CLASSATTS] = classatts
assert len(classvars) >= len(classatts), (classvars, classatts)
if properties:
res[JSC_PROPERTIES] = sorted_dict_cbor_ord(properties)
res[X_ORDER] = original_order
if sorted_dict_cbor_ord:
res = sorted_dict_cbor_ord(res)
if T.__name__ in used:
used.pop(T.__name__)
return TRE(res, used)
def get_T_attribute(T: TypeLike, n: str) -> object:
if hasattr(T, n):
# special case
the_att2 = getattr(T, n)
if isinstance(the_att2, Field):
# actually attribute not there
raise AttributeError()
else:
return the_att2
else:
raise AttributeError()
def make_schema_with_default(
schema: JSONSchema, default: object, c: IFTContext, ieso: IESO
) -> JSONSchema:
from .conv_ipce_from_object import ipce_from_object # ok-ish
options = [schema]
s_u_one = cast(JSONSchema, {SCHEMA_ATT: SCHEMA_ID, ANY_OF: options})
ipce_default = ipce_from_object(default, globals_=c.globals_, ieso=ieso)
s_u_one["default"] = ipce_default
s_u_one = sorted_dict_cbor_ord(s_u_one)
return s_u_one
from dataclasses import MISSING
def get_field_default(f: Field) -> object:
if f.default != MISSING:
return f.default
elif f.default_factory != MISSING:
return f.default_factory()
else:
raise KeyError("no default")
def ipce_from_typelike_Union(t: TypeLike, c: IFTContext, ieso: IESO) -> TRE:
types = get_Union_args(t)
used = {}
def f(x: TypeLike) -> JSONSchema:
tr = ipce_from_typelike_tr(x, c=c, ieso=ieso)
used.update(tr.used)
return tr.schema
types = tuple(sorted(types, key=key_for_sorting_types))
options = [f(t) for t in types]
res = cast(JSONSchema, {SCHEMA_ATT: SCHEMA_ID, ANY_OF: options})
res = sorted_dict_cbor_ord(res)
return TRE(res, used)
def ipce_from_typelike_Optional(t: TypeLike, c: IFTContext, ieso: IESO) -> TRE:
types = [get_Optional_arg(t), type(None)]
kt = KeepTrackSer(c, ieso)
options = [kt.ipce_from_typelike(t) for t in types]
res = cast(JSONSchema, {SCHEMA_ATT: SCHEMA_ID, ANY_OF: options})
res = sorted_dict_cbor_ord(res)
return kt.tre(res)
#
|
zuper-ipce-z6
|
/zuper-ipce-z6-6.1.2.tar.gz/zuper-ipce-z6-6.1.2/src/zuper_ipce/conv_ipce_from_typelike.py
|
conv_ipce_from_typelike.py
|
from typing import TypeVar
from zuper_commons.types import ZException
from zuper_typing import dataclass, Generic
class CannotFindSchemaReference(ZException):
pass
#
# class CannotResolveTypeVar(ZException):
# pass
KK = TypeVar("KK")
VV = TypeVar("VV")
@dataclass
class FakeValues(Generic[KK, VV]):
real_key: KK
value: VV
|
zuper-ipce-z6
|
/zuper-ipce-z6-6.1.2.tar.gz/zuper-ipce-z6-6.1.2/src/zuper_ipce/structures.py
|
structures.py
|
__version__ = "6.1.2"
from zuper_commons.logs import ZLogger
logger = ZLogger(__name__)
import os
path = os.path.dirname(os.path.dirname(__file__))
logger.debug(f'version {__version__} path {path}')
from .types import IPCE, TypeLike
from .constants import IEDO, IESO
from .conv_ipce_from_object import ipce_from_object
from .conv_ipce_from_typelike import ipce_from_typelike
from .conv_object_from_ipce import object_from_ipce
from .conv_typelike_from_ipce import typelike_from_ipce
from .ipce_spec import *
_ = (
ipce_from_object,
object_from_ipce,
typelike_from_ipce,
ipce_from_typelike,
TypeLike,
IPCE,
IEDO,
IESO,
)
|
zuper-ipce-z6
|
/zuper-ipce-z6-6.1.2.tar.gz/zuper-ipce-z6-6.1.2/src/zuper_ipce/__init__.py
|
__init__.py
|
import datetime
from dataclasses import dataclass, Field, fields, is_dataclass
from typing import cast, Iterator, Optional, Set, TypeVar
import numpy as np
from frozendict import frozendict
from zuper_commons.types import ZNotImplementedError, ZTypeError, ZValueError
from zuper_ipce.types import get_effective_type
from zuper_typing import (DictStrType, get_Optional_arg, get_Union_args, is_Optional,
is_SpecialForm,
is_Union,
lift_to_customtuple, make_dict, same_as_default, value_liskov)
from .constants import (GlobalsDict, HINTS_ATT, IESO, IPCE_PASS_THROUGH, IPCE_TRIVIAL, SCHEMA_ATT)
from .conv_ipce_from_typelike import ipce_from_typelike, ipce_from_typelike_ndarray
from .guesses import (
get_dict_type_suggestion,
get_list_type_suggestion,
get_set_type_suggestion,
get_tuple_type_suggestion,
)
from .ipce_spec import sorted_dict_cbor_ord
from .structures import FakeValues
from .types import IPCE, is_unconstrained, TypeLike
X = TypeVar("X")
def ipce_from_object(
ob: object,
suggest_type: TypeLike = object,
*,
globals_: GlobalsDict = None,
ieso: Optional[IESO] = None,
) -> IPCE:
# logger.debug(f'ipce_from_object({ob})')
if ieso is None:
ieso = IESO(with_schema=True)
if globals_ is None:
globals_ = {}
try:
res = ipce_from_object_(ob, suggest_type, globals_=globals_, ieso=ieso)
except TypeError as e:
msg = "ipce_from_object() for type @t failed."
raise ZTypeError(msg, ob=ob, T=type(ob)) from e
# assert_canonical_ipce(res)
return res
def ipce_from_object_(ob: object, st: TypeLike, *, globals_: GlobalsDict, ieso: IESO) -> IPCE:
unconstrained = is_unconstrained(st)
if ob is None:
if unconstrained or (st is type(None)) or is_Optional(st):
return ob
else: # pragma: no cover
msg = f"ob is None but suggest_type is @suggest_type"
raise ZTypeError(msg, suggest_type=st)
if is_Optional(st):
assert ob is not None # from before
T = get_Optional_arg(st)
return ipce_from_object_(ob, T, globals_=globals_, ieso=ieso)
if is_Union(st):
return ipce_from_object_union(ob, st, globals_=globals_, ieso=ieso)
if isinstance(ob, datetime.datetime):
if not ob.tzinfo:
msg = "Cannot serialize dates without a timezone."
raise ZValueError(msg, ob=ob)
if st in IPCE_TRIVIAL:
if not isinstance(ob, st):
msg = "Expected this to be @suggest_type."
raise ZTypeError(msg, st=st, ob=ob, T=type(ob))
return ob
if isinstance(ob, IPCE_TRIVIAL):
return ob
if isinstance(ob, list):
return ipce_from_object_list(ob, st, globals_=globals_, ieso=ieso)
if isinstance(ob, tuple):
return ipce_from_object_tuple(ob, st, globals_=globals_, ieso=ieso)
if isinstance(ob, slice):
return ipce_from_object_slice(ob, ieso=ieso)
if isinstance(ob, set):
return ipce_from_object_set(ob, st, globals_=globals_, ieso=ieso)
if isinstance(ob, (dict, frozendict)):
return ipce_from_object_dict(ob, st, globals_=globals_, ieso=ieso)
if isinstance(ob, type):
return ipce_from_typelike(ob, globals0=globals_, processing={}, ieso=ieso)
if is_SpecialForm(cast(TypeLike, ob)):
ob = cast(TypeLike, ob)
return ipce_from_typelike(ob, globals0=globals_, processing={}, ieso=ieso)
if isinstance(ob, np.ndarray):
return ipce_from_object_numpy(ob, ieso=ieso)
assert not isinstance(ob, type), ob
if is_dataclass(ob):
return ipce_from_object_dataclass_instance(ob, globals_=globals_, ieso=ieso)
msg = "I do not know a way to convert object @ob of type @T."
raise ZNotImplementedError(msg, ob=ob, T=type(ob))
def ipce_from_object_numpy(ob, *, ieso: IESO) -> IPCE:
from .numpy_encoding import ipce_from_numpy_array
res = ipce_from_numpy_array(ob)
if ieso.with_schema:
res[SCHEMA_ATT] = ipce_from_typelike_ndarray().schema
return res
def ipce_from_object_slice(ob, *, ieso: IESO):
from .conv_ipce_from_typelike import ipce_from_typelike_slice
res = {"start": ob.start, "step": ob.step, "stop": ob.stop}
if ieso.with_schema:
res[SCHEMA_ATT] = ipce_from_typelike_slice(ieso=ieso).schema
res = sorted_dict_cbor_ord(res)
return res
def ipce_from_object_union(ob: object, st: TypeLike, *, globals_, ieso: IESO) -> IPCE:
ts = get_Union_args(st)
errors = []
for Ti in ts:
can = value_liskov(ob, Ti)
if can:
return ipce_from_object(ob, Ti, globals_=globals_, ieso=ieso)
else:
errors.append(can)
msg = "Cannot save union."
raise ZTypeError(msg, suggest_type=st, value=ob, errors=errors)
def ipce_from_object_list(ob, st: TypeLike, *, globals_: dict, ieso: IESO) -> IPCE:
assert st is not None
V = get_list_type_suggestion(ob, st)
def rec(x: X) -> X:
return ipce_from_object(x, V, globals_=globals_, ieso=ieso)
return [rec(_) for _ in ob]
def ipce_from_object_tuple(ob: tuple, st: TypeLike, *, globals_, ieso: IESO) -> IPCE:
ts = get_tuple_type_suggestion(ob, st)
res = []
for _, T in zip(ob, ts):
x = ipce_from_object(_, T, globals_=globals_, ieso=ieso)
res.append(x)
return res
@dataclass
class IterAtt:
attr: str
T: TypeLike
value: object
def iterate_resolved_type_values_without_default(x: dataclass) -> Iterator[IterAtt]:
for f in fields(type(x)):
assert isinstance(f, Field), list(fields(type(x)))
k = f.name
v0 = getattr(x, k)
if same_as_default(f, v0):
continue
k_st = f.type
yield IterAtt(k, k_st, v0)
def ipce_from_object_dataclass_instance(ob: dataclass, *, globals_, ieso: IESO) -> IPCE:
globals_ = dict(globals_)
res = {}
T0 = type(ob)
from .conv_ipce_from_typelike import ipce_from_typelike
if ieso.with_schema:
res[SCHEMA_ATT] = ipce_from_typelike(T0, globals0=globals_, ieso=ieso)
globals_[T0.__name__] = T0
hints = DictStrType()
attrs = list(iterate_resolved_type_values_without_default(ob))
if ieso.with_schema:
for ia in attrs:
if isinstance(ia.value, tuple) and is_unconstrained(ia.T):
v2 = lift_to_customtuple(ia.value)
hints[ia.attr] = type(v2)
elif isinstance(ia.value, list) and is_unconstrained(ia.T):
hints[ia.attr] = type(ia.value)
for ia in attrs:
k = ia.attr
v = ia.value
T = ia.T
try:
res[k] = ipce_from_object(v, T, globals_=globals_, ieso=ieso)
# needs_schema = isinstance(v, (list, tuple))
# if ieso.with_schema and needs_schema and is_unconstrained(T):
# if isinstance(v, tuple):
# Ti = make_Tuple(*get_tuple_type_suggestion(v, T))
# else:
# Ti = type(v)
# hints[k] = Ti
except IPCE_PASS_THROUGH: # pragma: no cover
raise
except BaseException as e:
msg = (
f"Could not serialize an object. Problem "
f"occurred with the attribute {k!r}. It is supposed to be of type @expected."
)
raise ZValueError(msg, expected=T, ob=ob) from e
if hints:
# logger.info(hints=hints)
res[HINTS_ATT] = ipce_from_object(hints, ieso=ieso)
res = sorted_dict_cbor_ord(res)
return res
def ipce_from_object_dict(ob: dict, st: TypeLike, *, globals_: GlobalsDict, ieso: IESO):
K, V = get_dict_type_suggestion(ob, st)
DT = make_dict(K, V)
res = {}
from .conv_ipce_from_typelike import ipce_from_typelike
if ieso.with_schema:
res[SCHEMA_ATT] = ipce_from_typelike(DT, globals0=globals_, ieso=ieso)
K = get_effective_type(K)
if isinstance(K, type) and issubclass(K, str):
for k, v in ob.items():
res[k] = ipce_from_object(v, V, globals_=globals_, ieso=ieso)
elif isinstance(K, type) and issubclass(K, int):
for k, v in ob.items():
res[str(k)] = ipce_from_object(v, V, globals_=globals_, ieso=ieso)
else:
FV = FakeValues[K, V]
# group first by the type name, then sort by key
items = [(type(k).__name__, k, v) for k, v in ob.items()]
items = sorted(items)
for i, (_, k, v) in enumerate(items):
h = get_key_for_set_entry(i, len(ob))
fv = FV(k, v)
res[h] = ipce_from_object(fv, globals_=globals_, ieso=ieso)
res = sorted_dict_cbor_ord(res)
return res
def ipce_from_object_set(ob: set, st: TypeLike, *, globals_: GlobalsDict, ieso: IESO):
from .conv_ipce_from_typelike import ipce_from_typelike
V = get_set_type_suggestion(ob, st)
ST = Set[V]
res = {}
if ieso.with_schema:
res[SCHEMA_ATT] = ipce_from_typelike(ST, globals0=globals_, ieso=ieso)
# group first by the type name, then sort by key
items = [(type(v).__name__, v) for v in ob]
items = sorted(items)
for i, (_, v) in enumerate(items):
vj = ipce_from_object(v, V, globals_=globals_, ieso=ieso)
h = get_key_for_set_entry(i, len(ob))
res[h] = vj
res = sorted_dict_cbor_ord(res)
return res
def get_key_for_set_entry(i: int, n: int):
ndigits = len(str(n))
format0 = f"%0{ndigits}d"
x = format0 % i
return f"set:{x}"
|
zuper-ipce-z6
|
/zuper-ipce-z6-6.1.2.tar.gz/zuper-ipce-z6-6.1.2/src/zuper_ipce/conv_ipce_from_object.py
|
conv_ipce_from_object.py
|
from zuper_commons.types import ZValueError
class ZDeserializationError(ZValueError):
pass
class ZDeserializationErrorSchema(ZDeserializationError):
pass
# class ZSerializationError(ZValueError):
# pass
#
# class ZInvalidSchema(ZValueError):
# pass
|
zuper-ipce-z6
|
/zuper-ipce-z6-6.1.2.tar.gz/zuper-ipce-z6-6.1.2/src/zuper_ipce/exceptions.py
|
exceptions.py
|
import sys
from setuptools import setup
if sys.version_info >= (3, 0, 0):
msg = 'This is supposed to be used only with Python 2. Found version %s' % sys.version
raise Exception(msg)
def get_version(filename):
import ast
version = None
with open(filename) as f:
for line in f:
if line.startswith('__version__'):
version = ast.parse(line).body[0].value.s
break
else:
raise ValueError('No version found in %r.' % filename)
if version is None:
raise ValueError(filename)
return version
line = 'daffy'
version = get_version(filename='src/zuper_nodes_python2/__init__.py')
setup(
name='zuper-nodes-python2-%s' % line,
version=version,
keywords='',
package_dir={'': 'src'},
packages=[
'zuper_nodes_python2',
],
install_requires=[
'cbor2',
],
)
|
zuper-nodes-python2-daffy
|
/zuper-nodes-python2-daffy-5.0.3.tar.gz/zuper-nodes-python2-daffy-5.0.3/setup.py
|
setup.py
|
from __future__ import unicode_literals
import os
import socket
import time
import traceback
from collections import namedtuple
import cbor2 as cbor
from . import logger
from .constants import *
from .reading import read_next_cbor
from .utils import indent
# Python 2 compatibility.
try:
TimeoutError
except NameError:
import socket
TimeoutError = socket.timeout
__all__ = ['ComponentInterface']
class ExternalProtocolViolation(Exception):
pass
class ExternalNodeDidNotUnderstand(Exception):
pass
class RemoteNodeAborted(Exception):
pass
TimeoutError = socket.timeout
class Malformed(Exception):
pass
MsgReceived = namedtuple('MsgReceived', 'topic data')
class ComponentInterface(object):
def __init__(self, fnin, fnout, nickname, timeout=None):
self.nickname = nickname
try:
os.mkfifo(fnin)
except BaseException as e:
msg = 'Cannot create fifo {}'.format(fnin)
msg += '\n\n%s' % traceback.format_exc()
raise Exception(msg)
self.fpin = open(fnin, 'wb', buffering=0)
wait_for_creation(fnout)
self.fnout = fnout
f = open(fnout, 'rb', buffering=0)
# noinspection PyTypeChecker
self.fpout = f # BufferedReader(f, buffer_size=1)
self.nreceived = 0
self.node_protocol = None
self.data_protocol = None
self.timeout = timeout
def close(self):
self.fpin.close()
self.fpout.close()
def write_topic_and_expect(self, topic, data=None,
timeout=None,
timing=None,
expect=None):
timeout = timeout or self.timeout
self._write_topic(topic, data=data, timing=timing)
ob = self.read_one(expect_topic=expect, timeout=timeout)
return ob
def write_topic_and_expect_zero(self, topic, data=None,
timeout=None,
timing=None):
timeout = timeout or self.timeout
self._write_topic(topic, data=data, timing=timing)
msgs = read_reply(self.fpout, timeout=timeout,
nickname=self.nickname)
if msgs:
msg = 'Expecting zero, got %s' % msgs
raise ExternalProtocolViolation(msg)
def _write_topic(self, topic, data=None, timing=None):
msg = {FIELD_COMPAT: [PROTOCOL],
FIELD_TOPIC: topic,
FIELD_DATA: data,
FIELD_TIMING: timing}
j = self._serialize(msg)
self._write(j)
# logger.info('Written to topic "{topic}" >> {name}.'.format(topic=topic, name=self.nickname))
def _write(self, j):
try:
self.fpin.write(j)
self.fpin.flush()
except BrokenPipeError as e:
msg = ('While attempting to write to node "{nickname}", '
'I reckon that the pipe is closed and the node exited.').format(nickname=self.nickname)
try:
received = self.read_one(expect_topic=TOPIC_ABORTED)
if received.topic == TOPIC_ABORTED:
msg += '\n\nThis is the aborted message:'
msg += '\n\n' + received.data
except BaseException as e2:
msg += '\n\nI could not read any aborted message: {e2}'.format(e2=e2)
raise RemoteNodeAborted(msg)
def _serialize(self, msg):
j = cbor.dumps(msg)
return j
def read_one(self, expect_topic=None, timeout=None):
timeout = timeout or self.timeout
try:
if expect_topic:
waiting_for = 'Expecting topic "{expect_topic}" << {nickname}.'.format(expect_topic=expect_topic,
nickname=self.nickname)
else:
waiting_for = None
msgs = read_reply(self.fpout, timeout=timeout, waiting_for=waiting_for,
nickname=self.nickname)
if len(msgs) == 0:
msg = 'Expected one message from node "{}". Got zero.'.format(self.nickname)
if expect_topic:
msg += '\nExpecting topic "{}".'.format(expect_topic)
raise ExternalProtocolViolation(msg)
if len(msgs) > 1:
msg = 'Expected only one message. Got {}.'.format(len(msgs))
raise ExternalProtocolViolation(msg)
msg = msgs[0]
if FIELD_TOPIC not in msg:
m = 'Invalid message does not contain the field "{}".'.format(FIELD_TOPIC)
m += '\n {}'.format(msg)
raise ExternalProtocolViolation(m)
topic = msg[FIELD_TOPIC]
if expect_topic:
if topic != expect_topic:
msg = 'I expected topic "{expect_topic}" but received "{topic}".'.format(expect_topic=expect_topic,
topic=topic)
raise ExternalProtocolViolation(msg)
if self.nreceived == 0:
msg1 = 'Received first message of topic %s' % topic
logger.info(msg1)
self.nreceived += 1
return MsgReceived(topic, msg[FIELD_DATA])
except StopIteration as e:
msg = 'EOF detected on %s after %d messages.' % (self.fnout, self.nreceived)
if expect_topic:
msg += ' Expected topic "{}".'.format(expect_topic)
raise StopIteration(msg)
except TimeoutError as e:
msg = 'Timeout declared after waiting %s sec on %s after having received %d messages.' % (timeout,
self.fnout,
self.nreceived)
if expect_topic:
msg += ' Expected topic "{}".'.format(expect_topic)
raise TimeoutError(msg)
def wait_for_creation(fn):
while not os.path.exists(fn):
msg = 'waiting for creation of %s' % fn
logger.info(msg)
time.sleep(1)
def read_reply(fpout, nickname, timeout=None, waiting_for=None):
""" Reads a control message. Returns if it is CTRL_UNDERSTOOD.
Raises:
TimeoutError
RemoteNodeAborted
ExternalNodeDidNotUnderstand
ExternalProtocolViolation otherwise. """
try:
wm = read_next_cbor(fpout, timeout=timeout, waiting_for=waiting_for)
except StopIteration:
msg = 'Remote node closed communication (%s)' % waiting_for
raise RemoteNodeAborted(msg)
cm = interpret_control_message(wm)
if cm.code == CTRL_UNDERSTOOD:
others = read_until_over(fpout, timeout=timeout, nickname=nickname)
return others
elif cm.code == CTRL_ABORTED:
msg = 'The remote node "{}" aborted with the following error:'.format(nickname)
msg += '\n\n' + indent(cm.msg, "|", "error in {} |".format(nickname))
# others = self.read_until_over()
raise RemoteNodeAborted(msg)
elif cm.code == CTRL_NOT_UNDERSTOOD:
_others = read_until_over(fpout, timeout=timeout, nickname=nickname)
msg = 'The remote node "{nickname}" reports that it did not understand the message:'.format(nickname=nickname)
msg += '\n\n' + indent(cm.msg, "|", "reported by {} |".format(nickname))
raise ExternalNodeDidNotUnderstand(msg)
else:
msg = 'Remote node raised unknown code %s: %s' % (cm, cm.code)
raise ExternalProtocolViolation(msg)
ControlMessage = namedtuple('ControlMessage', 'code msg')
def interpret_control_message(m):
if not isinstance(m, dict):
msg = 'Expected dictionary, not {}.'.format(type(m))
raise Malformed(msg)
if not FIELD_CONTROL in m:
msg = 'Expected field {}, obtained {}'.format(FIELD_CONTROL, list(m))
raise Malformed(msg)
code = m[FIELD_CONTROL]
msg = m.get(FIELD_DATA, None)
return ControlMessage(code, msg)
def read_until_over(fpout, timeout, nickname):
""" Raises RemoteNodeAborted, TimeoutError """
res = []
waiting_for = 'Reading reply of {}.'.format(nickname)
while True:
try:
wm = read_next_cbor(fpout, timeout=timeout, waiting_for=waiting_for)
if wm.get(FIELD_CONTROL, '') == CTRL_ABORTED:
m = 'External node "{}" aborted:'.format(nickname)
m += '\n\n' + indent(wm.get(FIELD_DATA, None), "|",
"error in {} |".format(nickname))
raise RemoteNodeAborted(m)
if wm.get(FIELD_CONTROL, '') == CTRL_OVER:
# logger.info(f'Node "{nickname}" concluded output of %s messages.' % len(res))
break
# logger.info(f'Node "{nickname}" sent %s.' % len(wm))
except StopIteration:
msg = 'External node "{}" closed communication.'.format(nickname)
raise RemoteNodeAborted(msg)
except TimeoutError:
msg = 'Timeout while reading output of node "{}".'.format(nickname)
raise TimeoutError(msg)
res.append(wm)
return res
|
zuper-nodes-python2-daffy
|
/zuper-nodes-python2-daffy-5.0.3.tar.gz/zuper-nodes-python2-daffy-5.0.3/src/zuper_nodes_python2/outside.py
|
outside.py
|
from __future__ import unicode_literals
PROTOCOL = u'z2'
FIELD_COMPAT = u'compat'
FIELD_CONTROL = u'control'
FIELD_TOPIC = u'topic'
FIELD_DATA = u'data'
FIELD_TIMING = u'timing'
TOPIC_ABORTED = 'aborted'
CTRL_CAPABILITIES = u'capabilities'
CTRL_UNDERSTOOD = u'understood'
CTRL_NOT_UNDERSTOOD = u'not-understood'
CTRL_OVER = u'over'
CTRL_ABORTED = u'aborted'
|
zuper-nodes-python2-daffy
|
/zuper-nodes-python2-daffy-5.0.3.tar.gz/zuper-nodes-python2-daffy-5.0.3/src/zuper_nodes_python2/constants.py
|
constants.py
|
#!/usr/bin/env python2
from __future__ import unicode_literals
import os
import sys
import time
import traceback
import cbor2 as cbor
from . import logger
from .constants import *
# Python 2 compatibility.
try:
TimeoutError
except NameError:
import socket
TimeoutError = socket.timeout
__all__ = ['wrap_direct', 'Context']
class Context:
def info(self, s):
pass
def error(self, s):
pass
def debug(self, s):
pass
def warning(self, s):
pass
def write(self, topic, data):
pass
# noinspection PyBroadException
def wrap_direct(agent):
logger.info('python %s' % ".".join(map(str, sys.version_info)))
data_in = os.environ.get('AIDONODE_DATA_IN', '/dev/stdin')
data_out = os.environ.get('AIDONODE_DATA_OUT', '/dev/stdout')
while not os.path.exists(data_in):
logger.info('Waiting for %s to be created.' % data_in)
time.sleep(1)
if data_in == '/dev/stdin':
f_in = sys.stdin
else:
f_in = open(data_in, 'rb')
# f_in = io.BufferedReader(io.open(f_in.fileno()))
# f_in = sys.stdin
if data_out.startswith('fifo:'):
data_out = data_out.lstrip('fifo:')
os.mkfifo(data_out)
logger.info('Opening fifo %s for writing. Will block until reader appears.' % data_out)
f_out = open(data_out, 'wb')
logger.info('Starting reading from %s' % data_in)
try:
while True:
# whatever
# logger.info('Reading...')
try:
msg = cbor.load(f_in)
except IOError as e:
if e.errno == 29:
break
raise
if not isinstance(msg, dict) or ((FIELD_CONTROL not in msg) and (FIELD_TOPIC not in msg)):
# ignore things that we do not understand
send_control_message(f_out, CTRL_NOT_UNDERSTOOD, "Protocol mismatch")
send_control_message(f_out, CTRL_OVER)
if FIELD_CONTROL in msg:
c = msg[FIELD_CONTROL]
if c == CTRL_CAPABILITIES:
his = msg[FIELD_DATA]
logger.info('His capabilities: %s' % his)
capabilities = {
'z2': {}
}
logger.info('My capabilities: %s' % capabilities)
send_control_message(f_out, CTRL_UNDERSTOOD)
send_control_message(f_out, CTRL_CAPABILITIES, capabilities)
send_control_message(f_out, CTRL_OVER)
else:
msg = 'Could not deal with control message "%s".' % c
send_control_message(f_out, CTRL_NOT_UNDERSTOOD, msg)
send_control_message(f_out, CTRL_OVER)
elif FIELD_TOPIC in msg:
topic = msg[FIELD_TOPIC]
data = msg.get(FIELD_DATA, None)
fn = 'on_received_%s' % topic
if not hasattr(agent, fn):
msg = 'Could not deal with topic %s' % topic
send_control_message(f_out, CTRL_NOT_UNDERSTOOD, msg)
send_control_message(f_out, CTRL_OVER)
else:
send_control_message(f_out, CTRL_UNDERSTOOD)
context = ConcreteContext(f_out)
f = getattr(agent, fn)
try:
f(context=context, data=data)
except BaseException:
s = traceback.format_exc()
logger.error(s)
try:
s = s.decode('utf-8')
except:
pass
send_control_message(f_out, CTRL_ABORTED, s)
raise
finally:
send_control_message(f_out, CTRL_OVER)
else:
send_control_message(f_out, CTRL_NOT_UNDERSTOOD, "I expect a topic message")
send_control_message(f_out, CTRL_OVER)
logger.info('Graceful exit.')
except BaseException:
f_out.flush()
logger.error(traceback.format_exc())
sys.exit(1)
finally:
f_out.flush()
def send_control_message(f_out, c, msg=None):
m = {}
m[FIELD_COMPAT] = [PROTOCOL]
m[FIELD_CONTROL] = unicode(c)
m[FIELD_DATA] = msg
cbor.dump(m, f_out)
logger.info('Sending control %s' % c)
f_out.flush()
def send_topic_message(f_out, topic, data):
m = {}
m[FIELD_COMPAT] = [PROTOCOL]
m[FIELD_TOPIC] = unicode(topic)
m[FIELD_DATA] = data
cbor.dump(m, f_out)
logger.info('Sending topic %s' % topic)
f_out.flush()
class ConcreteContext(Context):
def __init__(self, f_out):
self.f_out = f_out
def info(self, s):
logger.info(s)
def error(self, s):
logger.error(s)
def debug(self, s):
logger.debug(s)
def warning(self, s):
logger.warning(s)
def write(self, topic, data=None):
send_topic_message(self.f_out, topic, data)
|
zuper-nodes-python2-daffy
|
/zuper-nodes-python2-daffy-5.0.3.tar.gz/zuper-nodes-python2-daffy-5.0.3/src/zuper_nodes_python2/imp.py
|
imp.py
|
import six
def indent(s, prefix, first=None):
s = str(s)
assert isinstance(prefix, six.string_types), type(prefix)
lines = s.split('\n')
if not lines: return ''
if first is None:
first = prefix
m = max(len(prefix), len(first))
prefix = ' ' * (m - len(prefix)) + prefix
first = ' ' * (m - len(first)) + first
# differnet first prefix
res = ['%s%s' % (prefix, line.rstrip()) for line in lines]
res[0] = '%s%s' % (first, lines[0].rstrip())
return '\n'.join(res)
|
zuper-nodes-python2-daffy
|
/zuper-nodes-python2-daffy-5.0.3.tar.gz/zuper-nodes-python2-daffy-5.0.3/src/zuper_nodes_python2/utils.py
|
utils.py
|
import logging
__version__ = '5.0.3'
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
logging.info('zn-p2 %s' % __version__)
from .imp import *
from .outside import *
|
zuper-nodes-python2-daffy
|
/zuper-nodes-python2-daffy-5.0.3.tar.gz/zuper-nodes-python2-daffy-5.0.3/src/zuper_nodes_python2/__init__.py
|
__init__.py
|
import io
import time
from . import logger
import select
import cbor2 as cbor
# Python 2 compatibility.
try:
TimeoutError
except NameError:
import socket
TimeoutError = socket.timeout
def wait_for_data(f, timeout=None, waiting_for = None):
""" Raises StopIteration if it is EOF.
Raises TimeoutError if over timeout"""
# XXX: StopIteration not implemented
fs = [f]
t0 = time.time()
intermediate_timeout = 3.0
while True:
try:
readyr, readyw, readyx = select.select(fs, [], fs, intermediate_timeout)
except io.UnsupportedOperation:
break
if readyr:
break
elif readyx:
logger.warning('Exceptional condition on input channel %s' % readyx)
else:
delta = time.time() - t0
if (timeout is not None) and (delta > timeout):
msg = 'Timeout after %.1f s.' % delta
logger.error(msg)
raise TimeoutError(msg)
else:
msg = 'I have been waiting %.1f s.' % delta
if timeout is None:
msg += ' I will wait indefinitely.'
else:
msg += ' Timeout will occurr at %.1f s.' % timeout
if waiting_for:
msg += ' ' + waiting_for
logger.warning(msg)
def read_next_cbor(f, timeout=None, waiting_for = None):
""" Raises StopIteration if it is EOF.
Raises TimeoutError if over timeout"""
wait_for_data(f, timeout, waiting_for)
try:
j = cbor.load(f)
return j
except OSError as e:
if e.errno == 29:
raise StopIteration
raise
|
zuper-nodes-python2-daffy
|
/zuper-nodes-python2-daffy-5.0.3.tar.gz/zuper-nodes-python2-daffy-5.0.3/src/zuper_nodes_python2/reading.py
|
reading.py
|
import sys
from setuptools import setup
if sys.version_info >= (3, 0, 0):
msg = 'This is supposed to be used only with Python 2. Found version %s' % sys.version
raise Exception(msg)
def get_version(filename):
import ast
version = None
with open(filename) as f:
for line in f:
if line.startswith('__version__'):
version = ast.parse(line).body[0].value.s
break
else:
raise ValueError('No version found in %r.' % filename)
if version is None:
raise ValueError(filename)
return version
line = 'z5'
version = get_version(filename='src/zuper_nodes_python2/__init__.py')
setup(
name='zuper-nodes-python2-%s' % line,
version=version,
keywords='',
package_dir={'': 'src'},
packages=[
'zuper_nodes_python2',
],
install_requires=[
'cbor2<5',
'six',
'numpy',
],
)
|
zuper-nodes-python2-z5
|
/zuper-nodes-python2-z5-5.0.14.tar.gz/zuper-nodes-python2-z5-5.0.14/setup.py
|
setup.py
|
from __future__ import unicode_literals
import os
import socket
import time
import traceback
from collections import namedtuple
import cbor2 as cbor
from . import logger
from .constants import *
from .reading import read_next_cbor
from .utils import indent
# Python 2 compatibility.
try:
TimeoutError
except NameError:
import socket
TimeoutError = socket.timeout
__all__ = ['ComponentInterface']
class ExternalProtocolViolation(Exception):
pass
class ExternalNodeDidNotUnderstand(Exception):
pass
class RemoteNodeAborted(Exception):
pass
TimeoutError = socket.timeout
class Malformed(Exception):
pass
MsgReceived = namedtuple('MsgReceived', 'topic data')
class ComponentInterface(object):
def __init__(self, fnin, fnout, nickname, timeout=None):
self.nickname = nickname
try:
os.mkfifo(fnin)
except BaseException as e:
msg = 'Cannot create fifo {}'.format(fnin)
msg += '\n\n%s' % traceback.format_exc()
raise Exception(msg)
self.fpin = open(fnin, 'wb', buffering=0)
wait_for_creation(fnout)
self.fnout = fnout
f = open(fnout, 'rb', buffering=0)
# noinspection PyTypeChecker
self.fpout = f # BufferedReader(f, buffer_size=1)
self.nreceived = 0
self.node_protocol = None
self.data_protocol = None
self.timeout = timeout
def close(self):
self.fpin.close()
self.fpout.close()
def write_topic_and_expect(self, topic, data=None,
timeout=None,
timing=None,
expect=None):
timeout = timeout or self.timeout
self._write_topic(topic, data=data, timing=timing)
ob = self.read_one(expect_topic=expect, timeout=timeout)
return ob
def write_topic_and_expect_zero(self, topic, data=None,
timeout=None,
timing=None):
timeout = timeout or self.timeout
self._write_topic(topic, data=data, timing=timing)
msgs = read_reply(self.fpout, timeout=timeout,
nickname=self.nickname)
if msgs:
msg = 'Expecting zero, got %s' % msgs
raise ExternalProtocolViolation(msg)
def _write_topic(self, topic, data=None, timing=None):
msg = {FIELD_COMPAT: [PROTOCOL],
FIELD_TOPIC: topic,
FIELD_DATA: data,
FIELD_TIMING: timing}
j = self._serialize(msg)
self._write(j)
# logger.info('Written to topic "{topic}" >> {name}.'.format(topic=topic, name=self.nickname))
def _write(self, j):
try:
self.fpin.write(j)
self.fpin.flush()
except BaseException as e:
msg = ('While attempting to write to node "{nickname}", '
'I reckon that the pipe is closed and the node exited.').format(nickname=self.nickname)
try:
received = self.read_one(expect_topic=TOPIC_ABORTED)
if received.topic == TOPIC_ABORTED:
msg += '\n\nThis is the aborted message:'
msg += '\n\n' + received.data
except BaseException as e2:
msg += '\n\nI could not read any aborted message: {e2}'.format(e2=e2)
raise RemoteNodeAborted(msg)
def _serialize(self, msg):
j = cbor.dumps(msg)
return j
def read_one(self, expect_topic=None, timeout=None):
timeout = timeout or self.timeout
try:
if expect_topic:
waiting_for = 'Expecting topic "{expect_topic}" << {nickname}.'.format(expect_topic=expect_topic,
nickname=self.nickname)
else:
waiting_for = None
msgs = read_reply(self.fpout, timeout=timeout, waiting_for=waiting_for,
nickname=self.nickname)
if len(msgs) == 0:
msg = 'Expected one message from node "{}". Got zero.'.format(self.nickname)
if expect_topic:
msg += '\nExpecting topic "{}".'.format(expect_topic)
raise ExternalProtocolViolation(msg)
if len(msgs) > 1:
msg = 'Expected only one message. Got {}.'.format(len(msgs))
raise ExternalProtocolViolation(msg)
msg = msgs[0]
if FIELD_TOPIC not in msg:
m = 'Invalid message does not contain the field "{}".'.format(FIELD_TOPIC)
m += '\n {}'.format(msg)
raise ExternalProtocolViolation(m)
topic = msg[FIELD_TOPIC]
if expect_topic:
if topic != expect_topic:
msg = 'I expected topic "{expect_topic}" but received "{topic}".'.format(expect_topic=expect_topic,
topic=topic)
raise ExternalProtocolViolation(msg)
if self.nreceived == 0:
msg1 = 'Received first message of topic %s' % topic
logger.info(msg1)
self.nreceived += 1
return MsgReceived(topic, msg[FIELD_DATA])
except StopIteration as e:
msg = 'EOF detected on %s after %d messages.' % (self.fnout, self.nreceived)
if expect_topic:
msg += ' Expected topic "{}".'.format(expect_topic)
raise StopIteration(msg)
except TimeoutError as e:
msg = 'Timeout declared after waiting %s sec on %s after having received %d messages.' % (timeout,
self.fnout,
self.nreceived)
if expect_topic:
msg += ' Expected topic "{}".'.format(expect_topic)
raise TimeoutError(msg)
def wait_for_creation(fn):
while not os.path.exists(fn):
msg = 'waiting for creation of %s' % fn
logger.info(msg)
time.sleep(1)
def read_reply(fpout, nickname, timeout=None, waiting_for=None):
""" Reads a control message. Returns if it is CTRL_UNDERSTOOD.
Raises:
TimeoutError
RemoteNodeAborted
ExternalNodeDidNotUnderstand
ExternalProtocolViolation otherwise. """
try:
wm = read_next_cbor(fpout, timeout=timeout, waiting_for=waiting_for)
except StopIteration:
msg = 'Remote node closed communication (%s)' % waiting_for
raise RemoteNodeAborted(msg)
cm = interpret_control_message(wm)
if cm.code == CTRL_UNDERSTOOD:
others = read_until_over(fpout, timeout=timeout, nickname=nickname)
return others
elif cm.code == CTRL_ABORTED:
msg = 'The remote node "{}" aborted with the following error:'.format(nickname)
msg += '\n\n' + indent(cm.msg, "|", "error in {} |".format(nickname))
# others = self.read_until_over()
raise RemoteNodeAborted(msg)
elif cm.code == CTRL_NOT_UNDERSTOOD:
_others = read_until_over(fpout, timeout=timeout, nickname=nickname)
msg = 'The remote node "{nickname}" reports that it did not understand the message:'.format(nickname=nickname)
msg += '\n\n' + indent(cm.msg, "|", "reported by {} |".format(nickname))
raise ExternalNodeDidNotUnderstand(msg)
else:
msg = 'Remote node raised unknown code %s: %s' % (cm, cm.code)
raise ExternalProtocolViolation(msg)
ControlMessage = namedtuple('ControlMessage', 'code msg')
def interpret_control_message(m):
if not isinstance(m, dict):
msg = 'Expected dictionary, not {}.'.format(type(m))
raise Malformed(msg)
if not FIELD_CONTROL in m:
msg = 'Expected field {}, obtained {}'.format(FIELD_CONTROL, list(m))
raise Malformed(msg)
code = m[FIELD_CONTROL]
msg = m.get(FIELD_DATA, None)
return ControlMessage(code, msg)
def read_until_over(fpout, timeout, nickname):
""" Raises RemoteNodeAborted, TimeoutError """
res = []
waiting_for = 'Reading reply of {}.'.format(nickname)
while True:
try:
wm = read_next_cbor(fpout, timeout=timeout, waiting_for=waiting_for)
if wm.get(FIELD_CONTROL, '') == CTRL_ABORTED:
m = 'External node "{}" aborted:'.format(nickname)
m += '\n\n' + indent(wm.get(FIELD_DATA, None), "|",
"error in {} |".format(nickname))
raise RemoteNodeAborted(m)
if wm.get(FIELD_CONTROL, '') == CTRL_OVER:
# logger.info(f'Node "{nickname}" concluded output of %s messages.' % len(res))
break
# logger.info(f'Node "{nickname}" sent %s.' % len(wm))
except StopIteration:
msg = 'External node "{}" closed communication.'.format(nickname)
raise RemoteNodeAborted(msg)
except TimeoutError:
msg = 'Timeout while reading output of node "{}".'.format(nickname)
raise TimeoutError(msg)
res.append(wm)
return res
|
zuper-nodes-python2-z5
|
/zuper-nodes-python2-z5-5.0.14.tar.gz/zuper-nodes-python2-z5-5.0.14/src/zuper_nodes_python2/outside.py
|
outside.py
|
from __future__ import unicode_literals
PROTOCOL = u'z2'
FIELD_COMPAT = u'compat'
FIELD_CONTROL = u'control'
FIELD_TOPIC = u'topic'
FIELD_DATA = u'data'
FIELD_TIMING = u'timing'
TOPIC_ABORTED = 'aborted'
CTRL_CAPABILITIES = u'capabilities'
CTRL_UNDERSTOOD = u'understood'
CTRL_NOT_UNDERSTOOD = u'not-understood'
CTRL_OVER = u'over'
CTRL_ABORTED = u'aborted'
|
zuper-nodes-python2-z5
|
/zuper-nodes-python2-z5-5.0.14.tar.gz/zuper-nodes-python2-z5-5.0.14/src/zuper_nodes_python2/constants.py
|
constants.py
|
#!/usr/bin/env python2
from __future__ import unicode_literals
import os
import sys
import time
import traceback
import cbor2 as cbor
from . import logger
from .constants import *
# Python 2 compatibility.
try:
TimeoutError
except NameError:
import socket
TimeoutError = socket.timeout
__all__ = ['wrap_direct', 'Context']
class Context:
def info(self, s):
pass
def error(self, s):
pass
def debug(self, s):
pass
def warning(self, s):
pass
def write(self, topic, data):
pass
# noinspection PyBroadException
def wrap_direct(agent):
logger.info('python %s' % ".".join(map(str, sys.version_info)))
data_in = os.environ.get('AIDONODE_DATA_IN', '/dev/stdin')
data_out = os.environ.get('AIDONODE_DATA_OUT', '/dev/stdout')
while not os.path.exists(data_in):
logger.info('Waiting for %s to be created.' % data_in)
time.sleep(1)
if data_in == '/dev/stdin':
f_in = sys.stdin
else:
f_in = open(data_in, 'rb')
# f_in = io.BufferedReader(io.open(f_in.fileno()))
# f_in = sys.stdin
if data_out.startswith('fifo:'):
data_out = data_out.lstrip('fifo:')
os.mkfifo(data_out)
logger.info('Opening fifo %s for writing. Will block until reader appears.' % data_out)
f_out = open(data_out, 'wb')
logger.info('Starting reading from %s' % data_in)
try:
while True:
# whatever
# logger.info('Reading...')
try:
msg = cbor.load(f_in)
except IOError as e:
if e.errno == 29:
break
raise
if not isinstance(msg, dict) or ((FIELD_CONTROL not in msg) and (FIELD_TOPIC not in msg)):
# ignore things that we do not understand
send_control_message(f_out, CTRL_NOT_UNDERSTOOD, "Protocol mismatch")
send_control_message(f_out, CTRL_OVER)
if FIELD_CONTROL in msg:
c = msg[FIELD_CONTROL]
if c == CTRL_CAPABILITIES:
his = msg[FIELD_DATA]
logger.info('His capabilities: %s' % his)
capabilities = {
'z2': {}
}
logger.info('My capabilities: %s' % capabilities)
send_control_message(f_out, CTRL_UNDERSTOOD)
send_control_message(f_out, CTRL_CAPABILITIES, capabilities)
send_control_message(f_out, CTRL_OVER)
else:
msg = 'Could not deal with control message "%s".' % c
send_control_message(f_out, CTRL_NOT_UNDERSTOOD, msg)
send_control_message(f_out, CTRL_OVER)
elif FIELD_TOPIC in msg:
topic = msg[FIELD_TOPIC]
data = msg.get(FIELD_DATA, None)
fn = 'on_received_%s' % topic
if not hasattr(agent, fn):
msg = 'Could not deal with topic %s' % topic
send_control_message(f_out, CTRL_NOT_UNDERSTOOD, msg)
send_control_message(f_out, CTRL_OVER)
else:
send_control_message(f_out, CTRL_UNDERSTOOD)
context = ConcreteContext(f_out)
f = getattr(agent, fn)
try:
f(context=context, data=data)
except BaseException:
s = traceback.format_exc()
logger.error(s)
try:
s = s.decode('utf-8')
except:
pass
send_control_message(f_out, CTRL_ABORTED, s)
raise
finally:
send_control_message(f_out, CTRL_OVER)
else:
send_control_message(f_out, CTRL_NOT_UNDERSTOOD, "I expect a topic message")
send_control_message(f_out, CTRL_OVER)
logger.info('Graceful exit.')
except BaseException:
f_out.flush()
logger.error(traceback.format_exc())
sys.exit(1)
finally:
f_out.flush()
def send_control_message(f_out, c, msg=None):
m = {}
m[FIELD_COMPAT] = [PROTOCOL]
m[FIELD_CONTROL] = unicode(c)
m[FIELD_DATA] = msg
cbor.dump(m, f_out)
# logger.info('Sending control %s' % c)
f_out.flush()
def send_topic_message(f_out, topic, data):
m = {}
m[FIELD_COMPAT] = [PROTOCOL]
m[FIELD_TOPIC] = unicode(topic)
m[FIELD_DATA] = data
cbor.dump(m, f_out)
# logger.info('Sending topic %s' % topic)
f_out.flush()
class ConcreteContext(Context):
def __init__(self, f_out):
self.f_out = f_out
def info(self, s):
logger.info(s)
def error(self, s):
logger.error(s)
def debug(self, s):
logger.debug(s)
def warning(self, s):
logger.warning(s)
def write(self, topic, data=None):
send_topic_message(self.f_out, topic, data)
|
zuper-nodes-python2-z5
|
/zuper-nodes-python2-z5-5.0.14.tar.gz/zuper-nodes-python2-z5-5.0.14/src/zuper_nodes_python2/imp.py
|
imp.py
|
import six
def indent(s, prefix, first=None):
s = str(s)
assert isinstance(prefix, six.string_types), type(prefix)
lines = s.split('\n')
if not lines: return ''
if first is None:
first = prefix
m = max(len(prefix), len(first))
prefix = ' ' * (m - len(prefix)) + prefix
first = ' ' * (m - len(first)) + first
# differnet first prefix
res = ['%s%s' % (prefix, line.rstrip()) for line in lines]
res[0] = '%s%s' % (first, lines[0].rstrip())
return '\n'.join(res)
|
zuper-nodes-python2-z5
|
/zuper-nodes-python2-z5-5.0.14.tar.gz/zuper-nodes-python2-z5-5.0.14/src/zuper_nodes_python2/utils.py
|
utils.py
|
import logging
__version__ = '5.0.14'
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
logger.info('zn-p2 %s' % __version__)
from .imp import *
from .outside import *
|
zuper-nodes-python2-z5
|
/zuper-nodes-python2-z5-5.0.14.tar.gz/zuper-nodes-python2-z5-5.0.14/src/zuper_nodes_python2/__init__.py
|
__init__.py
|
import io
import time
from . import logger
import select
import cbor2 as cbor
# Python 2 compatibility.
try:
TimeoutError
except NameError:
import socket
TimeoutError = socket.timeout
def wait_for_data(f, timeout=None, waiting_for = None):
""" Raises StopIteration if it is EOF.
Raises TimeoutError if over timeout"""
# XXX: StopIteration not implemented
fs = [f]
t0 = time.time()
intermediate_timeout = 3.0
while True:
try:
readyr, readyw, readyx = select.select(fs, [], fs, intermediate_timeout)
except io.UnsupportedOperation:
break
if readyr:
break
elif readyx:
logger.warning('Exceptional condition on input channel %s' % readyx)
else:
delta = time.time() - t0
if (timeout is not None) and (delta > timeout):
msg = 'Timeout after %.1f s.' % delta
logger.error(msg)
raise TimeoutError(msg)
else:
msg = 'I have been waiting %.1f s.' % delta
if timeout is None:
msg += ' I will wait indefinitely.'
else:
msg += ' Timeout will occurr at %.1f s.' % timeout
if waiting_for:
msg += ' ' + waiting_for
logger.warning(msg)
def read_next_cbor(f, timeout=None, waiting_for = None):
""" Raises StopIteration if it is EOF.
Raises TimeoutError if over timeout"""
wait_for_data(f, timeout, waiting_for)
try:
j = cbor.load(f)
return j
except OSError as e:
if e.errno == 29:
raise StopIteration
raise
|
zuper-nodes-python2-z5
|
/zuper-nodes-python2-z5-5.0.14.tar.gz/zuper-nodes-python2-z5-5.0.14/src/zuper_nodes_python2/reading.py
|
reading.py
|
import sys
from setuptools import setup
if sys.version_info >= (3, 0, 0):
msg = 'This is supposed to be used only with Python 2. Found version %s' % sys.version
raise Exception(msg)
def get_version(filename):
import ast
version = None
with open(filename) as f:
for line in f:
if line.startswith('__version__'):
version = ast.parse(line).body[0].value.s
break
else:
raise ValueError('No version found in %r.' % filename)
if version is None:
raise ValueError(filename)
return version
version = get_version(filename='src/zuper_nodes_python2/__init__.py')
setup(
name='zuper-nodes-python2',
version=version,
keywords='',
package_dir={'': 'src'},
packages=[
'zuper_nodes_python2',
],
install_requires=[
'cbor2',
],
)
|
zuper-nodes-python2
|
/zuper-nodes-python2-2.1.5.tar.gz/zuper-nodes-python2-2.1.5/setup.py
|
setup.py
|
from __future__ import unicode_literals
import os
import socket
import time
import traceback
from collections import namedtuple
import cbor2 as cbor
from . import logger
from .constants import *
from .reading import read_next_cbor
from .utils import indent
# Python 2 compatibility.
try:
TimeoutError
except NameError:
import socket
TimeoutError = socket.timeout
__all__ = ['ComponentInterface']
class ExternalProtocolViolation(Exception):
pass
class ExternalNodeDidNotUnderstand(Exception):
pass
class RemoteNodeAborted(Exception):
pass
TimeoutError = socket.timeout
class Malformed(Exception):
pass
MsgReceived = namedtuple('MsgReceived', 'topic data')
class ComponentInterface(object):
def __init__(self, fnin, fnout, nickname, timeout=None):
self.nickname = nickname
try:
os.mkfifo(fnin)
except BaseException as e:
msg = 'Cannot create fifo {}'.format(fnin)
msg += '\n\n%s' % traceback.format_exc()
raise Exception(msg)
self.fpin = open(fnin, 'wb', buffering=0)
wait_for_creation(fnout)
self.fnout = fnout
f = open(fnout, 'rb', buffering=0)
# noinspection PyTypeChecker
self.fpout = f # BufferedReader(f, buffer_size=1)
self.nreceived = 0
self.node_protocol = None
self.data_protocol = None
self.timeout = timeout
def close(self):
self.fpin.close()
self.fpout.close()
def write_topic_and_expect(self, topic, data=None,
timeout=None,
timing=None,
expect=None):
timeout = timeout or self.timeout
self._write_topic(topic, data=data, timing=timing)
ob = self.read_one(expect_topic=expect, timeout=timeout)
return ob
def write_topic_and_expect_zero(self, topic, data=None,
timeout=None,
timing=None):
timeout = timeout or self.timeout
self._write_topic(topic, data=data, timing=timing)
msgs = read_reply(self.fpout, timeout=timeout,
nickname=self.nickname)
if msgs:
msg = 'Expecting zero, got %s' % msgs
raise ExternalProtocolViolation(msg)
def _write_topic(self, topic, data=None, timing=None):
msg = {FIELD_COMPAT: [PROTOCOL],
FIELD_TOPIC: topic,
FIELD_DATA: data,
FIELD_TIMING: timing}
j = self._serialize(msg)
self._write(j)
# logger.info('Written to topic "{topic}" >> {name}.'.format(topic=topic, name=self.nickname))
def _write(self, j):
try:
self.fpin.write(j)
self.fpin.flush()
except BrokenPipeError as e:
msg = ('While attempting to write to node "{nickname}", '
'I reckon that the pipe is closed and the node exited.').format(nickname=self.nickname)
try:
received = self.read_one(expect_topic=TOPIC_ABORTED)
if received.topic == TOPIC_ABORTED:
msg += '\n\nThis is the aborted message:'
msg += '\n\n' + received.data
except BaseException as e2:
msg += '\n\nI could not read any aborted message: {e2}'.format(e2=e2)
raise RemoteNodeAborted(msg)
def _serialize(self, msg):
j = cbor.dumps(msg)
return j
def read_one(self, expect_topic=None, timeout=None):
timeout = timeout or self.timeout
try:
if expect_topic:
waiting_for = 'Expecting topic "{expect_topic}" << {nickname}.'.format(expect_topic=expect_topic,
nickname=self.nickname)
else:
waiting_for = None
msgs = read_reply(self.fpout, timeout=timeout, waiting_for=waiting_for,
nickname=self.nickname)
if len(msgs) == 0:
msg = 'Expected one message from node "{}". Got zero.'.format(self.nickname)
if expect_topic:
msg += '\nExpecting topic "{}".'.format(expect_topic)
raise ExternalProtocolViolation(msg)
if len(msgs) > 1:
msg = 'Expected only one message. Got {}.'.format(len(msgs))
raise ExternalProtocolViolation(msg)
msg = msgs[0]
if FIELD_TOPIC not in msg:
m = 'Invalid message does not contain the field "{}".'.format(FIELD_TOPIC)
m += '\n {}'.format(msg)
raise ExternalProtocolViolation(m)
topic = msg[FIELD_TOPIC]
if expect_topic:
if topic != expect_topic:
msg = 'I expected topic "{expect_topic}" but received "{topic}".'.format(expect_topic=expect_topic,
topic=topic)
raise ExternalProtocolViolation(msg)
if self.nreceived == 0:
msg1 = 'Received first message of topic %s' % topic
logger.info(msg1)
self.nreceived += 1
return MsgReceived(topic, msg[FIELD_DATA])
except StopIteration as e:
msg = 'EOF detected on %s after %d messages.' % (self.fnout, self.nreceived)
if expect_topic:
msg += ' Expected topic "{}".'.format(expect_topic)
raise StopIteration(msg)
except TimeoutError as e:
msg = 'Timeout declared after waiting %s sec on %s after having received %d messages.' % (timeout,
self.fnout,
self.nreceived)
if expect_topic:
msg += ' Expected topic "{}".'.format(expect_topic)
raise TimeoutError(msg)
def wait_for_creation(fn):
while not os.path.exists(fn):
msg = 'waiting for creation of %s' % fn
logger.info(msg)
time.sleep(1)
def read_reply(fpout, nickname, timeout=None, waiting_for=None):
""" Reads a control message. Returns if it is CTRL_UNDERSTOOD.
Raises:
TimeoutError
RemoteNodeAborted
ExternalNodeDidNotUnderstand
ExternalProtocolViolation otherwise. """
try:
wm = read_next_cbor(fpout, timeout=timeout, waiting_for=waiting_for)
except StopIteration:
msg = 'Remote node closed communication (%s)' % waiting_for
raise RemoteNodeAborted(msg)
cm = interpret_control_message(wm)
if cm.code == CTRL_UNDERSTOOD:
others = read_until_over(fpout, timeout=timeout, nickname=nickname)
return others
elif cm.code == CTRL_ABORTED:
msg = 'The remote node "{}" aborted with the following error:'.format(nickname)
msg += '\n\n' + indent(cm.msg, "|", "error in {} |".format(nickname))
# others = self.read_until_over()
raise RemoteNodeAborted(msg)
elif cm.code == CTRL_NOT_UNDERSTOOD:
_others = read_until_over(fpout, timeout=timeout, nickname=nickname)
msg = 'The remote node "{nickname}" reports that it did not understand the message:'.format(nickname=nickname)
msg += '\n\n' + indent(cm.msg, "|", "reported by {} |".format(nickname))
raise ExternalNodeDidNotUnderstand(msg)
else:
msg = 'Remote node raised unknown code %s: %s' % (cm, cm.code)
raise ExternalProtocolViolation(msg)
ControlMessage = namedtuple('ControlMessage', 'code msg')
def interpret_control_message(m):
if not isinstance(m, dict):
msg = 'Expected dictionary, not {}.'.format(type(m))
raise Malformed(msg)
if not FIELD_CONTROL in m:
msg = 'Expected field {}, obtained {}'.format(FIELD_CONTROL, list(m))
raise Malformed(msg)
code = m[FIELD_CONTROL]
msg = m.get(FIELD_DATA, None)
return ControlMessage(code, msg)
def read_until_over(fpout, timeout, nickname):
""" Raises RemoteNodeAborted, TimeoutError """
res = []
waiting_for = 'Reading reply of {}.'.format(nickname)
while True:
try:
wm = read_next_cbor(fpout, timeout=timeout, waiting_for=waiting_for)
if wm.get(FIELD_CONTROL, '') == CTRL_ABORTED:
m = 'External node "{}" aborted:'.format(nickname)
m += '\n\n' + indent(wm.get(FIELD_DATA, None), "|",
"error in {} |".format(nickname))
raise RemoteNodeAborted(m)
if wm.get(FIELD_CONTROL, '') == CTRL_OVER:
# logger.info(f'Node "{nickname}" concluded output of %s messages.' % len(res))
break
# logger.info(f'Node "{nickname}" sent %s.' % len(wm))
except StopIteration:
msg = 'External node "{}" closed communication.'.format(nickname)
raise RemoteNodeAborted(msg)
except TimeoutError:
msg = 'Timeout while reading output of node "{}".'.format(nickname)
raise TimeoutError(msg)
res.append(wm)
return res
|
zuper-nodes-python2
|
/zuper-nodes-python2-2.1.5.tar.gz/zuper-nodes-python2-2.1.5/src/zuper_nodes_python2/outside.py
|
outside.py
|
from __future__ import unicode_literals
PROTOCOL = u'z2'
FIELD_COMPAT = u'compat'
FIELD_CONTROL = u'control'
FIELD_TOPIC = u'topic'
FIELD_DATA = u'data'
FIELD_TIMING = u'timing'
TOPIC_ABORTED = 'aborted'
CTRL_CAPABILITIES = u'capabilities'
CTRL_UNDERSTOOD = u'understood'
CTRL_NOT_UNDERSTOOD = u'not-understood'
CTRL_OVER = u'over'
CTRL_ABORTED = u'aborted'
|
zuper-nodes-python2
|
/zuper-nodes-python2-2.1.5.tar.gz/zuper-nodes-python2-2.1.5/src/zuper_nodes_python2/constants.py
|
constants.py
|
#!/usr/bin/env python2
from __future__ import unicode_literals
import os
import sys
import time
import traceback
import cbor2 as cbor
from . import logger
from .constants import *
# Python 2 compatibility.
try:
TimeoutError
except NameError:
import socket
TimeoutError = socket.timeout
__all__ = ['wrap_direct', 'Context']
class Context:
def info(self, s):
pass
def error(self, s):
pass
def debug(self, s):
pass
def warning(self, s):
pass
def write(self, topic, data):
pass
# noinspection PyBroadException
def wrap_direct(agent):
logger.info('python %s' % ".".join(map(str, sys.version_info)))
data_in = os.environ.get('AIDONODE_DATA_IN', '/dev/stdin')
data_out = os.environ.get('AIDONODE_DATA_OUT', '/dev/stdout')
while not os.path.exists(data_in):
logger.info('Waiting for %s to be created.' % data_in)
time.sleep(1)
if data_in == '/dev/stdin':
f_in = sys.stdin
else:
f_in = open(data_in, 'rb')
# f_in = io.BufferedReader(io.open(f_in.fileno()))
# f_in = sys.stdin
if data_out.startswith('fifo:'):
data_out = data_out.lstrip('fifo:')
os.mkfifo(data_out)
logger.info('Opening fifo %s for writing. Will block until reader appears.' % data_out)
f_out = open(data_out, 'wb')
logger.info('Starting reading from %s' % data_in)
try:
while True:
# whatever
# logger.info('Reading...')
try:
msg = cbor.load(f_in)
except IOError as e:
if e.errno == 29:
break
raise
if not isinstance(msg, dict) or ((FIELD_CONTROL not in msg) and (FIELD_TOPIC not in msg)):
# ignore things that we do not understand
send_control_message(f_out, CTRL_NOT_UNDERSTOOD, "Protocol mismatch")
send_control_message(f_out, CTRL_OVER)
if FIELD_CONTROL in msg:
c = msg[FIELD_CONTROL]
if c == CTRL_CAPABILITIES:
his = msg[FIELD_DATA]
logger.info('His capabilities: %s' % his)
capabilities = {
'z2': {}
}
logger.info('My capabilities: %s' % capabilities)
send_control_message(f_out, CTRL_UNDERSTOOD)
send_control_message(f_out, CTRL_CAPABILITIES, capabilities)
send_control_message(f_out, CTRL_OVER)
else:
msg = 'Could not deal with control message "%s".' % c
send_control_message(f_out, CTRL_NOT_UNDERSTOOD, msg)
send_control_message(f_out, CTRL_OVER)
elif FIELD_TOPIC in msg:
topic = msg[FIELD_TOPIC]
data = msg.get(FIELD_DATA, None)
fn = 'on_received_%s' % topic
if not hasattr(agent, fn):
msg = 'Could not deal with topic %s' % topic
send_control_message(f_out, CTRL_NOT_UNDERSTOOD, msg)
send_control_message(f_out, CTRL_OVER)
else:
send_control_message(f_out, CTRL_UNDERSTOOD)
context = ConcreteContext(f_out)
f = getattr(agent, fn)
try:
f(context=context, data=data)
except BaseException:
s = traceback.format_exc()
logger.error(s)
try:
s = s.decode('utf-8')
except:
pass
send_control_message(f_out, CTRL_ABORTED, s)
raise
finally:
send_control_message(f_out, CTRL_OVER)
else:
send_control_message(f_out, CTRL_NOT_UNDERSTOOD, "I expect a topic message")
send_control_message(f_out, CTRL_OVER)
logger.info('Graceful exit.')
except BaseException:
f_out.flush()
logger.error(traceback.format_exc())
sys.exit(1)
finally:
f_out.flush()
def send_control_message(f_out, c, msg=None):
m = {}
m[FIELD_COMPAT] = [PROTOCOL]
m[FIELD_CONTROL] = unicode(c)
m[FIELD_DATA] = msg
cbor.dump(m, f_out)
logger.info('Sending control %s' % c)
f_out.flush()
def send_topic_message(f_out, topic, data):
m = {}
m[FIELD_COMPAT] = [PROTOCOL]
m[FIELD_TOPIC] = unicode(topic)
m[FIELD_DATA] = data
cbor.dump(m, f_out)
logger.info('Sending topic %s' % topic)
f_out.flush()
class ConcreteContext(Context):
def __init__(self, f_out):
self.f_out = f_out
def info(self, s):
logger.info(s)
def error(self, s):
logger.error(s)
def debug(self, s):
logger.debug(s)
def warning(self, s):
logger.warning(s)
def write(self, topic, data=None):
send_topic_message(self.f_out, topic, data)
|
zuper-nodes-python2
|
/zuper-nodes-python2-2.1.5.tar.gz/zuper-nodes-python2-2.1.5/src/zuper_nodes_python2/imp.py
|
imp.py
|
import six
def indent(s, prefix, first=None):
s = str(s)
assert isinstance(prefix, six.string_types), type(prefix)
lines = s.split('\n')
if not lines: return ''
if first is None:
first = prefix
m = max(len(prefix), len(first))
prefix = ' ' * (m - len(prefix)) + prefix
first = ' ' * (m - len(first)) + first
# differnet first prefix
res = ['%s%s' % (prefix, line.rstrip()) for line in lines]
res[0] = '%s%s' % (first, lines[0].rstrip())
return '\n'.join(res)
|
zuper-nodes-python2
|
/zuper-nodes-python2-2.1.5.tar.gz/zuper-nodes-python2-2.1.5/src/zuper_nodes_python2/utils.py
|
utils.py
|
import logging
__version__ = '2.1.5'
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
logging.info('zn-p2 %s' % __version__)
from .imp import *
from .outside import *
|
zuper-nodes-python2
|
/zuper-nodes-python2-2.1.5.tar.gz/zuper-nodes-python2-2.1.5/src/zuper_nodes_python2/__init__.py
|
__init__.py
|
import io
import time
from . import logger
import select
import cbor2 as cbor
# Python 2 compatibility.
try:
TimeoutError
except NameError:
import socket
TimeoutError = socket.timeout
def wait_for_data(f, timeout=None, waiting_for = None):
""" Raises StopIteration if it is EOF.
Raises TimeoutError if over timeout"""
# XXX: StopIteration not implemented
fs = [f]
t0 = time.time()
intermediate_timeout = 3.0
while True:
try:
readyr, readyw, readyx = select.select(fs, [], fs, intermediate_timeout)
except io.UnsupportedOperation:
break
if readyr:
break
elif readyx:
logger.warning('Exceptional condition on input channel %s' % readyx)
else:
delta = time.time() - t0
if (timeout is not None) and (delta > timeout):
msg = 'Timeout after %.1f s.' % delta
logger.error(msg)
raise TimeoutError(msg)
else:
msg = 'I have been waiting %.1f s.' % delta
if timeout is None:
msg += ' I will wait indefinitely.'
else:
msg += ' Timeout will occurr at %.1f s.' % timeout
if waiting_for:
msg += ' ' + waiting_for
logger.warning(msg)
def read_next_cbor(f, timeout=None, waiting_for = None):
""" Raises StopIteration if it is EOF.
Raises TimeoutError if over timeout"""
wait_for_data(f, timeout, waiting_for)
try:
j = cbor.load(f)
return j
except OSError as e:
if e.errno == 29:
raise StopIteration
raise
|
zuper-nodes-python2
|
/zuper-nodes-python2-2.1.5.tar.gz/zuper-nodes-python2-2.1.5/src/zuper_nodes_python2/reading.py
|
reading.py
|
from setuptools import setup
import sys
if not sys.version_info >= (3, 6, 0):
msg = 'Unsupported version %s' % sys.version
raise Exception(msg)
def get_version(filename):
import ast
version = None
with open(filename) as f:
for line in f:
if line.startswith('__version__'):
version = ast.parse(line).body[0].value.s
break
else:
raise ValueError('No version found in %r.' % filename)
if version is None:
raise ValueError(filename)
return version
version = get_version(filename='src/zuper_nodes/__init__.py')
line = 'z5'
setup(
name=f'zuper-nodes-{line}',
version=version,
keywords='',
package_dir={'': 'src'},
packages=[
'zuper_nodes',
'zuper_nodes_tests',
'zuper_nodes_wrapper',
'zuper_nodes_wrapper_tests',
],
install_requires=[
'compmake',
'pyparsing',
'PyContracts',
'networkx<=2.2',
'termcolor',
'zuper-ipce-z5',
'cbor2',
'base58',
],
entry_points={
'console_scripts': [
'zuper-node-identify=zuper_nodes_wrapper.identify:identify_main',
],
},
)
|
zuper-nodes-z5
|
/zuper-nodes-z5-5.0.9.tar.gz/zuper-nodes-z5-5.0.9/setup.py
|
setup.py
|
from comptests import comptest, run_module_tests
@comptest
def dummy1():
pass
@comptest
def dummy2():
pass
@comptest
def dummy3():
pass
@comptest
def dummy4():
pass
@comptest
def dummy5():
pass
if __name__ == '__main__':
run_module_tests()
|
zuper-nodes-z5
|
/zuper-nodes-z5-5.0.9.tar.gz/zuper-nodes-z5-5.0.9/src/zuper_nodes_tests/test1.py
|
test1.py
|
from nose.tools import assert_equal
from zuper_nodes import Language, ExpectInputReceived, ExpectOutputProduced, InSequence, ZeroOrMore, ZeroOrOne, \
OneOrMore, Either
from zuper_nodes.language_parse import Syntax
from comptests import comptest
from contracts import check_isinstance
def parse_language(s: str) -> Language:
expr = Syntax.language
res = expr.parseString(s, parseAll=True)
res = res[0]
return res
def expect_parse(expr, s, expected):
check_isinstance(s, str)
check_isinstance(expected, (type(None), Language))
res = expr.parseString(s, parseAll=True)
res = res[0]
print(f'Obtained: {res}')
print(f'Expected: {expected}')
if expected:
assert_equal(res, expected)
@comptest
def test_parse_language_01():
s = "in:name"
e = ExpectInputReceived("name")
expect_parse(Syntax.input_received, s, e)
expect_parse(Syntax.language, s, e)
@comptest
def test_parse_language_02():
s = "out:name"
e = ExpectOutputProduced("name")
expect_parse(Syntax.output_produced, s, e)
@comptest
def test_parse_language_03():
s = "out:first ; in:second"
e = InSequence((ExpectOutputProduced("first"),
ExpectInputReceived("second")))
expect_parse(Syntax.language, s, e)
@comptest
def test_parse_language_04():
s = "(out:first)*"
e = ZeroOrMore(ExpectOutputProduced("first"))
expect_parse(Syntax.language, s, e)
@comptest
def test_parse_language_05():
s = "(out:first)?"
e = ZeroOrOne(ExpectOutputProduced("first"))
expect_parse(Syntax.language, s, e)
@comptest
def test_parse_language_06():
s = "(out:first)+"
e = OneOrMore(ExpectOutputProduced("first"))
expect_parse(Syntax.language, s, e)
@comptest
def test_parse_language_07():
s = "out:first | out:second"
e = Either((ExpectOutputProduced("first"), ExpectOutputProduced("second")))
expect_parse(Syntax.language, s, e)
s2 = "(out:first | out:second)"
expect_parse(Syntax.language, s2, e)
@comptest
def test_parse_language_08():
s = """
(
in:next_episode ; (
out:no_episodes |
(out:episode_start ;
(in:next_image ; (out:image | out:episode_end))*)
)
)*
"""
expect_parse(Syntax.language, s, None)
#
# def test_parse_language_08():
# s = """
# (
# in:next_episode ; (
# out:no_episodes |
# (out:episode_start ;
# (in:next_image ; (out:image | out:episode_end))*)
# )
# )*
# """
#
# expect_parse(Syntax.language, s, None)
|
zuper-nodes-z5
|
/zuper-nodes-z5-5.0.9.tar.gz/zuper-nodes-z5-5.0.9/src/zuper_nodes_tests/test_language.py
|
test_language.py
|
import os
from typing import Sequence, List, Union
from networkx.drawing.nx_pydot import write_dot
from zuper_nodes import OutputProduced, InputReceived, Event, Language, logger
from zuper_nodes.language_parse import parse_language, language_to_str
from zuper_nodes.language_recognize import LanguageChecker, Enough, Unexpected, NeedMore
from compmake.utils import make_sure_dir_exists
from comptests import comptest, run_module_tests, get_comptests_output_dir
from zuper_nodes_wrapper.meta_protocol import basic_protocol
def assert_seq(s: Union[str, Language], seq: List[Event], expect: Sequence[type], final: type):
if isinstance(s, str):
s = s.replace('\n', ' ').strip()
while ' ' in s:
s = s.replace(' ', ' ')
l = parse_language(s)
else:
l = s
s2 = language_to_str(l)
print(s)
print(s2)
l2 = parse_language(s2)
assert l == l2, (s, s2)
pc = LanguageChecker(l)
logger.info(f'Active start: {pc.get_active_states_names()}')
dn = get_comptests_output_dir()
fn = os.path.join(dn, 'language.dot')
make_sure_dir_exists(fn)
write_dot(pc.g, fn)
logger.info(f'Written to {fn}')
# all except last
for i, (e, r) in enumerate(zip(seq, expect)):
logger.info(f'Active before: {pc.get_active_states_names()}')
logger.info(f'Event {e}')
res = pc.push(e)
logger.info(f'Active after: {pc.get_active_states_names()}')
if not isinstance(res, r):
msg = f'Input {i} ({e}) response was {type(res).__name__} instead of {r.__name__}'
msg += f'\n entire sequence: {seq}'
msg += f'\n language: {l}'
msg += f'\n language string: {s2}'
raise Exception(msg)
res = pc.finish()
if not isinstance(res, final):
msg = f'finish response was {type(res).__name__} instead of {final.__name__}'
msg += f'\n entire sequence: {seq}'
msg += f'\n language: {l}'
msg += f'\n language string: {s2}'
raise Exception(msg)
@comptest
def test_proto_out1():
seq = [OutputProduced("a")]
assert_seq("out:a", seq, (Enough,), Enough)
@comptest
def test_proto_in1():
seq = [InputReceived("a")]
assert_seq("in:a", seq, (Enough,), Enough)
@comptest
def test_proto3():
seq = [InputReceived("a")]
assert_seq("out:a", seq, (Unexpected,), Unexpected)
@comptest
def test_proto4():
seq = [OutputProduced("a")]
assert_seq("in:a", seq, (Unexpected,), Unexpected)
@comptest
def test_proto05():
seq = [InputReceived("b")]
assert_seq("in:a", seq, (Unexpected,), Unexpected)
@comptest
def test_proto06():
seq = [OutputProduced("b")]
assert_seq("in:a", seq, (Unexpected,), Unexpected)
@comptest
def test_proto07():
seq = [OutputProduced("a"), OutputProduced("b")]
assert_seq("out:a ; out:b", seq, (NeedMore, Enough), Enough)
@comptest
def test_proto08():
seq = [OutputProduced("a"), OutputProduced("b")]
assert_seq("out:a ; out:b ; out:b", seq, (NeedMore, NeedMore), NeedMore)
@comptest
def test_proto09():
seq = [OutputProduced("a")]
assert_seq("out:a ; out:b", seq, (NeedMore,), NeedMore)
@comptest
def test_proto10():
seq = [OutputProduced("a"), OutputProduced("b"), OutputProduced("c")]
assert_seq("out:a ; out:b", seq, (NeedMore, Enough, Unexpected), Unexpected)
@comptest
def test_proto_zom_01():
seq = []
assert_seq("out:a *", seq, (), Enough)
@comptest
def test_proto_zom_02():
seq = [OutputProduced("a")]
assert_seq("out:a *", seq, (Enough,), Enough)
@comptest
def test_proto_zom_03():
seq = [OutputProduced("a"), OutputProduced("a")]
assert_seq("out:a *", seq, (Enough, Enough), Enough)
@comptest
def test_proto_either_01():
seq = [OutputProduced("a")]
assert_seq("out:a | out:b ", seq, (Enough,), Enough)
@comptest
def test_proto_either_02():
seq = [OutputProduced("b")]
assert_seq("out:a | out:b ", seq, (Enough,), Enough)
@comptest
def test_proto_either_03():
seq = [OutputProduced("c")]
assert_seq("out:a | out:b | out:c ", seq, (Enough,), Enough)
@comptest
def test_proto_either_04():
seq = [OutputProduced("a"), OutputProduced("b")]
assert_seq("(out:a ; out:b) | (out:b ; out:a) ", seq, (NeedMore, Enough), Enough)
@comptest
def test_proto_either_05():
seq = [OutputProduced("b"), OutputProduced("a")]
assert_seq("(out:a ; out:b) | (out:b ; out:a) ", seq, (NeedMore, Enough,), Enough)
@comptest
def test_proto_oom_01():
seq = []
assert_seq("out:a +", seq, (), NeedMore)
@comptest
def test_proto_oom_02():
seq = [OutputProduced("a")]
assert_seq("out:a +", seq, (Enough,), Enough)
@comptest
def test_proto_oom_03():
seq = [OutputProduced("a"), OutputProduced("a")]
assert_seq("out:a +", seq, (Enough, Enough), Enough)
@comptest
def test_proto_zoom_01():
seq = []
assert_seq("out:a ?", seq, (), Enough)
@comptest
def test_proto_zoom_02():
seq = [OutputProduced("a")]
assert_seq("out:a ?", seq, (Enough,), Enough)
@comptest
def test_proto_zoom_03():
seq = [OutputProduced("a"), OutputProduced("a")]
assert_seq("out:a ?", seq, (Enough, Unexpected), Unexpected)
@comptest
def test_protocol_complex1():
l = """
(
in:next_episode ; (
out:no_more_episodes |
(out:episode_start ;
(in:next_image ; (out:image | out:no_more_images))*)
)
)*
"""
seq = [InputReceived("next_episode"), OutputProduced("episode_start")]
assert_seq(l, seq, (NeedMore, Enough), Enough)
@comptest
def test_protocol_complex1_0():
l = """
in:next_episode ; (
out:no_more_episodes |
(out:episode_start ;
(in:next_image ; (out:image | out:no_more_images))*)
)
"""
seq = [InputReceived("next_episode"), OutputProduced("no_more_episodes")]
assert_seq(l, seq, (NeedMore, Enough), Enough)
@comptest
def test_protocol_complex1_1():
l = """
in:next_episode ; (
out:no_more_episodes |
(out:episode_start ;
(in:next_image ; (out:image | out:no_more_images))*)
)
"""
seq = [InputReceived("next_episode"),
OutputProduced("episode_start")]
assert_seq(l, seq, (NeedMore, Enough), Enough)
@comptest
def test_protocol_complex1_2():
l = """
in:next_episode ; (
out:no_more_episodes |
(out:episode_start ;
(in:next_image ; (out:image | out:no_more_images))*)
)
"""
seq = [InputReceived("next_episode"),
OutputProduced("episode_start"),
InputReceived("next_image"),
OutputProduced("image"),
]
assert_seq(l, seq, (NeedMore, Enough), Enough)
@comptest
def test_protocol_complex1_3():
l = """
(
in:next_episode ; (
out:no_more_episodes |
(out:episode_start ;
(in:next_image ; (out:image | out:no_more_images))*)
)
)*
"""
seq = [
InputReceived("next_image"),
]
assert_seq(l, seq, (Unexpected,), Unexpected)
@comptest
def test_protocol_complex1_3b():
l = """
(
in:next_episode ; (
out:no_more_episodes |
(out:episode_start ;
(in:next_image ; (out:image | out:no_more_images))*)
)
)*
"""
seq = [
InputReceived("next_image"),
]
assert_seq(l, seq, (Unexpected,), Unexpected)
@comptest
def test_protocol_complex1_3c():
l = """
(
in:next_episode ; (
(out:episode_start ;
(in:next_image)*)
)
)*
"""
seq = [
InputReceived("next_image"),
]
assert_seq(l, seq, (Unexpected,), Unexpected)
@comptest
def test_protocol_complex1_3e():
l = """
(
in:next_episode ; (
(out:episode_start ;
(in:next_image)*)
)
)
"""
seq = [
InputReceived("next_image"),
]
assert_seq(l, seq, (Unexpected,), Unexpected)
@comptest
def test_protocol_complex1_3d():
l = """
(
in:next_episode ; (
(out:episode_start ;
(in:next_image))
)
)*
"""
seq = [
InputReceived("next_image"),
]
assert_seq(l, seq, (Unexpected,), Unexpected)
@comptest
def test_protocol_complex1_3v():
l0 = """
out:episode_start ;
(in:next_image ; (out:image | out:no_more_images))*
"""
seq = [OutputProduced("episode_start")]
assert_seq(l0, seq, (Enough,), Enough)
@comptest
def test_basic_protocol1():
l0 = basic_protocol.language
seq = [InputReceived("set_config")]
assert_seq(l0, seq, (NeedMore,), NeedMore)
if __name__ == '__main__':
run_module_tests()
|
zuper-nodes-z5
|
/zuper-nodes-z5-5.0.9.tar.gz/zuper-nodes-z5-5.0.9/src/zuper_nodes_tests/test_protocol.py
|
test_protocol.py
|
from . import test1
from . import test_protocol
from . import test_language
|
zuper-nodes-z5
|
/zuper-nodes-z5-5.0.9.tar.gz/zuper-nodes-z5-5.0.9/src/zuper_nodes_tests/__init__.py
|
__init__.py
|
# coding=utf-8
import termcolor
__all__ = ['setup_logging_color', 'setup_logging_format', 'setup_logging']
def get_FORMAT_datefmt():
def dark(s):
return termcolor.colored(s, attrs=['dark'])
pre = dark('%(asctime)s|')
pre += '%(name)s'
pre += dark('|%(filename)s:%(lineno)s|%(funcName)s(): ')
FORMAT = pre + "%(message)s"
datefmt = "%H:%M:%S"
return FORMAT, datefmt
# noinspection PyUnresolvedReferences
def setup_logging_format():
from logging import Logger, StreamHandler, Formatter
import logging
FORMAT, datefmt = get_FORMAT_datefmt()
logging.basicConfig(format=FORMAT, datefmt=datefmt)
if Logger.root.handlers: # @UndefinedVariable
for handler in Logger.root.handlers: # @UndefinedVariable
if isinstance(handler, StreamHandler):
formatter = Formatter(FORMAT, datefmt=datefmt)
handler.setFormatter(formatter)
else:
logging.basicConfig(format=FORMAT, datefmt=datefmt)
def add_coloring_to_emit_ansi(fn):
# add methods we need to the class
def new(*args):
levelno = args[1].levelno
if levelno >= 50:
color = '\x1b[31m' # red
elif levelno >= 40:
color = '\x1b[31m' # red
elif levelno >= 30:
color = '\x1b[33m' # yellow
elif levelno >= 20:
color = '\x1b[32m' # green
elif levelno >= 10:
color = '\x1b[35m' # pink
else:
color = '\x1b[0m' # normal
msg = str(args[1].msg)
lines = msg.split('\n')
def color_line(l):
return "%s%s%s" % (color, l, '\x1b[0m') # normal
lines = list(map(color_line, lines))
args[1].msg = "\n".join(lines)
return fn(*args)
return new
def setup_logging_color():
import platform
if platform.system() != 'Windows':
emit2 = add_coloring_to_emit_ansi(logging.StreamHandler.emit)
logging.StreamHandler.emit = emit2
def setup_logging():
# logging.basicConfig()
setup_logging_color()
setup_logging_format()
import logging
logging.basicConfig()
setup_logging()
|
zuper-nodes-z5
|
/zuper-nodes-z5-5.0.9.tar.gz/zuper-nodes-z5-5.0.9/src/zuper_nodes/col_logging.py
|
col_logging.py
|
from pyparsing import Suppress, Literal, Keyword, ParserElement, pyparsing_common, opAssoc, operatorPrecedence
from .language import ExpectInputReceived, ExpectOutputProduced, InSequence, ZeroOrMore, ZeroOrOne, Either, Language, \
OneOrMore
__all__ = [
'parse_language',
'language_to_str',
'Syntax',
]
ParserElement.enablePackrat()
S = Suppress
L = Literal
K = Keyword
def parse_language(s: str) -> Language:
res = Syntax.language.parseString(s, parseAll=True)
res = res[0]
return res
def language_to_str(l: Language):
def quote_if(s):
if ';' in s or '|' in s:
return "(" + s + ')'
else:
return s
if isinstance(l, ExpectInputReceived):
return f"in:{l.channel}"
if isinstance(l, ExpectOutputProduced):
return f"out:{l.channel}"
if isinstance(l, InSequence):
return " ; ".join(quote_if(language_to_str(_)) for _ in l.ls)
if isinstance(l, Either):
return " | ".join(quote_if(language_to_str(_)) for _ in l.ls)
if isinstance(l, ZeroOrMore):
return "(" + language_to_str(l.l) + ")" + '*'
if isinstance(l, OneOrMore):
return "(" + language_to_str(l.l) + ")" + '+'
if isinstance(l, ZeroOrOne):
return "(" + language_to_str(l.l) + ")" + '?'
raise NotImplementedError(type(l))
def on_input_received(s, loc, tokens):
return ExpectInputReceived(tokens[0])
def on_output_produced(s, loc, tokens):
return ExpectOutputProduced(tokens[0])
def on_in_sequence(tokens):
return InSequence(tuple(tokens[0]))
def on_either(tokens):
return Either(tuple(tokens[0]))
def on_zero_or_one(tokens):
return ZeroOrOne(tokens[0][0])
def on_zero_or_more(tokens):
return ZeroOrMore(tokens[0][0])
def on_one_or_more(tokens):
return OneOrMore(tokens[0][0])
class Syntax:
input_received = S(K("in") + L(":")) + pyparsing_common.identifier
output_produced = S(K("out") + L(":")) + pyparsing_common.identifier
basic = input_received | output_produced
language = operatorPrecedence(basic,
[
(S(L('*')), 1, opAssoc.LEFT, on_zero_or_more),
(S(L('+')), 1, opAssoc.LEFT, on_one_or_more),
(S(L('?')), 1, opAssoc.LEFT, on_zero_or_one),
(S(L(';')), 2, opAssoc.LEFT, on_in_sequence),
(S(L('|')), 2, opAssoc.LEFT, on_either),
])
input_received.setParseAction(on_input_received)
output_produced.setParseAction(on_output_produced)
|
zuper-nodes-z5
|
/zuper-nodes-z5-5.0.9.tar.gz/zuper-nodes-z5-5.0.9/src/zuper_nodes/language_parse.py
|
language_parse.py
|
from contracts import indent
from zuper_nodes import InteractionProtocol
from zuper_typing.subcheck import can_be_used_as2
class IncompatibleProtocol(Exception):
pass
def check_compatible_protocol(p1: InteractionProtocol, p2: InteractionProtocol):
""" Checks that p1 is a subprotocol of p2 """
try:
# check input compatibility
# we should have all inputs
for k, v2 in p2.inputs.items():
if not k in p1.inputs:
msg = f'First protocol misses input "{k}".'
raise IncompatibleProtocol(msg)
v1 = p1.inputs[k]
r = can_be_used_as2(v1, v2)
if not r:
msg = f'For input "{k}", cannot use type {v1} as {v2}: {r}'
raise IncompatibleProtocol(msg)
# check output compatibility
# we should have all inputs
for k, v2 in p2.outputs.items():
if not k in p1.outputs:
msg = f'First protocol misses output "{k}".'
raise IncompatibleProtocol(msg)
v1 = p1.outputs[k]
r = can_be_used_as2(v1, v2)
if not r:
msg = f'For output "{k}", cannot use type {v1} as {v2}: {r}'
raise IncompatibleProtocol(msg)
# XXX: to finish
except IncompatibleProtocol as e:
msg = 'Cannot say that p1 is a sub-protocol of p2'
msg += '\n' + indent(p1, '|', 'p1: |')
msg += '\n' + indent(p2, '|', 'p2: |')
raise IncompatibleProtocol(msg) from e
|
zuper-nodes-z5
|
/zuper-nodes-z5-5.0.9.tar.gz/zuper-nodes-z5-5.0.9/src/zuper_nodes/compatibility.py
|
compatibility.py
|
from dataclasses import dataclass
from typing import Union, Tuple, Optional, Set
from .language import Language, OutputProduced, InputReceived, Event, ExpectInputReceived, ExpectOutputProduced, \
InSequence, ZeroOrMore, Either, OneOrMore, ZeroOrOne
from contracts.utils import indent
class Result:
pass
@dataclass
class Enough(Result):
pass
@dataclass
class Unexpected(Result):
msg: str
def __repr__(self):
return 'Unexpected:' + indent(self.msg, ' ')
@dataclass
class NeedMore(Result):
pass
import networkx as nx
NodeName = Tuple[str, ...]
class Always:
pass
def get_nfa(g: Optional[nx.DiGraph], start_node: NodeName, accept_node: NodeName, l: Language,
prefix: Tuple[str, ...] = ()):
# assert start_node != accept_node
if not start_node in g:
g.add_node(start_node, label="/".join(start_node))
if not accept_node in g:
g.add_node(accept_node, label="/".join(accept_node))
if isinstance(l, ExpectOutputProduced):
g.add_edge(start_node, accept_node, event_match=l, label=f'out/{l.channel}')
elif isinstance(l, ExpectInputReceived):
g.add_edge(start_node, accept_node, event_match=l, label=f'in/{l.channel}')
elif isinstance(l, InSequence):
current = start_node
for i, li in enumerate(l.ls):
# if i == len(l.ls) - 1:
# n = accept_node
# else:
n = prefix + (f'after{i}',)
g.add_node(n)
# logger.debug(f'sequence {i} start {current} to {n}')
get_nfa(g, start_node=current, accept_node=n, prefix=prefix + (f'{i}',), l=li)
current = n
g.add_edge(current, accept_node, event_match=Always(), label='always')
elif isinstance(l, ZeroOrMore):
# logger.debug(f'zeroormore {start_node} -> {accept_node}')
g.add_edge(start_node, accept_node, event_match=Always(), label='always')
get_nfa(g, start_node=accept_node, accept_node=accept_node, l=l.l, prefix=prefix + ('zero_or_more',))
elif isinstance(l, OneOrMore):
# start to accept
get_nfa(g, start_node=start_node, accept_node=accept_node, l=l.l, prefix=prefix + ('one_or_more', '1'))
# accept to accept
get_nfa(g, start_node=accept_node, accept_node=accept_node, l=l.l, prefix=prefix + ('one_or_more', '2'))
elif isinstance(l, ZeroOrOne):
g.add_edge(start_node, accept_node, event_match=Always(), label='always')
get_nfa(g, start_node=start_node, accept_node=accept_node, l=l.l, prefix=prefix + ('zero_or_one',))
elif isinstance(l, Either):
for i, li in enumerate(l.ls):
get_nfa(g, start_node=start_node, accept_node=accept_node, l=li, prefix=prefix + (f'either{i}',))
else:
assert False, type(l)
def event_matches(l: Language, event: Event):
if isinstance(l, ExpectInputReceived):
return isinstance(event, InputReceived) and event.channel == l.channel
if isinstance(l, ExpectOutputProduced):
return isinstance(event, OutputProduced) and event.channel == l.channel
if isinstance(l, Always):
return False
raise NotImplementedError(l)
START = ('start',)
ACCEPT = ('accept',)
class LanguageChecker:
g: nx.DiGraph
active: Set[NodeName]
def __init__(self, language: Language):
self.g = nx.MultiDiGraph()
self.start_node = START
self.accept_node = ACCEPT
get_nfa(g=self.g, l=language, start_node=self.start_node, accept_node=self.accept_node, prefix=())
# for (a, b, data) in self.g.out_edges(data=True):
# print(f'{a} -> {b} {data["event_match"]}')
a = 2
for n in self.g:
if n not in [START, ACCEPT]:
# noinspection PyUnresolvedReferences
self.g.node[n]['label'] = f'S{a}'
a += 1
elif n == START:
# noinspection PyUnresolvedReferences
self.g.node[n]['label'] = 'start'
elif n == ACCEPT:
# noinspection PyUnresolvedReferences
self.g.node[n]['label'] = 'accept'
self.active = {self.start_node}
# logger.debug(f'active {self.active}')
self._evolve_empty()
def _evolve_empty(self):
while True:
now_active = set()
for node in self.active:
nalways = 0
nother = 0
for (_, neighbor, data) in self.g.out_edges([node], data=True):
# print(f'-> {neighbor} {data["event_match"]}')
if isinstance(data['event_match'], Always):
now_active.add(neighbor)
nalways += 1
else:
nother += 1
if nother or (nalways == 0):
now_active.add(node)
if self.active == now_active:
break
self.active = now_active
def push(self, event) -> Result:
now_active = set()
# print(f'push: active is {self.active}')
# print(f'push: considering {event}')
for node in self.active:
for (_, neighbor, data) in self.g.out_edges([node], data=True):
if event_matches(data['event_match'], event):
# print(f'now activating {neighbor}')
now_active.add(neighbor)
# else:
# print(f"event_match {event} does not match {data['event_match']}")
#
# if not now_active:
# return Unexpected('')
self.active = now_active
# print(f'push: now active is {self.active}')
self._evolve_empty()
# print(f'push: now active is {self.active}')
return self.finish()
def finish(self) -> Union[NeedMore, Enough, Unexpected]:
# print(f'finish: active is {self.active}')
if not self.active:
return Unexpected('no active')
if self.accept_node in self.active:
return Enough()
return NeedMore()
def get_active_states_names(self):
return [self.g.nodes[_]['label'] for _ in self.active]
def get_expected_events(self) -> Set:
events = set()
for state in self.active:
for (_, neighbor, data) in self.g.out_edges([state], data=True):
em = data['event_match']
if not isinstance(em, Always):
events.add(em)
return events
|
zuper-nodes-z5
|
/zuper-nodes-z5-5.0.9.tar.gz/zuper-nodes-z5-5.0.9/src/zuper_nodes/language_recognize.py
|
language_recognize.py
|
import socket
import time
from dataclasses import dataclass, field
from typing import Optional, Dict
import numpy as np
__all__ = [
'AIDONodesException',
'ProtocolViolation',
'ExternalProtocolViolation',
'InternalProtocolViolation',
'DecodingError',
'EncodingError',
'Timestamp',
'timestamp_from_seconds',
'TimeSpec',
'local_time',
'TimingInfo',
'EnvironmentError',
'NotConforming',
'ExternalTimeout',
]
class AIDONodesException(Exception):
pass
class ProtocolViolation(AIDONodesException):
pass
class ExternalProtocolViolation(ProtocolViolation):
pass
class ExternalNodeDidNotUnderstand(ProtocolViolation):
pass
class RemoteNodeAborted(Exception):
pass
class ExternalTimeout(ExternalProtocolViolation):
pass
class InternalProblem(Exception):
pass
class InternalProtocolViolation(ProtocolViolation):
pass
class DecodingError(AIDONodesException):
pass
class EncodingError(AIDONodesException):
pass
class NotConforming(AIDONodesException):
""" The node is not conforming to the protocol. """
pass
class EnvironmentError(AIDONodesException):
""" Things such as files not existing. """
pass
@dataclass
class Timestamp:
s: int
us: int
def timestamp_from_seconds(f: float) -> Timestamp:
s = int(np.floor(f))
extra = f - s
us = int(extra * 1000 * 1000 * 1000)
return Timestamp(s, us)
@dataclass
class TimeSpec:
time: Timestamp
frame: str
clock: str
time2: Optional[Timestamp] = None
def local_time() -> TimeSpec:
s = time.time()
hostname = socket.gethostname()
return TimeSpec(time=timestamp_from_seconds(s),
frame='epoch',
clock=hostname)
@dataclass
class TimingInfo:
acquired: Optional[Dict[str, TimeSpec]] = field(default_factory=dict)
processed: Optional[Dict[str, TimeSpec]] = field(default_factory=dict)
received: Optional[TimeSpec] = None
|
zuper-nodes-z5
|
/zuper-nodes-z5-5.0.9.tar.gz/zuper-nodes-z5-5.0.9/src/zuper_nodes/structures.py
|
structures.py
|
__version__ = '5.0.9'
from .col_logging import logging
# noinspection PyUnresolvedReferences
from zuper_ipce import __version__ as _v
logger = logging.getLogger('zuper-nodes')
logger.setLevel(logging.DEBUG)
logger.info(f'zuper-nodes {__version__}')
from .language import *
from .language_parse import *
from .language_recognize import *
from .structures import *
|
zuper-nodes-z5
|
/zuper-nodes-z5-5.0.9.tar.gz/zuper-nodes-z5-5.0.9/src/zuper_nodes/__init__.py
|
__init__.py
|
from abc import ABCMeta, abstractmethod
from dataclasses import dataclass
from typing import Dict, Iterator, Optional, Tuple
# Events
ChannelName = str
class Event:
pass
@dataclass(frozen=True, unsafe_hash=True)
class InputReceived(Event):
channel: ChannelName
@dataclass(frozen=True, unsafe_hash=True)
class OutputProduced(Event):
channel: ChannelName
# Language over events
class Language(metaclass=ABCMeta):
@abstractmethod
def collect_simple_events(self) -> Iterator[Event]:
pass
@dataclass(frozen=True, unsafe_hash=True)
class ExpectInputReceived(Language):
channel: ChannelName
def collect_simple_events(self):
yield InputReceived(self.channel)
@dataclass(frozen=True, unsafe_hash=True)
class ExpectOutputProduced(Language):
channel: ChannelName
def collect_simple_events(self):
yield OutputProduced(self.channel)
@dataclass(frozen=True, unsafe_hash=True)
class InSequence(Language):
ls: Tuple[Language, ...]
def collect_simple_events(self):
for l in self.ls:
yield from l.collect_simple_events()
@dataclass(frozen=True, unsafe_hash=True)
class ZeroOrOne(Language):
l: Language
def collect_simple_events(self):
yield from self.l.collect_simple_events()
@dataclass(frozen=True, unsafe_hash=True)
class ZeroOrMore(Language):
l: Language
def collect_simple_events(self):
yield from self.l.collect_simple_events()
@dataclass(frozen=True, unsafe_hash=True)
class OneOrMore(Language):
l: Language
def collect_simple_events(self):
yield from self.l.collect_simple_events()
@dataclass(frozen=True, unsafe_hash=True)
class Either(Language):
ls: Tuple[Language, ...]
def collect_simple_events(self):
for l in self.ls:
yield from l.collect_simple_events()
# Interaction protocol
@dataclass
class InteractionProtocol:
# Description
description: str
# Type for each input or output
inputs: Dict[ChannelName, type]
outputs: Dict[ChannelName, type]
# The interaction language
language: str
# interaction: Language = None
def __post_init__(self):
from .language_parse import parse_language, language_to_str
self.interaction = parse_language(self.language)
simple_events = list(self.interaction.collect_simple_events())
for e in simple_events:
if isinstance(e, InputReceived):
if e.channel not in self.inputs:
msg = f'Could not find input channel "{e.channel}" among {sorted(self.inputs)}.'
raise ValueError(msg)
if isinstance(e, OutputProduced):
if e.channel not in self.outputs:
msg = f'Could not find output channel "{e.channel}" among {sorted(self.outputs)}.'
raise ValueError(msg)
self.language = language_to_str(self.interaction)
def particularize(ip: InteractionProtocol,
description: Optional[str] = None,
inputs: Optional[Dict[str, type]] = None,
outputs: Optional[Dict[str, type]] = None) -> InteractionProtocol:
inputs2 = dict(ip.inputs)
inputs2.update(inputs or {})
outputs2 = dict(ip.outputs)
outputs2.update(outputs or {})
language = ip.language
description = description or ip.description
protocol2 = InteractionProtocol(description, inputs2, outputs2, language)
from .compatibility import check_compatible_protocol
check_compatible_protocol(protocol2, ip)
return protocol2
|
zuper-nodes-z5
|
/zuper-nodes-z5-5.0.9.tar.gz/zuper-nodes-z5-5.0.9/src/zuper_nodes/language.py
|
language.py
|
ENV_NAME = 'AIDONODE_NAME'
ENV_DATA_IN = 'AIDONODE_DATA_IN'
ENV_DATA_OUT = 'AIDONODE_DATA_OUT'
ENV_META_IN = 'AIDONODE_META_IN'
ENV_META_OUT = 'AIDONODE_META_OUT'
ENV_TRANSLATE = 'AIDONODE_TRANSLATE'
ENV_ENCODING = 'AIDONODE_ENCODING'
ENV_CONFIG = 'AIDONODE_CONFIG'
# ENV_ENCODING_JSON = 'json'
# ENV_ENCODING_CBOR = 'cbor'
# ENV_ENCODING_VALID = [ENV_ENCODING_JSON, ENV_ENCODING_CBOR]
KNOWN = [ENV_DATA_IN, ENV_DATA_OUT, ENV_CONFIG, ENV_META_IN, ENV_META_OUT, ENV_NAME, ENV_TRANSLATE, ENV_ENCODING]
ATT_CONFIG = 'config'
TOPIC_ABORTED = 'aborted'
FIELD_COMPAT = 'compat'
CUR_PROTOCOL = 'z2'
FIELD_DATA = 'data'
FIELD_TOPIC = 'topic'
FIELD_TIMING = 'timing'
FIELD_CONTROL = 'control'
CTRL_CAPABILITIES = 'capabilities'
CTRL_UNDERSTOOD = 'understood'
CTRL_NOT_UNDERSTOOD = 'not-understood'
CTRL_OVER = 'over'
CTRL_ABORTED = 'aborted'
CAPABILITY_PROTOCOL_REFLECTION = 'protocol-reflection'
|
zuper-nodes-z5
|
/zuper-nodes-z5-5.0.9.tar.gz/zuper-nodes-z5-5.0.9/src/zuper_nodes_wrapper/constants.py
|
constants.py
|
import argparse
import json
import os
import socket
import time
import traceback
from dataclasses import dataclass
from typing import *
import yaml
from zuper_ipce import object_from_ipce, ipce_from_object, IESO
from contracts.utils import format_obs
from zuper_commons.text import indent
from zuper_commons.types import check_isinstance
from zuper_nodes import InteractionProtocol, InputReceived, OutputProduced, Unexpected, LanguageChecker
from zuper_nodes.structures import TimingInfo, local_time, TimeSpec, timestamp_from_seconds, DecodingError, \
ExternalProtocolViolation, NotConforming, ExternalTimeout, InternalProblem
from .reading import inputs
from .streams import open_for_read, open_for_write
from .struct import RawTopicMessage, ControlMessage
from .utils import call_if_fun_exists
from .writing import Sink
from . import logger, logger_interaction
from .interface import Context
from .meta_protocol import basic_protocol, SetConfig, ProtocolDescription, ConfigDescription, \
BuildDescription, NodeDescription
class ConcreteContext(Context):
protocol: InteractionProtocol
to_write: List[RawTopicMessage]
def __init__(self, sink: Sink, protocol: InteractionProtocol,
node_name: str, tout: Dict[str, str]):
self.sink = sink
self.protocol = protocol
self.pc = LanguageChecker(protocol.interaction)
self.node_name = node_name
self.hostname = socket.gethostname()
self.tout = tout
self.to_write = []
self.last_timing = None
def set_last_timing(self, timing: TimingInfo):
self.last_timing = timing
def get_hostname(self):
return self.hostname
def write(self, topic, data, timing=None, with_schema=False):
if topic not in self.protocol.outputs:
msg = f'Output channel "{topic}" not found in protocol; know {sorted(self.protocol.outputs)}.'
raise Exception(msg)
# logger.info(f'Writing output "{topic}".')
klass = self.protocol.outputs[topic]
if isinstance(klass, type):
check_isinstance(data, klass)
event = OutputProduced(topic)
res = self.pc.push(event)
if isinstance(res, Unexpected):
msg = f'Unexpected output {topic}: {res}'
logger.error(msg)
return
klass = self.protocol.outputs[topic]
if isinstance(data, dict):
data = object_from_ipce(data, klass)
if timing is None:
timing = self.last_timing
if timing is not None:
s = time.time()
if timing.received is None:
# XXX
time1 = timestamp_from_seconds(s)
else:
time1 = timing.received.time
processed = TimeSpec(time=time1,
time2=timestamp_from_seconds(s),
frame='epoch',
clock=socket.gethostname())
timing.processed[self.node_name] = processed
timing.received = None
topic_o = self.tout.get(topic, topic)
ieso = IESO(use_ipce_from_typelike_cache=True, with_schema=with_schema)
data = ipce_from_object(data, ieso=ieso)
if timing is not None:
ieso = IESO(use_ipce_from_typelike_cache=True, with_schema=False)
timing_o = ipce_from_object(timing, ieso=ieso)
else:
timing_o = None
rtm = RawTopicMessage(topic_o, data, timing_o)
self.to_write.append(rtm)
def get_to_write(self) -> List[RawTopicMessage]:
""" Returns the messages to send and resets the queue"""
res = self.to_write
self.to_write = []
return res
def log(self, s):
prefix = f'{self.hostname}:{self.node_name}: '
logger.info(prefix + s)
def info(self, s):
prefix = f'{self.hostname}:{self.node_name}: '
logger.info(prefix + s)
def debug(self, s):
prefix = f'{self.hostname}:{self.node_name}: '
logger.debug(prefix + s)
def warning(self, s):
prefix = f'{self.hostname}:{self.node_name}: '
logger.warning(prefix + s)
def error(self, s):
prefix = f'{self.hostname}:{self.node_name}: '
logger.error(prefix + s)
def get_translation_table(t: str) -> Tuple[Dict[str, str], Dict[str, str]]:
tout = {}
tin = {}
for t in t.split(','):
ts = t.split(':')
if ts[0] == 'in':
tin[ts[1]] = ts[2]
if ts[0] == 'out':
tout[ts[1]] = ts[2]
return tin, tout
def check_variables():
for k, v in os.environ.items():
if k.startswith('AIDO') and k not in KNOWN:
msg = f'I do not expect variable "{k}" set in environment with value "{v}".'
msg += ' I expect: %s' % ", ".join(KNOWN)
logger.warn(msg)
from .constants import *
def run_loop(node: object, protocol: InteractionProtocol, args: Optional[List[str]] = None):
parser = argparse.ArgumentParser()
check_variables()
data_in = os.environ.get(ENV_DATA_IN, '/dev/stdin')
data_out = os.environ.get(ENV_DATA_OUT, '/dev/stdout')
default_name = os.environ.get(ENV_NAME, None)
translate = os.environ.get(ENV_TRANSLATE, '')
config = os.environ.get(ENV_CONFIG, '{}')
parser.add_argument('--data-in', default=data_in)
parser.add_argument('--data-out', default=data_out)
parser.add_argument('--name', default=default_name)
parser.add_argument('--config', default=config)
parser.add_argument('--translate', default=translate)
parser.add_argument('--loose', default=False, action='store_true')
parsed = parser.parse_args(args)
tin, tout = get_translation_table(parsed.translate)
# expect in:name1:name2, out:name2:name1
fin = parsed.data_in
fout = parsed.data_out
fi = open_for_read(fin)
fo = open_for_write(fout)
node_name = parsed.name or type(node).__name__
logger.name = node_name
config = yaml.load(config, Loader=yaml.SafeLoader)
try:
loop(node_name, fi, fo, node, protocol, tin, tout,
config=config)
except BaseException as e:
msg = f'Error in node {node_name}'
logger.error(f'Error in node {node_name}: \n{traceback.format_exc()}')
raise Exception(msg) from e
finally:
fo.flush()
fo.close()
fi.close()
def loop(node_name: str, fi, fo, node, protocol: InteractionProtocol, tin, tout, config: dict):
logger.info(f'Starting reading')
initialized = False
context_data = None
sink = Sink(fo)
try:
context_data = ConcreteContext(sink=sink, protocol=protocol,
node_name=node_name, tout=tout)
context_meta = ConcreteContext(sink=sink, protocol=basic_protocol,
node_name=node_name + '.wrapper', tout=tout)
wrapper = MetaHandler(node, protocol)
for k, v in config.items():
wrapper.set_config(k, v)
waiting_for = 'Expecting control message or one of: %s' % context_data.pc.get_expected_events()
for parsed in inputs(fi, waiting_for=waiting_for):
if isinstance(parsed, ControlMessage):
expect = [CTRL_CAPABILITIES]
if parsed.code not in expect:
msg = f'I expect any of {expect}, not "{parsed.code}".'
sink.write_control_message(CTRL_NOT_UNDERSTOOD, msg)
sink.write_control_message(CTRL_OVER)
else:
if parsed.code == CTRL_CAPABILITIES:
my_capabilities = {
'z2': {
CAPABILITY_PROTOCOL_REFLECTION: True
}
}
sink.write_control_message(CTRL_UNDERSTOOD)
sink.write_control_message(CTRL_CAPABILITIES, my_capabilities)
sink.write_control_message(CTRL_OVER)
else:
assert False
elif isinstance(parsed, RawTopicMessage):
parsed.topic = tin.get(parsed.topic, parsed.topic)
logger_interaction.info(f'Received message of topic "{parsed.topic}".')
if parsed.topic.startswith('wrapper.'):
parsed.topic = parsed.topic.replace('wrapper.', '')
receiver0 = wrapper
context0 = context_meta
else:
receiver0 = node
context0 = context_data
if receiver0 is node and not initialized:
try:
call_if_fun_exists(node, 'init', context=context_data)
except BaseException as e:
msg = "Exception while calling the node's init() function."
msg += '\n\n' + indent(traceback.format_exc(), '| ')
context_meta.write('aborted', msg)
raise Exception(msg) from e
initialized = True
if parsed.topic not in context0.protocol.inputs:
msg = f'Input channel "{parsed.topic}" not found in protocol. '
msg += f'\n\nKnown channels: {sorted(context0.protocol.inputs)}'
sink.write_control_message(CTRL_NOT_UNDERSTOOD, msg)
sink.write_control_message(CTRL_OVER)
raise ExternalProtocolViolation(msg)
sink.write_control_message(CTRL_UNDERSTOOD)
try:
handle_message_node(parsed, receiver0, context0)
to_write = context0.get_to_write()
# msg = f'I wrote {len(to_write)} messages.'
# logger.info(msg)
for rtm in to_write:
sink.write_topic_message(rtm.topic, rtm.data, rtm.timing)
sink.write_control_message(CTRL_OVER)
except BaseException as e:
msg = f'Exception while handling a message on topic "{parsed.topic}".'
msg += '\n\n' + indent(traceback.format_exc(), '| ')
sink.write_control_message(CTRL_ABORTED, msg)
sink.write_control_message(CTRL_OVER)
raise InternalProblem(msg) from e # XXX
else:
assert False
res = context_data.pc.finish()
if isinstance(res, Unexpected):
msg = f'Protocol did not finish: {res}'
logger_interaction.error(msg)
if initialized:
try:
call_if_fun_exists(node, 'finish', context=context_data)
except BaseException as e:
msg = "Exception while calling the node's finish() function."
msg += '\n\n' + indent(traceback.format_exc(), '| ')
context_meta.write('aborted', msg)
raise Exception(msg) from e
except BrokenPipeError:
msg = 'The other side closed communication.'
logger.info(msg)
return
except ExternalTimeout as e:
msg = 'Could not receive any other messages.'
if context_data:
msg += '\n Expecting one of: %s' % context_data.pc.get_expected_events()
sink.write_control_message(CTRL_ABORTED, msg)
sink.write_control_message(CTRL_OVER)
raise ExternalTimeout(msg) from e
except InternalProblem:
raise
except BaseException as e:
msg = f"Unexpected error:"
msg += '\n\n' + indent(traceback.format_exc(), '| ')
sink.write_control_message(CTRL_ABORTED, msg)
sink.write_control_message(CTRL_OVER)
raise InternalProblem(msg) from e # XXX
class MetaHandler:
def __init__(self, node, protocol):
self.node = node
self.protocol = protocol
def set_config(self, key, value):
if hasattr(self.node, ATT_CONFIG):
config = self.node.config
if hasattr(config, key):
setattr(self.node.config, key, value)
else:
msg = f'Could not find config key {key}'
raise ValueError(msg)
else:
msg = 'Node does not have the "config" attribute.'
raise ValueError(msg)
def on_received_set_config(self, context, data: SetConfig):
key = data.key
value = data.value
try:
self.set_config(key, value)
except ValueError as e:
context.write('set_config_error', str(e))
else:
context.write('set_config_ack', None)
def on_received_describe_protocol(self, context):
desc = ProtocolDescription(data=self.protocol, meta=basic_protocol)
context.write('protocol_description', desc)
def on_received_describe_config(self, context):
K = type(self.node)
if hasattr(K, '__annotations__') and ATT_CONFIG in K.__annotations__:
config_type = K.__annotations__[ATT_CONFIG]
config_current = getattr(self.node, ATT_CONFIG)
else:
@dataclass
class NoConfig:
pass
config_type = NoConfig
config_current = NoConfig()
desc = ConfigDescription(config=config_type, current=config_current)
context.write('config_description', desc, with_schema=True)
def on_received_describe_node(self, context):
desc = NodeDescription(self.node.__doc__)
context.write('node_description', desc, with_schema=True)
def on_received_describe_build(self, context):
desc = BuildDescription()
context.write('build_description', desc, with_schema=True)
def handle_message_node(parsed: RawTopicMessage,
agent, context: ConcreteContext):
protocol = context.protocol
topic = parsed.topic
data = parsed.data
pc = context.pc
klass = protocol.inputs[topic]
try:
ob = object_from_ipce(data, klass)
except BaseException as e:
msg = f'Cannot deserialize object for topic "{topic}" expecting {klass}.'
try:
parsed = json.dumps(parsed, indent=2)
except:
parsed = str(parsed)
msg += '\n\n' + indent(parsed, '|', 'parsed: |')
raise DecodingError(msg) from e
if parsed.timing is not None:
timing = object_from_ipce(parsed.timing, TimingInfo)
else:
timing = TimingInfo()
timing.received = local_time()
context.set_last_timing(timing)
# logger.info(f'Before push the state is\n{pc}')
event = InputReceived(topic)
expected = pc.get_expected_events()
res = pc.push(event)
# names = pc.get_active_states_names()
# logger.info(f'After push of {event}: result \n{res} active {names}' )
if isinstance(res, Unexpected):
msg = f'Unexpected input "{topic}": {res}'
msg += f'\nI expected: {expected}'
msg += '\n' + format_obs(dict(pc=pc))
logger.error(msg)
raise ExternalProtocolViolation(msg)
else:
expect_fn = f'on_received_{topic}'
call_if_fun_exists(agent, expect_fn, data=ob, context=context, timing=timing)
def check_implementation(node, protocol: InteractionProtocol):
logger.info('checking implementation')
for n in protocol.inputs:
expect_fn = f'on_received_{n}'
if not hasattr(node, expect_fn):
msg = f'Missing function {expect_fn}'
msg += f'\nI know {sorted(type(node).__dict__)}'
raise NotConforming(msg)
for x in type(node).__dict__:
if x.startswith('on_received_'):
input_name = x.replace('on_received_', '')
if input_name not in protocol.inputs:
msg = f'The node has function "{x}" but there is no input "{input_name}".'
raise NotConforming(msg)
|
zuper-nodes-z5
|
/zuper-nodes-z5-5.0.9.tar.gz/zuper-nodes-z5-5.0.9/src/zuper_nodes_wrapper/wrapper.py
|
wrapper.py
|
import os
import stat
import time
from io import BufferedReader
from zuper_commons.fs import make_sure_dir_exists
from . import logger_interaction
from . import logger
def wait_for_creation(fn):
while not os.path.exists(fn):
msg = 'waiting for creation of %s' % fn
logger.info(msg)
time.sleep(1)
def open_for_read(fin, timeout=None):
t0 = time.time()
# first open reader file in case somebody is waiting for it
while not os.path.exists(fin):
delta = time.time() - t0
if timeout is not None and (delta > timeout):
msg = f'The file {fin} was not created before {timeout} seconds. I give up.'
raise EnvironmentError(msg)
logger_interaction.info(f'waiting for file {fin} to be created')
time.sleep(1)
logger_interaction.info(f'Opening input {fin}')
fi = open(fin, 'rb', buffering=0)
# noinspection PyTypeChecker
fi = BufferedReader(fi, buffer_size=1)
return fi
def open_for_write(fout):
if fout == '/dev/stdout':
return open('/dev/stdout', 'wb', buffering=0)
else:
wants_fifo = fout.startswith('fifo:')
fout = fout.replace('fifo:', '')
logger_interaction.info(f'Opening output file {fout} (wants fifo: {wants_fifo})')
if not os.path.exists(fout):
if wants_fifo:
make_sure_dir_exists(fout)
os.mkfifo(fout)
logger_interaction.info('Fifo created.')
else:
is_fifo = stat.S_ISFIFO(os.stat(fout).st_mode)
if wants_fifo and not is_fifo:
logger_interaction.info(f'Recreating {fout} as a fifo.')
os.unlink(fout)
os.mkfifo(fout)
if wants_fifo:
logger_interaction.info('Fifo detected. Opening will block until a reader appears.')
make_sure_dir_exists(fout)
fo = open(fout, 'wb', buffering=0)
if wants_fifo:
logger_interaction.info('Reader has connected to my fifo')
return fo
|
zuper-nodes-z5
|
/zuper-nodes-z5-5.0.9.tar.gz/zuper-nodes-z5-5.0.9/src/zuper_nodes_wrapper/streams.py
|
streams.py
|
import cbor2
from .constants import *
class Sink:
def __init__(self, of):
self.of = of
def write_topic_message(self, topic, data, timing):
""" Can raise BrokenPipeError"""
m = {}
m[FIELD_COMPAT] = [CUR_PROTOCOL]
m[FIELD_TOPIC] = topic
m[FIELD_DATA] = data
m[FIELD_TIMING] = timing
self._write_raw(m)
def write_control_message(self, code, data=None):
""" Can raise BrokenPipeError"""
m = {}
m[FIELD_CONTROL] = code
m[FIELD_DATA] = data
self._write_raw(m)
def _write_raw(self, m: dict):
""" Can raise BrokenPipeError"""
j = cbor2.dumps(m)
self.of.write(j)
self.of.flush()
|
zuper-nodes-z5
|
/zuper-nodes-z5-5.0.9.tar.gz/zuper-nodes-z5-5.0.9/src/zuper_nodes_wrapper/writing.py
|
writing.py
|
from dataclasses import dataclass
from typing import *
from .constants import *
from zuper_nodes.structures import TimingInfo
X = TypeVar('X')
@dataclass
class MsgReceived(Generic[X]):
topic: str
data: X
timing: TimingInfo
@dataclass
class RawTopicMessage:
topic: str
data: Optional[dict]
timing: Optional[dict]
@dataclass
class ControlMessage:
code: str
msg: Optional[str]
WireMessage = NewType('WireMessage', dict)
class Malformed(Exception):
pass
def interpret_control_message(m: WireMessage) -> ControlMessage:
if not isinstance(m, dict):
msg = f'Expected dictionary, not {type(m)}.'
raise Malformed(msg)
if not FIELD_CONTROL in m:
msg = f'Expected field {FIELD_CONTROL}, obtained {list(m)}.'
raise Malformed(msg)
code = m[FIELD_CONTROL]
msg = m.get(FIELD_DATA, None)
return ControlMessage(code, msg)
|
zuper-nodes-z5
|
/zuper-nodes-z5-5.0.9.tar.gz/zuper-nodes-z5-5.0.9/src/zuper_nodes_wrapper/struct.py
|
struct.py
|
import inspect
from zuper_commons.types import ZTypeError
from . import logger
def call_if_fun_exists(ob, fname, **kwargs):
kwargs = dict(kwargs)
if not hasattr(ob, fname):
msg = f'Missing function {fname}() for {type(ob)}'
logger.warning(msg)
return
f = getattr(ob, fname)
a = inspect.getfullargspec(f)
for k, v in dict(kwargs).items():
if k not in a.args:
kwargs.pop(k)
try:
f(**kwargs)
except TypeError as e:
msg = f'Cannot call function {f}.'
raise ZTypeError(msg, f=f, args=kwargs, argspec=a) from e
|
zuper-nodes-z5
|
/zuper-nodes-z5-5.0.9.tar.gz/zuper-nodes-z5-5.0.9/src/zuper_nodes_wrapper/utils.py
|
utils.py
|
from dataclasses import dataclass
from typing import *
from zuper_nodes import InteractionProtocol
@dataclass
class SetConfig:
key: str
value: Any
@dataclass
class ConfigDescription:
config: type
current: Any
@dataclass
class NodeDescription:
description: str
@dataclass
class BuildDescription:
pass
@dataclass
class ProtocolDescription:
data: InteractionProtocol
meta: InteractionProtocol
@dataclass
class CommsHealth:
# ignored because not compatible
ignored: Dict[str, int]
# unexpected topics
unexpected: Dict[str, int]
# malformed data
malformed: Dict[str, int]
# if we are completely lost
unrecoverable_protocol_error: bool
@dataclass
class NodeHealth:
# there is a critical error that makes it useless to continue
critical: bool
# severe problem but we can continue
severe: bool
# a minor problem to report
minor: bool
details: str
LogEntry = str
basic_protocol = InteractionProtocol(
description="""\
Basic interaction protocol for nodes spoken by the node wrapper.
""",
inputs={
"describe_config": type(None),
"set_config": SetConfig,
"describe_protocol": type(None),
"describe_node": type(None),
"describe_build": type(None),
"get_state": type(None),
"set_state": Any,
"get_logs": type(None),
},
language="""\
(
(in:describe_config ; out:config_description) |
(in:set_config ; (out:set_config_ack | out:set_config_error)) |
(in:describe_protocol ; out:protocol_description) |
(in:describe_node ; out:node_description) |
(in:describe_build ; out:build_description) |
(in:get_state ; out:node_state) |
(in:set_state ; (out:set_state_ack| out:set_state_error) ) |
(in:get_logs ; out:logs) |
out:aborted
)*
""",
outputs={
"config_description": ConfigDescription,
'set_config_ack': type(None),
'set_config_error': str,
'protocol_description': ProtocolDescription,
'node_description': NodeDescription,
'build_description': BuildDescription,
'node_state': Any,
'set_state_ack': type(None),
'set_state_error': str,
'logs': List[LogEntry],
'aborted': str,
})
|
zuper-nodes-z5
|
/zuper-nodes-z5-5.0.9.tar.gz/zuper-nodes-z5-5.0.9/src/zuper_nodes_wrapper/meta_protocol.py
|
meta_protocol.py
|
import sys
from abc import ABCMeta, abstractmethod
from typing import List, Optional
from zuper_nodes.structures import TimingInfo
__all__ = [
'Context',
'wrap_direct',
]
def wrap_direct(node, protocol, args: Optional[List[str]] = None):
if args is None:
args = sys.argv[1:]
from zuper_commons.logs import monkeypatch_findCaller
from zuper_nodes_wrapper.wrapper import check_implementation, run_loop
monkeypatch_findCaller()
check_implementation(node, protocol)
run_loop(node, protocol, args)
class Context(metaclass=ABCMeta):
@abstractmethod
def write(self, topic: str, data: object, timing: TimingInfo = None, with_schema: bool = False):
pass
@abstractmethod
def info(self, msg: str): pass
@abstractmethod
def debug(self, msg: str): pass
@abstractmethod
def warning(self, msg: str): pass
@abstractmethod
def error(self, msg: str): pass
@abstractmethod
def get_hostname(self):
pass
|
zuper-nodes-z5
|
/zuper-nodes-z5-5.0.9.tar.gz/zuper-nodes-z5-5.0.9/src/zuper_nodes_wrapper/interface.py
|
interface.py
|
import os
from io import BufferedReader
from typing import *
import cbor2 as cbor
from zuper_commons.text import indent
from zuper_commons.types import ZException
from zuper_ipce import IESO, ipce_from_object, object_from_ipce
from zuper_ipce.json2cbor import read_next_cbor
from zuper_nodes import ExternalProtocolViolation, InteractionProtocol
from zuper_nodes.compatibility import check_compatible_protocol
from zuper_nodes.structures import ExternalNodeDidNotUnderstand, RemoteNodeAborted, TimingInfo
from zuper_nodes_wrapper.meta_protocol import basic_protocol, ProtocolDescription
from zuper_nodes_wrapper.streams import wait_for_creation
from zuper_nodes_wrapper.struct import interpret_control_message, MsgReceived, WireMessage
from . import logger, logger_interaction
from .constants import *
class ComponentInterface:
def __init__(self, fnin: str, fnout: str,
expect_protocol: InteractionProtocol, nickname: str,
timeout=None):
self.nickname = nickname
self._cc = None
try:
os.mkfifo(fnin)
except BaseException as e:
msg = f'Cannot create fifo {fnin}'
raise Exception(msg) from e
self.fpin = open(fnin, 'wb', buffering=0)
wait_for_creation(fnout)
self.fnout = fnout
f = open(fnout, 'rb', buffering=0)
# noinspection PyTypeChecker
self.fpout = BufferedReader(f, buffer_size=1)
self.nreceived = 0
self.expect_protocol = expect_protocol
self.node_protocol = None
self.data_protocol = None
self.timeout = timeout
def close(self):
self.fpin.close()
self.fpout.close()
def cc(self, f):
""" CC-s everything that is read or written to this file. """
self._cc = f
def _get_node_protocol(self, timeout: float = None):
self.my_capabilities = {'z2': {CAPABILITY_PROTOCOL_REFLECTION: True}}
msg = {
FIELD_CONTROL: CTRL_CAPABILITIES,
FIELD_DATA: self.my_capabilities
}
j = self._serialize(msg)
self._write(j)
msgs = read_reply(self.fpout, timeout=timeout, waiting_for=f"Reading {self.nickname} capabilities",
nickname=self.nickname)
self.node_capabilities = msgs[0]['data']
logger.info('My capabilities: %s' % self.my_capabilities)
logger.info('Found capabilities: %s' % self.node_capabilities)
if 'z2' not in self.node_capabilities:
msg = 'Incompatible node; capabilities %s' % self.node_capabilities
raise ExternalProtocolViolation(msg)
z = self.node_capabilities['z2']
if not z.get(CAPABILITY_PROTOCOL_REFLECTION, False):
logger.info('Node does not support reflection.')
if self.expect_protocol is None:
msg = 'Node does not support reflection - need to provide protocol.'
raise Exception(msg)
else:
ob: MsgReceived[ProtocolDescription] = \
self.write_topic_and_expect('wrapper.describe_protocol',
expect='protocol_description',
timeout=timeout)
self.node_protocol = ob.data.data
self.data_protocol = ob.data.meta
if self.expect_protocol is not None:
check_compatible_protocol(self.node_protocol, self.expect_protocol)
def write_topic_and_expect(self, topic: str, data=None, with_schema: bool = False,
timeout: float = None,
timing=None,
expect: str = None) -> MsgReceived:
timeout = timeout or self.timeout
self._write_topic(topic, data=data, with_schema=with_schema, timing=timing)
ob: MsgReceived = self.read_one(expect_topic=expect, timeout=timeout)
return ob
def write_topic_and_expect_zero(self, topic: str, data=None, with_schema=False,
timeout=None,
timing=None):
timeout = timeout or self.timeout
self._write_topic(topic, data=data, with_schema=with_schema, timing=timing)
msgs = read_reply(self.fpout, timeout=timeout,
nickname=self.nickname)
if msgs:
msg = 'Expecting zero, got %s' % msgs
raise ExternalProtocolViolation(msg)
def _write_topic(self, topic, data=None, with_schema=False, timing=None):
suggest_type = object
if self.node_protocol:
if topic in self.node_protocol.inputs:
suggest_type = self.node_protocol.inputs[topic]
ieso = IESO(with_schema=with_schema)
ieso_true = IESO(with_schema=True)
ipce = ipce_from_object(data, suggest_type, ieso=ieso)
# try to re-read
if suggest_type is not object:
try:
_ = object_from_ipce(ipce, suggest_type)
except BaseException as e:
msg = f'While attempting to write on topic "{topic}", cannot ' \
f'interpret the value as {suggest_type}.\nValue: {data}'
raise ZException(msg, data=data, ipce=ipce, suggest_type=suggest_type) from e # XXX
msg = {
FIELD_COMPAT: [CUR_PROTOCOL],
FIELD_TOPIC: topic,
FIELD_DATA: ipce,
FIELD_TIMING: timing
}
j = self._serialize(msg)
self._write(j)
# make sure we write the schema when we copy it
if not with_schema:
msg[FIELD_DATA] = ipce_from_object(data, ieso=ieso_true)
j = self._serialize(msg)
if self._cc:
self._cc.write(j)
self._cc.flush()
logger_interaction.info(f'Written to topic "{topic}" >> {self.nickname}.')
def _write(self, j):
try:
self.fpin.write(j)
self.fpin.flush()
except BrokenPipeError as e:
msg = f'While attempting to write to node "{self.nickname}", ' \
f'I reckon that the pipe is closed and the node exited.'
try:
received = self.read_one(expect_topic=TOPIC_ABORTED)
if received.topic == TOPIC_ABORTED:
msg += '\n\nThis is the aborted message:'
msg += '\n\n' + indent(received.data, ' |')
except BaseException as e2:
msg += f'\n\nI could not read any aborted message: {e2}'
raise RemoteNodeAborted(msg) from e
def _serialize(self, msg) -> bytes:
j = cbor.dumps(msg)
return j
def read_one(self, expect_topic: str = None, timeout: float = None) -> MsgReceived:
timeout = timeout or self.timeout
try:
if expect_topic:
waiting_for = f'Expecting topic "{expect_topic}" << {self.nickname}.'
else:
waiting_for = None
msgs = read_reply(self.fpout, timeout=timeout, waiting_for=waiting_for,
nickname=self.nickname)
if len(msgs) == 0:
msg = f'Expected one message from node "{self.nickname}". Got zero.'
if expect_topic:
msg += f'\nExpecting topic "{expect_topic}".'
raise ExternalProtocolViolation(msg)
if len(msgs) > 1:
msg = f'Expected only one message. Got {msgs}'
raise ExternalProtocolViolation(msg)
msg = msgs[0]
if FIELD_TOPIC not in msg:
m = f'Invalid message does not contain the field "{FIELD_TOPIC}".'
m += f'\n {msg}'
raise ExternalProtocolViolation(m)
topic = msg[FIELD_TOPIC]
if expect_topic:
if topic != expect_topic:
msg = f'I expected topic "{expect_topic}" but received "{topic}".'
raise ExternalProtocolViolation(msg)
if topic in basic_protocol.outputs:
klass = basic_protocol.outputs[topic]
else:
if self.node_protocol:
if topic not in self.node_protocol.outputs:
msg = f'Cannot find topic "{topic}" in outputs of detected node protocol.'
msg += '\nI know: %s' % sorted(self.node_protocol.outputs)
raise ExternalProtocolViolation(msg)
else:
klass = self.node_protocol.outputs[topic]
else:
if not topic in self.expect_protocol.outputs:
msg = f'Cannot find topic "{topic}".'
raise ExternalProtocolViolation(msg)
else:
klass = self.expect_protocol.outputs[topic]
data = object_from_ipce(msg[FIELD_DATA], klass)
ieso_true = IESO(with_schema=True)
if self._cc:
msg[FIELD_DATA] = ipce_from_object(data, ieso=ieso_true)
msg_b = self._serialize(msg)
self._cc.write(msg_b)
self._cc.flush()
if FIELD_TIMING not in msg:
timing = TimingInfo()
else:
timing = object_from_ipce(msg[FIELD_TIMING], TimingInfo)
self.nreceived += 1
return MsgReceived[klass](topic, data, timing)
except StopIteration as e:
msg = 'EOF detected on %s after %d messages.' % (self.fnout, self.nreceived)
if expect_topic:
msg += f' Expected topic "{expect_topic}".'
raise StopIteration(msg) from e
except TimeoutError as e:
msg = 'Timeout detected on %s after %d messages.' % (self.fnout, self.nreceived)
if expect_topic:
msg += f' Expected topic "{expect_topic}".'
raise TimeoutError(msg) from e
def read_reply(fpout, nickname: str, timeout=None, waiting_for=None, ) -> List:
""" Reads a control message. Returns if it is CTRL_UNDERSTOOD.
Raises:
TimeoutError
RemoteNodeAborted
ExternalNodeDidNotUnderstand
ExternalProtocolViolation otherwise. """
try:
wm: WireMessage = read_next_cbor(fpout, timeout=timeout, waiting_for=waiting_for)
# logger.debug(f'{nickname} sent {wm}')
except StopIteration:
msg = 'Remote node closed communication (%s)' % waiting_for
raise RemoteNodeAborted(msg) from None
cm = interpret_control_message(wm)
if cm.code == CTRL_UNDERSTOOD:
others = read_until_over(fpout, timeout=timeout, nickname=nickname)
return others
elif cm.code == CTRL_ABORTED:
msg = f'The remote node "{nickname}" aborted with the following error:'
msg += '\n\n' + indent(cm.msg, "|", f"error in {nickname} |")
# others = self.read_until_over()
raise RemoteNodeAborted(msg)
elif cm.code == CTRL_NOT_UNDERSTOOD:
_others = read_until_over(fpout, timeout=timeout, nickname=nickname)
msg = f'The remote node "{nickname}" reports that it did not understand the message:'
msg += '\n\n' + indent(cm.msg, "|", f"reported by {nickname} |")
raise ExternalNodeDidNotUnderstand(msg)
else:
msg = 'Remote node raised unknown code %s: %s' % (cm, cm.code)
raise ExternalProtocolViolation(msg)
def read_until_over(fpout, timeout, nickname) -> List[WireMessage]:
""" Raises RemoteNodeAborted, TimeoutError """
res = []
waiting_for = f'Reading reply of {nickname}.'
while True:
try:
wm: WireMessage = read_next_cbor(fpout, timeout=timeout, waiting_for=waiting_for)
if wm.get(FIELD_CONTROL, '') == CTRL_ABORTED:
m = f'External node "{nickname}" aborted:'
m += '\n\n' + indent(wm.get(FIELD_DATA, None), "|",
f"error in {nickname} |")
raise RemoteNodeAborted(m)
if wm.get(FIELD_CONTROL, '') == CTRL_OVER:
# logger.info(f'Node "{nickname}" concluded output of %s messages.' % len(res))
break
# logger.info(f'Node "{nickname}" sent %s.' % len(wm))
except StopIteration:
msg = f'External node "{nickname}" closed communication.'
raise RemoteNodeAborted(msg) from None
except TimeoutError:
msg = f'Timeout while reading output of node "{nickname}".'
raise TimeoutError(msg) from None
res.append(wm)
return res
|
zuper-nodes-z5
|
/zuper-nodes-z5-5.0.9.tar.gz/zuper-nodes-z5-5.0.9/src/zuper_nodes_wrapper/wrapper_outside.py
|
wrapper_outside.py
|
import logging
logger = logging.getLogger('znw')
logger.setLevel(logging.DEBUG)
logger_interaction = logger.getChild("interaction")
logger_interaction.setLevel(logging.CRITICAL)
from .interface import *
|
zuper-nodes-z5
|
/zuper-nodes-z5-5.0.9.tar.gz/zuper-nodes-z5-5.0.9/src/zuper_nodes_wrapper/__init__.py
|
__init__.py
|
import select
import time
from typing import Optional, Union, Iterator
from zuper_ipce.json2cbor import read_next_cbor
from zuper_commons.text import indent
from zuper_nodes.structures import ExternalTimeout
from zuper_nodes_wrapper.struct import interpret_control_message, RawTopicMessage, ControlMessage
from . import logger
from .constants import *
M = Union[RawTopicMessage, ControlMessage]
def inputs(f, give_up: Optional[float] = None, waiting_for: str = None) -> Iterator[M]:
last = time.time()
intermediate_timeout = 3.0
intermediate_timeout_multiplier = 1.5
while True:
readyr, readyw, readyx = select.select([f], [], [f], intermediate_timeout)
if readyr:
try:
parsed = read_next_cbor(f, waiting_for=waiting_for)
except StopIteration:
return
if not isinstance(parsed, dict):
msg = f'Expected a dictionary, obtained {parsed!r}'
logger.error(msg)
continue
if FIELD_CONTROL in parsed:
m = interpret_control_message(parsed)
yield m
elif FIELD_TOPIC in parsed:
if not FIELD_COMPAT in parsed:
msg = f'Could not find field "compat" in structure "{parsed}".'
logger.error(msg)
continue
l = parsed[FIELD_COMPAT]
if not isinstance(l, list):
msg = f'Expected a list for compatibility value, found {l!r}'
logger.error(msg)
continue
if not CUR_PROTOCOL in parsed[FIELD_COMPAT]:
msg = f'Skipping message because could not find {CUR_PROTOCOL} in {l}.'
logger.warn(msg)
continue
rtm = RawTopicMessage(parsed[FIELD_TOPIC],
parsed.get(FIELD_DATA, None),
parsed.get(FIELD_TIMING, None))
yield rtm
elif readyx:
logger.warning('Exceptional condition on input channel %s' % readyx)
else:
delta = time.time() - last
if give_up is not None and (delta > give_up):
msg = f'I am giving up after %.1f seconds.' % delta
raise ExternalTimeout(msg)
else:
intermediate_timeout *= intermediate_timeout_multiplier
msg = f'Input channel not ready after %.1f seconds. Will re-try.' % delta
if waiting_for:
msg += '\n' + indent(waiting_for, '> ')
msg = 'I will warn again in %.1f seconds.' % intermediate_timeout
logger.warning(msg)
|
zuper-nodes-z5
|
/zuper-nodes-z5-5.0.9.tar.gz/zuper-nodes-z5-5.0.9/src/zuper_nodes_wrapper/reading.py
|
reading.py
|
import argparse
import dataclasses
import subprocess
import sys
from dataclasses import dataclass
from io import BufferedReader, BytesIO
import cbor2
import yaml
from zuper_ipce import object_from_ipce
from zuper_ipce.json2cbor import read_cbor_or_json_objects
from contracts import indent
from zuper_nodes import InteractionProtocol
from zuper_nodes_wrapper.meta_protocol import (BuildDescription, ConfigDescription, NodeDescription,
ProtocolDescription,
cast)
from . import logger
def identify_main():
usage = None
parser = argparse.ArgumentParser(usage=usage)
parser.add_argument('--image', default=None)
parser.add_argument('--command', default=None)
parsed = parser.parse_args()
image = parsed.image
if image is not None:
ni: NodeInfo = identify_image2(image)
elif parsed.command is not None:
command = parsed.command.split()
ni: NodeInfo = identify_command(command)
else:
msg = 'Please specify either --image or --command'
logger.error(msg)
sys.exit(1)
print('\n\n')
print(indent(describe_nd(ni.nd), '', 'desc: '))
print('\n\n')
print(indent(describe_bd(ni.bd), '', 'build: '))
print('\n\n')
print(indent(describe_cd(ni.cd), '', 'config: '))
print('\n\n')
print(indent(describe(ni.pd.data), '', 'data: '))
print('\n\n')
print(indent(describe(ni.pd.meta), '', 'meta: '))
def describe_nd(nd: NodeDescription):
return str(nd.description)
def describe_bd(nd: BuildDescription):
return str(nd)
def describe_cd(nd: ConfigDescription):
s = []
# noinspection PyDataclass
for f in dataclasses.fields(nd.config):
# for k, v in nd.config.__annotations__.items():
s.append('%20s: %s = %s' % (f.name, f.type, f.default))
if not s:
return 'No configuration switches available.'
if hasattr(nd.config, '__doc__'):
s.insert(0, nd.config.__doc__)
return "\n".join(s)
def describe(ip: InteractionProtocol):
s = "InteractionProtocol"
s += '\n\n' + '* Description:'
s += '\n\n' + indent(ip.description.strip(), ' ')
s += '\n\n' + '* Inputs:'
for name, type_ in ip.inputs.items():
s += '\n %25s: %s' % (name, type_)
s += '\n\n' + '* Outputs:'
for name, type_ in ip.outputs.items():
s += '\n %25s: %s' % (name, type_)
s += '\n\n' + '* Language:'
s += '\n\n' + ip.language
return s
@dataclass
class NodeInfo:
pd: ProtocolDescription
nd: NodeDescription
bd: BuildDescription
cd: ConfigDescription
def identify_command(command) -> NodeInfo:
d = [{'topic': 'wrapper.describe_protocol'},
{'topic': 'wrapper.describe_config'},
{'topic': 'wrapper.describe_node'},
{'topic': 'wrapper.describe_build'}
]
to_send = b''
for p in d:
p['compat'] = ['aido2']
# to_send += (json.dumps(p) + '\n').encode('utf-8')
to_send += cbor2.dumps(p)
cp = subprocess.run(command, input=to_send, capture_output=True)
s = cp.stderr.decode('utf-8')
sys.stderr.write(indent(s.strip(), '|', ' stderr: |') + '\n\n')
# noinspection PyTypeChecker
f = BufferedReader(BytesIO(cp.stdout))
stream = read_cbor_or_json_objects(f)
res = stream.__next__()
logger.debug(yaml.dump(res))
pd = cast(ProtocolDescription, object_from_ipce(res['data'], ProtocolDescription))
res = stream.__next__()
logger.debug(yaml.dump(res))
cd = cast(ConfigDescription, object_from_ipce(res['data'], ConfigDescription))
res = stream.__next__()
logger.debug(yaml.dump(res))
nd = cast(NodeDescription, object_from_ipce(res['data'], NodeDescription))
res = stream.__next__()
logger.debug(yaml.dump(res))
bd = cast(BuildDescription, object_from_ipce(res['data'], BuildDescription))
logger.debug(yaml.dump(res))
return NodeInfo(pd, nd, bd, cd)
def identify_image2(image) -> NodeInfo:
cmd = ['docker', 'run', '--rm', '-i', image]
return identify_command(cmd)
# def identify_image(image):
# import docker
# client = docker.from_env()
#
#
# container: Container = client.containers.create(image, detach=True, stdin_open=True)
# print(container)
# # time.sleep(4)
# # attach to the container stdin socket
# container.start()
# # s = container.exec_run()
# s: SocketIO = container.attach_socket(params={'stdin': 1, 'stream': 1, 'stderr': 0, 'stdout': 0})
# s_out: SocketIO = container.attach_socket(params={ 'stream': 1, 'stdout': 1, 'stderr': 0, 'stdin': 0})
# s_stderr: SocketIO = container.attach_socket(params={'stream': 1, 'stdout': 0, 'stderr': 1, 'stdin': 0})
# print(s.__dict__)
# print(s_out.__dict__)
# # send text
# # s.write(j)
# os.write(s._sock.fileno(), j)
# os.close(s._sock.fileno())
# s._sock.close()
# # s.close()
#
# f = os.fdopen(s_out._sock.fileno(), 'rb')
# # there is some garbage: b'\x01\x00\x00\x00\x00\x00\x1e|{
# f.read(8)
#
# for x in read_cbor_or_json_objects(f):
# print(x)
# print(f.read(10))
# # print(os.read(s_out._sock.fileno(), 100))
#
# print(os.read(s_stderr._sock.fileno(), 100))
# # close, stop and disconnect
# s.close()
|
zuper-nodes-z5
|
/zuper-nodes-z5-5.0.9.tar.gz/zuper-nodes-z5-5.0.9/src/zuper_nodes_wrapper/identify.py
|
identify.py
|
import sys
from setuptools import setup
if not sys.version_info >= (3, 6, 0):
msg = "Unsupported version %s" % sys.version
raise Exception(msg)
def get_version(filename):
import ast
version = None
with open(filename) as f:
for line in f:
if line.startswith("__version__"):
version = ast.parse(line).body[0].value.s
break
else:
raise ValueError("No version found in %r." % filename)
if version is None:
raise ValueError(filename)
return version
line = "z6"
install_requires = [
"pyparsing",
"networkx>=2,<3",
"termcolor",
f"zuper-ipce-{line}",
"base58<2.0,>=1.0.2",
"cbor2",
"PyYAML",
f"zuper-commons-{line}",
f"zuper-typing-{line}",
]
tests_require = [
f"compmake-{line}",
"pydot",
]
version = get_version(filename="src/zuper_nodes/__init__.py")
setup(
name=f"zuper-nodes-{line}",
version=version,
keywords="",
package_dir={"": "src"},
packages=["zuper_nodes", "zuper_nodes_tests", "zuper_nodes_wrapper", "zuper_nodes_wrapper_tests",],
install_requires=install_requires,
extras_require={'test': tests_require},
tests_require=tests_require,
entry_points={"console_scripts": ["zuper-node-identify=zuper_nodes_wrapper.identify:identify_main",
"node-launch=zuper_nodes_wrapper.launcher:launcher_main",],},
)
|
zuper-nodes-z6
|
/zuper-nodes-z6-6.2.17.tar.gz/zuper-nodes-z6-6.2.17/setup.py
|
setup.py
|
from nose.tools import assert_equal
from zuper_commons.types import check_isinstance
from zuper_nodes import (
ChannelName,
Either,
ExpectInputReceived,
ExpectOutputProduced,
InSequence,
Language,
OneOrMore,
ZeroOrMore,
ZeroOrOne,
)
from zuper_nodes.language_parse import Syntax
def parse_language(s: str) -> Language:
expr = Syntax.language
res = expr.parseString(s, parseAll=True)
res = res[0]
return res
def expect_parse(expr, s, expected):
check_isinstance(s, str)
check_isinstance(expected, (type(None), Language))
res = expr.parseString(s, parseAll=True)
res = res[0]
print(f"Obtained: {res}")
print(f"Expected: {expected}")
if expected:
assert_equal(res, expected)
def test_parse_language_01():
s = "in:name"
e = ExpectInputReceived(ChannelName("name"))
expect_parse(Syntax.input_received, s, e)
expect_parse(Syntax.language, s, e)
def test_parse_language_02():
s = "out:name"
e = ExpectOutputProduced(ChannelName("name"))
expect_parse(Syntax.output_produced, s, e)
def test_parse_language_03():
s = "out:first ; in:second"
e = InSequence((ExpectOutputProduced(ChannelName("first")), ExpectInputReceived(ChannelName("second"))))
expect_parse(Syntax.language, s, e)
def test_parse_language_04():
s = "(out:first)*"
e = ZeroOrMore(ExpectOutputProduced(ChannelName("first")))
expect_parse(Syntax.language, s, e)
def test_parse_language_05():
s = "(out:first)?"
e = ZeroOrOne(ExpectOutputProduced(ChannelName("first")))
expect_parse(Syntax.language, s, e)
def test_parse_language_06():
s = "(out:first)+"
e = OneOrMore(ExpectOutputProduced(ChannelName("first")))
expect_parse(Syntax.language, s, e)
def test_parse_language_07():
s = "out:first | out:second"
e = Either((ExpectOutputProduced(ChannelName("first")), ExpectOutputProduced(ChannelName("second"))))
expect_parse(Syntax.language, s, e)
s2 = "(out:first | out:second)"
expect_parse(Syntax.language, s2, e)
def test_parse_language_08():
s = """
(
in:next_episode ; (
out:no_episodes |
(out:episode_start ;
(in:next_image ; (out:image | out:episode_end))*)
)
)*
"""
expect_parse(Syntax.language, s, None)
def test_parse_language_09():
s = """
(
in:next_episode ; (
out:no_episodes |
(out:episode_start ;
(in:next_image ; (out:image | out:episode_end))*)
)
)*
"""
language = parse_language(s)
op1 = language.opposite()
op2 = op1.opposite()
assert op2 == language
#
# def test_parse_language_08():
# s = """
# (
# in:next_episode ; (
# out:no_episodes |
# (out:episode_start ;
# (in:next_image ; (out:image | out:episode_end))*)
# )
# )*
# """
#
# expect_parse(Syntax.language, s, None)
|
zuper-nodes-z6
|
/zuper-nodes-z6-6.2.17.tar.gz/zuper-nodes-z6-6.2.17/src/zuper_nodes_tests/test_language.py
|
test_language.py
|
import os
from typing import List, Sequence, Union
from networkx.drawing.nx_pydot import write_dot
from zuper_commons.fs import make_sure_dir_exists
from zuper_nodes import ChannelName, Event, InputReceived, Language, logger, OutputProduced
from zuper_nodes.language_parse import language_to_str, parse_language
from zuper_nodes.language_recognize import Enough, LanguageChecker, NeedMore, Unexpected
from zuper_nodes_wrapper.meta_protocol import basic_protocol
def assert_seq(s: Union[str, Language], seq: List[Event], expect: Sequence[type], final: type):
if isinstance(s, str):
s = s.replace("\n", " ").strip()
while " " in s:
s = s.replace(" ", " ")
l = parse_language(s)
else: # pragma: no cover
l = s
s2 = language_to_str(l)
print(s)
print(s2)
l2 = parse_language(s2)
assert l == l2, (s, s2)
pc = LanguageChecker(l)
logger.info(f"Active start: {pc.get_active_states_names()}")
dn = "out-tests"
fn = os.path.join(dn, "language.dot")
make_sure_dir_exists(fn)
write_dot(pc.g, fn)
logger.info(f"Written to {fn}")
# all except last
for i, (e, r) in enumerate(zip(seq, expect)):
logger.info(f"Active before: {pc.get_active_states_names()}")
logger.info(f"Event {e}")
res = pc.push(e)
logger.info(f"Active after: {pc.get_active_states_names()}")
if not isinstance(res, r): # pragma: no cover
msg = f"Input {i} ({e}) response was {type(res).__name__} instead of {r.__name__}"
msg += f"\n entire sequence: {seq}"
msg += f"\n language: {l}"
msg += f"\n language string: {s2}"
raise Exception(msg)
res = pc.finish()
if not isinstance(res, final): # pragma: no cover
msg = f"finish response was {type(res).__name__} instead of {final.__name__}"
msg += f"\n entire sequence: {seq}"
msg += f"\n language: {l}"
msg += f"\n language string: {s2}"
raise Exception(msg)
def test_proto_out1():
seq = [OutputProduced(ChannelName("a"))]
assert_seq("out:a", seq, (Enough,), Enough)
def test_proto_in1():
seq = [InputReceived(ChannelName("a"))]
assert_seq("in:a", seq, (Enough,), Enough)
def test_proto3():
seq = [InputReceived(ChannelName("a"))]
assert_seq("out:a", seq, (Unexpected,), Unexpected)
def test_proto4():
seq = [OutputProduced(ChannelName("a"))]
assert_seq("in:a", seq, (Unexpected,), Unexpected)
def test_proto05():
seq = [InputReceived(ChannelName("b"))]
assert_seq("in:a", seq, (Unexpected,), Unexpected)
def test_proto06():
seq = [OutputProduced(ChannelName("b"))]
assert_seq("in:a", seq, (Unexpected,), Unexpected)
def test_proto07():
seq = [OutputProduced(ChannelName("a")), OutputProduced(ChannelName("b"))]
assert_seq("out:a ; out:b", seq, (NeedMore, Enough), Enough)
def test_proto08():
seq = [OutputProduced(ChannelName("a")), OutputProduced(ChannelName("b"))]
assert_seq("out:a ; out:b ; out:b", seq, (NeedMore, NeedMore), NeedMore)
def test_proto09():
seq = [OutputProduced(ChannelName("a"))]
assert_seq("out:a ; out:b", seq, (NeedMore,), NeedMore)
def test_proto10():
seq = [
OutputProduced(ChannelName("a")),
OutputProduced(ChannelName("b")),
OutputProduced(ChannelName("c")),
]
assert_seq("out:a ; out:b", seq, (NeedMore, Enough, Unexpected), Unexpected)
def test_proto_zom_01():
seq = []
assert_seq("out:a *", seq, (), Enough)
def test_proto_zom_02():
seq = [OutputProduced(ChannelName("a"))]
assert_seq("out:a *", seq, (Enough,), Enough)
def test_proto_zom_03():
seq = [OutputProduced(ChannelName("a")), OutputProduced(ChannelName("a"))]
assert_seq("out:a *", seq, (Enough, Enough), Enough)
def test_proto_either_01():
seq = [OutputProduced(ChannelName("a"))]
assert_seq("out:a | out:b ", seq, (Enough,), Enough)
def test_proto_either_02():
seq = [OutputProduced(ChannelName("b"))]
assert_seq("out:a | out:b ", seq, (Enough,), Enough)
def test_proto_either_03():
seq = [OutputProduced(ChannelName("c"))]
assert_seq("out:a | out:b | out:c ", seq, (Enough,), Enough)
def test_proto_either_04():
seq = [OutputProduced(ChannelName("a")), OutputProduced(ChannelName("b"))]
assert_seq("(out:a ; out:b) | (out:b ; out:a) ", seq, (NeedMore, Enough), Enough)
def test_proto_either_05():
seq = [OutputProduced(ChannelName("b")), OutputProduced(ChannelName("a"))]
assert_seq("(out:a ; out:b) | (out:b ; out:a) ", seq, (NeedMore, Enough,), Enough)
def test_proto_oom_01():
seq = []
assert_seq("out:a +", seq, (), NeedMore)
def test_proto_oom_02():
seq = [OutputProduced(ChannelName("a"))]
assert_seq("out:a +", seq, (Enough,), Enough)
def test_proto_oom_03():
seq = [OutputProduced(ChannelName("a")), OutputProduced(ChannelName("a"))]
assert_seq("out:a +", seq, (Enough, Enough), Enough)
def test_proto_zoom_01():
seq = []
assert_seq("out:a ?", seq, (), Enough)
def test_proto_zoom_02():
seq = [OutputProduced(ChannelName("a"))]
assert_seq("out:a ?", seq, (Enough,), Enough)
def test_proto_zoom_03():
seq = [OutputProduced(ChannelName("a")), OutputProduced(ChannelName("a"))]
assert_seq("out:a ?", seq, (Enough, Unexpected), Unexpected)
def test_protocol_complex1():
l = """
(
in:next_episode ; (
out:no_more_episodes |
(out:episode_start ;
(in:next_image ; (out:image | out:no_more_images))*)
)
)*
"""
seq = [InputReceived(ChannelName("next_episode")), OutputProduced(ChannelName("episode_start"))]
assert_seq(l, seq, (NeedMore, Enough), Enough)
def test_protocol_complex1_0():
l = """
in:next_episode ; (
out:no_more_episodes |
(out:episode_start ;
(in:next_image ; (out:image | out:no_more_images))*)
)
"""
seq = [InputReceived(ChannelName("next_episode")), OutputProduced(ChannelName("no_more_episodes"))]
assert_seq(l, seq, (NeedMore, Enough), Enough)
def test_protocol_complex1_1():
l = """
in:next_episode ; (
out:no_more_episodes |
(out:episode_start ;
(in:next_image ; (out:image | out:no_more_images))*)
)
"""
seq = [InputReceived(ChannelName("next_episode")), OutputProduced(ChannelName("episode_start"))]
assert_seq(l, seq, (NeedMore, Enough), Enough)
def test_protocol_complex1_2():
l = """
in:next_episode ; (
out:no_more_episodes |
(out:episode_start ;
(in:next_image ; (out:image | out:no_more_images))*)
)
"""
seq = [
InputReceived(ChannelName("next_episode")),
OutputProduced(ChannelName("episode_start")),
InputReceived(ChannelName("next_image")),
OutputProduced(ChannelName("image")),
]
assert_seq(l, seq, (NeedMore, Enough), Enough)
def test_protocol_complex1_3():
l = """
(
in:next_episode ; (
out:no_more_episodes |
(out:episode_start ;
(in:next_image ; (out:image | out:no_more_images))*)
)
)*
"""
seq = [
InputReceived(ChannelName("next_image")),
]
assert_seq(l, seq, (Unexpected,), Unexpected)
def test_protocol_complex1_3b():
l = """
(
in:next_episode ; (
out:no_more_episodes |
(out:episode_start ;
(in:next_image ; (out:image | out:no_more_images))*)
)
)*
"""
seq = [
InputReceived(ChannelName("next_image")),
]
assert_seq(l, seq, (Unexpected,), Unexpected)
def test_protocol_complex1_3c():
l = """
(
in:next_episode ; (
(out:episode_start ;
(in:next_image)*)
)
)*
"""
seq = [
InputReceived(ChannelName("next_image")),
]
assert_seq(l, seq, (Unexpected,), Unexpected)
def test_protocol_complex1_3e():
l = """
(
in:next_episode ; (
(out:episode_start ;
(in:next_image)*)
)
)
"""
seq = [
InputReceived(ChannelName("next_image")),
]
assert_seq(l, seq, (Unexpected,), Unexpected)
def test_protocol_complex1_3d():
l = """
(
in:next_episode ; (
(out:episode_start ;
(in:next_image))
)
)*
"""
seq = [
InputReceived(ChannelName("next_image")),
]
assert_seq(l, seq, (Unexpected,), Unexpected)
def test_protocol_complex1_3v():
l0 = """
out:episode_start ;
(in:next_image ; (out:image | out:no_more_images))*
"""
seq = [OutputProduced(ChannelName("episode_start"))]
assert_seq(l0, seq, (Enough,), Enough)
def test_basic_protocol1():
l0 = basic_protocol.language
seq = [InputReceived(ChannelName("set_config"))]
assert_seq(l0, seq, (NeedMore,), NeedMore)
|
zuper-nodes-z6
|
/zuper-nodes-z6-6.2.17.tar.gz/zuper-nodes-z6-6.2.17/src/zuper_nodes_tests/test_protocol.py
|
test_protocol.py
|
from dataclasses import dataclass
from nose.tools import assert_raises
from zuper_nodes import check_compatible_protocol, IncompatibleProtocol, InteractionProtocol
from zuper_nodes.language import opposite, particularize_no_check
from . import logger
@dataclass
class Empty:
pass
def test_subprotocol_obs():
# this is the story of the duckiebot
protocol_agent = InteractionProtocol(
description="",
inputs={"observations": object},
outputs={"commands": object},
language="""
(in:observations ; out:commands)*
""",
)
@dataclass
class Commands2018:
u: int
@dataclass
class Commands2020:
u: int
# v: float
@dataclass
class Obs2018:
camera: float
@dataclass
class Obs2020:
camera: float
odometry: float
protocol_agent_2018 = particularize_no_check(
protocol_agent, inputs={"observations": Obs2018}, outputs={"commands": Commands2018}
)
protocol_agent_2020 = particularize_no_check(
protocol_agent, inputs={"observations": Obs2020}, outputs={"commands": Commands2020}
)
# logger.info(protocol_agent_2018=protocol_agent_2018, protocol_agent_2020=protocol_agent_2020)
# Every agent2018 is an agent2020
check_compatible_protocol(protocol_agent_2018, protocol_agent_2020)
assert_raises(IncompatibleProtocol, check_compatible_protocol, protocol_agent_2020, protocol_agent_2018)
def test_subprotocol_cmds():
# this is the story of the duckiebot
@dataclass
class Commands2018:
u: int
@dataclass
class Commands2020:
u: int
v: float
@dataclass
class Obs2018:
camera: float
@dataclass
class Obs2020:
camera: float
protocol_agent = InteractionProtocol(
description="",
inputs={"observations": object},
outputs={"commands": object},
language="""
(in:observations ; out:commands)*
""",
)
protocol_agent_2018 = particularize_no_check(
protocol_agent, inputs={"observations": Obs2018}, outputs={"commands": Commands2018}
)
protocol_agent_2020 = particularize_no_check(
protocol_agent, inputs={"observations": Obs2020}, outputs={"commands": Commands2020}
)
# logger.info(protocol_agent_2018=protocol_agent_2018, protocol_agent_2020=protocol_agent_2020)
# Every agent2020 is an agent2018
check_compatible_protocol(protocol_agent_2020, protocol_agent_2018)
assert_raises(IncompatibleProtocol, check_compatible_protocol, protocol_agent_2018, protocol_agent_2020)
protocol_agent_2020_op = opposite(protocol_agent_2020)
protocol_agent_2018_op = opposite(protocol_agent_2018)
check_compatible_protocol(protocol_agent_2018_op, protocol_agent_2020_op)
assert_raises(
IncompatibleProtocol, check_compatible_protocol, protocol_agent_2020_op, protocol_agent_2018_op
)
def test_subprotocol_channels_inputs():
protocol_agent_2018 = InteractionProtocol(
description="",
inputs={"observations": object},
outputs={"commands": object},
language="""
(in:observations ; out:commands)*
""",
)
protocol_agent_2020 = InteractionProtocol(
description="",
inputs={"observations": object},
outputs={"commands": object, "extra": int},
language="""
(in:observations ; out:commands)*
""",
)
# Every agent2020 is an agent2018
check_compatible_protocol(protocol_agent_2020, protocol_agent_2018)
assert_raises(IncompatibleProtocol, check_compatible_protocol, protocol_agent_2018, protocol_agent_2020)
protocol_agent_2020_op = opposite(protocol_agent_2020)
protocol_agent_2018_op = opposite(protocol_agent_2018)
check_compatible_protocol(protocol_agent_2018_op, protocol_agent_2020_op)
assert_raises(
IncompatibleProtocol, check_compatible_protocol, protocol_agent_2020_op, protocol_agent_2018_op
)
def test_subprotocol_channels_outputs():
protocol_agent_2018 = InteractionProtocol(
description="",
inputs={"observations": object, "extra": int},
outputs={"commands": object},
language="""
(in:observations ; out:commands)*
""",
)
protocol_agent_2020 = InteractionProtocol(
description="",
inputs={"observations": object},
outputs={"commands": object},
language="""
(in:observations ; out:commands)*
""",
)
# Every agent2020 is an agent2018
check_compatible_protocol(protocol_agent_2020, protocol_agent_2018)
assert_raises(IncompatibleProtocol, check_compatible_protocol, protocol_agent_2018, protocol_agent_2020)
protocol_agent_2020_op = opposite(protocol_agent_2020)
protocol_agent_2018_op = opposite(protocol_agent_2018)
check_compatible_protocol(protocol_agent_2018_op, protocol_agent_2020_op)
assert_raises(
IncompatibleProtocol, check_compatible_protocol, protocol_agent_2020_op, protocol_agent_2018_op
)
|
zuper-nodes-z6
|
/zuper-nodes-z6-6.2.17.tar.gz/zuper-nodes-z6-6.2.17/src/zuper_nodes_tests/test_subprotocol.py
|
test_subprotocol.py
|
from zuper_commons.logs import ZLogger
logger = ZLogger(__name__)
from . import test_language, test_protocol
|
zuper-nodes-z6
|
/zuper-nodes-z6-6.2.17.tar.gz/zuper-nodes-z6-6.2.17/src/zuper_nodes_tests/__init__.py
|
__init__.py
|
import pyparsing
from pyparsing import (
Suppress,
Literal,
Keyword,
ParserElement,
pyparsing_common,
opAssoc,
)
try:
from pyparsing import operatorPrecedence
except ImportError: # pragma: no cover
from pyparsing import infixNotation as operatorPrecedence
from .language import (
ExpectInputReceived,
ExpectOutputProduced,
InSequence,
ZeroOrMore,
ZeroOrOne,
Either,
Language,
OneOrMore,
)
__all__ = [
"parse_language",
"language_to_str",
"Syntax",
]
ParserElement.enablePackrat()
S = Suppress
L = Literal
K = Keyword
def parse_language(s: str) -> Language:
try:
res = Syntax.language.parseString(s, parseAll=True)
except pyparsing.ParseException as e:
msg = f"Cannot parse the language:\n\n{s}"
raise Exception(msg) from e
res = res[0]
return res
def language_to_str(l: Language):
def quote_if(s):
if ";" in s or "|" in s:
return "(" + s + ")"
else:
return s
if isinstance(l, ExpectInputReceived):
return f"in:{l.channel}"
if isinstance(l, ExpectOutputProduced):
return f"out:{l.channel}"
if isinstance(l, InSequence):
return " ; ".join(quote_if(language_to_str(_)) for _ in l.ls)
if isinstance(l, Either):
return " | ".join(quote_if(language_to_str(_)) for _ in l.ls)
if isinstance(l, ZeroOrMore):
return "(" + language_to_str(l.l) + ")" + "*"
if isinstance(l, OneOrMore):
return "(" + language_to_str(l.l) + ")" + "+"
if isinstance(l, ZeroOrOne):
return "(" + language_to_str(l.l) + ")" + "?"
raise NotImplementedError(type(l))
def on_input_received(s, loc, tokens):
return ExpectInputReceived(tokens[0])
def on_output_produced(s, loc, tokens):
return ExpectOutputProduced(tokens[0])
def on_in_sequence(tokens):
return InSequence(tuple(tokens[0]))
def on_either(tokens):
return Either(tuple(tokens[0]))
def on_zero_or_one(tokens):
return ZeroOrOne(tokens[0][0])
def on_zero_or_more(tokens):
return ZeroOrMore(tokens[0][0])
def on_one_or_more(tokens):
return OneOrMore(tokens[0][0])
class Syntax:
input_received = S(K("in") + L(":")) + pyparsing_common.identifier
output_produced = S(K("out") + L(":")) + pyparsing_common.identifier
basic = input_received | output_produced
language = operatorPrecedence(
basic,
[
(S(L("*")), 1, opAssoc.LEFT, on_zero_or_more),
(S(L("+")), 1, opAssoc.LEFT, on_one_or_more),
(S(L("?")), 1, opAssoc.LEFT, on_zero_or_one),
(S(L(";")), 2, opAssoc.LEFT, on_in_sequence),
(S(L("|")), 2, opAssoc.LEFT, on_either),
],
)
input_received.setParseAction(on_input_received)
output_produced.setParseAction(on_output_produced)
|
zuper-nodes-z6
|
/zuper-nodes-z6-6.2.17.tar.gz/zuper-nodes-z6-6.2.17/src/zuper_nodes/language_parse.py
|
language_parse.py
|
from zuper_commons.types import ZException
from zuper_typing import can_be_used_as2
from .language import InteractionProtocol
__all__ = ["IncompatibleProtocol", "check_compatible_protocol"]
class IncompatibleProtocol(ZException):
pass
def check_compatible_protocol(p1: InteractionProtocol, p2: InteractionProtocol):
"""
Checks that p1 is a subprotocol of p2, that is, we can use a p1-node
wherever a p2-node fits.
:raises: IncompatibleProtocol
"""
try:
# check input compatibility
# p1 should not need more input
p1_needs_more = set(p1.inputs) - set(p2.inputs)
if p1_needs_more:
msg = f"P1 needs more inputs."
raise IncompatibleProtocol(
msg, p1_inputs=sorted(p1.inputs), p2_inputs=sorted(p2.inputs), p1_needs_more=p1_needs_more
)
# p1 should have all the outputs
p1_missing_output = set(p2.outputs) - set(p1.outputs)
if p1_missing_output:
msg = f"P1 has missing outputs."
raise IncompatibleProtocol(
msg,
p1_outputs=sorted(p1.outputs),
p2_outputs=sorted(p2.outputs),
p1_missing_output=p1_missing_output,
)
common_inputs = set(p1.inputs) & set(p2.inputs)
for k in common_inputs:
v1 = p1.inputs[k]
v2 = p2.inputs[k]
r = can_be_used_as2(v2, v1)
if not r:
msg = f'For input "{k}", cannot use type v2 as v1'
raise IncompatibleProtocol(
msg, k=k, v1=v1, v2=v2, r=r, p1_inputs=p1.inputs, p2_inputs=p2.inputs
)
# check output compatibility
common_ouputs = set(p1.outputs) & set(p2.outputs)
for k in common_ouputs:
v1 = p1.outputs[k]
v2 = p2.outputs[k]
r = can_be_used_as2(v1, v2)
if not r:
msg = f'For output "{k}", cannot use type v1 as v2.'
raise IncompatibleProtocol(
msg, k=k, v1=v1, v2=v2, r=r, p1_outputs=p1.outputs, p2_outputs=p2.outputs
)
# XXX: to finish
except IncompatibleProtocol as e:
msg = "Cannot say that p1 is a sub-protocol of p2"
raise IncompatibleProtocol(msg, p1=p1, p2=p2) from e
|
zuper-nodes-z6
|
/zuper-nodes-z6-6.2.17.tar.gz/zuper-nodes-z6-6.2.17/src/zuper_nodes/compatibility.py
|
compatibility.py
|
from dataclasses import dataclass
from typing import Union, Tuple, Optional, Set
from .language import (
Language,
OutputProduced,
InputReceived,
Event,
ExpectInputReceived,
ExpectOutputProduced,
InSequence,
ZeroOrMore,
Either,
OneOrMore,
ZeroOrOne,
)
from zuper_commons.text import indent
__all__ = ["Enough", "Unexpected", "Always", "LanguageChecker", "NeedMore"]
class Result:
pass
@dataclass
class Enough(Result):
pass
@dataclass
class Unexpected(Result):
msg: str
def __repr__(self):
return "Unexpected:" + indent(self.msg, " ")
@dataclass
class NeedMore(Result):
pass
import networkx as nx
NodeName = Tuple[str, ...]
class Always:
pass
def get_nfa(
g: Optional[nx.DiGraph],
start_node: NodeName,
accept_node: NodeName,
l: Language,
prefix: Tuple[str, ...] = (),
):
# assert start_node != accept_node
if not start_node in g:
g.add_node(start_node, label="/".join(start_node))
if not accept_node in g:
g.add_node(accept_node, label="/".join(accept_node))
if isinstance(l, ExpectOutputProduced):
g.add_edge(start_node, accept_node, event_match=l, label=f"out/{l.channel}")
elif isinstance(l, ExpectInputReceived):
g.add_edge(start_node, accept_node, event_match=l, label=f"in/{l.channel}")
elif isinstance(l, InSequence):
current = start_node
for i, li in enumerate(l.ls):
# if i == len(l.ls) - 1:
# n = accept_node
# else:
n = prefix + (f"after{i}",)
g.add_node(n)
# logger.debug(f'sequence {i} start {current} to {n}')
get_nfa(g, start_node=current, accept_node=n, prefix=prefix + (f"{i}",), l=li)
current = n
g.add_edge(current, accept_node, event_match=Always(), label="always")
elif isinstance(l, ZeroOrMore):
# logger.debug(f'zeroormore {start_node} -> {accept_node}')
g.add_edge(start_node, accept_node, event_match=Always(), label="always")
get_nfa(
g, start_node=accept_node, accept_node=accept_node, l=l.l, prefix=prefix + ("zero_or_more",),
)
elif isinstance(l, OneOrMore):
# start to accept
get_nfa(
g, start_node=start_node, accept_node=accept_node, l=l.l, prefix=prefix + ("one_or_more", "1"),
)
# accept to accept
get_nfa(
g, start_node=accept_node, accept_node=accept_node, l=l.l, prefix=prefix + ("one_or_more", "2"),
)
elif isinstance(l, ZeroOrOne):
g.add_edge(start_node, accept_node, event_match=Always(), label="always")
get_nfa(
g, start_node=start_node, accept_node=accept_node, l=l.l, prefix=prefix + ("zero_or_one",),
)
elif isinstance(l, Either):
for i, li in enumerate(l.ls):
get_nfa(
g, start_node=start_node, accept_node=accept_node, l=li, prefix=prefix + (f"either{i}",),
)
else:
assert False, type(l)
def event_matches(l: Language, event: Event):
if isinstance(l, ExpectInputReceived):
return isinstance(event, InputReceived) and event.channel == l.channel
if isinstance(l, ExpectOutputProduced):
return isinstance(event, OutputProduced) and event.channel == l.channel
if isinstance(l, Always):
return False
raise NotImplementedError(l)
START = ("start",)
ACCEPT = ("accept",)
class LanguageChecker:
g: nx.DiGraph
active: Set[NodeName]
def __init__(self, language: Language):
self.g = nx.MultiDiGraph()
self.start_node = START
self.accept_node = ACCEPT
get_nfa(
g=self.g, l=language, start_node=self.start_node, accept_node=self.accept_node, prefix=(),
)
# for (a, b, data) in self.g.out_edges(data=True):
# print(f'{a} -> {b} {data["event_match"]}')
a = 2
for n in self.g:
if n not in [START, ACCEPT]:
# noinspection PyUnresolvedReferences
self.g.nodes[n]["label"] = f"S{a}"
a += 1
elif n == START:
# noinspection PyUnresolvedReferences
self.g.nodes[n]["label"] = "start"
elif n == ACCEPT:
# noinspection PyUnresolvedReferences
self.g.nodes[n]["label"] = "accept"
self.active = {self.start_node}
# logger.debug(f'active {self.active}')
self._evolve_empty()
def _evolve_empty(self):
while True:
now_active = set()
for node in self.active:
nalways = 0
nother = 0
for (_, neighbor, data) in self.g.out_edges([node], data=True):
# print(f'-> {neighbor} {data["event_match"]}')
if isinstance(data["event_match"], Always):
now_active.add(neighbor)
nalways += 1
else:
nother += 1
if nother or (nalways == 0):
now_active.add(node)
if self.active == now_active:
break
self.active = now_active
def push(self, event) -> Result:
now_active = set()
# print(f'push: active is {self.active}')
# print(f'push: considering {event}')
for node in self.active:
for (_, neighbor, data) in self.g.out_edges([node], data=True):
if event_matches(data["event_match"], event):
# print(f'now activating {neighbor}')
now_active.add(neighbor)
# else:
# print(f"event_match {event} does not match {data['event_match']}")
#
# if not now_active:
# return Unexpected('')
self.active = now_active
# print(f'push: now active is {self.active}')
self._evolve_empty()
# print(f'push: now active is {self.active}')
return self.finish()
def finish(self) -> Union[NeedMore, Enough, Unexpected]:
# print(f'finish: active is {self.active}')
if not self.active:
return Unexpected("no active")
if self.accept_node in self.active:
return Enough()
return NeedMore()
def get_active_states_names(self):
return [self.g.nodes[_]["label"] for _ in self.active]
def get_expected_events(self) -> Set:
events = set()
for state in self.active:
for (_, neighbor, data) in self.g.out_edges([state], data=True):
em = data["event_match"]
if not isinstance(em, Always):
events.add(em)
return events
|
zuper-nodes-z6
|
/zuper-nodes-z6-6.2.17.tar.gz/zuper-nodes-z6-6.2.17/src/zuper_nodes/language_recognize.py
|
language_recognize.py
|
import socket
import time
from dataclasses import dataclass, field
from typing import Dict, Optional
import numpy as np
__all__ = [
"AIDONodesException",
"ProtocolViolation",
"ExternalProtocolViolation",
"InternalProtocolViolation",
"DecodingError",
"EncodingError",
"Timestamp",
"timestamp_from_seconds",
"TimeSpec",
"local_time",
"TimingInfo",
# "EnvironmentError",
"NotConforming",
"ExternalTimeout",
"ExternalNodeDidNotUnderstand",
"RemoteNodeAborted",
"InternalProblem",
]
class AIDONodesException(Exception):
pass
class ProtocolViolation(AIDONodesException):
pass
class ExternalProtocolViolation(ProtocolViolation):
pass
class ExternalNodeDidNotUnderstand(ExternalProtocolViolation):
pass
class RemoteNodeAborted(ExternalProtocolViolation):
pass
class ExternalTimeout(ExternalProtocolViolation):
pass
class InternalProblem(AIDONodesException):
pass
class InternalProtocolViolation(ProtocolViolation):
pass
class DecodingError(AIDONodesException):
pass
class EncodingError(AIDONodesException):
pass
class NotConforming(AIDONodesException):
""" The node is not conforming to the protocol. """
pass
#
#
# class EnvironmentError(AIDONodesException):
# """ Things such as files not existing. """
#
# pass
@dataclass
class Timestamp:
s: int
us: int
def timestamp_from_seconds(f: float) -> Timestamp:
s = int(np.floor(f))
extra = f - s
us = int(extra * 1000 * 1000 * 1000)
return Timestamp(s, us)
@dataclass
class TimeSpec:
time: Timestamp
frame: str
clock: str
time2: Optional[Timestamp] = None
def local_time() -> TimeSpec:
s = time.time()
hostname = socket.gethostname()
return TimeSpec(time=timestamp_from_seconds(s), frame="epoch", clock=hostname)
@dataclass
class TimingInfo:
acquired: Optional[Dict[str, TimeSpec]] = field(default_factory=dict)
processed: Optional[Dict[str, TimeSpec]] = field(default_factory=dict)
received: Optional[TimeSpec] = None
|
zuper-nodes-z6
|
/zuper-nodes-z6-6.2.17.tar.gz/zuper-nodes-z6-6.2.17/src/zuper_nodes/structures.py
|
structures.py
|
__version__ = "6.2.17"
import os
from zuper_commons.logs import ZLogger
import pyparsing
logger = ZLogger(__name__)
path = os.path.dirname(os.path.dirname(__file__))
logger.debug(f"version {__version__} path {path} pyparsing {pyparsing.__version__}")
from .language import *
from .language_parse import *
from .language_recognize import *
from .structures import *
from .compatibility import *
|
zuper-nodes-z6
|
/zuper-nodes-z6-6.2.17.tar.gz/zuper-nodes-z6-6.2.17/src/zuper_nodes/__init__.py
|
__init__.py
|
from abc import ABCMeta, abstractmethod
from dataclasses import dataclass
from typing import Dict, Iterator, NewType, Optional, Tuple, TYPE_CHECKING
__all__ = [
"InteractionProtocol",
"particularize",
"opposite",
"ChannelName",
"Either",
"ExpectInputReceived",
"ExpectOutputProduced",
"InSequence",
"Language",
"ZeroOrOne",
"OneOrMore",
"ZeroOrMore",
"OutputProduced",
"InputReceived",
"Event",
"particularize_no_check",
]
if TYPE_CHECKING:
ChannelName = NewType("ChannelName", str)
else:
ChannelName = str
class Event:
pass
@dataclass(frozen=True, unsafe_hash=True)
class InputReceived(Event):
channel: ChannelName
@dataclass(frozen=True, unsafe_hash=True)
class OutputProduced(Event):
channel: ChannelName
# Language over events
class Language(metaclass=ABCMeta):
@abstractmethod
def collect_simple_events(self) -> Iterator[Event]:
...
@abstractmethod
def opposite(self) -> "Language":
...
@dataclass(frozen=True, unsafe_hash=True)
class ExpectInputReceived(Language):
channel: ChannelName
def collect_simple_events(self):
yield InputReceived(self.channel)
def opposite(self) -> "Language":
return ExpectOutputProduced(self.channel)
@dataclass(frozen=True, unsafe_hash=True)
class ExpectOutputProduced(Language):
channel: ChannelName
def collect_simple_events(self):
yield OutputProduced(self.channel)
def opposite(self) -> "Language":
return ExpectInputReceived(self.channel)
@dataclass(frozen=True, unsafe_hash=True)
class InSequence(Language):
ls: Tuple[Language, ...]
def collect_simple_events(self):
for l in self.ls:
yield from l.collect_simple_events()
def opposite(self) -> "Language":
ls = tuple(_.opposite() for _ in self.ls)
return InSequence(ls)
@dataclass(frozen=True, unsafe_hash=True)
class ZeroOrOne(Language):
l: Language
def collect_simple_events(self):
yield from self.l.collect_simple_events()
def opposite(self) -> "Language":
return ZeroOrOne(self.l.opposite())
@dataclass(frozen=True, unsafe_hash=True)
class ZeroOrMore(Language):
l: Language
def collect_simple_events(self):
yield from self.l.collect_simple_events()
def opposite(self) -> "Language":
return ZeroOrMore(self.l.opposite())
@dataclass(frozen=True, unsafe_hash=True)
class OneOrMore(Language):
l: Language
def collect_simple_events(self):
yield from self.l.collect_simple_events()
def opposite(self) -> "Language":
return OneOrMore(self.l.opposite())
@dataclass(frozen=True, unsafe_hash=True)
class Either(Language):
ls: Tuple[Language, ...]
def collect_simple_events(self):
for l in self.ls:
yield from l.collect_simple_events()
def opposite(self) -> "Language":
ls = tuple(_.opposite() for _ in self.ls)
return Either(ls)
# Interaction protocol
@dataclass(unsafe_hash=True)
class InteractionProtocol:
# Description
description: str
# Type for each input or output
inputs: Dict[ChannelName, type]
outputs: Dict[ChannelName, type]
# The interaction language
language: str
# interaction: Language = None
def __post_init__(self):
from .language_parse import parse_language, language_to_str
self.interaction = parse_language(self.language)
simple_events = list(self.interaction.collect_simple_events())
for e in simple_events:
if isinstance(e, InputReceived):
if e.channel not in self.inputs: # pragma: no cover
msg = f'Could not find input channel "{e.channel}" among {sorted(self.inputs)}.'
raise ValueError(msg)
if isinstance(e, OutputProduced):
if e.channel not in self.outputs: # pragma: no cover
msg = f'Could not find output channel "{e.channel}" among {sorted(self.outputs)}.'
raise ValueError(msg)
self.language = language_to_str(self.interaction)
def opposite(ip: InteractionProtocol) -> InteractionProtocol:
from .language_parse import language_to_str, parse_language
outputs = ip.inputs # switch
inputs = ip.outputs # switch
l = parse_language(ip.language)
l_op = l.opposite()
language = language_to_str(l_op)
description = ip.description
return InteractionProtocol(outputs=outputs, inputs=inputs, language=language, description=description)
def particularize(
ip: InteractionProtocol,
description: Optional[str] = None,
inputs: Optional[Dict[ChannelName, type]] = None,
outputs: Optional[Dict[ChannelName, type]] = None,
) -> InteractionProtocol:
inputs2 = dict(ip.inputs)
inputs2.update(inputs or {})
outputs2 = dict(ip.outputs)
outputs2.update(outputs or {})
language = ip.language
description = description or ip.description
protocol2 = InteractionProtocol(description, inputs2, outputs2, language)
from .compatibility import check_compatible_protocol
check_compatible_protocol(protocol2, ip)
return protocol2
def particularize_no_check(
ip: InteractionProtocol,
description: Optional[str] = None,
inputs: Optional[Dict[ChannelName, type]] = None,
outputs: Optional[Dict[ChannelName, type]] = None,
) -> InteractionProtocol:
inputs2 = dict(ip.inputs)
inputs2.update(inputs or {})
outputs2 = dict(ip.outputs)
outputs2.update(outputs or {})
language = ip.language
description = description or ip.description
protocol2 = InteractionProtocol(description, inputs2, outputs2, language)
return protocol2
|
zuper-nodes-z6
|
/zuper-nodes-z6-6.2.17.tar.gz/zuper-nodes-z6-6.2.17/src/zuper_nodes/language.py
|
language.py
|
# TopicName = NewType('TopicName' ,str)
|
zuper-nodes-z6
|
/zuper-nodes-z6-6.2.17.tar.gz/zuper-nodes-z6-6.2.17/src/zuper_nodes_wrapper/types.py
|
types.py
|
ENV_NAME = "AIDONODE_NAME"
ENV_DATA_IN = "AIDONODE_DATA_IN"
ENV_DATA_OUT = "AIDONODE_DATA_OUT"
ENV_META_IN = "AIDONODE_META_IN"
ENV_META_OUT = "AIDONODE_META_OUT"
ENV_TRANSLATE = "AIDONODE_TRANSLATE"
# ENV_ENCODING = "AIDONODE_ENCODING"
ENV_CONFIG = "AIDONODE_CONFIG"
ENV_AIDO_REQUIRE_GPU = "AIDO_REQUIRE_GPU"
# ENV_ENCODING_JSON = 'json'
# ENV_ENCODING_CBOR = 'cbor'
# ENV_ENCODING_VALID = [ENV_ENCODING_JSON, ENV_ENCODING_CBOR]
KNOWN = [
ENV_DATA_IN,
ENV_DATA_OUT,
ENV_CONFIG,
ENV_META_IN,
ENV_META_OUT,
ENV_NAME,
ENV_TRANSLATE,
ENV_AIDO_REQUIRE_GPU,
# ENV_ENCODING,
]
TAG_Z2 = "z2"
ATT_CONFIG = "config"
TOPIC_ABORTED = "aborted"
FIELD_COMPAT = "compat"
CUR_PROTOCOL = "z2"
FIELD_DATA = "data"
FIELD_TOPIC = "topic"
FIELD_TIMING = "timing"
FIELD_CONTROL = "control"
CTRL_CAPABILITIES = "capabilities"
CTRL_UNDERSTOOD = "understood"
CTRL_NOT_UNDERSTOOD = "not-understood"
CTRL_OVER = "over"
CTRL_ABORTED = "aborted"
CAPABILITY_PROTOCOL_REFLECTION = "protocol-reflection"
|
zuper-nodes-z6
|
/zuper-nodes-z6-6.2.17.tar.gz/zuper-nodes-z6-6.2.17/src/zuper_nodes_wrapper/constants.py
|
constants.py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.