code
stringlengths 501
5.19M
| package
stringlengths 2
81
| path
stringlengths 9
304
| filename
stringlengths 4
145
|
---|---|---|---|
from __future__ import annotations
import logging
from collections.abc import Mapping
from typing import Any, TypeVar
from zero_3rdparty.error import BaseError
from zero_3rdparty.id_creator import simple_id
logger = logging.getLogger(__name__)
def update_no_overwrite(source: dict[str, object], updates: dict[str, object]) -> None:
"""
Warning:
Will modify both source and updates
>>> start = {"a": 1}
>>> update_no_overwrite(start, {"a": 2})
>>> start
{'a': 1, 'a_1': 2}
>>> update_no_overwrite(start, {"a": 3})
>>> start
{'a': 1, 'a_1': 2, 'a_2': 3}
>>> update_no_overwrite(start, {"b": 4})
>>> start
{'a': 1, 'a_1': 2, 'a_2': 3, 'b': 4}
>>> [update_no_overwrite(start, {"c": i}) for i in range(20)]
[None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None]
>>> "c_9" in start
True
>>> "c_10" in start
False
"""
existing = source.keys() & updates.keys()
for key in existing:
value = updates.pop(key)
if value == source[key]:
continue
for i in range(1, 10):
new_key = f"{key}_{i}"
if new_key not in source:
break
else:
logger.warning(f"many of the same key: {key}")
random_suffix = simple_id(length=5)
new_key = f"{key}_{random_suffix}"
updates[new_key] = value
source.update(updates)
KT = TypeVar("KT")
VT = TypeVar("VT")
def rename_keys(source: dict[str, VT], renames: dict[str, str]) -> dict[str, VT]:
"""
>>> rename_keys({"a.2": 1, "b": 2}, {"a.2": "a"})
{'a': 1, 'b': 2}
"""
new_dict = {}
for key, value in source.items():
new_key = renames.get(key, key)
new_dict[new_key] = value
return new_dict
def pop_latest(d: dict[KT, VT]) -> VT:
"""
>>> pop_latest({"a": 1, "b": 2})
2
>>> d = {"a": 1, "b": 2}
>>> d["ok"] = "yes"
>>> pop_latest(d)
'yes'
"""
_, value = d.popitem()
return value
class MergeDictError(BaseError):
def __init__(self, path: str):
self.path: str = path
def merge(
a: dict,
b: dict,
path: list[str] | None = None,
allow_overwrite: bool = False,
allow_new: bool = True,
) -> None:
"""merges b into a https://stackoverflow.com/questions/7204805/how-to-
merge-dictionaries-of-dictionaries/7205107#7205107.
>>> a = {1:{"a":"A"},2:{"b":"B"}}
>>> merge(a, {2:{"c":"C"},3:{"d":"D"}})
>>> a
{1: {'a': 'A'}, 2: {'b': 'B', 'c': 'C'}, 3: {'d': 'D'}}
>>> merge(a, {1: "OVERWRITE"}, allow_overwrite=False)
Traceback (most recent call last):
...
zero_3rdparty.dict_utils.MergeDictError: MergeDictError(path='1')
>>> merge(a, {1: "OVERWRITE"}, allow_overwrite=True)
>>> a
{1: 'OVERWRITE', 2: {'b': 'B', 'c': 'C'}, 3: {'d': 'D'}}
>>> before_no_new = dict(a="old")
>>> merge(before_no_new, dict(a="new", b="ignored"), allow_overwrite=True, allow_new=False)
>>> before_no_new
{'a': 'new'}
"""
if path is None:
path = []
for key, value in b.items():
if key in a:
if isinstance(a[key], dict) and isinstance(b[key], dict):
merge(
a[key],
b[key],
path + [str(key)],
allow_overwrite=allow_overwrite,
allow_new=allow_new,
)
elif a[key] != value:
if allow_overwrite:
a[key] = value
else:
raise MergeDictError(path=".".join(path + [str(key)]))
elif allow_new:
a[key] = b[key]
def select_existing(existing_vars: dict, new_vars: dict) -> dict:
"""
>>> select_existing(dict(a=1), dict(a=2, b=2))
{'a': 2}
>>> select_existing(dict(a=1, b=dict(c=1)), dict(a=2, b=2))
{'a': 2, 'b': 2}
>>> select_existing(dict(a=1, b=dict(c=1)), dict(a=2, b=dict(c=2)))
{'a': 2, 'b': {'c': 2}}
"""
new_d = {}
for key, value in new_vars.items():
old = existing_vars.get(key)
if not old:
continue
if isinstance(old, dict) and isinstance(value, dict):
new_value = select_existing(old, value)
new_d[key] = new_value
continue
new_d[key] = value
return new_d
def sort_keys(some_dict: dict[KT, VT]) -> dict[KT, VT]:
"""
>>> sort_keys(dict(b=2, a=1, c=3))
{'a': 1, 'b': 2, 'c': 3}
>>> sort_keys(dict(b=2, a=1, c=dict(d=4, a=2)))
{'a': 1, 'b': 2, 'c': {'a': 2, 'd': 4}}
"""
def add_sorted_value(value: VT):
return sort_keys(value) if isinstance(value, dict) else value
return {key: add_sorted_value(some_dict[key]) for key in sorted(some_dict.keys())} # type: ignore
def select_values(some_container: dict | list, allowed_values: tuple[type, ...]):
def ok_value(value: Any):
if isinstance(value, (dict, list)):
return bool(value)
return isinstance(value, allowed_values)
def unpack(value: Any):
return unpack_list_or_dict(value) if isinstance(value, (dict, list)) else value
def unpack_list_or_dict(some_dict_or_list: dict | list):
if isinstance(some_dict_or_list, dict):
return {
key: unpack(value)
for key, value in some_dict_or_list.items()
if ok_value(value)
}
else:
return [unpack(value) for value in some_dict_or_list if ok_value(value)]
return unpack_list_or_dict(some_container)
def as_case_insensitive(d: Mapping[str, VT]) -> dict[str, VT]:
"""
>>> as_case_insensitive(dict(a=1, B=2, cD=3))
{'a': 1, 'A': 1, 'B': 2, 'b': 2, 'cD': 3, 'cd': 3, 'CD': 3}
"""
new: dict[str, VT] = {}
def add_env(key: str, value: VT) -> None:
new[key] = value
new[key.lower()] = value
new[key.upper()] = value
for key, value in d.items():
add_env(key, value)
return new | zero-3rdparty | /zero_3rdparty-0.0.28a2-py3-none-any.whl/zero_3rdparty/dict_utils.py | dict_utils.py |
from __future__ import annotations
import logging
from collections.abc import Iterable
from contextlib import suppress
from functools import partial
from queue import Empty, Queue
from typing import Generic, TypeVar
logger = logging.getLogger(__name__)
QueueType = TypeVar("QueueType")
_empty = object()
class QueueIsClosed(Exception):
pass
def _raise_queue_is_closed(item: object, *, queue: ClosableQueue, **kwargs):
if item is not ClosableQueue.SENTINEL:
raise QueueIsClosed
Queue.put(queue, item, **kwargs)
class ClosableQueue(Queue[QueueType], Generic[QueueType]):
SENTINEL = object()
__QUEUES: list[ClosableQueue] = []
def __init__(self, maxsize: int = 0): # 0 == infinite
super().__init__(maxsize=maxsize)
self.__QUEUES.append(self)
def close(self):
self.put(self.SENTINEL)
self.put = partial(_raise_queue_is_closed, queue=self)
with suppress(Exception):
self.__QUEUES.remove(self)
def close_safely(self):
with suppress(QueueIsClosed):
self.close()
def __iter__(self) -> Iterable[QueueType]:
while True:
item = self.get()
try:
if item is self.SENTINEL:
self.put(item) # ensure next iterator will finish immediately
return # Cause the thread to exit
yield item
finally:
self.task_done()
def has_next(self):
"""
Warning: Side effect of put first item to the end of the queue
"""
next = self.pop()
if next is _empty:
return False
self.put(next)
return True
def pop(self, default=_empty):
try:
next = self.get_nowait()
except Empty:
return default
else:
if next is self.SENTINEL:
self.put(self.SENTINEL) # ensure next iterator will finish immediately
return next
def iter_non_blocking(self) -> Iterable[QueueType]:
next_or_sentinel = partial(self.pop, self.SENTINEL)
return iter(next_or_sentinel, self.SENTINEL)
@classmethod
def close_all(cls):
if cls.__QUEUES:
logger.info("closing all queues")
for q in list(cls.__QUEUES): # avoid modification during iteration
q.close()
def get_nowait(self) -> QueueType:
item = super().get_nowait()
if item is self.SENTINEL:
raise QueueIsClosed
return item | zero-3rdparty | /zero_3rdparty-0.0.28a2-py3-none-any.whl/zero_3rdparty/closable_queue.py | closable_queue.py |
from __future__ import annotations
from collections import defaultdict
from collections.abc import Hashable
from functools import wraps
from inspect import signature
from time import monotonic
from typing import Any, Callable, TypeVar
from typing_extensions import ParamSpec
T = TypeVar("T")
P = ParamSpec("P")
_sentinel = object()
def _wrap_func(func, seconds):
expire_result = (0, _sentinel)
@wraps(func)
def inner(*args, **kwargs):
nonlocal expire_result
now_seconds = monotonic()
expire, call_result = expire_result
if now_seconds < expire and call_result is not _sentinel:
return call_result
call_result = func(*args, **kwargs)
expire_result = (now_seconds + seconds, call_result)
return call_result
def clear():
nonlocal expire_result
expire_result = (0, _sentinel)
inner.clear = clear
return inner
def _wrap_method(
seconds: float, instance_key: Callable[[T], Hashable], meth: Callable[P, Any]
):
expire_times: dict[Hashable, tuple[float, Any]] = defaultdict(
lambda: (0, _sentinel)
)
@wraps(meth)
def inner(self, *args, **kwargs):
now_seconds = monotonic()
key = instance_key(self)
expire, call_result = expire_times[key]
if now_seconds < expire and call_result is not _sentinel:
return call_result
call_result = meth(self, *args, **kwargs)
expire_times[key] = now_seconds + seconds, call_result
return call_result
def clear():
nonlocal expire_times
keys = list(expire_times.keys())
for key in keys:
expire_times[key] = (0, _sentinel)
inner.clear = clear # type: ignore
return inner
def cache_ttl(seconds: float | int) -> Callable[[Callable[P, T]], Callable[P, T]]:
"""simple decorator if you want to cache the results of a call ignoring arguments
Warning:
1. Only caches a 'single value'
2. Expects it to be a method if 'self' is in parameters
'"""
assert isinstance(seconds, (float, int)), "ttl seconds must be int/float"
def decorator(func: Callable[P, T]) -> Callable[P, T]:
if "self" in signature(func).parameters:
return _wrap_method(seconds, id, func)
return _wrap_func(func, seconds)
return decorator
def clear_cache(func):
func.clear() | zero-3rdparty | /zero_3rdparty-0.0.28a2-py3-none-any.whl/zero_3rdparty/cache_ttl.py | cache_ttl.py |
from __future__ import annotations
import logging
import sys
from collections.abc import Awaitable as ColAwaitable
from collections.abc import Iterable
from functools import lru_cache, partial
from inspect import Parameter, currentframe, isclass, signature
from pathlib import Path
from types import FrameType
from typing import Callable, TypeVar, cast, get_type_hints
from zero_3rdparty.iter_utils import first
logger = logging.getLogger(__name__)
def _name(obj: type[object] | object) -> str:
"""Get object qualified name."""
if not hasattr(obj, "__name__") and hasattr(obj, "__class__"):
obj = obj.__class__
name = getattr(obj, "__qualname__", cast(type[object], obj).__name__)
module_name = getattr(obj, "__module__", "")
if not module_name and isinstance(obj, ColAwaitable):
module_name = obj.cr_frame.f_globals["__name__"] # type: ignore
return ".".join((module_name or "__module__", name))
def short_name(obj: type[object] | object) -> str:
_name = as_name(obj)
if "." in _name:
return _name.rsplit(".", maxsplit=1)[1]
return _name
def as_name(obj: type[object] | object) -> str:
"""Get non-qualified name of obj, resolve real name of ``__main__``.
Examples.
>>> class A:
... pass
>>> as_name(A).endswith('object_name.A')
True
>>> from functools import partial
>>> partial_name = as_name(partial(A, 'lol'))
>>> "partial" in partial_name
True
>>> partial_name.endswith("object_name.A args: ('lol',), kwargs: {}")
True
>>> async def b(s: str):
... pass
>>> as_name(b).endswith('object_name.b')
True
"""
name_ = _name(obj)
if name_ == "functools.partial":
if isinstance(obj, partial):
return (
f"partial: {as_name(obj.func)} args: {obj.args}, kwargs: {obj.keywords}"
)
parts = name_.split(".")
if parts[0] == "__main__":
return ".".join([_detect_main_name()] + parts[1:])
return name_
@lru_cache(maxsize=1)
def _detect_main_name() -> str: # pragma: no cover
try:
filename = sys.modules["__main__"].__file__
assert filename
except (AttributeError, KeyError, AssertionError): # ipython/REPL
return "__main__"
else:
path = Path(filename).absolute()
node = path.parent
seen = []
while node:
if (node / "__init__.py").exists():
seen.append(node.stem)
node = node.parent
else:
break
return ".".join(seen + [path.stem])
def func_arg_names(
func: Callable, skip_self: bool = True, skip_kwargs: bool = True
) -> list[str]:
def filter(param: Parameter) -> bool:
if skip_self and param.name == "self":
return False
if skip_kwargs and param.kind == param.VAR_KEYWORD:
return False
return True
return [
param.name for param in signature(func).parameters.values() if filter(param)
]
def func_arg_types(func: Callable) -> list[type]:
param_types = [
value for _name, value in get_type_hints(func).items() if _name != "return"
]
func_name = as_name(func)
assert len(param_types) == len(
func_arg_names(func, skip_self=True, skip_kwargs=True)
), f"missing type hints on {func_name}"
return param_types
def call_signature(func, args=None, kwargs=None):
args = args or tuple()
kwargs = kwargs or {}
key_value_str = ",".join(f"{k}={v}" for k, v in kwargs.items())
function_name = as_name(func)
return f"{function_name}({args}, {key_value_str})"
def func_arg_name_of_type(
func: Callable, arg_type: type, strict: bool = True
) -> str | None:
for name, value in get_type_hints(func).items():
if value is arg_type or (not strict and is_subclass(value, arg_type)):
return name
return None
def is_subclass(maybe_class, classes) -> bool:
return isclass(maybe_class) and issubclass(maybe_class, classes)
T = TypeVar("T")
def func_args_of_instance(func: Callable, arg_type: type[T]) -> Iterable[tuple[str, T]]:
for name, value in get_type_hints(func).items():
if isinstance(value, arg_type):
yield name, value
def func_args_of_instance_or_type(
func: Callable, arg_type: type[T]
) -> Iterable[tuple[str, T | type[T]]]:
for name, value in get_type_hints(func).items():
if isinstance(value, arg_type):
yield name, value
elif is_subclass(value, arg_type):
yield name, value
def func_return_type(func: Callable) -> type | None:
return get_type_hints(func).get("return", None)
def func_default_instances(
func: Callable, default_type: type[T]
) -> Iterable[tuple[str, T]]:
for name, parameter in signature(func).parameters.items():
if isinstance(parameter.default, default_type):
yield name, parameter.default
def func_default_instances_or_classes(
func: Callable, default_type: type[T]
) -> Iterable[tuple[str, T | type[T]]]:
for name, parameter in signature(func).parameters.items():
default = parameter.default
if isinstance(default, default_type):
yield name, default
elif isclass(default) and issubclass(default, default_type):
yield name, default
def unpack_optional_or_assume_class(maybe_optional) -> type | None:
args = getattr(maybe_optional, "__args__", [])
if not isclass(maybe_optional) and args and isclass(args[0]):
return args[0]
if isclass(maybe_optional):
return maybe_optional
return None
def unpack_first_arg(function: Callable) -> type:
maybe_optional: type = first(func_arg_types(function))
unpacked = unpack_optional_or_assume_class(maybe_optional)
assert unpacked is not None, f"unable to find cls for {function}"
return unpacked
def as_caller_name(frames_back: int = 2, with_line_no: bool = False) -> str:
frame: FrameType | None = currentframe()
for _ in range(frames_back):
if frame is None:
return ""
frame = frame.f_back
if frame is None:
return ""
code = frame.f_code
if self := frame.f_locals.get("self"):
name = f"{self.__class__.__name__}.{code.co_name}"
else:
name = code.co_name
if with_line_no:
return f"{name}.{frame.f_lineno}"
return name
def caller_module_and_name() -> tuple[str, str]:
code: FrameType = currentframe().f_back.f_back # type: ignore
module = code.f_globals["__name__"]
if self := code.f_locals.get("self"):
return module, f"{self.__class__.__name__}.{code.f_code.co_name}"
return module, code.f_code.co_name
def caller_module_name_line_no_path() -> tuple[str, str, int, str]:
code: FrameType = currentframe().f_back.f_back # type: ignore
module = code.f_globals["__name__"]
path = code.f_globals["__file__"]
if self := code.f_locals.get("self"):
return (
module,
f"{self.__class__.__name__}.{code.f_code.co_name}",
code.f_lineno,
path,
)
return module, code.f_code.co_name, code.f_lineno, path | zero-3rdparty | /zero_3rdparty-0.0.28a2-py3-none-any.whl/zero_3rdparty/object_name.py | object_name.py |
from __future__ import annotations
import logging
import traceback
from asyncio import TimeoutError as AsyncTimeoutError
from concurrent.futures import TimeoutError as ConcTimeoutError
from functools import partial, singledispatch
from types import TracebackType
from typing import TYPE_CHECKING
from typing_extensions import TypeAlias
from zero_3rdparty.enum_utils import StrEnum
logger = logging.getLogger(__name__)
ExcInfo: TypeAlias = tuple[type[BaseException], BaseException, TracebackType]
class KwargsError(Exception):
"""Used when you want you do not want to define an init for the error."""
if TYPE_CHECKING:
def __getattr__(self, item):
return self.__dict__[item]
def __init__(self, **kwargs):
as_str = ",".join(
f"{key}={value!r}" if isinstance(key, str) else f"{key!r}={value!r}"
for key, value in kwargs.items()
)
cls_name = self.__class__.__name__
self.__dict__.update(kwargs)
super().__init__(f"{cls_name}({as_str})")
def __repr__(self):
return str(self)
class Code(StrEnum):
OK = "OK"
UNKNOWN = "UNKNOWN"
INTERNAL = "INTERNAL"
INVALID_ARGUMENT = "INVALID_ARGUMENT"
NOT_FOUND = "NOT_FOUND"
ALREADY_EXISTS = "ALREADY_EXISTS"
PERMISSION_DENIED = "PERMISSION_DENIED"
UNAUTHENTICATED = "UNAUTHENTICATED"
OUT_OF_RANGE = "OUT_OF_RANGE"
UNIMPLEMENTED = "UNIMPLEMENTED"
TIMEOUT = "TIMEOUT"
@classmethod
def is_crash(cls, code: str):
return is_crash(code)
@classmethod
def is_ok(cls, code: str):
return is_ok(code)
@classmethod
def is_timeout(cls, code: str):
return is_timeout(code)
@classmethod
def is_error(cls, code: str):
return is_error(code)
# use public sets to support extending codes
OK_CODES = {Code.OK}
TIMEOUT_CODES = {Code.TIMEOUT}
CRASH_CODES = {Code.UNKNOWN, Code.INTERNAL, Code.UNIMPLEMENTED}
ERROR_CODES = set(Code) - CRASH_CODES - {Code.OK} - {Code.TIMEOUT}
_all_codes = OK_CODES | TIMEOUT_CODES | CRASH_CODES | ERROR_CODES
_missing_category = set(Code) - _all_codes
assert _missing_category == set(), f"missing category for codes: {_missing_category}"
@singledispatch
def as_error_code(error: object) -> Code:
"""Use register on this method if having a code attribute is not enough."""
raise NotImplementedError
@as_error_code.register(BaseException)
def _base_error_default(error: BaseException) -> Code:
return getattr(error, "code", Code.UNKNOWN) # type: ignore
@as_error_code.register(Code)
def _identity(error: Code):
return error
@as_error_code.register(TimeoutError)
@as_error_code.register(ConcTimeoutError)
@as_error_code.register(AsyncTimeoutError)
def _timeout(error: TimeoutError) -> Code:
return Code.TIMEOUT # type: ignore
def is_crash(code_or_error: str | BaseException):
code = as_error_code(code_or_error)
return code in CRASH_CODES
def is_ok(code_or_error: str | BaseException):
code = as_error_code(code_or_error)
return code in OK_CODES
def is_timeout(code_or_error: str | BaseException):
code = as_error_code(code_or_error)
return code in TIMEOUT_CODES
def is_error(code_or_error: str | BaseException):
code = as_error_code(code_or_error)
return code in ERROR_CODES
class BaseError(Exception):
"""Used when you want to define an init method for the error, e.g., for
accessing fields."""
code = Code.UNKNOWN
msg_template = ""
def __str__(self):
if self.msg_template:
return self.msg_template.format(**self.__dict__)
args = ", ".join(
f"{key}={value!r}"
for key, value in self.__dict__.items()
if not key.startswith("_")
)
return f"{type(self).__name__}({args})"
def __repr__(self):
return str(self)
def __eq__(self, other):
return str(self) == str(other)
def log_error(logger, error, level=logging.ERROR, *, prefix="") -> None:
error_msg = repr(error)
if prefix:
error_msg = f"{prefix}:{error_msg}"
logger.log(level, error_msg)
if traceback__ := get_tb(error):
logger.log(level, "".join(traceback.format_tb(traceback__)))
def get_tb(error: BaseException) -> TracebackType | None:
"""empty string if no traceback."""
return getattr(error, "__traceback__", None)
def as_str_traceback_from_error(error: Exception) -> str:
return as_str_traceback(getattr(error, "__traceback__", ""))
def as_str_traceback(tb: TracebackType | str | None) -> str:
if isinstance(tb, str):
return tb
elif tb:
return "".join(traceback.format_tb(tb))
return ""
def error_and_traceback(error: BaseException) -> str:
lines = [repr(error)]
if tb_str := as_str_traceback(get_tb(error)):
lines.append(tb_str)
return "\n".join(lines)
def log_error_callable(logger, prefix=""):
return partial(log_error, logger, prefix=prefix) | zero-3rdparty | /zero_3rdparty-0.0.28a2-py3-none-any.whl/zero_3rdparty/error.py | error.py |
from __future__ import annotations
import logging
from logging import config
from zero_3rdparty.env_reader import log_format_console, log_max_msg_length
from zero_3rdparty.run_env import running_in_container_environment
logger = logging.getLogger(__name__)
class ReplaceLineBreaks(logging.Filter):
def filter(self, record):
if (msg := getattr(record, "msg", None)) and isinstance(msg, str):
record.msg = record.msg.replace("\n", "\\n\t")
return super().filter(record)
class LimitMessageLength(logging.Filter):
def __init__(self, length=1000, name=""):
super().__init__(name)
self.length = length
def filter(self, record: logging.LogRecord) -> bool:
if (
(msg := getattr(record, "msg", None))
and isinstance(msg, str)
and len(msg) > self.length
):
record.msg = record.msg[: self.length]
return super().filter(record)
def limit_message_length(logger: logging.Logger | None = None) -> None:
max_length = log_max_msg_length()
filter = LimitMessageLength(length=max_length)
if logger is None:
logger = logging.getLogger()
logger.warning(f"adding limit message length={max_length} to handlers!")
for handler in logger.handlers:
handler.addFilter(filter)
else:
logger.addFilter(filter)
def avoid_linebreaks(logger: logging.Logger | None = None):
replace_line_breaks_filter = ReplaceLineBreaks()
if logger is None:
logger = logging.getLogger()
logger.warning("adding replace line breaks to handlers!")
for handler in logger.handlers:
handler.addFilter(replace_line_breaks_filter)
else:
logger.addFilter(replace_line_breaks_filter)
DATE_FMT_SHORT = "%H:%M:%S"
DATE_FMT_LONG = "%Y-%m-%dT%H:%M:%S"
def default_handler() -> dict:
return {
"class": "logging.StreamHandler",
"stream": "ext://sys.stdout",
"level": logging.INFO,
}
def setup_logging(
handler_dict: dict | None = None, disable_stream_handler: bool = False
):
handlers = {} if disable_stream_handler else {"stream": default_handler()}
if handler_dict:
handlers["default"] = handler_dict
assert handlers, "no logging handlers are configured!"
for handler in handlers.values():
handler["formatter"] = "default"
config.dictConfig(
{
"version": 1,
"formatters": {
"default": {
"class": "logging.Formatter",
"format": log_format_console(),
"datefmt": DATE_FMT_LONG,
}
},
"handlers": handlers,
"root": {"handlers": list(handlers.keys()), "level": logging.INFO},
"disable_existing_loggers": False,
}
)
if running_in_container_environment():
avoid_linebreaks()
limit_message_length() | zero-3rdparty | /zero_3rdparty-0.0.28a2-py3-none-any.whl/zero_3rdparty/logging_utils.py | logging_utils.py |
from __future__ import annotations
import inspect
from collections import ChainMap, defaultdict
from collections.abc import Generator
from functools import singledispatch
from itertools import chain, tee
from types import ModuleType
from typing import (
Any,
AsyncIterable,
Callable,
Dict,
Iterable,
List,
Optional,
Sequence,
Tuple,
Type,
TypeVar,
Union,
)
T = TypeVar("T")
async def first_async(async_iter: AsyncIterable[T], default=None) -> Optional[T]:
async for t in async_iter:
return t
return default
def want_set(maybe_set: object) -> set:
return maybe_set if isinstance(maybe_set, set) else set(want_list(maybe_set))
@singledispatch
def want_list(maybe_list: object) -> list:
"""
>>> want_list((1, 2, 3))
[1, 2, 3]
"""
return [maybe_list]
@want_list.register
def _already_list(maybe_list: list) -> list:
return maybe_list
@want_list.register
def _empty_list_on_none(maybe_list: None) -> list:
return []
@want_list.register
def _exhaust_generator(maybe_list: Generator):
return list(maybe_list)
@want_list.register
def _convert_tuple(maybe_list: tuple) -> list:
return list(maybe_list)
def unique_instance_iter(iterable: Iterable[T]) -> Iterable[T]:
seen_ids = set()
for instance in iterable:
instance_id = id(instance)
if instance_id in seen_ids:
continue
yield instance
seen_ids.add(instance_id)
def flat_map(iterable: Iterable[Iterable[T]]) -> Iterable[T]:
"""
>>> list(flat_map([[1, 2, 3], [4,5,6]]))
[1, 2, 3, 4, 5, 6]
>>> list(flat_map([{1: 2, 3:4}.values(), {5:6, 7:8}.values()]))
[2, 4, 6, 8]
>>> dict(flat_map([{1: 2, 3:4}.items(), {5:6, 7:8}.items()]))
{1: 2, 3: 4, 5: 6, 7: 8}
"""
return chain.from_iterable(iterable)
def first(iterable: Iterable[Any], first_type: Type[T] | None = None) -> T:
"""
>>> first(['a', 'b', 2], int)
2
>>> first(['a', 'b', 2], float)
Traceback (most recent call last):
...
StopIteration
>>> first(['a', 'b', 2])
'a'
"""
if first_type is None:
return next(iter(iterable)) # type: ignore
return next(instance for instance in filter_on_type(iterable, first_type)) # type: ignore
def first_or_none(
iterable: Iterable[Any],
first_type: Type[T] | None = None,
*,
condition: Callable[[T], bool] | None = None,
default: Optional[T] = None,
) -> Optional[T]:
"""
>>> first_or_none(['a', 'b', 2], float)
>>> first_or_none(['a', 'b', 2], int)
2
>>> first_or_none([1,2,3], condition=lambda a: a < 0)
>>> first_or_none([1,2,3], condition=lambda a: a > 2)
3
"""
if condition:
return next((instance for instance in iterable if condition(instance)), default)
if first_type:
return next(
(instance for instance in filter_on_type(iterable, first_type)), default
)
return next((instance for instance in iterable), default)
def filter_on_type(iterable: Iterable[T], t: Type[T]) -> Iterable[T]:
"""
>>> list(filter_on_type(['a', 'b', 2, 3.0, 4], str))
['a', 'b']
>>> list(filter_on_type(['a', 'b', 2, 3.0, 4], float))
[3.0]
>>> list(filter_on_type(['a', 'b', 2, 3.0, 4], int))
[2, 4]
>>> list(filter_on_type(['a', 'b', 2, 3.0, 4], bool))
[]
"""
for i in iterable:
if isinstance(i, t):
yield i
def public_dict(
cls: Union[Type, ModuleType], recursive: bool = False
) -> Dict[str, Any]:
"""
Args:
recursive: go up the chain of base classes
"""
if not inspect.isclass(cls) and not inspect.ismodule(cls):
cls = type(cls)
if recursive and cls is not object:
maps = [public_dict(parent) for parent in cls.__mro__]
return dict(ChainMap(*maps))
return {
name: value for name, value in vars(cls).items() if not name.startswith("_")
}
def public_values(cls: Union[Type, ModuleType], sorted_=True) -> List[Any]:
public_vars = public_dict(cls)
if sorted_:
sorted_kv = sorted(public_vars.items(), key=lambda kv: kv[0])
return [kv[1] for kv in sorted_kv]
return list(public_vars.values())
def cls_bases(cls: Type) -> List[str]:
return [b.__name__ for b in cls.__bases__]
_missing = object()
@singledispatch
def select_attrs(
instance: object, attrs: Iterable[str], skip_none: bool = True
) -> Dict[str, object]:
if skip_none:
return {
attr_name: attr_value
for attr_name in attrs
if (attr_value := getattr(instance, attr_name, None))
}
return {
attr_name: getattr(instance, attr_name)
for attr_name in attrs
if getattr(instance, attr_name, _missing) is not _missing
}
@select_attrs.register
def _select_attrs_from_dict(
instance: dict, attrs: Iterable[str], skip_none: bool = True
) -> Dict[str, object]:
if skip_none:
return {
attr_name: value
for attr_name in attrs
if (value := instance.get(attr_name, None))
}
return {attr_name: instance[attr_name] for attr_name in attrs}
KT = TypeVar("KT")
VT = TypeVar("VT")
def key_equal_value_to_dict(key_values: List[str]) -> Dict[str, str]:
"""
>>> key_equal_value_to_dict(['a=b', 'b=c=d', 'c=lol'])
{'a': 'b', 'b': 'c=d', 'c': 'lol'}
:param key_values:
:return:
"""
return dict(
name_equal_value.split("=", maxsplit=1) for name_equal_value in key_values
)
def key_values(
dict_object: Dict[KT, VT],
key_filter: Callable[[KT], bool] = lambda _: True,
value_filter: Callable[[VT], bool] = lambda _: True,
) -> Iterable[str]:
return (
f"{key}={value!r}" if isinstance(key, str) else f"{key!r}={value!r}"
for key, value in dict_object.items()
if key_filter(key) and value_filter(value)
)
def transpose(d: Dict[KT, VT]) -> Dict[VT, KT]:
"""
>>> transpose(dict(a=1, b=2))
{1: 'a', 2: 'b'}
"""
return dict(zip(d.values(), d.keys()))
def partition(
iterable: Iterable[T], pred: Optional[Callable[[T], bool]] = None
) -> Tuple[List[T], List[T]]:
"""From more_iterutils Returns a 2-tuple of iterables derived from the
input iterable. The first yields the items that have ``pred(item) ==
False``. The second yields the items that have ``pred(item) == True``.
>>> is_odd = lambda x: x % 2 != 0
>>> iterable = range(10)
>>> even_items, odd_items = partition(iterable, is_odd)
>>> list(even_items), list(odd_items)
([0, 2, 4, 6, 8], [1, 3, 5, 7, 9])
If *pred* is None, :func:`bool` is used.
>>> iterable = [0, 1, False, True, '', ' ']
>>> false_items, true_items = partition(iterable, None)
>>> list(false_items), list(true_items)
([0, False, ''], [1, True, ' '])
"""
if pred is None:
pred = bool
evaluations = ((pred(x), x) for x in iterable)
t1, t2 = tee(evaluations)
return [x for (cond, x) in t1 if not cond], [x for (cond, x) in t2 if cond]
def last(iterable: Iterable[T]) -> Optional[T]:
"""
>>> last([1, 2, 3])
3
>>> last([])
>>> last((1, 2, 3))
3
>>> last(range(1, 4))
3
"""
value = None
for value in iterable:
continue
return value
def group_by_once(
iterable: Iterable[VT], *, key=Callable[[VT], KT]
) -> dict[KT, list[VT]]:
"""
>>> example = ["a", "b", "aa", "c"]
>>> from itertools import groupby
>>> [(key, list(iterable)) for key, iterable in groupby(example, key=len)]
[(1, ['a', 'b']), (2, ['aa']), (1, ['c'])]
>>> list(group_by_once(example, key=len).items())
[(1, ['a', 'b', 'c']), (2, ['aa'])]
"""
full = defaultdict(list)
for instance in iterable:
full[key(instance)].append(instance)
return {**full}
@singledispatch
def _unpack(raw: object, allowed_falsy: set[object] | None):
return raw
@_unpack.register
def _unpack_list(raw: list, allowed_falsy: Optional[set[object]]):
return [_unpack(each_raw, allowed_falsy) for each_raw in raw]
@_unpack.register
def _unpack_dict(raw: dict, allowed_falsy: Optional[set[object]]):
return ignore_falsy_recurse(**raw, allowed_falsy=allowed_falsy)
_allowed_falsy = {False, 0}
def ignore_falsy_recurse(allowed_falsy: Optional[set[Any]] = None, **kwargs) -> dict:
"""Ignores empty dictionaries or lists and None values.
Warning:
Keeps False & 0
>>> ignore_falsy_recurse(a=0, b="ok", c=None)
{'a': 0, 'b': 'ok'}
>>> ignore_falsy_recurse(a=[{"name": "e", "age": None}, {"people": []}, {}], b="ok", c=None)
{'a': [{'name': 'e'}, {}, {}], 'b': 'ok'}
>>> ignore_falsy_recurse(a=[{"name": "e", "age": None}, {"people": [{"name": "nested", "lastname": ""}]}, {}], b="ok", c=None)
{'a': [{'name': 'e'}, {'people': [{'name': 'nested'}]}, {}], 'b': 'ok'}
"""
allowed_falsy = allowed_falsy or _allowed_falsy
return {
key: _unpack(value, allowed_falsy) # type: ignore
for key, value in kwargs.items()
if value or (not isinstance(value, (dict, list)) and value in allowed_falsy)
}
def ignore_falsy(**kwargs) -> dict:
"""
Warning: Also removes 0 or False
>>> ignore_falsy(a=0, b="ok", c=None)
{'b': 'ok'}
"""
return {key: value for key, value in kwargs.items() if value}
def ignore_none(**kwargs) -> dict:
return {key: value for key, value in kwargs.items() if value is not None}
def iter_slices(sequence: Sequence[T], max: int = 100) -> Iterable[Sequence[T]]:
for start in range(0, len(sequence), max):
yield sequence[start : start + max] | zero-3rdparty | /zero_3rdparty-0.0.28a2-py3-none-any.whl/zero_3rdparty/iter_utils.py | iter_utils.py |
from __future__ import annotations
import logging
import os
import shutil
from logging import Logger
from pathlib import Path
from typing import Iterable
from zero_3rdparty.run_env import running_in_container_environment
PathLike = os.PathLike
logger = logging.getLogger(__name__)
def filepath_in_same_dir(file_path: str, *other_filename: str) -> str:
"""
>>> filepath_in_same_dir(__file__, 'id_creator.py').endswith('id_creator.py')
True
"""
return os.path.join(os.path.dirname(file_path), *other_filename)
def abspath_current_dir(file: os.PathLike) -> str:
return abspath_dir(file, dirs_up=0)
def abspath_dir(file: os.PathLike, dirs_up: int = 0) -> str:
parents = list(Path(file).parents)
return str(parents[dirs_up])
def rm_tree_logged(file_path: str, logger: Logger, ignore_errors: bool = True) -> None:
logger.info(f"remove dir: {file_path}")
if ignore_errors:
def log_error(*args):
logger.warning(f"error deleting: {file_path}, {args}")
shutil.rmtree(file_path, ignore_errors=False, onerror=log_error)
else:
shutil.rmtree(file_path, ignore_errors=False)
def stem_name(
path: os.PathLike, include_parent: bool = False, join_parent: str = "/"
) -> str:
"""
>>> Path("docker-compose.dec.yaml").stem # notice how there is still .dec
'docker-compose.dec'
>>> stem_name('dump/docker-compose.dec.yaml')
'docker-compose'
>>> stem_name('dump/docker-compose.dec.yaml', include_parent=True)
'dump/docker-compose'
"""
path = Path(path)
name = path.name.replace("".join(path.suffixes), "")
if include_parent:
name = f"{path.parent.name}{join_parent}{name}"
return name
def clean_dir(
path: Path, expected_parents: int = 2, recreate: bool = True, ignore_errors=True
) -> None:
if not running_in_container_environment():
assert (
len(Path(path).parents) > expected_parents
), f"rm root by accident {path}?"
rm_tree_logged(str(path), logger, ignore_errors=ignore_errors)
if recreate:
path.mkdir(parents=True, exist_ok=True)
def join_if_not_absolute(base_path: os.PathLike, relative: str) -> str:
if relative.startswith(os.path.sep):
return relative
return os.path.join(base_path, relative)
def copy(
src: os.PathLike, dest: os.PathLike, clean_dest: bool = False, ensure_parents=True
) -> None:
logger.info(f"cp {src} {dest}")
dest = Path(dest)
if ensure_parents:
dest.parent.mkdir(parents=True, exist_ok=True)
if Path(src).is_dir():
if clean_dest and dest.exists():
clean_dir(dest, recreate=False)
shutil.copytree(src, dest)
else:
if dest.exists():
dest.unlink()
shutil.copy(src, dest)
def ensure_parents_write_text(path: os.PathLike, text: str, log: bool = False) -> None:
path = Path(path)
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(text)
if log:
logger.info(f"writing to {path}, text={text}")
def file_modified_time(path: os.PathLike) -> float:
return os.path.getmtime(path)
IMG_EXTENSIONS = (".jpeg", ".gif", ".png")
def is_image_file(path: os.PathLike) -> bool:
"""
>>> is_image_file("profile.png")
True
>>> is_image_file("profile.txt")
False
>>> is_image_file("profile")
False
"""
return Path(path).suffix.endswith(IMG_EXTENSIONS)
def iter_paths(base_dir: Path, *globs: str, rglob=True) -> Iterable[Path]:
search_func = base_dir.rglob if rglob else base_dir.glob
for glob in globs:
yield from search_func(glob)
def iter_paths_and_relative(
base_dir: Path, *globs: str, rglob=True
) -> Iterable[tuple[Path, str]]:
for path in iter_paths(base_dir, *globs, rglob=rglob):
yield path, str(path.relative_to(base_dir)) | zero-3rdparty | /zero_3rdparty-0.0.28a2-py3-none-any.whl/zero_3rdparty/file_utils.py | file_utils.py |
from __future__ import annotations
import logging
from asyncio import CancelledError as _AsyncCancelledError
from asyncio import Future as AsyncFuture
from asyncio import TimeoutError as _AsyncTimeoutError
from asyncio import gather, wrap_future
from collections.abc import Iterable
from concurrent.futures import CancelledError as _CancelledError
from concurrent.futures import Future as _ConcFuture
from concurrent.futures import TimeoutError as _ConcTimeoutError
from contextlib import suppress
from functools import wraps
from typing import Any, Callable, TypeVar, Union
from typing_extensions import TypeAlias
logger = logging.getLogger(__name__)
ResultT = TypeVar("ResultT")
ConcFuture = _ConcFuture
ConcCancelledError = _CancelledError
ConcTimeoutError = _ConcTimeoutError
AsyncTimeoutError = _AsyncTimeoutError
AsyncCancelledError = _AsyncCancelledError
Future: TypeAlias = Union[ConcFuture[ResultT], AsyncFuture[ResultT]]
def gather_conc_futures(futures: Iterable[ConcFuture]) -> AsyncFuture:
return gather(*[wrap_future(f) for f in futures])
def chain_future(
complete_first: Future, complete_after: Future, only_on_error: bool = False
) -> None:
def copy_result(future: Future):
assert future is complete_first
if complete_after.done():
logger.info(f"complete_after future already done: {complete_after}")
return
if error := complete_first.exception():
safe_complete(complete_after, error=error)
else:
if not only_on_error:
safe_complete(complete_after, result=complete_first.result())
complete_first.add_done_callback(copy_result)
def add_done_callback(
future: Future,
call: Callable,
*,
_only_on_ok: bool = False,
_only_on_error: bool = False,
_include_error: bool = False,
_include_error_name: str = "error",
**callback_kwargs,
) -> None:
assert not (_only_on_error and _only_on_ok), "only_on_xx is mutually exclusive"
@wraps(call)
def on_complete(f: Future):
error = f.exception()
if _only_on_ok and error:
return
elif _only_on_error and not error:
return
if _include_error:
callback_kwargs[_include_error_name] = error
call(**callback_kwargs)
future.add_done_callback(on_complete)
def add_done_callback_ignore_errors(
future: Future,
call: Callable,
*errors: type[Exception],
_only_on_ok: bool = False,
_only_on_error: bool = False,
**callback_kwargs,
) -> None:
assert not (_only_on_error and _only_on_ok), "only_on_xx is mutually exclusive"
@wraps(call)
def on_complete(f: Future):
if _only_on_ok and f.exception():
return
elif _only_on_error and not f.exception():
return
try:
call(**callback_kwargs)
except Exception as e:
if not isinstance(e, errors):
raise e
future.add_done_callback(on_complete)
def safe_complete(
future: Future,
error: BaseException | None = None,
result: object | None = None,
):
if error:
# asyncio.CancelledError(BaseException)
assert isinstance(error, BaseException), f"not an error: {error!r}"
if future.done():
logger.warning(
f"future already complete: {future}, error={error}, result={result}"
)
return
if error:
future.set_exception(error)
else:
future.set_result(result)
def safe_error(future: Future) -> BaseException | None:
if not future.done():
return None
try:
return future.exception()
except (
ConcCancelledError,
ConcTimeoutError,
AsyncTimeoutError,
AsyncCancelledError,
) as e:
return e
def safe_result(future: Future) -> Any:
if not future.done():
logger.warning("cannot get result, not done")
return None
with suppress(BaseException):
return future.result()
def as_incomplete_future(future: Future | None, fut_type: type = ConcFuture) -> Future:
if future and not future.done():
return future
return fut_type()
def safe_cancel(future: Future | None, reason: str = "") -> None:
if future and not future.done():
future.cancel()
return None
def safe_wait(future: Future[ResultT], timeout: float | None = None) -> ResultT | None:
if not future:
logger.warning("no future to wait for")
try:
if isinstance(future, ConcFuture):
return future.result(timeout)
return future.result()
except Exception as e:
logger.exception(e)
return None
def on_error_ignore(*error_t: type[BaseException]) -> Callable[[BaseException], None]:
def on_error(error: BaseException):
if isinstance(error, error_t):
logger.info(f"ignored error: {error!r}")
return
logger.exception(error)
return on_error | zero-3rdparty | /zero_3rdparty-0.0.28a2-py3-none-any.whl/zero_3rdparty/future.py | future.py |
import string
from random import choices
from time import time
from uuid import uuid4
def unique_id(length: int, character_class: str) -> str:
"""Returns a unique id of length `length`.
A unique id is a string of length 1 or greater composed of
characters from a character_class.
For example, a unique id of length 6 composed of characters from the
character class [0-9a-f] might be the following:
a1fb32
Different calls to `unique_id` are expected to return unequal unique ids. While we expect this
to be true in most business relevant cases, it must be understood that this is not guaranteed
and that, while generally improbable, a call to `unique_id` might return the exact same unique
id as the precedent call.
Different combinations of `length` and `character_class` modify the chance of a collision.
For example, two successive calls of `unique_id` with a single element `character_class` are
guaranteed to return the same unique id independently of the used length. Thus `unique_id``
would guarantee a period of 1.
More generally, two successive calls of `unique_id` with a `length` of 1 and a
`character_class` composed of n characters have a probability of 1/n of returning the same
unique id. Thus `unique_id` would have a maximum period of n that would not be guaranteed.
A `length` of 5 with the character class [0-9a-f] would provide a maximum non guaranteed
period of about 500000.
A `character_class` should not contain any repeated character. Having characters that are
repeated will increase the chance of a collision.
`unique_id` should not directly be used in other parts of the codebase.
Args:
length: The length of the requested unique id. Must be a positive integer.
character_class: The characters allowed in the unique id. Must contain at least one character.
Raises:
AssertionError when `length` is 0 or less.
AssertionError when `character_class` is empty.
Returns:
A random string of length `length` composed only by characters contained in `character_clas`.
"""
assert length > 0, "length must be a positive natural!"
assert character_class, "character_class should contain at least one character!"
return "".join(choices(character_class, k=length))
"""
A string containing all the characters that are available to be used in a GET query paramaters
without being escaped.
See https://www.456bereastreet.com/archive/201008/what_characters_are_allowed_unencoded_in_query_strings/
for informations about what characters are allowed.
"""
url_query_safe_characters: str = (
string.ascii_letters + string.digits + "-_.!$&'()*+,;=:@?"
)
def url_safe_unique_id(length: int = 32) -> str:
"""Returns a unique id composed of characters that can be used in a GET
query parameter.
The `length` default value is a refuse of a previous iteration of this function and has no
particular meaning.
@see `url_query_safe_characters` to learn about the characters that are allowed.
@see `unique_id` to learn about what a unique id is.
Args:
length: The length of the requested unique id.
Raises:
AssertionError when `length` is 0 or less.
Returns:
A unique id of length `length` composed of character that can be used to form a GET query
parameter without being escaped.
"""
return unique_id(length, url_query_safe_characters)
string_or_digit = string.ascii_letters + string.digits
def simple_id(length: int = 10) -> str:
"""
>>> len(simple_id())
10
>>> simple_id() != simple_id()
True
"""
return "".join(choices(string_or_digit, k=length))
def uuid4_hex():
return uuid4().hex
def ms_time_and_random(random_length: int = 5, separator: str = "-") -> str:
"""
>>> ms_time_and_random() #doctest:+SKIP
'1620244384.258-k7d3N'
''
"""
return f"{time():.3f}{separator}{simple_id(random_length)}"
def as_ms_time_and_random(ms_time: str, separator: str = "-") -> tuple[float, str]:
"""
>>> as_ms_time_and_random('1620244384.258-k7d3N')
(1620244384.258, 'k7d3N')
>>> as_ms_time_and_random('1620244384.258-k7d3N', separator="+")
Traceback (most recent call last):
...
ValueError: not enough values to unpack (expected 2, got 1)
>>> as_ms_time_and_random('1620244384K.258-k7d3N')
Traceback (most recent call last):
...
ValueError: could not convert string to float: '1620244384K.258'
"""
time_str, random_str = ms_time.split(separator, maxsplit=1)
return float(time_str), random_str | zero-3rdparty | /zero_3rdparty-0.0.28a2-py3-none-any.whl/zero_3rdparty/id_creator.py | id_creator.py |
choosing to simplify by providing a smaller set of methods."""
from __future__ import annotations
import logging
from collections import defaultdict
from contextlib import suppress
from dataclasses import dataclass
from typing import Any, Callable, Generic, TypeVar, Union, cast
from typing_extensions import TypeAlias
from zero_3rdparty.error import BaseError
from zero_3rdparty.iter_utils import first_or_none, public_dict
from zero_3rdparty.object_name import as_name
logger = logging.getLogger(__name__)
T = TypeVar("T")
@dataclass
class Provider(Generic[T]):
provider: Callable[[], T]
ProviderOrInstance: TypeAlias = Union[T, Provider[T]]
_dependencies: dict[type, ProviderOrInstance] = {}
_infer_instances: list[Any] = []
class DependencyNotSet(BaseError):
def __init__(self, cls: type):
self.cls = cls
def instance(cls: type[T]) -> T:
if _instance := _dependencies.get(cls):
return _instance.provider() if isinstance(_instance, Provider) else _instance
raise DependencyNotSet(cls)
def instance_or_inferred(cls: type[T]) -> T:
try:
return instance(cls)
except DependencyNotSet as e:
inferable = _infer_instances + list(_dependencies.values())
if found := first_or_none(inferable, cls):
_dependencies[cls] = found
return found
raise e
def instance_or_none(cls: type[T]) -> T | None:
with suppress(DependencyNotSet):
return instance(cls)
return None
class ReBindingError(BaseError):
def __init__(self, classes: list[type]):
self.classes = classes
def get_dependencies() -> dict[type, Provider[T] | T]:
return _dependencies
def bind_infer_instances(instances: list[Any], clear_first: bool = False):
global _infer_instances
if clear_first:
_infer_instances.clear()
_infer_instances.extend(instances)
def bind_instances(
instances: dict[type[T], Provider | T],
clear_first: bool = False,
allow_re_binding: bool = False,
):
if clear_first:
_dependencies.clear()
if not allow_re_binding:
if re_bindings := [cls for cls in instances if cls in _dependencies]:
raise ReBindingError(re_bindings)
_dependencies.update(instances)
@dataclass
class _InjectDescriptor(Generic[T]):
cls: type[T]
def __get__(self, _instance, owner) -> T | _InjectDescriptor[T]:
with suppress(DependencyNotSet):
return instance(self.cls)
if _instance is not None:
raise AttributeError
return self
def as_dependency_cls(maybe_dependency: Any):
if isinstance(maybe_dependency, _InjectDescriptor):
return maybe_dependency.cls
def dependency(cls: type[T]) -> T:
return cast(T, _InjectDescriptor(cls))
class MissingDependencies(BaseError):
def __init__(self, missing_dependencies: dict[type, list[str]]):
self.missing_dependencies = missing_dependencies
def _as_member_dependencies(member: Any) -> list[tuple[str, type[T]]]:
"""
Tip:
cannot use inspect.getmembers since it will ignore dependencies
due to AttributeError raised in resolve_dependency
"""
member_type = type(member)
return [
(name, cls)
for name, dependency_property in public_dict(
member_type, recursive=True
).items()
if (cls := as_dependency_cls(dependency_property))
]
def validate_dependencies(instances: list[Any], allow_binding: bool = True) -> None:
"""Raises MissingDependencies."""
missing_dependencies: dict[type, list[str]] = defaultdict(list)
for each_instance in instances:
for prop_name, cls in _as_member_dependencies(each_instance): # type: ignore
if cls not in _dependencies:
if allow_binding and (
inferred_instance := first_or_none(instances, cls)
):
logger.info(f"binding by inferring {cls} to {inferred_instance}")
bind_instances({cls: inferred_instance})
else:
prop_path = f"{as_name(each_instance)}.{prop_name}"
missing_dependencies[cls].append(prop_path)
if missing_dependencies:
raise MissingDependencies(missing_dependencies) | zero-3rdparty | /zero_3rdparty-0.0.28a2-py3-none-any.whl/zero_3rdparty/dependency.py | dependency.py |
__author__ = 'Jose Maria Zambrana Arze'
__email__ = '[email protected]'
__version__ = '0.1'
__copyright__ = 'Copyright 2012, Mandla Web Studio'
import logging
from django.conf import settings
from django.core.urlresolvers import reverse, resolve
from django.core.exceptions import ImproperlyConfigured
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.http import Http404
from django.template.loader import render_to_string
from django.forms import models as model_forms
from django.views.generic import View
from django.views.generic import TemplateView as DjangoTemplateView
from django.views.generic import CreateView as DjangoCreateView
from django.views.generic import DetailView as DjangoDetailView
from django.views.generic import ListView as DjangoListView
from django.views.generic import UpdateView as DjangoUpdateView
from django.views.generic import DeleteView as DjangoDeleteView
from django.views.generic.base import TemplateResponseMixin
from django.views.generic.detail import SingleObjectMixin
from django.views.generic.edit import FormMixin
from django.views.generic.edit import ProcessFormView
from django.views.generic.edit import BaseFormView
from django.utils import simplejson as json
from django.utils.translation import ugettext_lazy as _
from django.http import HttpResponse
from django.shortcuts import redirect
MIMES = {
'html': 'text/html',
'xhtml': 'text/html',
'json': 'application/json',
'atom': 'application/atom+xml',
'xml': 'application/xml'
}
class TemplateView(DjangoTemplateView):
def get_context_data(self, **kwargs):
return kwargs
class MultipleFormatResponseMixin(TemplateResponseMixin):
"""
Clase vista para retornar en multiples formatos.
"""
#: Formato de la respuesta
format = None
#: Formato por defecto
default_format = 'html'
#: Templates por formato
templates = {
'html': 'page.example.html',
}
#: Determina si se tiene que redireccionar.
redirect = None
#: La url donde se tiene que redireccionar.
redirect_url = None
def get_format(self):
"""
Retorna el formato en el que se ha de renderizar la pΓ‘gina
"""
if self.format is not None:
return self.format
if self.request.is_ajax() and 'format' not in self.kwargs:
self.format = 'json'
else:
self.format = self.kwargs.get('format', self.default_format)
# TODO: esto no esta muy coherente.
if self.format in self.templates or self.format == 'json':
return self.format
raise ImproperlyConfigured(u'Format not allowed: %s ' % self.format)
def get_template_names(self):
"""
Retorna el template que se ha de utilizar para renderizar la pΓ‘gina
"""
format = self.get_format()
if format not in self.templates:
raise ImproperlyConfigured('%s have to define a template for '
'format %s' % (self.__class__.__name__, format))
else:
template = self.templates.get(format)
return [template]
def get_redirect_url(self, **kwargs):
"""
Retorna la url donde redireccionar.
"""
if self.redirect_url is not None:
return self.redirect_url
raise ImproperlyConfigured('%s have to define a redirect url'
% self.__class__.__name__)
def get_json_response(self, context):
"""
Obtiene la respuesta de la vista en formato json
"""
if 'form' in context:
del context['form']
json_data = json.dumps(context)
return HttpResponse(json_data, 'application/json')
def render_to_response(self, context, **response_kwargs):
"""
Retorna la respuesta a la llamada de la vista.
"""
self.format = self.get_format()
if self.format == 'json':
if self.redirect:
context['redirect'] = True
context['redirect_url'] = self.get_redirect_url()
else:
context['redirect'] = False
return self.get_json_response(context)
elif self.format == 'html':
if self.redirect:
return redirect(self.get_redirect_url())
response_kwargs['mimetype'] = MIMES.get(format, 'text/html')
return super(MultipleFormatResponseMixin, self).render_to_response(context, **response_kwargs)
class BaseViewMixin(object):
"""
Clase vista base para todas las clases vista del proyecto.
"""
#: Nombre de la vista
view_name = None
#: TΓtulo de la pagina legible para una persona
title = None
#: Nombre de la app
app_name = None
def get_view_name(self):
"""
Retorna el nombre de la vista.
"""
if self.view_name is not None:
return self.view_name
raise ImproperlyConfigured('%s have to define view_name'
% self.__class__.__name__)
def get_title(self):
"""
Retorna el tΓtulo de la pΓ‘gina.
"""
if self.title is not None:
return self.title
try:
return self.get_view_name()
except ImproperlyConfigured, e:
raise ImproperlyConfigured('%s have to define title or view_name'
% self.__class__.__name__)
def get_app_name(self):
"""
Retorna el nombre de la app.
"""
if self.app_name is not None:
return self.app_name
raise ImproperlyConfigured('%s have to define an app_name'
% self.__class__.__name__)
def get_context_data(self, **context):
"""
AΓ±ade las variables bΓ‘sicas de la vista al contexto
"""
context = super(BaseViewMixin, self).get_context_data(**context)
context.update({
'app_name': self.get_app_name(),
'view_name': self.get_view_name(),
'title': self.get_title(),
})
if self.get_format() != 'json':
context.update({
'request': self.request,
'user': self.request.user
})
return context
class BaseView(MultipleFormatResponseMixin, BaseViewMixin, TemplateView):
"""
Clase base de las vistas definidas por el desarrollador que no estan dentro
del CRUD.
"""
pass
class ActionResponseMixin(MultipleFormatResponseMixin):
"""
Mixin para manejar acciones.
"""
#: El mensaje que se muestra cuando la acciΓ³n se ejecuta correctamente.
success_message = _(u'La acciΓ³n se realizo exitosamente')
#: Si se necesita mostrar el mensaje de suceso
show_success_message = True
#: El mensaje que se muestra cuando la acciΓ³n no se completo
fail_message = _(u'La acciΓ³n no pudo realizarse')
#: Si se necesita mostrar el mensaje de falla.
show_fail_message = True
#: Si se redireccionarΓ‘ en caso de suceso o falla. Solo ajax en otro caso siempre redirecciona.
redirect = False
#: La url donde redireccionar si la acciΓ³n se ejecuta exitosamente.
success_url = None
#: La url donde redireccionar si la acciΓ³n falla.
fail_url = None
#: Si la acciΓ³n se realizo con Γ©xito o no.
success = None
def get_context_data(self, success=True, **context):
"""
Retorna el contexto de la acciΓ³n
"""
self.success = success
context.update({
'success': success,
'message': self.get_message(success),
'redirect': self.redirect
})
if self.get_format() == 'json' and 'request' in context:
del context['request']
if self.redirect and self.request.method == 'POST':
context['redirect_url'] = self.get_redirect_url(success)
return context
def get_message(self, success):
"""
Retorna el mensaje apropiado de acuerdo y existio una acciΓ³n exitosa o
si hubo una falla.
"""
message = self.success_message if success else self.fail_message
return unicode(message)
@classmethod
def get_success_redirect_url(self):
"""
Retorna la url para redireccionar en caso de suceso.
"""
if self.success_url is not None:
return self.success_url
self.success_url = reverse('home')
return self.success_url
@classmethod
def get_fail_redirect_url(self):
"""
Retorna la url para redireccionar en caso de falla.
"""
if self.fail_url is not None:
return self.fail_url
self.fail_url = reverse('error')
return self.fail_url
def get_redirect_url(self, success=None):
"""
Retorna la url apropiada para redireccionar.
"""
if success is None:
if self.redirect_url is not None:
return self.redirect_url
else:
raise ImproperlyConfigured(u'La variable success tiene que ser Falso o verdadero')
if success:
return self.get_success_redirect_url()
return self.get_fail_redirect_url()
def action_response(self, success=True, message=None, redirect=None,
redirect_url=None, **kwargs):
"""
Retorna la respuesta de la acciΓ³n.
"""
self.success = success
if message is not None:
if success:
self.success_message = message
else:
self.fail_message = message
if redirect is not None:
self.redirect = redirect
if redirect_url is not None:
self.redirect_url = redirect_url
context = self.get_context_data(success=success, **kwargs)
return self.render_to_response(context)
def fail_response(self, message=None, redirect=None, redirect_url=None,
**kwargs):
"""
Retorna una respuesta de acciΓ³n fallida.
"""
return self.action_response(success=False,
message=message,
redirect=redirect,
redirect_url=redirect_url,
**kwargs)
def success_response(self, message=None, redirect=None, redirect_url=None,
**kwargs):
"""
Retorna una respuesta de acciΓ³n exitosa
"""
return self.action_response(success=True,
message=message,
redirect=redirect,
redirect_url=redirect_url,
**kwargs)
class LoginRequiredMixin(ActionResponseMixin):
"""
Mixin para todas las vistas que necesitas al usuario identificado.
"""
def dispatch(self, request, *args, **kwargs):
"""
Despacha al metodo adecuado verificando si el usuario esta identificado,
si no, redirecciona a la pΓ‘gina de login.
"""
self.request = request
self.args = args
self.kwargs = kwargs
if not request.user.is_authenticated():
url = '%s?%s=%s' % (settings.LOGIN_URL, REDIRECT_FIELD_NAME, request.get_full_path())
return self.fail_response(message=_(u'Necesitas identificarte para continuar.'),
redirect=True,
redirect_url=url)
return super(LoginRequiredMixin, self).dispatch(request, **kwargs)
class OwnerRequiredMixin(LoginRequiredMixin, SingleObjectMixin):
"""
Mixin para verificar la propiedad de un usuario sobre un objeto.
"""
def get_object(self):
if hasattr(self, 'object') and self.object is not None:
return self.object
else:
return super(OwnerRequiredMixin, self).get_object()
def is_owner(self, content, user):
return content.user_id == user.id
def dispatch(self, request, *args, **kwargs):
"""
Despacha la peticiΓ³n al mΓ©todo apropiado vericando la propiedad del
usuario sobre el objeto.
"""
self.request = request
self.args = args
self.kwargs = kwargs
if not request.user.is_authenticated():
return super(OwnerRequiredMixin, self).dispatch(request, **kwargs)
try:
self.object = self.get_object()
except Http404:
return self.fail_response(_(u'El contenido no existe'), redirect=True,
redirect_url=reverse('error'))
if not self.is_owner(self.object, self.request.user):
return self.fail_response(_(u'No eres el propietario'), redirect=True,
redirect_url=reverse('error'))
return super(OwnerRequiredMixin, self).dispatch(request, **kwargs)
class BaseFormMixin(ActionResponseMixin):
"""
Mixin para vistar que procesan formularios.
"""
template_form = 'forms/form.html'
def get_form_class(self):
if self.form_class is not None:
return self.form_class
else:
raise ImproperlyConfigured('%s have to define a form_class'
% self.__class__.__name__)
def get_context_data(self, *args, **kwargs):
context = super(BaseFormMixin, self).get_context_data(*args, **kwargs)
if self.get_format() == 'json':
context['form_html'] = render_to_string(self.template_form, context)
return context
def get_success_url(self):
return self.get_success_redirect_url()
def form_valid(self, form):
if self.get_format() == 'html':
self.redirect = True
self.redirect_url = self.get_success_url()
return self.success_response(form=form)
def form_invalid(self, form):
self.fail_message = self.error_to_string(form)
return self.fail_response(form=form)
@classmethod
def error_to_string(cls, form):
"""
Retorna los mensajes de error de un formulario en una misma cadena.
"""
errors = []
for key, error in form.errors.items():
errors.append('%s: %s' % (key, '. '.join(error)))
message = '\n'.join(errors)
return message
class FormView(BaseViewMixin, BaseFormMixin, BaseFormView):
"""
Vista base para procesar un formulario
"""
templates = {
'html': 'page.form.html'
}
class ModelFormMixin(BaseFormMixin, SingleObjectMixin):
"""
Vista para mostrar y procesar formularios para crear objetos.
"""
template_object = None
def get_form_kwargs(self):
kwargs = super(ModelFormMixin, self).get_form_kwargs()
kwargs.update({'instance': self.object})
return kwargs
def get_form_class(self):
if self.form_class:
return self.form_class
else:
if self.model is not None:
# If a model has been explicitly provided, use it
model = self.model
elif hasattr(self, 'object') and self.object is not None:
# If this view is operating on a single object, use
# the class of that object
model = self.object.__class__
else:
# Try to get a queryset and extract the model class
# from that
model = self.get_queryset().model
return model_forms.modelform_factory(model)
def get_success_redirect_url(self):
try:
url = self.object.get_absolute_url()
except AttributeError:
url = super(ModelFormMixin, self).get_success_redirect_url()
return url
def form_valid(self, form):
self.object = form.save()
return super(ModelFormMixin, self).form_valid(form)
def get_template_object(self):
if self.template_object is not None:
return self.template_object
else:
raise ImproperlyConfigured('%s have to define a template_object'
% self.__class__.__name__)
def get_context_data(self, success=True, **kwargs):
context = super(ModelFormMixin, self).get_context_data(success=success, **kwargs)
if hasattr(self, 'object'):
context['object'] = self.object
if self.get_format() == 'json' and self.object is not None:
# Generamos el html del objeto.
# AΓ±adimos el request y el usuario pero luego lo eliminamos para
# que se compatible con json
context['user'] = self.request.user
context['request'] = self.request
context['object'] = render_to_string(self.get_template_object(), context)
del context['user']
del context['request']
context['pk'] = self.object.pk
return context
class CreateView(ModelFormMixin, FormView):
"""
Clase base de las vistas para crear objetos.
"""
def get(self, request, *args, **kwargs):
self.object = None
return super(CreateView, self).get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
self.object = None
return super(CreateView, self).post(request, *args, **kwargs)
class UpdateView(ModelFormMixin, FormView):
"""
Clase base para actualizar los datos de un objeto.
"""
def get(self, request, *args, **kwargs):
self.object = self.get_object()
return super(UpdateView, self).get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
self.object = self.get_object()
return super(UpdateView, self).post(request, *args, **kwargs)
class ActionView(ActionResponseMixin, TemplateView):
"""
Vista para manejar acciones sobre objetos.
"""
#: El mensaje que se muestra para confirmar la acciΓ³n
confirm_message = _(u'Deseas continuar con esta acciΓ³n?')
#: Si se necesita una confirmaciΓ³n para continuar con la acciΓ³n. Solo en AJAX
confirm = True
templates = {
'html': 'page.confirm.html'
}
def action(self, request, **kwargs):
raise NotImplementedError
def get_context_data(self, success=True, **context):
"""
Retorna el contexto de la acciΓ³n
"""
context.update({
'confirm_message': unicode(self.confirm_message),
'confirm': True,
})
if self.get_format() != 'json':
context['request'] = self.request
return super(ActionView, self).get_context_data(success=success, **context)
def post(self, request, **kwargs):
"""
Ejecuta la acciΓ³n.
"""
# Ejecutamos la acciΓ³n
success = self.action(request, **kwargs)
format = self.get_format()
if format == 'html':
self.redirect = True
self.redirect_url = self.get_redirect_url(success)
if success:
return self.success_response()
else:
return self.fail_response()
class ListView(BaseViewMixin, MultipleFormatResponseMixin, DjangoListView):
"""
Clase base de las vistas que muestran una lista de objetos.
"""
pass
class DetailView(BaseViewMixin, MultipleFormatResponseMixin, DjangoDetailView):
"""
Clase base de as vistas que muestran un objeto a detalle.
"""
pass
class DeleteView(OwnerRequiredMixin, ActionView):
"""
Clase base para eliminar un objeto.
"""
def action(self, request, **kwargs):
"""
Elimina el objeto.
"""
try:
content = self.get_object()
content.delete()
return True
except Http404:
self.fail_message = _(u'El contenido no existe')
return False
class FlashView(BaseView):
"""
Vista que muestra un mensaje para el usuario
"""
view_name = 'flash-view'
app_name = 'common'
templates = {
'html': 'flash_view.html'
} | zero-common | /zero-common-0.1.10.tar.gz/zero-common-0.1.10/common/views.py | views.py |
__author__ = 'Jose Maria Zambrana Arze'
__email__ = '[email protected]'
__version__ = '0.1'
__copyright__ = 'Copyright 2012, Mandla Web Studio'
import copy
from django.conf import settings
from django.core.mail import EmailMultiAlternatives
from django.core.exceptions import ImproperlyConfigured
from django.template.loader import render_to_string
SITE_FROM_EMAIL = getattr(settings, 'SITE_FROM_EMAIL', None)
class Mailer(object):
"""
Clase encargada de enviar emails.
"""
def __init__(self, subject, plain_template, html_template, **kwargs):
self.subject = subject
self.kwargs = kwargs
self.plain_template = plain_template
self.html_template = html_template
def send(self, emails, from_email=None):
"""
Envia el email.
"""
if isinstance(emails, basestring):
emails = [emails]
# Definimos el email de donde se envia
from_email = from_email or SITE_FROM_EMAIL
if from_email is None:
raise ImproperlyConfigured(u'Se necesita definir una direcciΓ³n de '
u'email del sitio.')
# renderizamos los mensajes
message = self._render(self.plain_template, **self.kwargs)
html_message = self._render(self.html_template, **self.kwargs)
# Enviamos el mail
return self.send_mail(emails, self.subject, message=message,
from_email=from_email,
html_message=html_message)
@classmethod
def send_mail(cls, emails, subject, message, from_email=None, html_message=None):
"""
Envia el email a todos los destinatarios.
"""
mail = EmailMultiAlternatives(subject, message, from_email, emails)
mail.attach_alternative(html_message, "text/html")
return mail.send()
@classmethod
def _render(cls, template_name, **kwargs):
"""
Renderiza el mensaje.
"""
# Generamos el contexto
from common.context_processors import basic
kwargs['STATIC_URL'] = settings.STATIC_URL
context = copy.copy(basic())
context.update(kwargs)
# renderizamos el mensaje
message = render_to_string(template_name, context)
return message | zero-common | /zero-common-0.1.10.tar.gz/zero-common-0.1.10/common/mail.py | mail.py |
__author__ = 'Jose Maria Zambrana Arze'
__email__ = '[email protected]'
__version__ = '0.1'
__copyright__ = 'Copyright 2012, Mandla Web Studio'
from time import time
from django.db import models
from django.contrib.auth.models import User
from django.template.defaultfilters import slugify
from django.utils.translation import ugettext_lazy as _
class SlugifyMixin(models.Model):
"""
AΓ±ade la propiedad
"""
#: Slug para usarse en la url y mostrar el contenido
slug = models.SlugField(_('slug'))
#: Nombre de la propiedad para ser utilizada como base
field_name = 'name'
class Meta:
abstract = True
def save(self, *args, **kwargs):
field = getattr(self, self.field_name)
slug = slugify(field)
model = self.__class__
query = model.objects.filter(slug=slug)
if not query.exists():
self.slug = slug
else:
content = query.get()
if self.id is None:
self.slug = "%s-%s" % (slug, int(time()))
elif self.id == content.id:
self.slug = slug
else:
self.slug = "%s-%s" % (self.slug, self.id)
return super(SlugifyMixin, self).save(*args, **kwargs)
class Content(SlugifyMixin):
"""
Modelo base con atributos primarios para los contenidos que son visibles
en el sitio.
"""
#: Nombre del contenido
name = models.CharField(_('Nombre'), max_length=255)
#: El usuario propietario del contenido
user = models.ForeignKey(User, verbose_name=_(u'Propietario'))
#: Fecha de creaciΓ³n del contenido
created_at = models.DateTimeField(_(u'Fecha de creaciΓ³n'),
auto_now_add=True)
#: Fecha de ΓΊltima actualizaciΓ³n del contenido
updated_at = models.DateTimeField(_(u'Fecha de ΓΊltima modificaciΓ³n'),
auto_now=True)
#: Fecha de publicaciΓ³n del contenido
published_at = models.DateTimeField(_(u'Fecha PublicaciΓ³n'),
auto_now_add=True)
class Meta:
abstract = True
def __unicode__(self):
return self.name | zero-common | /zero-common-0.1.10.tar.gz/zero-common-0.1.10/common/models.py | models.py |
__author__ = 'Jose Maria Zambrana Arze'
__email__ = '[email protected]'
__version__ = '0.1'
__copyright__ = 'Copyright 2012, Mandla Web Studio'
import re
import base64
import logging
from django.db import models
from south.modelsinspector import add_introspection_rules
from django.forms import RegexField
try:
import cPickle as pickle
except ImportError:
import pickle
class DictField(models.TextField):
"""
Campo para manejar y almacenar datos de tipo diccionario.
"""
__metaclass__ = models.SubfieldBase
description = "Campo que almacena datos de tipo diccionario"
def to_python(self, value):
"""
Convierte el valor guardado en la base de datos a un diccionario.
"""
if value is None:
return {}
if isinstance(value, dict):
return value
try:
res = pickle.loads(base64.b64decode(value))
except EOFError:
res = {}
return res
def get_db_prep_save(self, value, **kwargs):
"""
Serializa el valor y lo codifica para almacenarlo en la base de datos.
"""
if value is None:
value = {}
return base64.b64encode(pickle.dumps(value, protocol=-1))
def validate(self, value, model_instance):
if not isinstance(value, dict):
raise exceptions.ValidationError("No es un diccionario %s" % value)
class ListField(models.TextField):
"""
Campo para manejar y almacenar datos de tipo lista.
"""
__metaclass__ = models.SubfieldBase
description = "Campo que almacena datos de tipo diccionario"
def to_python(self, value):
"""
Convierte los datos en una lista.
"""
if value is None:
return []
if isinstance(value, list):
return value
try:
res = pickle.loads(base64.b64decode(value))
except EOFError:
res = []
return res
def get_db_prep_save(self, value, **kwargs):
"""
Serializa y codifica la lista para almacenarlo en la base de datos.
"""
if value is None:
value = []
return base64.b64encode(pickle.dumps(value, protocol=-1))
def validate(self, value, model_instance):
if not isinstance(value, list):
raise exceptions.ValidationError(u'No es una lista %s' % value)
HEX_COLOR = re.compile(r'^#(?:[0-9a-fA-F]{3}){1,2}$')
class ColorField(models.CharField):
"""
Campo para almacenar un color en formato hexadecimal.
"""
__metaclass__ = models.SubfieldBase
description = "Un color en formato hexadecimal"
def __init__(self, *args, **kwargs):
if not 'max_length' in kwargs:
kwargs['max_length'] = 7
return super(ColorField, self).__init__(*args, **kwargs)
def validate(self, value, model_instance):
if not isinstance(value, basestring) and not HEX_COLOR.match(value):
raise exceptions.ValidationError(u'No es un color vΓ‘lido.')
def formfield(self, **kwargs):
defaults = {
'max_length': 7,
'min_length': 4,
'form_class': RegexField,
'regex': HEX_COLOR
}
defaults.update(kwargs)
return super(ColorField, self).formfield(**defaults)
# Registra el campo dict para manejar las migraciones.
add_introspection_rules([], ["^common\.fields\.DictField"])
add_introspection_rules([], ["^common\.fields\.ListField"])
add_introspection_rules([], ["^common\.fields\.ColorField"]) | zero-common | /zero-common-0.1.10.tar.gz/zero-common-0.1.10/common/fields.py | fields.py |
__author__ = 'Jose Maria Zambrana Arze'
__email__ = '[email protected]'
__version__ = '0.1'
__copyright__ = 'Copyright 2012, Mandla Web Studio'
import re
import time
import logging
from django import template
from django.conf import settings
from django.contrib.sites.models import Site
from django.utils.hashcompat import sha_constructor
from django.utils.safestring import SafeUnicode
from django.template import TemplateSyntaxError
from django.template.loader_tags import ExtendsNode
from django.template.defaultfilters import urlize
from django.template.defaulttags import kwarg_re
from django.template.defaulttags import URLNode
from django.template.loader_tags import ExtendsNode
register = template.Library()
current_site = Site.objects.get_current()
logger = logging.getLogger('project.simple')
@register.filter(name="urlize_blank")
def urlize_blank(value, autoescape=None):
"""
Convierte en un enlace <a> las cadenas que tienen un formato de url.
A diferencia del template tag por defecto de django, este convierte a
enlaces que se abren en una nueva ventana y se aΓ±ade el atributo
rel="nofollow"
"""
result = urlize(value, autoescape=autoescape)
return result.replace('<a ', '<a target="_blank" rel="nofollow" ')
@register.filter(name='add_class')
def add_class(value, klass):
value = value.replace('label', 'label class="%s"' % klass)
return SafeUnicode(value)
@register.filter
def hash(dictionary, key):
"""
Retorna el objecto que tienen como clave *key* del diccionario *dictionary*
"""
if dictionary:
try:
return dictionary[str(key)]
except KeyError:
pass
return None
@register.tag
def urlfull(parser, token):
bits = token.split_contents()
if len(bits) < 2:
raise TemplateSyntaxError("'%s' takes at least one argument")
viewname = bits[1]
args = []
kwargs = {}
asvar = None
bits = bits[2:]
if len(bits) >= 2 and bits[-2] == 'as':
asvar = bits[-1]
bits = bits[:-2]
if len(bits):
for bit in bits:
match = kwarg_re.match(bit)
if not match:
raise TemplateSyntaxError("Malformed arguments to url tag")
name, value = match.groups()
if name:
kwargs[name] = parser.compile_filter(value)
else:
args.append(parser.compile_filter(value))
return URLFullNode(viewname, args, kwargs, asvar)
class URLFullNode(URLNode):
"""
Nodo para mostrar la url completa mΓ‘s el dominio:
http://www.yourdomain.com/path
"""
def render(self, context):
url = super(URLFullNode, self).render(context)
return "http://www.%s%s" % (current_site.domain, url) | zero-common | /zero-common-0.1.10.tar.gz/zero-common-0.1.10/common/templatetags/base.py | base.py |
!function(e){"use strict";e(function(){e.support.transition=function(){var e=function(){var e=document.createElement("bootstrap"),t={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd otransitionend",transition:"transitionend"},n;for(n in t)if(e.style[n]!==undefined)return t[n]}();return e&&{end:e}}()})}(window.jQuery),!function(e){"use strict";var t='[data-dismiss="alert"]',n=function(n){e(n).on("click",t,this.close)};n.prototype.close=function(t){function s(){i.trigger("closed").remove()}var n=e(this),r=n.attr("data-target"),i;r||(r=n.attr("href"),r=r&&r.replace(/.*(?=#[^\s]*$)/,"")),i=e(r),t&&t.preventDefault(),i.length||(i=n.hasClass("alert")?n:n.parent()),i.trigger(t=e.Event("close"));if(t.isDefaultPrevented())return;i.removeClass("in"),e.support.transition&&i.hasClass("fade")?i.on(e.support.transition.end,s):s()},e.fn.alert=function(t){return this.each(function(){var r=e(this),i=r.data("alert");i||r.data("alert",i=new n(this)),typeof t=="string"&&i[t].call(r)})},e.fn.alert.Constructor=n,e(document).on("click.alert.data-api",t,n.prototype.close)}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.$element=e(t),this.options=e.extend({},e.fn.button.defaults,n)};t.prototype.setState=function(e){var t="disabled",n=this.$element,r=n.data(),i=n.is("input")?"val":"html";e+="Text",r.resetText||n.data("resetText",n[i]()),n[i](r[e]||this.options[e]),setTimeout(function(){e=="loadingText"?n.addClass(t).attr(t,t):n.removeClass(t).removeAttr(t)},0)},t.prototype.toggle=function(){var e=this.$element.closest('[data-toggle="buttons-radio"]');e&&e.find(".active").removeClass("active"),this.$element.toggleClass("active")},e.fn.button=function(n){return this.each(function(){var r=e(this),i=r.data("button"),s=typeof n=="object"&&n;i||r.data("button",i=new t(this,s)),n=="toggle"?i.toggle():n&&i.setState(n)})},e.fn.button.defaults={loadingText:"loading..."},e.fn.button.Constructor=t,e(document).on("click.button.data-api","[data-toggle^=button]",function(t){var n=e(t.target);n.hasClass("btn")||(n=n.closest(".btn")),n.button("toggle")})}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.$element=e(t),this.options=n,this.options.slide&&this.slide(this.options.slide),this.options.pause=="hover"&&this.$element.on("mouseenter",e.proxy(this.pause,this)).on("mouseleave",e.proxy(this.cycle,this))};t.prototype={cycle:function(t){return t||(this.paused=!1),this.options.interval&&!this.paused&&(this.interval=setInterval(e.proxy(this.next,this),this.options.interval)),this},to:function(t){var n=this.$element.find(".item.active"),r=n.parent().children(),i=r.index(n),s=this;if(t>r.length-1||t<0)return;return this.sliding?this.$element.one("slid",function(){s.to(t)}):i==t?this.pause().cycle():this.slide(t>i?"next":"prev",e(r[t]))},pause:function(t){return t||(this.paused=!0),this.$element.find(".next, .prev").length&&e.support.transition.end&&(this.$element.trigger(e.support.transition.end),this.cycle()),clearInterval(this.interval),this.interval=null,this},next:function(){if(this.sliding)return;return this.slide("next")},prev:function(){if(this.sliding)return;return this.slide("prev")},slide:function(t,n){var r=this.$element.find(".item.active"),i=n||r[t](),s=this.interval,o=t=="next"?"left":"right",u=t=="next"?"first":"last",a=this,f;this.sliding=!0,s&&this.pause(),i=i.length?i:this.$element.find(".item")[u](),f=e.Event("slide",{relatedTarget:i[0]});if(i.hasClass("active"))return;if(e.support.transition&&this.$element.hasClass("slide")){this.$element.trigger(f);if(f.isDefaultPrevented())return;i.addClass(t),i[0].offsetWidth,r.addClass(o),i.addClass(o),this.$element.one(e.support.transition.end,function(){i.removeClass([t,o].join(" ")).addClass("active"),r.removeClass(["active",o].join(" ")),a.sliding=!1,setTimeout(function(){a.$element.trigger("slid")},0)})}else{this.$element.trigger(f);if(f.isDefaultPrevented())return;r.removeClass("active"),i.addClass("active"),this.sliding=!1,this.$element.trigger("slid")}return s&&this.cycle(),this}},e.fn.carousel=function(n){return this.each(function(){var r=e(this),i=r.data("carousel"),s=e.extend({},e.fn.carousel.defaults,typeof n=="object"&&n),o=typeof n=="string"?n:s.slide;i||r.data("carousel",i=new t(this,s)),typeof n=="number"?i.to(n):o?i[o]():s.interval&&i.cycle()})},e.fn.carousel.defaults={interval:5e3,pause:"hover"},e.fn.carousel.Constructor=t,e(document).on("click.carousel.data-api","[data-slide]",function(t){var n=e(this),r,i=e(n.attr("data-target")||(r=n.attr("href"))&&r.replace(/.*(?=#[^\s]+$)/,"")),s=e.extend({},i.data(),n.data());i.carousel(s),t.preventDefault()})}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.$element=e(t),this.options=e.extend({},e.fn.collapse.defaults,n),this.options.parent&&(this.$parent=e(this.options.parent)),this.options.toggle&&this.toggle()};t.prototype={constructor:t,dimension:function(){var e=this.$element.hasClass("width");return e?"width":"height"},show:function(){var t,n,r,i;if(this.transitioning)return;t=this.dimension(),n=e.camelCase(["scroll",t].join("-")),r=this.$parent&&this.$parent.find("> .accordion-group > .in");if(r&&r.length){i=r.data("collapse");if(i&&i.transitioning)return;r.collapse("hide"),i||r.data("collapse",null)}this.$element[t](0),this.transition("addClass",e.Event("show"),"shown"),e.support.transition&&this.$element[t](this.$element[0][n])},hide:function(){var t;if(this.transitioning)return;t=this.dimension(),this.reset(this.$element[t]()),this.transition("removeClass",e.Event("hide"),"hidden"),this.$element[t](0)},reset:function(e){var t=this.dimension();return this.$element.removeClass("collapse")[t](e||"auto")[0].offsetWidth,this.$element[e!==null?"addClass":"removeClass"]("collapse"),this},transition:function(t,n,r){var i=this,s=function(){n.type=="show"&&i.reset(),i.transitioning=0,i.$element.trigger(r)};this.$element.trigger(n);if(n.isDefaultPrevented())return;this.transitioning=1,this.$element[t]("in"),e.support.transition&&this.$element.hasClass("collapse")?this.$element.one(e.support.transition.end,s):s()},toggle:function(){this[this.$element.hasClass("in")?"hide":"show"]()}},e.fn.collapse=function(n){return this.each(function(){var r=e(this),i=r.data("collapse"),s=typeof n=="object"&&n;i||r.data("collapse",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.collapse.defaults={toggle:!0},e.fn.collapse.Constructor=t,e(document).on("click.collapse.data-api","[data-toggle=collapse]",function(t){var n=e(this),r,i=n.attr("data-target")||t.preventDefault()||(r=n.attr("href"))&&r.replace(/.*(?=#[^\s]+$)/,""),s=e(i).data("collapse")?"toggle":n.data();n[e(i).hasClass("in")?"addClass":"removeClass"]("collapsed"),e(i).collapse(s)})}(window.jQuery),!function(e){"use strict";function r(){e(t).each(function(){i(e(this)).removeClass("open")})}function i(t){var n=t.attr("data-target"),r;return n||(n=t.attr("href"),n=n&&/#/.test(n)&&n.replace(/.*(?=#[^\s]*$)/,"")),r=e(n),r.length||(r=t.parent()),r}var t="[data-toggle=dropdown]",n=function(t){var n=e(t).on("click.dropdown.data-api",this.toggle);e("html").on("click.dropdown.data-api",function(){n.parent().removeClass("open")})};n.prototype={constructor:n,toggle:function(t){var n=e(this),s,o;if(n.is(".disabled, :disabled"))return;return s=i(n),o=s.hasClass("open"),r(),o||(s.toggleClass("open"),n.focus()),!1},keydown:function(t){var n,r,s,o,u,a;if(!/(38|40|27)/.test(t.keyCode))return;n=e(this),t.preventDefault(),t.stopPropagation();if(n.is(".disabled, :disabled"))return;o=i(n),u=o.hasClass("open");if(!u||u&&t.keyCode==27)return n.click();r=e("[role=menu] li:not(.divider) a",o);if(!r.length)return;a=r.index(r.filter(":focus")),t.keyCode==38&&a>0&&a--,t.keyCode==40&&a<r.length-1&&a++,~a||(a=0),r.eq(a).focus()}},e.fn.dropdown=function(t){return this.each(function(){var r=e(this),i=r.data("dropdown");i||r.data("dropdown",i=new n(this)),typeof t=="string"&&i[t].call(r)})},e.fn.dropdown.Constructor=n,e(document).on("click.dropdown.data-api touchstart.dropdown.data-api",r).on("click.dropdown touchstart.dropdown.data-api",".dropdown form",function(e){e.stopPropagation()}).on("click.dropdown.data-api touchstart.dropdown.data-api",t,n.prototype.toggle).on("keydown.dropdown.data-api touchstart.dropdown.data-api",t+", [role=menu]",n.prototype.keydown)}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.options=n,this.$element=e(t).delegate('[data-dismiss="modal"]',"click.dismiss.modal",e.proxy(this.hide,this)),this.options.remote&&this.$element.find(".modal-body").load(this.options.remote)};t.prototype={constructor:t,toggle:function(){return this[this.isShown?"hide":"show"]()},show:function(){var t=this,n=e.Event("show");this.$element.trigger(n);if(this.isShown||n.isDefaultPrevented())return;this.isShown=!0,this.escape(),this.backdrop(function(){var n=e.support.transition&&t.$element.hasClass("fade");t.$element.parent().length||t.$element.appendTo(document.body),t.$element.show(),n&&t.$element[0].offsetWidth,t.$element.addClass("in").attr("aria-hidden",!1),t.enforceFocus(),n?t.$element.one(e.support.transition.end,function(){t.$element.focus().trigger("shown")}):t.$element.focus().trigger("shown")})},hide:function(t){t&&t.preventDefault();var n=this;t=e.Event("hide"),this.$element.trigger(t);if(!this.isShown||t.isDefaultPrevented())return;this.isShown=!1,this.escape(),e(document).off("focusin.modal"),this.$element.removeClass("in").attr("aria-hidden",!0),e.support.transition&&this.$element.hasClass("fade")?this.hideWithTransition():this.hideModal()},enforceFocus:function(){var t=this;e(document).on("focusin.modal",function(e){t.$element[0]!==e.target&&!t.$element.has(e.target).length&&t.$element.focus()})},escape:function(){var e=this;this.isShown&&this.options.keyboard?this.$element.on("keyup.dismiss.modal",function(t){t.which==27&&e.hide()}):this.isShown||this.$element.off("keyup.dismiss.modal")},hideWithTransition:function(){var t=this,n=setTimeout(function(){t.$element.off(e.support.transition.end),t.hideModal()},500);this.$element.one(e.support.transition.end,function(){clearTimeout(n),t.hideModal()})},hideModal:function(e){this.$element.hide().trigger("hidden"),this.backdrop()},removeBackdrop:function(){this.$backdrop.remove(),this.$backdrop=null},backdrop:function(t){var n=this,r=this.$element.hasClass("fade")?"fade":"";if(this.isShown&&this.options.backdrop){var i=e.support.transition&&r;this.$backdrop=e('<div class="modal-backdrop '+r+'" />').appendTo(document.body),this.$backdrop.click(this.options.backdrop=="static"?e.proxy(this.$element[0].focus,this.$element[0]):e.proxy(this.hide,this)),i&&this.$backdrop[0].offsetWidth,this.$backdrop.addClass("in"),i?this.$backdrop.one(e.support.transition.end,t):t()}else!this.isShown&&this.$backdrop?(this.$backdrop.removeClass("in"),e.support.transition&&this.$element.hasClass("fade")?this.$backdrop.one(e.support.transition.end,e.proxy(this.removeBackdrop,this)):this.removeBackdrop()):t&&t()}},e.fn.modal=function(n){return this.each(function(){var r=e(this),i=r.data("modal"),s=e.extend({},e.fn.modal.defaults,r.data(),typeof n=="object"&&n);i||r.data("modal",i=new t(this,s)),typeof n=="string"?i[n]():s.show&&i.show()})},e.fn.modal.defaults={backdrop:!0,keyboard:!0,show:!0},e.fn.modal.Constructor=t,e(document).on("click.modal.data-api",'[data-toggle="modal"]',function(t){var n=e(this),r=n.attr("href"),i=e(n.attr("data-target")||r&&r.replace(/.*(?=#[^\s]+$)/,"")),s=i.data("modal")?"toggle":e.extend({remote:!/#/.test(r)&&r},i.data(),n.data());t.preventDefault(),i.modal(s).one("hide",function(){n.focus()})})}(window.jQuery),!function(e){"use strict";var t=function(e,t){this.init("tooltip",e,t)};t.prototype={constructor:t,init:function(t,n,r){var i,s;this.type=t,this.$element=e(n),this.options=this.getOptions(r),this.enabled=!0,this.options.trigger=="click"?this.$element.on("click."+this.type,this.options.selector,e.proxy(this.toggle,this)):this.options.trigger!="manual"&&(i=this.options.trigger=="hover"?"mouseenter":"focus",s=this.options.trigger=="hover"?"mouseleave":"blur",this.$element.on(i+"."+this.type,this.options.selector,e.proxy(this.enter,this)),this.$element.on(s+"."+this.type,this.options.selector,e.proxy(this.leave,this))),this.options.selector?this._options=e.extend({},this.options,{trigger:"manual",selector:""}):this.fixTitle()},getOptions:function(t){return t=e.extend({},e.fn[this.type].defaults,t,this.$element.data()),t.delay&&typeof t.delay=="number"&&(t.delay={show:t.delay,hide:t.delay}),t},enter:function(t){var n=e(t.currentTarget)[this.type](this._options).data(this.type);if(!n.options.delay||!n.options.delay.show)return n.show();clearTimeout(this.timeout),n.hoverState="in",this.timeout=setTimeout(function(){n.hoverState=="in"&&n.show()},n.options.delay.show)},leave:function(t){var n=e(t.currentTarget)[this.type](this._options).data(this.type);this.timeout&&clearTimeout(this.timeout);if(!n.options.delay||!n.options.delay.hide)return n.hide();n.hoverState="out",this.timeout=setTimeout(function(){n.hoverState=="out"&&n.hide()},n.options.delay.hide)},show:function(){var e,t,n,r,i,s,o;if(this.hasContent()&&this.enabled){e=this.tip(),this.setContent(),this.options.animation&&e.addClass("fade"),s=typeof this.options.placement=="function"?this.options.placement.call(this,e[0],this.$element[0]):this.options.placement,t=/in/.test(s),e.detach().css({top:0,left:0,display:"block"}).insertAfter(this.$element),n=this.getPosition(t),r=e[0].offsetWidth,i=e[0].offsetHeight;switch(t?s.split(" ")[1]:s){case"bottom":o={top:n.top+n.height,left:n.left+n.width/2-r/2};break;case"top":o={top:n.top-i,left:n.left+n.width/2-r/2};break;case"left":o={top:n.top+n.height/2-i/2,left:n.left-r};break;case"right":o={top:n.top+n.height/2-i/2,left:n.left+n.width}}e.offset(o).addClass(s).addClass("in")}},setContent:function(){var e=this.tip(),t=this.getTitle();e.find(".tooltip-inner")[this.options.html?"html":"text"](t),e.removeClass("fade in top bottom left right")},hide:function(){function r(){var t=setTimeout(function(){n.off(e.support.transition.end).detach()},500);n.one(e.support.transition.end,function(){clearTimeout(t),n.detach()})}var t=this,n=this.tip();return n.removeClass("in"),e.support.transition&&this.$tip.hasClass("fade")?r():n.detach(),this},fixTitle:function(){var e=this.$element;(e.attr("title")||typeof e.attr("data-original-title")!="string")&&e.attr("data-original-title",e.attr("title")||"").removeAttr("title")},hasContent:function(){return this.getTitle()},getPosition:function(t){return e.extend({},t?{top:0,left:0}:this.$element.offset(),{width:this.$element[0].offsetWidth,height:this.$element[0].offsetHeight})},getTitle:function(){var e,t=this.$element,n=this.options;return e=t.attr("data-original-title")||(typeof n.title=="function"?n.title.call(t[0]):n.title),e},tip:function(){return this.$tip=this.$tip||e(this.options.template)},validate:function(){this.$element[0].parentNode||(this.hide(),this.$element=null,this.options=null)},enable:function(){this.enabled=!0},disable:function(){this.enabled=!1},toggleEnabled:function(){this.enabled=!this.enabled},toggle:function(t){var n=e(t.currentTarget)[this.type](this._options).data(this.type);n[n.tip().hasClass("in")?"hide":"show"]()},destroy:function(){this.hide().$element.off("."+this.type).removeData(this.type)}},e.fn.tooltip=function(n){return this.each(function(){var r=e(this),i=r.data("tooltip"),s=typeof n=="object"&&n;i||r.data("tooltip",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.tooltip.Constructor=t,e.fn.tooltip.defaults={animation:!0,placement:"top",selector:!1,template:'<div class="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>',trigger:"hover",title:"",delay:0,html:!1}}(window.jQuery),!function(e){"use strict";var t=function(e,t){this.init("popover",e,t)};t.prototype=e.extend({},e.fn.tooltip.Constructor.prototype,{constructor:t,setContent:function(){var e=this.tip(),t=this.getTitle(),n=this.getContent();e.find(".popover-title")[this.options.html?"html":"text"](t),e.find(".popover-content > *")[this.options.html?"html":"text"](n),e.removeClass("fade top bottom left right in")},hasContent:function(){return this.getTitle()||this.getContent()},getContent:function(){var e,t=this.$element,n=this.options;return e=t.attr("data-content")||(typeof n.content=="function"?n.content.call(t[0]):n.content),e},tip:function(){return this.$tip||(this.$tip=e(this.options.template)),this.$tip},destroy:function(){this.hide().$element.off("."+this.type).removeData(this.type)}}),e.fn.popover=function(n){return this.each(function(){var r=e(this),i=r.data("popover"),s=typeof n=="object"&&n;i||r.data("popover",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.popover.Constructor=t,e.fn.popover.defaults=e.extend({},e.fn.tooltip.defaults,{placement:"right",trigger:"click",content:"",template:'<div class="popover"><div class="arrow"></div><div class="popover-inner"><h3 class="popover-title"></h3><div class="popover-content"><p></p></div></div></div>'})}(window.jQuery),!function(e){"use strict";function t(t,n){var r=e.proxy(this.process,this),i=e(t).is("body")?e(window):e(t),s;this.options=e.extend({},e.fn.scrollspy.defaults,n),this.$scrollElement=i.on("scroll.scroll-spy.data-api",r),this.selector=(this.options.target||(s=e(t).attr("href"))&&s.replace(/.*(?=#[^\s]+$)/,"")||"")+" .nav li > a",this.$body=e("body"),this.refresh(),this.process()}t.prototype={constructor:t,refresh:function(){var t=this,n;this.offsets=e([]),this.targets=e([]),n=this.$body.find(this.selector).map(function(){var t=e(this),n=t.data("target")||t.attr("href"),r=/^#\w/.test(n)&&e(n);return r&&r.length&&[[r.position().top,n]]||null}).sort(function(e,t){return e[0]-t[0]}).each(function(){t.offsets.push(this[0]),t.targets.push(this[1])})},process:function(){var e=this.$scrollElement.scrollTop()+this.options.offset,t=this.$scrollElement[0].scrollHeight||this.$body[0].scrollHeight,n=t-this.$scrollElement.height(),r=this.offsets,i=this.targets,s=this.activeTarget,o;if(e>=n)return s!=(o=i.last()[0])&&this.activate(o);for(o=r.length;o--;)s!=i[o]&&e>=r[o]&&(!r[o+1]||e<=r[o+1])&&this.activate(i[o])},activate:function(t){var n,r;this.activeTarget=t,e(this.selector).parent(".active").removeClass("active"),r=this.selector+'[data-target="'+t+'"],'+this.selector+'[href="'+t+'"]',n=e(r).parent("li").addClass("active"),n.parent(".dropdown-menu").length&&(n=n.closest("li.dropdown").addClass("active")),n.trigger("activate")}},e.fn.scrollspy=function(n){return this.each(function(){var r=e(this),i=r.data("scrollspy"),s=typeof n=="object"&&n;i||r.data("scrollspy",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.scrollspy.Constructor=t,e.fn.scrollspy.defaults={offset:10},e(window).on("load",function(){e('[data-spy="scroll"]').each(function(){var t=e(this);t.scrollspy(t.data())})})}(window.jQuery),!function(e){"use strict";var t=function(t){this.element=e(t)};t.prototype={constructor:t,show:function(){var t=this.element,n=t.closest("ul:not(.dropdown-menu)"),r=t.attr("data-target"),i,s,o;r||(r=t.attr("href"),r=r&&r.replace(/.*(?=#[^\s]*$)/,""));if(t.parent("li").hasClass("active"))return;i=n.find(".active:last a")[0],o=e.Event("show",{relatedTarget:i}),t.trigger(o);if(o.isDefaultPrevented())return;s=e(r),this.activate(t.parent("li"),n),this.activate(s,s.parent(),function(){t.trigger({type:"shown",relatedTarget:i})})},activate:function(t,n,r){function o(){i.removeClass("active").find("> .dropdown-menu > .active").removeClass("active"),t.addClass("active"),s?(t[0].offsetWidth,t.addClass("in")):t.removeClass("fade"),t.parent(".dropdown-menu")&&t.closest("li.dropdown").addClass("active"),r&&r()}var i=n.find("> .active"),s=r&&e.support.transition&&i.hasClass("fade");s?i.one(e.support.transition.end,o):o(),i.removeClass("in")}},e.fn.tab=function(n){return this.each(function(){var r=e(this),i=r.data("tab");i||r.data("tab",i=new t(this)),typeof n=="string"&&i[n]()})},e.fn.tab.Constructor=t,e(document).on("click.tab.data-api",'[data-toggle="tab"], [data-toggle="pill"]',function(t){t.preventDefault(),e(this).tab("show")})}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.$element=e(t),this.options=e.extend({},e.fn.typeahead.defaults,n),this.matcher=this.options.matcher||this.matcher,this.sorter=this.options.sorter||this.sorter,this.highlighter=this.options.highlighter||this.highlighter,this.updater=this.options.updater||this.updater,this.$menu=e(this.options.menu).appendTo("body"),this.source=this.options.source,this.shown=!1,this.listen()};t.prototype={constructor:t,select:function(){var e=this.$menu.find(".active").attr("data-value");return this.$element.val(this.updater(e)).change(),this.hide()},updater:function(e){return e},show:function(){var t=e.extend({},this.$element.offset(),{height:this.$element[0].offsetHeight});return this.$menu.css({top:t.top+t.height,left:t.left}),this.$menu.show(),this.shown=!0,this},hide:function(){return this.$menu.hide(),this.shown=!1,this},lookup:function(t){var n;return this.query=this.$element.val(),!this.query||this.query.length<this.options.minLength?this.shown?this.hide():this:(n=e.isFunction(this.source)?this.source(this.query,e.proxy(this.process,this)):this.source,n?this.process(n):this)},process:function(t){var n=this;return t=e.grep(t,function(e){return n.matcher(e)}),t=this.sorter(t),t.length?this.render(t.slice(0,this.options.items)).show():this.shown?this.hide():this},matcher:function(e){return~e.toLowerCase().indexOf(this.query.toLowerCase())},sorter:function(e){var t=[],n=[],r=[],i;while(i=e.shift())i.toLowerCase().indexOf(this.query.toLowerCase())?~i.indexOf(this.query)?n.push(i):r.push(i):t.push(i);return t.concat(n,r)},highlighter:function(e){var t=this.query.replace(/[\-\[\]{}()*+?.,\\\^$|#\s]/g,"\\$&");return e.replace(new RegExp("("+t+")","ig"),function(e,t){return"<strong>"+t+"</strong>"})},render:function(t){var n=this;return t=e(t).map(function(t,r){return t=e(n.options.item).attr("data-value",r),t.find("a").html(n.highlighter(r)),t[0]}),t.first().addClass("active"),this.$menu.html(t),this},next:function(t){var n=this.$menu.find(".active").removeClass("active"),r=n.next();r.length||(r=e(this.$menu.find("li")[0])),r.addClass("active")},prev:function(e){var t=this.$menu.find(".active").removeClass("active"),n=t.prev();n.length||(n=this.$menu.find("li").last()),n.addClass("active")},listen:function(){this.$element.on("blur",e.proxy(this.blur,this)).on("keypress",e.proxy(this.keypress,this)).on("keyup",e.proxy(this.keyup,this)),this.eventSupported("keydown")&&this.$element.on("keydown",e.proxy(this.keydown,this)),this.$menu.on("click",e.proxy(this.click,this)).on("mouseenter","li",e.proxy(this.mouseenter,this))},eventSupported:function(e){var t=e in this.$element;return t||(this.$element.setAttribute(e,"return;"),t=typeof this.$element[e]=="function"),t},move:function(e){if(!this.shown)return;switch(e.keyCode){case 9:case 13:case 27:e.preventDefault();break;case 38:e.preventDefault(),this.prev();break;case 40:e.preventDefault(),this.next()}e.stopPropagation()},keydown:function(t){this.suppressKeyPressRepeat=!~e.inArray(t.keyCode,[40,38,9,13,27]),this.move(t)},keypress:function(e){if(this.suppressKeyPressRepeat)return;this.move(e)},keyup:function(e){switch(e.keyCode){case 40:case 38:case 16:case 17:case 18:break;case 9:case 13:if(!this.shown)return;this.select();break;case 27:if(!this.shown)return;this.hide();break;default:this.lookup()}e.stopPropagation(),e.preventDefault()},blur:function(e){var t=this;setTimeout(function(){t.hide()},150)},click:function(e){e.stopPropagation(),e.preventDefault(),this.select()},mouseenter:function(t){this.$menu.find(".active").removeClass("active"),e(t.currentTarget).addClass("active")}},e.fn.typeahead=function(n){return this.each(function(){var r=e(this),i=r.data("typeahead"),s=typeof n=="object"&&n;i||r.data("typeahead",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.typeahead.defaults={source:[],items:8,menu:'<ul class="typeahead dropdown-menu"></ul>',item:'<li><a href="#"></a></li>',minLength:1},e.fn.typeahead.Constructor=t,e(document).on("focus.typeahead.data-api",'[data-provide="typeahead"]',function(t){var n=e(this);if(n.data("typeahead"))return;t.preventDefault(),n.typeahead(n.data())})}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.options=e.extend({},e.fn.affix.defaults,n),this.$window=e(window).on("scroll.affix.data-api",e.proxy(this.checkPosition,this)).on("click.affix.data-api",e.proxy(function(){setTimeout(e.proxy(this.checkPosition,this),1)},this)),this.$element=e(t),this.checkPosition()};t.prototype.checkPosition=function(){if(!this.$element.is(":visible"))return;var t=e(document).height(),n=this.$window.scrollTop(),r=this.$element.offset(),i=this.options.offset,s=i.bottom,o=i.top,u="affix affix-top affix-bottom",a;typeof i!="object"&&(s=o=i),typeof o=="function"&&(o=i.top()),typeof s=="function"&&(s=i.bottom()),a=this.unpin!=null&&n+this.unpin<=r.top?!1:s!=null&&r.top+this.$element.height()>=t-s?"bottom":o!=null&&n<=o?"top":!1;if(this.affixed===a)return;this.affixed=a,this.unpin=a=="bottom"?r.top-n:null,this.$element.removeClass(u).addClass("affix"+(a?"-"+a:""))},e.fn.affix=function(n){return this.each(function(){var r=e(this),i=r.data("affix"),s=typeof n=="object"&&n;i||r.data("affix",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.affix.Constructor=t,e.fn.affix.defaults={offset:0},e(window).on("load",function(){e('[data-spy="affix"]').each(function(){var t=e(this),n=t.data();n.offset=n.offset||{},n.offsetBottom&&(n.offset.bottom=n.offsetBottom),n.offsetTop&&(n.offset.top=n.offsetTop),t.affix(n)})})}(window.jQuery); | zero-common | /zero-common-0.1.10.tar.gz/zero-common-0.1.10/common/static/bootstrap/js/bootstrap.min.js | bootstrap.min.js |
!function ($) {
"use strict"; // jshint ;_;
/* CSS TRANSITION SUPPORT (http://www.modernizr.com/)
* ======================================================= */
$(function () {
$.support.transition = (function () {
var transitionEnd = (function () {
var el = document.createElement('bootstrap')
, transEndEventNames = {
'WebkitTransition' : 'webkitTransitionEnd'
, 'MozTransition' : 'transitionend'
, 'OTransition' : 'oTransitionEnd otransitionend'
, 'transition' : 'transitionend'
}
, name
for (name in transEndEventNames){
if (el.style[name] !== undefined) {
return transEndEventNames[name]
}
}
}())
return transitionEnd && {
end: transitionEnd
}
})()
})
}(window.jQuery);/* ==========================================================
* bootstrap-alert.js v2.2.1
* http://twitter.github.com/bootstrap/javascript.html#alerts
* ==========================================================
* Copyright 2012 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ========================================================== */
!function ($) {
"use strict"; // jshint ;_;
/* ALERT CLASS DEFINITION
* ====================== */
var dismiss = '[data-dismiss="alert"]'
, Alert = function (el) {
$(el).on('click', dismiss, this.close)
}
Alert.prototype.close = function (e) {
var $this = $(this)
, selector = $this.attr('data-target')
, $parent
if (!selector) {
selector = $this.attr('href')
selector = selector && selector.replace(/.*(?=#[^\s]*$)/, '') //strip for ie7
}
$parent = $(selector)
e && e.preventDefault()
$parent.length || ($parent = $this.hasClass('alert') ? $this : $this.parent())
$parent.trigger(e = $.Event('close'))
if (e.isDefaultPrevented()) return
$parent.removeClass('in')
function removeElement() {
$parent
.trigger('closed')
.remove()
}
$.support.transition && $parent.hasClass('fade') ?
$parent.on($.support.transition.end, removeElement) :
removeElement()
}
/* ALERT PLUGIN DEFINITION
* ======================= */
$.fn.alert = function (option) {
return this.each(function () {
var $this = $(this)
, data = $this.data('alert')
if (!data) $this.data('alert', (data = new Alert(this)))
if (typeof option == 'string') data[option].call($this)
})
}
$.fn.alert.Constructor = Alert
/* ALERT DATA-API
* ============== */
$(document).on('click.alert.data-api', dismiss, Alert.prototype.close)
}(window.jQuery);/* ============================================================
* bootstrap-button.js v2.2.1
* http://twitter.github.com/bootstrap/javascript.html#buttons
* ============================================================
* Copyright 2012 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============================================================ */
!function ($) {
"use strict"; // jshint ;_;
/* BUTTON PUBLIC CLASS DEFINITION
* ============================== */
var Button = function (element, options) {
this.$element = $(element)
this.options = $.extend({}, $.fn.button.defaults, options)
}
Button.prototype.setState = function (state) {
var d = 'disabled'
, $el = this.$element
, data = $el.data()
, val = $el.is('input') ? 'val' : 'html'
state = state + 'Text'
data.resetText || $el.data('resetText', $el[val]())
$el[val](data[state] || this.options[state])
// push to event loop to allow forms to submit
setTimeout(function () {
state == 'loadingText' ?
$el.addClass(d).attr(d, d) :
$el.removeClass(d).removeAttr(d)
}, 0)
}
Button.prototype.toggle = function () {
var $parent = this.$element.closest('[data-toggle="buttons-radio"]')
$parent && $parent
.find('.active')
.removeClass('active')
this.$element.toggleClass('active')
}
/* BUTTON PLUGIN DEFINITION
* ======================== */
$.fn.button = function (option) {
return this.each(function () {
var $this = $(this)
, data = $this.data('button')
, options = typeof option == 'object' && option
if (!data) $this.data('button', (data = new Button(this, options)))
if (option == 'toggle') data.toggle()
else if (option) data.setState(option)
})
}
$.fn.button.defaults = {
loadingText: 'loading...'
}
$.fn.button.Constructor = Button
/* BUTTON DATA-API
* =============== */
$(document).on('click.button.data-api', '[data-toggle^=button]', function (e) {
var $btn = $(e.target)
if (!$btn.hasClass('btn')) $btn = $btn.closest('.btn')
$btn.button('toggle')
})
}(window.jQuery);/* ==========================================================
* bootstrap-carousel.js v2.2.1
* http://twitter.github.com/bootstrap/javascript.html#carousel
* ==========================================================
* Copyright 2012 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ========================================================== */
!function ($) {
"use strict"; // jshint ;_;
/* CAROUSEL CLASS DEFINITION
* ========================= */
var Carousel = function (element, options) {
this.$element = $(element)
this.options = options
this.options.slide && this.slide(this.options.slide)
this.options.pause == 'hover' && this.$element
.on('mouseenter', $.proxy(this.pause, this))
.on('mouseleave', $.proxy(this.cycle, this))
}
Carousel.prototype = {
cycle: function (e) {
if (!e) this.paused = false
this.options.interval
&& !this.paused
&& (this.interval = setInterval($.proxy(this.next, this), this.options.interval))
return this
}
, to: function (pos) {
var $active = this.$element.find('.item.active')
, children = $active.parent().children()
, activePos = children.index($active)
, that = this
if (pos > (children.length - 1) || pos < 0) return
if (this.sliding) {
return this.$element.one('slid', function () {
that.to(pos)
})
}
if (activePos == pos) {
return this.pause().cycle()
}
return this.slide(pos > activePos ? 'next' : 'prev', $(children[pos]))
}
, pause: function (e) {
if (!e) this.paused = true
if (this.$element.find('.next, .prev').length && $.support.transition.end) {
this.$element.trigger($.support.transition.end)
this.cycle()
}
clearInterval(this.interval)
this.interval = null
return this
}
, next: function () {
if (this.sliding) return
return this.slide('next')
}
, prev: function () {
if (this.sliding) return
return this.slide('prev')
}
, slide: function (type, next) {
var $active = this.$element.find('.item.active')
, $next = next || $active[type]()
, isCycling = this.interval
, direction = type == 'next' ? 'left' : 'right'
, fallback = type == 'next' ? 'first' : 'last'
, that = this
, e
this.sliding = true
isCycling && this.pause()
$next = $next.length ? $next : this.$element.find('.item')[fallback]()
e = $.Event('slide', {
relatedTarget: $next[0]
})
if ($next.hasClass('active')) return
if ($.support.transition && this.$element.hasClass('slide')) {
this.$element.trigger(e)
if (e.isDefaultPrevented()) return
$next.addClass(type)
$next[0].offsetWidth // force reflow
$active.addClass(direction)
$next.addClass(direction)
this.$element.one($.support.transition.end, function () {
$next.removeClass([type, direction].join(' ')).addClass('active')
$active.removeClass(['active', direction].join(' '))
that.sliding = false
setTimeout(function () { that.$element.trigger('slid') }, 0)
})
} else {
this.$element.trigger(e)
if (e.isDefaultPrevented()) return
$active.removeClass('active')
$next.addClass('active')
this.sliding = false
this.$element.trigger('slid')
}
isCycling && this.cycle()
return this
}
}
/* CAROUSEL PLUGIN DEFINITION
* ========================== */
$.fn.carousel = function (option) {
return this.each(function () {
var $this = $(this)
, data = $this.data('carousel')
, options = $.extend({}, $.fn.carousel.defaults, typeof option == 'object' && option)
, action = typeof option == 'string' ? option : options.slide
if (!data) $this.data('carousel', (data = new Carousel(this, options)))
if (typeof option == 'number') data.to(option)
else if (action) data[action]()
else if (options.interval) data.cycle()
})
}
$.fn.carousel.defaults = {
interval: 5000
, pause: 'hover'
}
$.fn.carousel.Constructor = Carousel
/* CAROUSEL DATA-API
* ================= */
$(document).on('click.carousel.data-api', '[data-slide]', function (e) {
var $this = $(this), href
, $target = $($this.attr('data-target') || (href = $this.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '')) //strip for ie7
, options = $.extend({}, $target.data(), $this.data())
$target.carousel(options)
e.preventDefault()
})
}(window.jQuery);/* =============================================================
* bootstrap-collapse.js v2.2.1
* http://twitter.github.com/bootstrap/javascript.html#collapse
* =============================================================
* Copyright 2012 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============================================================ */
!function ($) {
"use strict"; // jshint ;_;
/* COLLAPSE PUBLIC CLASS DEFINITION
* ================================ */
var Collapse = function (element, options) {
this.$element = $(element)
this.options = $.extend({}, $.fn.collapse.defaults, options)
if (this.options.parent) {
this.$parent = $(this.options.parent)
}
this.options.toggle && this.toggle()
}
Collapse.prototype = {
constructor: Collapse
, dimension: function () {
var hasWidth = this.$element.hasClass('width')
return hasWidth ? 'width' : 'height'
}
, show: function () {
var dimension
, scroll
, actives
, hasData
if (this.transitioning) return
dimension = this.dimension()
scroll = $.camelCase(['scroll', dimension].join('-'))
actives = this.$parent && this.$parent.find('> .accordion-group > .in')
if (actives && actives.length) {
hasData = actives.data('collapse')
if (hasData && hasData.transitioning) return
actives.collapse('hide')
hasData || actives.data('collapse', null)
}
this.$element[dimension](0)
this.transition('addClass', $.Event('show'), 'shown')
$.support.transition && this.$element[dimension](this.$element[0][scroll])
}
, hide: function () {
var dimension
if (this.transitioning) return
dimension = this.dimension()
this.reset(this.$element[dimension]())
this.transition('removeClass', $.Event('hide'), 'hidden')
this.$element[dimension](0)
}
, reset: function (size) {
var dimension = this.dimension()
this.$element
.removeClass('collapse')
[dimension](size || 'auto')
[0].offsetWidth
this.$element[size !== null ? 'addClass' : 'removeClass']('collapse')
return this
}
, transition: function (method, startEvent, completeEvent) {
var that = this
, complete = function () {
if (startEvent.type == 'show') that.reset()
that.transitioning = 0
that.$element.trigger(completeEvent)
}
this.$element.trigger(startEvent)
if (startEvent.isDefaultPrevented()) return
this.transitioning = 1
this.$element[method]('in')
$.support.transition && this.$element.hasClass('collapse') ?
this.$element.one($.support.transition.end, complete) :
complete()
}
, toggle: function () {
this[this.$element.hasClass('in') ? 'hide' : 'show']()
}
}
/* COLLAPSIBLE PLUGIN DEFINITION
* ============================== */
$.fn.collapse = function (option) {
return this.each(function () {
var $this = $(this)
, data = $this.data('collapse')
, options = typeof option == 'object' && option
if (!data) $this.data('collapse', (data = new Collapse(this, options)))
if (typeof option == 'string') data[option]()
})
}
$.fn.collapse.defaults = {
toggle: true
}
$.fn.collapse.Constructor = Collapse
/* COLLAPSIBLE DATA-API
* ==================== */
$(document).on('click.collapse.data-api', '[data-toggle=collapse]', function (e) {
var $this = $(this), href
, target = $this.attr('data-target')
|| e.preventDefault()
|| (href = $this.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '') //strip for ie7
, option = $(target).data('collapse') ? 'toggle' : $this.data()
$this[$(target).hasClass('in') ? 'addClass' : 'removeClass']('collapsed')
$(target).collapse(option)
})
}(window.jQuery);/* ============================================================
* bootstrap-dropdown.js v2.2.1
* http://twitter.github.com/bootstrap/javascript.html#dropdowns
* ============================================================
* Copyright 2012 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============================================================ */
!function ($) {
"use strict"; // jshint ;_;
/* DROPDOWN CLASS DEFINITION
* ========================= */
var toggle = '[data-toggle=dropdown]'
, Dropdown = function (element) {
var $el = $(element).on('click.dropdown.data-api', this.toggle)
$('html').on('click.dropdown.data-api', function () {
$el.parent().removeClass('open')
})
}
Dropdown.prototype = {
constructor: Dropdown
, toggle: function (e) {
var $this = $(this)
, $parent
, isActive
if ($this.is('.disabled, :disabled')) return
$parent = getParent($this)
isActive = $parent.hasClass('open')
clearMenus()
if (!isActive) {
$parent.toggleClass('open')
$this.focus()
}
return false
}
, keydown: function (e) {
var $this
, $items
, $active
, $parent
, isActive
, index
if (!/(38|40|27)/.test(e.keyCode)) return
$this = $(this)
e.preventDefault()
e.stopPropagation()
if ($this.is('.disabled, :disabled')) return
$parent = getParent($this)
isActive = $parent.hasClass('open')
if (!isActive || (isActive && e.keyCode == 27)) return $this.click()
$items = $('[role=menu] li:not(.divider) a', $parent)
if (!$items.length) return
index = $items.index($items.filter(':focus'))
if (e.keyCode == 38 && index > 0) index-- // up
if (e.keyCode == 40 && index < $items.length - 1) index++ // down
if (!~index) index = 0
$items
.eq(index)
.focus()
}
}
function clearMenus() {
$(toggle).each(function () {
getParent($(this)).removeClass('open')
})
}
function getParent($this) {
var selector = $this.attr('data-target')
, $parent
if (!selector) {
selector = $this.attr('href')
selector = selector && /#/.test(selector) && selector.replace(/.*(?=#[^\s]*$)/, '') //strip for ie7
}
$parent = $(selector)
$parent.length || ($parent = $this.parent())
return $parent
}
/* DROPDOWN PLUGIN DEFINITION
* ========================== */
$.fn.dropdown = function (option) {
return this.each(function () {
var $this = $(this)
, data = $this.data('dropdown')
if (!data) $this.data('dropdown', (data = new Dropdown(this)))
if (typeof option == 'string') data[option].call($this)
})
}
$.fn.dropdown.Constructor = Dropdown
/* APPLY TO STANDARD DROPDOWN ELEMENTS
* =================================== */
$(document)
.on('click.dropdown.data-api touchstart.dropdown.data-api', clearMenus)
.on('click.dropdown touchstart.dropdown.data-api', '.dropdown form', function (e) { e.stopPropagation() })
.on('click.dropdown.data-api touchstart.dropdown.data-api' , toggle, Dropdown.prototype.toggle)
.on('keydown.dropdown.data-api touchstart.dropdown.data-api', toggle + ', [role=menu]' , Dropdown.prototype.keydown)
}(window.jQuery);/* =========================================================
* bootstrap-modal.js v2.2.1
* http://twitter.github.com/bootstrap/javascript.html#modals
* =========================================================
* Copyright 2012 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ========================================================= */
!function ($) {
"use strict"; // jshint ;_;
/* MODAL CLASS DEFINITION
* ====================== */
var Modal = function (element, options) {
this.options = options
this.$element = $(element)
.delegate('[data-dismiss="modal"]', 'click.dismiss.modal', $.proxy(this.hide, this))
this.options.remote && this.$element.find('.modal-body').load(this.options.remote)
}
Modal.prototype = {
constructor: Modal
, toggle: function () {
return this[!this.isShown ? 'show' : 'hide']()
}
, show: function () {
var that = this
, e = $.Event('show')
this.$element.trigger(e)
if (this.isShown || e.isDefaultPrevented()) return
this.isShown = true
this.escape()
this.backdrop(function () {
var transition = $.support.transition && that.$element.hasClass('fade')
if (!that.$element.parent().length) {
that.$element.appendTo(document.body) //don't move modals dom position
}
that.$element
.show()
if (transition) {
that.$element[0].offsetWidth // force reflow
}
that.$element
.addClass('in')
.attr('aria-hidden', false)
that.enforceFocus()
transition ?
that.$element.one($.support.transition.end, function () { that.$element.focus().trigger('shown') }) :
that.$element.focus().trigger('shown')
})
}
, hide: function (e) {
e && e.preventDefault()
var that = this
e = $.Event('hide')
this.$element.trigger(e)
if (!this.isShown || e.isDefaultPrevented()) return
this.isShown = false
this.escape()
$(document).off('focusin.modal')
this.$element
.removeClass('in')
.attr('aria-hidden', true)
$.support.transition && this.$element.hasClass('fade') ?
this.hideWithTransition() :
this.hideModal()
}
, enforceFocus: function () {
var that = this
$(document).on('focusin.modal', function (e) {
if (that.$element[0] !== e.target && !that.$element.has(e.target).length) {
that.$element.focus()
}
})
}
, escape: function () {
var that = this
if (this.isShown && this.options.keyboard) {
this.$element.on('keyup.dismiss.modal', function ( e ) {
e.which == 27 && that.hide()
})
} else if (!this.isShown) {
this.$element.off('keyup.dismiss.modal')
}
}
, hideWithTransition: function () {
var that = this
, timeout = setTimeout(function () {
that.$element.off($.support.transition.end)
that.hideModal()
}, 500)
this.$element.one($.support.transition.end, function () {
clearTimeout(timeout)
that.hideModal()
})
}
, hideModal: function (that) {
this.$element
.hide()
.trigger('hidden')
this.backdrop()
}
, removeBackdrop: function () {
this.$backdrop.remove()
this.$backdrop = null
}
, backdrop: function (callback) {
var that = this
, animate = this.$element.hasClass('fade') ? 'fade' : ''
if (this.isShown && this.options.backdrop) {
var doAnimate = $.support.transition && animate
this.$backdrop = $('<div class="modal-backdrop ' + animate + '" />')
.appendTo(document.body)
this.$backdrop.click(
this.options.backdrop == 'static' ?
$.proxy(this.$element[0].focus, this.$element[0])
: $.proxy(this.hide, this)
)
if (doAnimate) this.$backdrop[0].offsetWidth // force reflow
this.$backdrop.addClass('in')
doAnimate ?
this.$backdrop.one($.support.transition.end, callback) :
callback()
} else if (!this.isShown && this.$backdrop) {
this.$backdrop.removeClass('in')
$.support.transition && this.$element.hasClass('fade')?
this.$backdrop.one($.support.transition.end, $.proxy(this.removeBackdrop, this)) :
this.removeBackdrop()
} else if (callback) {
callback()
}
}
}
/* MODAL PLUGIN DEFINITION
* ======================= */
$.fn.modal = function (option) {
return this.each(function () {
var $this = $(this)
, data = $this.data('modal')
, options = $.extend({}, $.fn.modal.defaults, $this.data(), typeof option == 'object' && option)
if (!data) $this.data('modal', (data = new Modal(this, options)))
if (typeof option == 'string') data[option]()
else if (options.show) data.show()
})
}
$.fn.modal.defaults = {
backdrop: true
, keyboard: true
, show: true
}
$.fn.modal.Constructor = Modal
/* MODAL DATA-API
* ============== */
$(document).on('click.modal.data-api', '[data-toggle="modal"]', function (e) {
var $this = $(this)
, href = $this.attr('href')
, $target = $($this.attr('data-target') || (href && href.replace(/.*(?=#[^\s]+$)/, ''))) //strip for ie7
, option = $target.data('modal') ? 'toggle' : $.extend({ remote:!/#/.test(href) && href }, $target.data(), $this.data())
e.preventDefault()
$target
.modal(option)
.one('hide', function () {
$this.focus()
})
})
}(window.jQuery);
/* ===========================================================
* bootstrap-tooltip.js v2.2.1
* http://twitter.github.com/bootstrap/javascript.html#tooltips
* Inspired by the original jQuery.tipsy by Jason Frame
* ===========================================================
* Copyright 2012 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ========================================================== */
!function ($) {
"use strict"; // jshint ;_;
/* TOOLTIP PUBLIC CLASS DEFINITION
* =============================== */
var Tooltip = function (element, options) {
this.init('tooltip', element, options)
}
Tooltip.prototype = {
constructor: Tooltip
, init: function (type, element, options) {
var eventIn
, eventOut
this.type = type
this.$element = $(element)
this.options = this.getOptions(options)
this.enabled = true
if (this.options.trigger == 'click') {
this.$element.on('click.' + this.type, this.options.selector, $.proxy(this.toggle, this))
} else if (this.options.trigger != 'manual') {
eventIn = this.options.trigger == 'hover' ? 'mouseenter' : 'focus'
eventOut = this.options.trigger == 'hover' ? 'mouseleave' : 'blur'
this.$element.on(eventIn + '.' + this.type, this.options.selector, $.proxy(this.enter, this))
this.$element.on(eventOut + '.' + this.type, this.options.selector, $.proxy(this.leave, this))
}
this.options.selector ?
(this._options = $.extend({}, this.options, { trigger: 'manual', selector: '' })) :
this.fixTitle()
}
, getOptions: function (options) {
options = $.extend({}, $.fn[this.type].defaults, options, this.$element.data())
if (options.delay && typeof options.delay == 'number') {
options.delay = {
show: options.delay
, hide: options.delay
}
}
return options
}
, enter: function (e) {
var self = $(e.currentTarget)[this.type](this._options).data(this.type)
if (!self.options.delay || !self.options.delay.show) return self.show()
clearTimeout(this.timeout)
self.hoverState = 'in'
this.timeout = setTimeout(function() {
if (self.hoverState == 'in') self.show()
}, self.options.delay.show)
}
, leave: function (e) {
var self = $(e.currentTarget)[this.type](this._options).data(this.type)
if (this.timeout) clearTimeout(this.timeout)
if (!self.options.delay || !self.options.delay.hide) return self.hide()
self.hoverState = 'out'
this.timeout = setTimeout(function() {
if (self.hoverState == 'out') self.hide()
}, self.options.delay.hide)
}
, show: function () {
var $tip
, inside
, pos
, actualWidth
, actualHeight
, placement
, tp
if (this.hasContent() && this.enabled) {
$tip = this.tip()
this.setContent()
if (this.options.animation) {
$tip.addClass('fade')
}
placement = typeof this.options.placement == 'function' ?
this.options.placement.call(this, $tip[0], this.$element[0]) :
this.options.placement
inside = /in/.test(placement)
$tip
.detach()
.css({ top: 0, left: 0, display: 'block' })
.insertAfter(this.$element)
pos = this.getPosition(inside)
actualWidth = $tip[0].offsetWidth
actualHeight = $tip[0].offsetHeight
switch (inside ? placement.split(' ')[1] : placement) {
case 'bottom':
tp = {top: pos.top + pos.height, left: pos.left + pos.width / 2 - actualWidth / 2}
break
case 'top':
tp = {top: pos.top - actualHeight, left: pos.left + pos.width / 2 - actualWidth / 2}
break
case 'left':
tp = {top: pos.top + pos.height / 2 - actualHeight / 2, left: pos.left - actualWidth}
break
case 'right':
tp = {top: pos.top + pos.height / 2 - actualHeight / 2, left: pos.left + pos.width}
break
}
$tip
.offset(tp)
.addClass(placement)
.addClass('in')
}
}
, setContent: function () {
var $tip = this.tip()
, title = this.getTitle()
$tip.find('.tooltip-inner')[this.options.html ? 'html' : 'text'](title)
$tip.removeClass('fade in top bottom left right')
}
, hide: function () {
var that = this
, $tip = this.tip()
$tip.removeClass('in')
function removeWithAnimation() {
var timeout = setTimeout(function () {
$tip.off($.support.transition.end).detach()
}, 500)
$tip.one($.support.transition.end, function () {
clearTimeout(timeout)
$tip.detach()
})
}
$.support.transition && this.$tip.hasClass('fade') ?
removeWithAnimation() :
$tip.detach()
return this
}
, fixTitle: function () {
var $e = this.$element
if ($e.attr('title') || typeof($e.attr('data-original-title')) != 'string') {
$e.attr('data-original-title', $e.attr('title') || '').removeAttr('title')
}
}
, hasContent: function () {
return this.getTitle()
}
, getPosition: function (inside) {
return $.extend({}, (inside ? {top: 0, left: 0} : this.$element.offset()), {
width: this.$element[0].offsetWidth
, height: this.$element[0].offsetHeight
})
}
, getTitle: function () {
var title
, $e = this.$element
, o = this.options
title = $e.attr('data-original-title')
|| (typeof o.title == 'function' ? o.title.call($e[0]) : o.title)
return title
}
, tip: function () {
return this.$tip = this.$tip || $(this.options.template)
}
, validate: function () {
if (!this.$element[0].parentNode) {
this.hide()
this.$element = null
this.options = null
}
}
, enable: function () {
this.enabled = true
}
, disable: function () {
this.enabled = false
}
, toggleEnabled: function () {
this.enabled = !this.enabled
}
, toggle: function (e) {
var self = $(e.currentTarget)[this.type](this._options).data(this.type)
self[self.tip().hasClass('in') ? 'hide' : 'show']()
}
, destroy: function () {
this.hide().$element.off('.' + this.type).removeData(this.type)
}
}
/* TOOLTIP PLUGIN DEFINITION
* ========================= */
$.fn.tooltip = function ( option ) {
return this.each(function () {
var $this = $(this)
, data = $this.data('tooltip')
, options = typeof option == 'object' && option
if (!data) $this.data('tooltip', (data = new Tooltip(this, options)))
if (typeof option == 'string') data[option]()
})
}
$.fn.tooltip.Constructor = Tooltip
$.fn.tooltip.defaults = {
animation: true
, placement: 'top'
, selector: false
, template: '<div class="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>'
, trigger: 'hover'
, title: ''
, delay: 0
, html: false
}
}(window.jQuery);/* ===========================================================
* bootstrap-popover.js v2.2.1
* http://twitter.github.com/bootstrap/javascript.html#popovers
* ===========================================================
* Copyright 2012 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =========================================================== */
!function ($) {
"use strict"; // jshint ;_;
/* POPOVER PUBLIC CLASS DEFINITION
* =============================== */
var Popover = function (element, options) {
this.init('popover', element, options)
}
/* NOTE: POPOVER EXTENDS BOOTSTRAP-TOOLTIP.js
========================================== */
Popover.prototype = $.extend({}, $.fn.tooltip.Constructor.prototype, {
constructor: Popover
, setContent: function () {
var $tip = this.tip()
, title = this.getTitle()
, content = this.getContent()
$tip.find('.popover-title')[this.options.html ? 'html' : 'text'](title)
$tip.find('.popover-content > *')[this.options.html ? 'html' : 'text'](content)
$tip.removeClass('fade top bottom left right in')
}
, hasContent: function () {
return this.getTitle() || this.getContent()
}
, getContent: function () {
var content
, $e = this.$element
, o = this.options
content = $e.attr('data-content')
|| (typeof o.content == 'function' ? o.content.call($e[0]) : o.content)
return content
}
, tip: function () {
if (!this.$tip) {
this.$tip = $(this.options.template)
}
return this.$tip
}
, destroy: function () {
this.hide().$element.off('.' + this.type).removeData(this.type)
}
})
/* POPOVER PLUGIN DEFINITION
* ======================= */
$.fn.popover = function (option) {
return this.each(function () {
var $this = $(this)
, data = $this.data('popover')
, options = typeof option == 'object' && option
if (!data) $this.data('popover', (data = new Popover(this, options)))
if (typeof option == 'string') data[option]()
})
}
$.fn.popover.Constructor = Popover
$.fn.popover.defaults = $.extend({} , $.fn.tooltip.defaults, {
placement: 'right'
, trigger: 'click'
, content: ''
, template: '<div class="popover"><div class="arrow"></div><div class="popover-inner"><h3 class="popover-title"></h3><div class="popover-content"><p></p></div></div></div>'
})
}(window.jQuery);/* =============================================================
* bootstrap-scrollspy.js v2.2.1
* http://twitter.github.com/bootstrap/javascript.html#scrollspy
* =============================================================
* Copyright 2012 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============================================================== */
!function ($) {
"use strict"; // jshint ;_;
/* SCROLLSPY CLASS DEFINITION
* ========================== */
function ScrollSpy(element, options) {
var process = $.proxy(this.process, this)
, $element = $(element).is('body') ? $(window) : $(element)
, href
this.options = $.extend({}, $.fn.scrollspy.defaults, options)
this.$scrollElement = $element.on('scroll.scroll-spy.data-api', process)
this.selector = (this.options.target
|| ((href = $(element).attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '')) //strip for ie7
|| '') + ' .nav li > a'
this.$body = $('body')
this.refresh()
this.process()
}
ScrollSpy.prototype = {
constructor: ScrollSpy
, refresh: function () {
var self = this
, $targets
this.offsets = $([])
this.targets = $([])
$targets = this.$body
.find(this.selector)
.map(function () {
var $el = $(this)
, href = $el.data('target') || $el.attr('href')
, $href = /^#\w/.test(href) && $(href)
return ( $href
&& $href.length
&& [[ $href.position().top, href ]] ) || null
})
.sort(function (a, b) { return a[0] - b[0] })
.each(function () {
self.offsets.push(this[0])
self.targets.push(this[1])
})
}
, process: function () {
var scrollTop = this.$scrollElement.scrollTop() + this.options.offset
, scrollHeight = this.$scrollElement[0].scrollHeight || this.$body[0].scrollHeight
, maxScroll = scrollHeight - this.$scrollElement.height()
, offsets = this.offsets
, targets = this.targets
, activeTarget = this.activeTarget
, i
if (scrollTop >= maxScroll) {
return activeTarget != (i = targets.last()[0])
&& this.activate ( i )
}
for (i = offsets.length; i--;) {
activeTarget != targets[i]
&& scrollTop >= offsets[i]
&& (!offsets[i + 1] || scrollTop <= offsets[i + 1])
&& this.activate( targets[i] )
}
}
, activate: function (target) {
var active
, selector
this.activeTarget = target
$(this.selector)
.parent('.active')
.removeClass('active')
selector = this.selector
+ '[data-target="' + target + '"],'
+ this.selector + '[href="' + target + '"]'
active = $(selector)
.parent('li')
.addClass('active')
if (active.parent('.dropdown-menu').length) {
active = active.closest('li.dropdown').addClass('active')
}
active.trigger('activate')
}
}
/* SCROLLSPY PLUGIN DEFINITION
* =========================== */
$.fn.scrollspy = function (option) {
return this.each(function () {
var $this = $(this)
, data = $this.data('scrollspy')
, options = typeof option == 'object' && option
if (!data) $this.data('scrollspy', (data = new ScrollSpy(this, options)))
if (typeof option == 'string') data[option]()
})
}
$.fn.scrollspy.Constructor = ScrollSpy
$.fn.scrollspy.defaults = {
offset: 10
}
/* SCROLLSPY DATA-API
* ================== */
$(window).on('load', function () {
$('[data-spy="scroll"]').each(function () {
var $spy = $(this)
$spy.scrollspy($spy.data())
})
})
}(window.jQuery);/* ========================================================
* bootstrap-tab.js v2.2.1
* http://twitter.github.com/bootstrap/javascript.html#tabs
* ========================================================
* Copyright 2012 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ======================================================== */
!function ($) {
"use strict"; // jshint ;_;
/* TAB CLASS DEFINITION
* ==================== */
var Tab = function (element) {
this.element = $(element)
}
Tab.prototype = {
constructor: Tab
, show: function () {
var $this = this.element
, $ul = $this.closest('ul:not(.dropdown-menu)')
, selector = $this.attr('data-target')
, previous
, $target
, e
if (!selector) {
selector = $this.attr('href')
selector = selector && selector.replace(/.*(?=#[^\s]*$)/, '') //strip for ie7
}
if ( $this.parent('li').hasClass('active') ) return
previous = $ul.find('.active:last a')[0]
e = $.Event('show', {
relatedTarget: previous
})
$this.trigger(e)
if (e.isDefaultPrevented()) return
$target = $(selector)
this.activate($this.parent('li'), $ul)
this.activate($target, $target.parent(), function () {
$this.trigger({
type: 'shown'
, relatedTarget: previous
})
})
}
, activate: function ( element, container, callback) {
var $active = container.find('> .active')
, transition = callback
&& $.support.transition
&& $active.hasClass('fade')
function next() {
$active
.removeClass('active')
.find('> .dropdown-menu > .active')
.removeClass('active')
element.addClass('active')
if (transition) {
element[0].offsetWidth // reflow for transition
element.addClass('in')
} else {
element.removeClass('fade')
}
if ( element.parent('.dropdown-menu') ) {
element.closest('li.dropdown').addClass('active')
}
callback && callback()
}
transition ?
$active.one($.support.transition.end, next) :
next()
$active.removeClass('in')
}
}
/* TAB PLUGIN DEFINITION
* ===================== */
$.fn.tab = function ( option ) {
return this.each(function () {
var $this = $(this)
, data = $this.data('tab')
if (!data) $this.data('tab', (data = new Tab(this)))
if (typeof option == 'string') data[option]()
})
}
$.fn.tab.Constructor = Tab
/* TAB DATA-API
* ============ */
$(document).on('click.tab.data-api', '[data-toggle="tab"], [data-toggle="pill"]', function (e) {
e.preventDefault()
$(this).tab('show')
})
}(window.jQuery);/* =============================================================
* bootstrap-typeahead.js v2.2.1
* http://twitter.github.com/bootstrap/javascript.html#typeahead
* =============================================================
* Copyright 2012 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============================================================ */
!function($){
"use strict"; // jshint ;_;
/* TYPEAHEAD PUBLIC CLASS DEFINITION
* ================================= */
var Typeahead = function (element, options) {
this.$element = $(element)
this.options = $.extend({}, $.fn.typeahead.defaults, options)
this.matcher = this.options.matcher || this.matcher
this.sorter = this.options.sorter || this.sorter
this.highlighter = this.options.highlighter || this.highlighter
this.updater = this.options.updater || this.updater
this.$menu = $(this.options.menu).appendTo('body')
this.source = this.options.source
this.shown = false
this.listen()
}
Typeahead.prototype = {
constructor: Typeahead
, select: function () {
var val = this.$menu.find('.active').attr('data-value')
this.$element
.val(this.updater(val))
.change()
return this.hide()
}
, updater: function (item) {
return item
}
, show: function () {
var pos = $.extend({}, this.$element.offset(), {
height: this.$element[0].offsetHeight
})
this.$menu.css({
top: pos.top + pos.height
, left: pos.left
})
this.$menu.show()
this.shown = true
return this
}
, hide: function () {
this.$menu.hide()
this.shown = false
return this
}
, lookup: function (event) {
var items
this.query = this.$element.val()
if (!this.query || this.query.length < this.options.minLength) {
return this.shown ? this.hide() : this
}
items = $.isFunction(this.source) ? this.source(this.query, $.proxy(this.process, this)) : this.source
return items ? this.process(items) : this
}
, process: function (items) {
var that = this
items = $.grep(items, function (item) {
return that.matcher(item)
})
items = this.sorter(items)
if (!items.length) {
return this.shown ? this.hide() : this
}
return this.render(items.slice(0, this.options.items)).show()
}
, matcher: function (item) {
return ~item.toLowerCase().indexOf(this.query.toLowerCase())
}
, sorter: function (items) {
var beginswith = []
, caseSensitive = []
, caseInsensitive = []
, item
while (item = items.shift()) {
if (!item.toLowerCase().indexOf(this.query.toLowerCase())) beginswith.push(item)
else if (~item.indexOf(this.query)) caseSensitive.push(item)
else caseInsensitive.push(item)
}
return beginswith.concat(caseSensitive, caseInsensitive)
}
, highlighter: function (item) {
var query = this.query.replace(/[\-\[\]{}()*+?.,\\\^$|#\s]/g, '\\$&')
return item.replace(new RegExp('(' + query + ')', 'ig'), function ($1, match) {
return '<strong>' + match + '</strong>'
})
}
, render: function (items) {
var that = this
items = $(items).map(function (i, item) {
i = $(that.options.item).attr('data-value', item)
i.find('a').html(that.highlighter(item))
return i[0]
})
items.first().addClass('active')
this.$menu.html(items)
return this
}
, next: function (event) {
var active = this.$menu.find('.active').removeClass('active')
, next = active.next()
if (!next.length) {
next = $(this.$menu.find('li')[0])
}
next.addClass('active')
}
, prev: function (event) {
var active = this.$menu.find('.active').removeClass('active')
, prev = active.prev()
if (!prev.length) {
prev = this.$menu.find('li').last()
}
prev.addClass('active')
}
, listen: function () {
this.$element
.on('blur', $.proxy(this.blur, this))
.on('keypress', $.proxy(this.keypress, this))
.on('keyup', $.proxy(this.keyup, this))
if (this.eventSupported('keydown')) {
this.$element.on('keydown', $.proxy(this.keydown, this))
}
this.$menu
.on('click', $.proxy(this.click, this))
.on('mouseenter', 'li', $.proxy(this.mouseenter, this))
}
, eventSupported: function(eventName) {
var isSupported = eventName in this.$element
if (!isSupported) {
this.$element.setAttribute(eventName, 'return;')
isSupported = typeof this.$element[eventName] === 'function'
}
return isSupported
}
, move: function (e) {
if (!this.shown) return
switch(e.keyCode) {
case 9: // tab
case 13: // enter
case 27: // escape
e.preventDefault()
break
case 38: // up arrow
e.preventDefault()
this.prev()
break
case 40: // down arrow
e.preventDefault()
this.next()
break
}
e.stopPropagation()
}
, keydown: function (e) {
this.suppressKeyPressRepeat = !~$.inArray(e.keyCode, [40,38,9,13,27])
this.move(e)
}
, keypress: function (e) {
if (this.suppressKeyPressRepeat) return
this.move(e)
}
, keyup: function (e) {
switch(e.keyCode) {
case 40: // down arrow
case 38: // up arrow
case 16: // shift
case 17: // ctrl
case 18: // alt
break
case 9: // tab
case 13: // enter
if (!this.shown) return
this.select()
break
case 27: // escape
if (!this.shown) return
this.hide()
break
default:
this.lookup()
}
e.stopPropagation()
e.preventDefault()
}
, blur: function (e) {
var that = this
setTimeout(function () { that.hide() }, 150)
}
, click: function (e) {
e.stopPropagation()
e.preventDefault()
this.select()
}
, mouseenter: function (e) {
this.$menu.find('.active').removeClass('active')
$(e.currentTarget).addClass('active')
}
}
/* TYPEAHEAD PLUGIN DEFINITION
* =========================== */
$.fn.typeahead = function (option) {
return this.each(function () {
var $this = $(this)
, data = $this.data('typeahead')
, options = typeof option == 'object' && option
if (!data) $this.data('typeahead', (data = new Typeahead(this, options)))
if (typeof option == 'string') data[option]()
})
}
$.fn.typeahead.defaults = {
source: []
, items: 8
, menu: '<ul class="typeahead dropdown-menu"></ul>'
, item: '<li><a href="#"></a></li>'
, minLength: 1
}
$.fn.typeahead.Constructor = Typeahead
/* TYPEAHEAD DATA-API
* ================== */
$(document).on('focus.typeahead.data-api', '[data-provide="typeahead"]', function (e) {
var $this = $(this)
if ($this.data('typeahead')) return
e.preventDefault()
$this.typeahead($this.data())
})
}(window.jQuery);
/* ==========================================================
* bootstrap-affix.js v2.2.1
* http://twitter.github.com/bootstrap/javascript.html#affix
* ==========================================================
* Copyright 2012 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ========================================================== */
!function ($) {
"use strict"; // jshint ;_;
/* AFFIX CLASS DEFINITION
* ====================== */
var Affix = function (element, options) {
this.options = $.extend({}, $.fn.affix.defaults, options)
this.$window = $(window)
.on('scroll.affix.data-api', $.proxy(this.checkPosition, this))
.on('click.affix.data-api', $.proxy(function () { setTimeout($.proxy(this.checkPosition, this), 1) }, this))
this.$element = $(element)
this.checkPosition()
}
Affix.prototype.checkPosition = function () {
if (!this.$element.is(':visible')) return
var scrollHeight = $(document).height()
, scrollTop = this.$window.scrollTop()
, position = this.$element.offset()
, offset = this.options.offset
, offsetBottom = offset.bottom
, offsetTop = offset.top
, reset = 'affix affix-top affix-bottom'
, affix
if (typeof offset != 'object') offsetBottom = offsetTop = offset
if (typeof offsetTop == 'function') offsetTop = offset.top()
if (typeof offsetBottom == 'function') offsetBottom = offset.bottom()
affix = this.unpin != null && (scrollTop + this.unpin <= position.top) ?
false : offsetBottom != null && (position.top + this.$element.height() >= scrollHeight - offsetBottom) ?
'bottom' : offsetTop != null && scrollTop <= offsetTop ?
'top' : false
if (this.affixed === affix) return
this.affixed = affix
this.unpin = affix == 'bottom' ? position.top - scrollTop : null
this.$element.removeClass(reset).addClass('affix' + (affix ? '-' + affix : ''))
}
/* AFFIX PLUGIN DEFINITION
* ======================= */
$.fn.affix = function (option) {
return this.each(function () {
var $this = $(this)
, data = $this.data('affix')
, options = typeof option == 'object' && option
if (!data) $this.data('affix', (data = new Affix(this, options)))
if (typeof option == 'string') data[option]()
})
}
$.fn.affix.Constructor = Affix
$.fn.affix.defaults = {
offset: 0
}
/* AFFIX DATA-API
* ============== */
$(window).on('load', function () {
$('[data-spy="affix"]').each(function () {
var $spy = $(this)
, data = $spy.data()
data.offset = data.offset || {}
data.offsetBottom && (data.offset.bottom = data.offsetBottom)
data.offsetTop && (data.offset.top = data.offsetTop)
$spy.affix(data)
})
})
}(window.jQuery); | zero-common | /zero-common-0.1.10.tar.gz/zero-common-0.1.10/common/static/bootstrap/js/bootstrap.js | bootstrap.js |
var utils = {
log_object: function(obj){
for (var field in obj) {
jQuery(document).log(field + ": " + obj[field])
}
},
trim: function(text) {
/* Elimina los espacios al principio y al final de una oraciΓ³n */
return text.replace(/\s+$/,'').replace(/^\s+/,'');
},
inner_trim: function(text) {
/* Elimina los espacios repetidos dentro de una oraciΓ³n */
return text.replace(/\s+/g,' ')
},
split2array: function(text, character) {
/* Devuelve un array cortado por character */
if (character == undefined) {character = ' '}
return text.split(character)
},
counter: {
words: function(text, character) {
/* Devuelve el numero de palabras */
if (character == undefined) {character = ' '}
if (text == undefined) {return 0;}
return utils.split2array(utils.inner_trim(utils.trim(text))).length
},
tags: function(text) {
/* Devuelve el numero de palabras */
tags = utils.split2array(text, ',')
count = 0
for(tag in tags) {
tag = utils.inner_trim(utils.trim(tags[tag]))
if (tag != '') {
count++
}
}
return count
},
chars: function(text) {
/* Devuelve el numero de caracteres */
if (text == undefined) {
return 0;
}
return text.length;
}
},
redirect: function(url) {
// simulates similar behavior as an HTTP redirect
window.location.replace(url);
},
replace_link: function($link, options) {
/* Modifica un enlace por uno nuevo */
// Remplazamos el enlace
href = $link.attr('href');
$link.attr('href', options.new_href);
// Reemplazamos las clases
$link.removeClass(options.old_class);
$link.addClass(options.new_class);
// Reemplazamos el texto
$link.html(options.new_html);
}
}
jQuery.noConflict();
(function($) {
/***
* AΓ±ade la opciΓ³n de logging a los elementos jquery
***/
$.fn.log = function (msg, options) {
settings = {
level: 'info'
}
try {
if (settings.level == 'info') {
console.log("%s: %o", msg, this);
} else if (settings.level == 'error') {
console.error("%s: %o", msg, this);
}
return this;
} catch(e) {
//alert(msg)
}
return this;
};
})(jQuery);
var messages = {
generate: function(type, options) {
return jQuery("<div class='" + type + " corner' />").html(options.content)
}
};
(function($) {
/* *
* AΓ±ade la clase cuando el puntero esta sobre el elemento y lo elimina
* cuando ya no lo esta.
* */
$.fn.addHover = function(options) {
var settings = { klass: 'hover' }
if (options) {
$.extend(settings, options)
}
return this.hover(
function(){ $(this).addClass(settings.klass); },
function(){ $(this).removeClass(settings.klass); }
)
}
})(jQuery);
(function($) {
/***
* Incrementa o decrementa un contador definido en un elemento html. Recibe
* un parametro con la acciΓ³n a realizar *increase* o *decrease*
***/
$.fn.counter = function(action) {
var $counter = this;
if (action == 'decrease') {
extra = -1;
} else if (action == 'increase') {
extra = 1;
} else {
console.error('invalid action')
extra = 0;
}
try {
$counter.html(parseInt($counter.html()) + extra)
} catch (e) {
$counter.log('El elemento no es un contador', {'level': 'error'});
}
return $counter;
};
})(jQuery);
(function($) {
/***
* Configura un enlace para que actΓΊe como acciΓ³n asΓncrona.
**/
$.fn.action = function(delegated, options) {
var $object = this,
selector = $object.selector;
var show_alert = function($object, $link, data) {
alert(data.message)
}
// Sobrescribimos la configuraciΓ³n
var settings = {
check_confirm: false,
onsuccess: show_alert,
onfail: show_alert,
oncancel: show_alert
};
if (options) {
$.extend(settings, options);
}
$object.delegate(delegated, 'click', function(event) {
event.preventDefault();
var $delegated = $(this),
$object = $delegated.parents(selector),
continue_action = true,
url_action = $delegated.attr('href')
// Verifica si la acciΓ³n requiere confirmaciΓ³n.
if (settings.check_confirm) {
var confirm_message = "",
confirm_action = false;
$.ajax({
url: url_action,
success: function(json) {
if (json.success) {
confirm_action = json.confirm
confirm_message = json.confirm_message
}
},
async: false,
dataType: 'json'
});
// Solicita la confirmaciΓ³n del usuario.
if (confirm_action) {
continue_action = confirm(confirm_message)
}
}
if (continue_action) {
// Enviamos la peticiΓ³n para realizar la acciΓ³n.
$.post(url_action, function(json) {
if (json.success) {
settings.onsuccess($object, $delegated, json);
} else {
settings.onfail($object, $delegated, json);
}
});
} else {
settings.oncancel($object, $delegated);
}
})
}
})(jQuery);
(function($) {
/***
* Configura un formulario para que sea procesado asΓncronamente.
***/
$.fn.async_form = function(options) {
var show_message = function($form, json) {
alert(json.message)
}
settings = {
submit_selector: 'button',
onsuccess: show_message,
onfail: show_message
}
if (options) {
$.extend(settings, options)
}
this.submit(function(event) {
event.preventDefault();
var $form = jQuery(this),
$button = $form.find(settings.submit_selector);
jQuery.post(
$form.attr('action'),
$form.serialize(),
function(json) {
if (json.success) {
settings.onsuccess($form, json);
} else {
settings.onfail($form, json);
}
},
'json'
);
});
}
})(jQuery); | zero-common | /zero-common-0.1.10.tar.gz/zero-common-0.1.10/common/static/js/jquery.common.js | jquery.common.js |
import torch
import torch.nn as nn
import torch.nn.functional as F
from torchvision.models.vgg import vgg16
class L_color(nn.Module):
def __init__(self):
super(L_color, self).__init__()
def forward(self, x):
b, c, h, w = x.shape
mean_rgb = torch.mean(x, [2, 3], keepdim=True)
mr, mg, mb = torch.split(mean_rgb, 1, dim=1)
Drg = torch.pow(mr - mg, 2)
Drb = torch.pow(mr - mb, 2)
Dgb = torch.pow(mb - mg, 2)
k = torch.pow(torch.pow(Drg, 2) + torch.pow(Drb, 2) + torch.pow(Dgb, 2), 0.5)
return k
class L_spa(nn.Module):
def __init__(self):
super(L_spa, self).__init__()
# print(1)kernel = torch.FloatTensor(kernel).unsqueeze(0).unsqueeze(0)
kernel_left = torch.FloatTensor([[0, 0, 0], [-1, 1, 0], [0, 0, 0]]).cuda().unsqueeze(0).unsqueeze(0)
kernel_right = torch.FloatTensor([[0, 0, 0], [0, 1, -1], [0, 0, 0]]).cuda().unsqueeze(0).unsqueeze(0)
kernel_up = torch.FloatTensor([[0, -1, 0], [0, 1, 0], [0, 0, 0]]).cuda().unsqueeze(0).unsqueeze(0)
kernel_down = torch.FloatTensor([[0, 0, 0], [0, 1, 0], [0, -1, 0]]).cuda().unsqueeze(0).unsqueeze(0)
self.weight_left = nn.Parameter(data=kernel_left, requires_grad=False)
self.weight_right = nn.Parameter(data=kernel_right, requires_grad=False)
self.weight_up = nn.Parameter(data=kernel_up, requires_grad=False)
self.weight_down = nn.Parameter(data=kernel_down, requires_grad=False)
self.pool = nn.AvgPool2d(4)
def forward(self, org, enhance):
b, c, h, w = org.shape
org_mean = torch.mean(org, 1, keepdim=True)
enhance_mean = torch.mean(enhance, 1, keepdim=True)
org_pool = self.pool(org_mean)
enhance_pool = self.pool(enhance_mean)
weight_diff = torch.max(
torch.FloatTensor([1]).cuda() + 10000 * torch.min(org_pool - torch.FloatTensor([0.3]).cuda(),
torch.FloatTensor([0]).cuda()),
torch.FloatTensor([0.5]).cuda())
E_1 = torch.mul(torch.sign(enhance_pool - torch.FloatTensor([0.5]).cuda()), enhance_pool - org_pool)
D_org_letf = F.conv2d(org_pool, self.weight_left, padding=1)
D_org_right = F.conv2d(org_pool, self.weight_right, padding=1)
D_org_up = F.conv2d(org_pool, self.weight_up, padding=1)
D_org_down = F.conv2d(org_pool, self.weight_down, padding=1)
D_enhance_letf = F.conv2d(enhance_pool, self.weight_left, padding=1)
D_enhance_right = F.conv2d(enhance_pool, self.weight_right, padding=1)
D_enhance_up = F.conv2d(enhance_pool, self.weight_up, padding=1)
D_enhance_down = F.conv2d(enhance_pool, self.weight_down, padding=1)
D_left = torch.pow(D_org_letf - D_enhance_letf, 2)
D_right = torch.pow(D_org_right - D_enhance_right, 2)
D_up = torch.pow(D_org_up - D_enhance_up, 2)
D_down = torch.pow(D_org_down - D_enhance_down, 2)
E = (D_left + D_right + D_up + D_down)
# E = 25*(D_left + D_right + D_up +D_down)
return E
class L_exp(nn.Module):
def __init__(self, patch_size, mean_val):
super(L_exp, self).__init__()
# print(1)
self.pool = nn.AvgPool2d(patch_size)
self.mean_val = mean_val
def forward(self, x):
b, c, h, w = x.shape
x = torch.mean(x, 1, keepdim=True)
mean = self.pool(x)
d = torch.mean(torch.pow(mean - torch.FloatTensor([self.mean_val]).cuda(), 2))
return d
class L_TV(nn.Module):
def __init__(self, TVLoss_weight=1):
super(L_TV, self).__init__()
self.TVLoss_weight = TVLoss_weight
def forward(self, x):
batch_size = x.size()[0]
h_x = x.size()[2]
w_x = x.size()[3]
count_h = (x.size()[2] - 1) * x.size()[3]
count_w = x.size()[2] * (x.size()[3] - 1)
h_tv = torch.pow((x[:, :, 1:, :] - x[:, :, :h_x - 1, :]), 2).sum()
w_tv = torch.pow((x[:, :, :, 1:] - x[:, :, :, :w_x - 1]), 2).sum()
return self.TVLoss_weight * 2 * (h_tv / count_h + w_tv / count_w) / batch_size
class Sa_Loss(nn.Module):
def __init__(self):
super(Sa_Loss, self).__init__()
# print(1)
def forward(self, x):
# self.grad = np.ones(x.shape,dtype=np.float32)
b, c, h, w = x.shape
# x_de = x.cpu().detach().numpy()
r, g, b = torch.split(x, 1, dim=1)
mean_rgb = torch.mean(x, [2, 3], keepdim=True)
mr, mg, mb = torch.split(mean_rgb, 1, dim=1)
Dr = r - mr
Dg = g - mg
Db = b - mb
k = torch.pow(torch.pow(Dr, 2) + torch.pow(Db, 2) + torch.pow(Dg, 2), 0.5)
# print(k)
k = torch.mean(k)
return k
class perception_loss(nn.Module):
def __init__(self):
super(perception_loss, self).__init__()
features = vgg16(pretrained=True).features
self.to_relu_1_2 = nn.Sequential()
self.to_relu_2_2 = nn.Sequential()
self.to_relu_3_3 = nn.Sequential()
self.to_relu_4_3 = nn.Sequential()
for x in range(4):
self.to_relu_1_2.add_module(str(x), features[x])
for x in range(4, 9):
self.to_relu_2_2.add_module(str(x), features[x])
for x in range(9, 16):
self.to_relu_3_3.add_module(str(x), features[x])
for x in range(16, 23):
self.to_relu_4_3.add_module(str(x), features[x])
# don't need the gradients, just want the features
for param in self.parameters():
param.requires_grad = False
def forward(self, x):
h = self.to_relu_1_2(x)
h_relu_1_2 = h
h = self.to_relu_2_2(h)
h_relu_2_2 = h
h = self.to_relu_3_3(h)
h_relu_3_3 = h
h = self.to_relu_4_3(h)
h_relu_4_3 = h
# out = (h_relu_1_2, h_relu_2_2, h_relu_3_3, h_relu_4_3)
return h_relu_4_3 | zero-dce | /zero_dce-1.0.0-py3-none-any.whl/Zero-DCE/Myloss.py | Myloss.py |
import torch
import torch.nn as nn
import torch.nn.functional as F
class enhance_net_nopool(nn.Module):
def __init__(self):
super(enhance_net_nopool, self).__init__()
self.relu = nn.ReLU(inplace=True)
number_f = 32
self.e_conv1 = nn.Conv2d(3, number_f, 3, 1, 1, bias=True)
self.e_conv2 = nn.Conv2d(number_f, number_f, 3, 1, 1, bias=True)
self.e_conv3 = nn.Conv2d(number_f, number_f, 3, 1, 1, bias=True)
self.e_conv4 = nn.Conv2d(number_f, number_f, 3, 1, 1, bias=True)
self.e_conv5 = nn.Conv2d(number_f * 2, number_f, 3, 1, 1, bias=True)
self.e_conv6 = nn.Conv2d(number_f * 2, number_f, 3, 1, 1, bias=True)
self.e_conv7 = nn.Conv2d(number_f * 2, 24, 3, 1, 1, bias=True)
self.maxpool = nn.MaxPool2d(2, stride=2, return_indices=False, ceil_mode=False)
self.upsample = nn.UpsamplingBilinear2d(scale_factor=2)
def forward(self, x):
x1 = self.relu(self.e_conv1(x))
# p1 = self.maxpool(x1)
x2 = self.relu(self.e_conv2(x1))
# p2 = self.maxpool(x2)
x3 = self.relu(self.e_conv3(x2))
# p3 = self.maxpool(x3)
x4 = self.relu(self.e_conv4(x3))
x5 = self.relu(self.e_conv5(torch.cat([x3, x4], 1)))
# x5 = self.upsample(x5)
x6 = self.relu(self.e_conv6(torch.cat([x2, x5], 1)))
x_r = F.tanh(self.e_conv7(torch.cat([x1, x6], 1)))
r1, r2, r3, r4, r5, r6, r7, r8 = torch.split(x_r, 3, dim=1)
x = x + r1 * (torch.pow(x, 2) - x)
x = x + r2 * (torch.pow(x, 2) - x)
x = x + r3 * (torch.pow(x, 2) - x)
enhance_image_1 = x + r4 * (torch.pow(x, 2) - x)
x = enhance_image_1 + r5 * (torch.pow(enhance_image_1, 2) - enhance_image_1)
x = x + r6 * (torch.pow(x, 2) - x)
x = x + r7 * (torch.pow(x, 2) - x)
enhance_image = x + r8 * (torch.pow(x, 2) - x)
r = torch.cat([r1, r2, r3, r4, r5, r6, r7, r8], 1)
return enhance_image_1, enhance_image, r | zero-dce | /zero_dce-1.0.0-py3-none-any.whl/Zero-DCE/model.py | model.py |
import argparse
import os
import torch
import torch.optim
import Myloss
import dataloader
import model
def weights_init(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1:
m.weight.data.normal_(0.0, 0.02)
elif classname.find('BatchNorm') != -1:
m.weight.data.normal_(1.0, 0.02)
m.bias.data.fill_(0)
def train(config):
os.environ['CUDA_VISIBLE_DEVICES'] = '0'
DCE_net = model.enhance_net_nopool().cuda()
DCE_net.apply(weights_init)
if config.load_pretrain:
DCE_net.load_state_dict(torch.load(config.pretrain_dir))
train_dataset = dataloader.lowlight_loader(config.lowlight_images_path)
train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=config.train_batch_size, shuffle=True,
num_workers=config.num_workers, pin_memory=True)
L_color = Myloss.L_color()
L_spa = Myloss.L_spa()
L_exp = Myloss.L_exp(16, 0.6)
L_TV = Myloss.L_TV()
optimizer = torch.optim.Adam(DCE_net.parameters(), lr=config.lr, weight_decay=config.weight_decay)
DCE_net.train()
for epoch in range(config.num_epochs):
for iteration, img_lowlight in enumerate(train_loader):
img_lowlight = img_lowlight.cuda()
enhanced_image_1, enhanced_image, A = DCE_net(img_lowlight)
Loss_TV = 200 * L_TV(A)
loss_spa = torch.mean(L_spa(enhanced_image, img_lowlight))
loss_col = 5 * torch.mean(L_color(enhanced_image))
loss_exp = 10 * torch.mean(L_exp(enhanced_image))
# best_loss
loss = Loss_TV + loss_spa + loss_col + loss_exp
#
optimizer.zero_grad()
loss.backward()
torch.nn.utils.clip_grad_norm(DCE_net.parameters(), config.grad_clip_norm)
optimizer.step()
if ((iteration + 1) % config.display_iter) == 0:
print("Loss at iteration", iteration + 1, ":", loss.item())
if ((iteration + 1) % config.snapshot_iter) == 0:
torch.save(DCE_net.state_dict(), config.snapshots_folder + "Epoch" + str(epoch) + '.pth')
if __name__ == "__main__":
parser = argparse.ArgumentParser()
# Input Parameters
parser.add_argument('--lowlight_images_path', type=str, default="data/train_data/")
parser.add_argument('--lr', type=float, default=0.0001)
parser.add_argument('--weight_decay', type=float, default=0.0001)
parser.add_argument('--grad_clip_norm', type=float, default=0.1)
parser.add_argument('--num_epochs', type=int, default=200)
parser.add_argument('--train_batch_size', type=int, default=8)
parser.add_argument('--val_batch_size', type=int, default=4)
parser.add_argument('--num_workers', type=int, default=4)
parser.add_argument('--display_iter', type=int, default=10)
parser.add_argument('--snapshot_iter', type=int, default=10)
parser.add_argument('--snapshots_folder', type=str, default="snapshots/")
parser.add_argument('--load_pretrain', type=bool, default=False)
parser.add_argument('--pretrain_dir', type=str, default="snapshots/Epoch99.pth")
config = parser.parse_args()
if not os.path.exists(config.snapshots_folder):
os.mkdir(config.snapshots_folder)
train(config) | zero-dce | /zero_dce-1.0.0-py3-none-any.whl/Zero-DCE/lowlight_train.py | lowlight_train.py |
import torch
import torch.nn as nn
import torch.nn.functional as F
from torchvision.models.vgg import vgg16
class L_color(nn.Module):
def __init__(self):
super(L_color, self).__init__()
def forward(self, x):
b, c, h, w = x.shape
mean_rgb = torch.mean(x, [2, 3], keepdim=True)
mr, mg, mb = torch.split(mean_rgb, 1, dim=1)
Drg = torch.pow(mr - mg, 2)
Drb = torch.pow(mr - mb, 2)
Dgb = torch.pow(mb - mg, 2)
k = torch.pow(torch.pow(Drg, 2) + torch.pow(Drb, 2) + torch.pow(Dgb, 2), 0.5)
return k
class L_spa(nn.Module):
def __init__(self):
super(L_spa, self).__init__()
# print(1)kernel = torch.FloatTensor(kernel).unsqueeze(0).unsqueeze(0)
kernel_left = torch.FloatTensor([[0, 0, 0], [-1, 1, 0], [0, 0, 0]]).cuda().unsqueeze(0).unsqueeze(0)
kernel_right = torch.FloatTensor([[0, 0, 0], [0, 1, -1], [0, 0, 0]]).cuda().unsqueeze(0).unsqueeze(0)
kernel_up = torch.FloatTensor([[0, -1, 0], [0, 1, 0], [0, 0, 0]]).cuda().unsqueeze(0).unsqueeze(0)
kernel_down = torch.FloatTensor([[0, 0, 0], [0, 1, 0], [0, -1, 0]]).cuda().unsqueeze(0).unsqueeze(0)
self.weight_left = nn.Parameter(data=kernel_left, requires_grad=False)
self.weight_right = nn.Parameter(data=kernel_right, requires_grad=False)
self.weight_up = nn.Parameter(data=kernel_up, requires_grad=False)
self.weight_down = nn.Parameter(data=kernel_down, requires_grad=False)
self.pool = nn.AvgPool2d(4)
def forward(self, org, enhance):
b, c, h, w = org.shape
org_mean = torch.mean(org, 1, keepdim=True)
enhance_mean = torch.mean(enhance, 1, keepdim=True)
org_pool = self.pool(org_mean)
enhance_pool = self.pool(enhance_mean)
weight_diff = torch.max(
torch.FloatTensor([1]).cuda() + 10000 * torch.min(org_pool - torch.FloatTensor([0.3]).cuda(),
torch.FloatTensor([0]).cuda()),
torch.FloatTensor([0.5]).cuda())
E_1 = torch.mul(torch.sign(enhance_pool - torch.FloatTensor([0.5]).cuda()), enhance_pool - org_pool)
D_org_letf = F.conv2d(org_pool, self.weight_left, padding=1)
D_org_right = F.conv2d(org_pool, self.weight_right, padding=1)
D_org_up = F.conv2d(org_pool, self.weight_up, padding=1)
D_org_down = F.conv2d(org_pool, self.weight_down, padding=1)
D_enhance_letf = F.conv2d(enhance_pool, self.weight_left, padding=1)
D_enhance_right = F.conv2d(enhance_pool, self.weight_right, padding=1)
D_enhance_up = F.conv2d(enhance_pool, self.weight_up, padding=1)
D_enhance_down = F.conv2d(enhance_pool, self.weight_down, padding=1)
D_left = torch.pow(D_org_letf - D_enhance_letf, 2)
D_right = torch.pow(D_org_right - D_enhance_right, 2)
D_up = torch.pow(D_org_up - D_enhance_up, 2)
D_down = torch.pow(D_org_down - D_enhance_down, 2)
E = (D_left + D_right + D_up + D_down)
# E = 25*(D_left + D_right + D_up +D_down)
return E
class L_exp(nn.Module):
def __init__(self, patch_size, mean_val):
super(L_exp, self).__init__()
# print(1)
self.pool = nn.AvgPool2d(patch_size)
self.mean_val = mean_val
def forward(self, x):
b, c, h, w = x.shape
x = torch.mean(x, 1, keepdim=True)
mean = self.pool(x)
d = torch.mean(torch.pow(mean - torch.FloatTensor([self.mean_val]).cuda(), 2))
return d
class L_TV(nn.Module):
def __init__(self, TVLoss_weight=1):
super(L_TV, self).__init__()
self.TVLoss_weight = TVLoss_weight
def forward(self, x):
batch_size = x.size()[0]
h_x = x.size()[2]
w_x = x.size()[3]
count_h = (x.size()[2] - 1) * x.size()[3]
count_w = x.size()[2] * (x.size()[3] - 1)
h_tv = torch.pow((x[:, :, 1:, :] - x[:, :, :h_x - 1, :]), 2).sum()
w_tv = torch.pow((x[:, :, :, 1:] - x[:, :, :, :w_x - 1]), 2).sum()
return self.TVLoss_weight * 2 * (h_tv / count_h + w_tv / count_w) / batch_size
class Sa_Loss(nn.Module):
def __init__(self):
super(Sa_Loss, self).__init__()
# print(1)
def forward(self, x):
# self.grad = np.ones(x.shape,dtype=np.float32)
b, c, h, w = x.shape
# x_de = x.cpu().detach().numpy()
r, g, b = torch.split(x, 1, dim=1)
mean_rgb = torch.mean(x, [2, 3], keepdim=True)
mr, mg, mb = torch.split(mean_rgb, 1, dim=1)
Dr = r - mr
Dg = g - mg
Db = b - mb
k = torch.pow(torch.pow(Dr, 2) + torch.pow(Db, 2) + torch.pow(Dg, 2), 0.5)
# print(k)
k = torch.mean(k)
return k
class perception_loss(nn.Module):
def __init__(self):
super(perception_loss, self).__init__()
features = vgg16(pretrained=True).features
self.to_relu_1_2 = nn.Sequential()
self.to_relu_2_2 = nn.Sequential()
self.to_relu_3_3 = nn.Sequential()
self.to_relu_4_3 = nn.Sequential()
for x in range(4):
self.to_relu_1_2.add_module(str(x), features[x])
for x in range(4, 9):
self.to_relu_2_2.add_module(str(x), features[x])
for x in range(9, 16):
self.to_relu_3_3.add_module(str(x), features[x])
for x in range(16, 23):
self.to_relu_4_3.add_module(str(x), features[x])
# don't need the gradients, just want the features
for param in self.parameters():
param.requires_grad = False
def forward(self, x):
h = self.to_relu_1_2(x)
h_relu_1_2 = h
h = self.to_relu_2_2(h)
h_relu_2_2 = h
h = self.to_relu_3_3(h)
h_relu_3_3 = h
h = self.to_relu_4_3(h)
h_relu_4_3 = h
# out = (h_relu_1_2, h_relu_2_2, h_relu_3_3, h_relu_4_3)
return h_relu_4_3 | zero-dce | /zero_dce-1.0.0-py3-none-any.whl/zero_dce/Myloss.py | Myloss.py |
import torch
import torch.nn as nn
import torch.nn.functional as F
class enhance_net_nopool(nn.Module):
def __init__(self):
super(enhance_net_nopool, self).__init__()
self.relu = nn.ReLU(inplace=True)
number_f = 32
self.e_conv1 = nn.Conv2d(3, number_f, 3, 1, 1, bias=True)
self.e_conv2 = nn.Conv2d(number_f, number_f, 3, 1, 1, bias=True)
self.e_conv3 = nn.Conv2d(number_f, number_f, 3, 1, 1, bias=True)
self.e_conv4 = nn.Conv2d(number_f, number_f, 3, 1, 1, bias=True)
self.e_conv5 = nn.Conv2d(number_f * 2, number_f, 3, 1, 1, bias=True)
self.e_conv6 = nn.Conv2d(number_f * 2, number_f, 3, 1, 1, bias=True)
self.e_conv7 = nn.Conv2d(number_f * 2, 24, 3, 1, 1, bias=True)
self.maxpool = nn.MaxPool2d(2, stride=2, return_indices=False, ceil_mode=False)
self.upsample = nn.UpsamplingBilinear2d(scale_factor=2)
def forward(self, x):
x1 = self.relu(self.e_conv1(x))
# p1 = self.maxpool(x1)
x2 = self.relu(self.e_conv2(x1))
# p2 = self.maxpool(x2)
x3 = self.relu(self.e_conv3(x2))
# p3 = self.maxpool(x3)
x4 = self.relu(self.e_conv4(x3))
x5 = self.relu(self.e_conv5(torch.cat([x3, x4], 1)))
# x5 = self.upsample(x5)
x6 = self.relu(self.e_conv6(torch.cat([x2, x5], 1)))
x_r = F.tanh(self.e_conv7(torch.cat([x1, x6], 1)))
r1, r2, r3, r4, r5, r6, r7, r8 = torch.split(x_r, 3, dim=1)
x = x + r1 * (torch.pow(x, 2) - x)
x = x + r2 * (torch.pow(x, 2) - x)
x = x + r3 * (torch.pow(x, 2) - x)
enhance_image_1 = x + r4 * (torch.pow(x, 2) - x)
x = enhance_image_1 + r5 * (torch.pow(enhance_image_1, 2) - enhance_image_1)
x = x + r6 * (torch.pow(x, 2) - x)
x = x + r7 * (torch.pow(x, 2) - x)
enhance_image = x + r8 * (torch.pow(x, 2) - x)
r = torch.cat([r1, r2, r3, r4, r5, r6, r7, r8], 1)
return enhance_image_1, enhance_image, r | zero-dce | /zero_dce-1.0.0-py3-none-any.whl/zero_dce/model.py | model.py |
import argparse
import os
import torch
import torch.optim
import Myloss
import dataloader
import model
def weights_init(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1:
m.weight.data.normal_(0.0, 0.02)
elif classname.find('BatchNorm') != -1:
m.weight.data.normal_(1.0, 0.02)
m.bias.data.fill_(0)
def train(config):
os.environ['CUDA_VISIBLE_DEVICES'] = '0'
DCE_net = model.enhance_net_nopool().cuda()
DCE_net.apply(weights_init)
if config.load_pretrain:
DCE_net.load_state_dict(torch.load(config.pretrain_dir))
train_dataset = dataloader.lowlight_loader(config.lowlight_images_path)
train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=config.train_batch_size, shuffle=True,
num_workers=config.num_workers, pin_memory=True)
L_color = Myloss.L_color()
L_spa = Myloss.L_spa()
L_exp = Myloss.L_exp(16, 0.6)
L_TV = Myloss.L_TV()
optimizer = torch.optim.Adam(DCE_net.parameters(), lr=config.lr, weight_decay=config.weight_decay)
DCE_net.train()
for epoch in range(config.num_epochs):
for iteration, img_lowlight in enumerate(train_loader):
img_lowlight = img_lowlight.cuda()
enhanced_image_1, enhanced_image, A = DCE_net(img_lowlight)
Loss_TV = 200 * L_TV(A)
loss_spa = torch.mean(L_spa(enhanced_image, img_lowlight))
loss_col = 5 * torch.mean(L_color(enhanced_image))
loss_exp = 10 * torch.mean(L_exp(enhanced_image))
# best_loss
loss = Loss_TV + loss_spa + loss_col + loss_exp
#
optimizer.zero_grad()
loss.backward()
torch.nn.utils.clip_grad_norm(DCE_net.parameters(), config.grad_clip_norm)
optimizer.step()
if ((iteration + 1) % config.display_iter) == 0:
print("Loss at iteration", iteration + 1, ":", loss.item())
if ((iteration + 1) % config.snapshot_iter) == 0:
torch.save(DCE_net.state_dict(), config.snapshots_folder + "Epoch" + str(epoch) + '.pth')
if __name__ == "__main__":
parser = argparse.ArgumentParser()
# Input Parameters
parser.add_argument('--lowlight_images_path', type=str, default="data/train_data/")
parser.add_argument('--lr', type=float, default=0.0001)
parser.add_argument('--weight_decay', type=float, default=0.0001)
parser.add_argument('--grad_clip_norm', type=float, default=0.1)
parser.add_argument('--num_epochs', type=int, default=200)
parser.add_argument('--train_batch_size', type=int, default=8)
parser.add_argument('--val_batch_size', type=int, default=4)
parser.add_argument('--num_workers', type=int, default=4)
parser.add_argument('--display_iter', type=int, default=10)
parser.add_argument('--snapshot_iter', type=int, default=10)
parser.add_argument('--snapshots_folder', type=str, default="snapshots/")
parser.add_argument('--load_pretrain', type=bool, default=False)
parser.add_argument('--pretrain_dir', type=str, default="snapshots/Epoch99.pth")
config = parser.parse_args()
if not os.path.exists(config.snapshots_folder):
os.mkdir(config.snapshots_folder)
train(config) | zero-dce | /zero_dce-1.0.0-py3-none-any.whl/zero_dce/lowlight_train.py | lowlight_train.py |
# Zero Divisor Graph math library
This is a pure python library for working with zero divisor graphs of commutative semigroups. The primary purpose is to automate the task of checking if a given graph is a zero divisor graph and for what possible semigroups.
# Installation
```
pip3 install zero-divisor-graph
```
You can also retrieve from source at https://github.com/Paulcappaert/zero-divisor-graph
# use
first start python3 in a terminal window and import the ZeroDivisorGraph object
```
python3
>>> from zdg.zdg import ZeroDivisorGraph as ZDG
```
You can create a zero divisor graph from edges as such. the vertices can be named whatever you want.
```
>>> example1 = ZDG((1, 2), (2, 3))
>>> example2 = ZDG(('a', 'b'), ('b', 'c'))
```
You can print all of the semigroups from a zero divisor graph as such
```
>>> semigroups = example1.semigroups()
>>> for s in semigroups:
... print(s.caley_table())
```
| zero-divisor-graph | /zero-divisor-graph-1.0.0.tar.gz/zero-divisor-graph-1.0.0/README.md | README.md |
from zdg.counter import ModularCounter
from zdg.groupoid import is_assoc
def _inverse_set(a, b, pm, elements):
'''
returns: set of values z such that az = b is a possible mapping
'''
inv = set()
for z in elements:
if b in pm[frozenset({a, z})]:
inv.add(z)
return inv
def reduce_poss_maps(poss_maps, elements):
'''
parameter: dict of possible mappings for a groupoid
returns: dict of possible associative mappings
None if there is no associative mappings
'''
for _ in range(len(poss_maps)):
for a in elements:
for b in elements:
for c in elements:
ab = poss_maps[frozenset({a, b})]
bc = poss_maps[frozenset({b, c})]
prod1 = set()
for p in ab:
prod1.update(poss_maps[frozenset({p, c})])
prod2 = set()
for p in bc:
prod2.update(poss_maps[frozenset({p, a})])
products = prod1.intersection(prod2)
ab_vals = set()
for p in products:
ab_vals.update(_inverse_set(c, p, poss_maps, elements))
bc_vals = set()
for p in products:
bc_vals.update(_inverse_set(a, p, poss_maps, elements))
poss_maps[frozenset({a, b})].intersection_update(ab_vals)
poss_maps[frozenset({b, c})].intersection_update(bc_vals)
def get_semigroups(poss_maps, groupoid):
'''
parameters:
groupoid with an incomplete mappings
dictionary of elements of the groupoid to sets of possible mappings for those elements
returns: a list of groupoids copied from the passed groupoid and completed with
mappings from the poss_maps parameter
'''
num_possibilites = 1
mods = []
ordered_mappings = {}
ordered_keys = tuple(poss_maps.keys())
for key in ordered_keys:
ordered_mappings[key] = tuple(poss_maps[key])
n = len(poss_maps[key])
mods.append(n)
num_possibilites *= n
counter = ModularCounter(mods, ordered_keys)
semigroups = set()
for i in range(num_possibilites):
for key in ordered_mappings:
c = ordered_mappings[key][counter.get_count(key)]
if len(key) == 2:
a, b = key
groupoid.set(a, b, c)
else:
a, = key
groupoid.set(a, a, c)
if is_assoc(groupoid):
semigroups.add(groupoid.copy())
counter.tick()
return semigroups | zero-divisor-graph | /zero-divisor-graph-1.0.0.tar.gz/zero-divisor-graph-1.0.0/zdg/semigroup.py | semigroup.py |
.. image:: https://img.shields.io/pypi/v/zero-downtime-migrations.svg?style=flat
:alt: PyPI Version
:target: https://pypi.python.org/pypi/zero-downtime-migrations
.. image:: https://img.shields.io/pypi/pyversions/zero-downtime-migrations.svg
:alt: Supported Python versions
:target: https://pypi.python.org/pypi/zero-downtime-migrations
.. image:: https://travis-ci.org/yandex/zero-downtime-migrations.svg?branch=master
:alt: Build Status
:target: https://travis-ci.org/yandex/zero-downtime-migrations
Zero-Downtime-Migrations
========================
Description
-----------
*Zero-Downtime-Migrations (ZDM)* -- this is application which allow you to avoid long locks (and rewriting the whole table)
while applying Django migrations using PostgreSql as database.
Current possibilities
---------------------
* add field with default value (nullable or not)
* create index concurrently (and check index status after creation in case it was created with INVALID status)
* add unique property to existing field through creating unique index concurrently and creating constraint using this index
Why use it
----------
We face such a problem - performing some django migrations (such as add column with default value) lock the table on
read/write, so its impossible for our services to work properly during this periods. It can be acceptable on rather small
tables (less than million rows), but even on them it can be painful if service is high loaded.
But we have a lot of tables with more than 50 millions rows, and applying migrations on such a table lock it for
more than an hour, which is totally unacceptable. Also, during this time consuming operations, migration rather often fail
because of different errors (such as TimeoutError) and we have to start it from scratch or run sql manually thought
psql and when fake migration.
So in the end we have an idea of writing this package so it can prevent long locks on table and also
provide more stable migration process which can be continued if operation fall for some reason.
Installation
------------
To install :code:`ZDM`, simply run:
.. code:: bash
pip install zero-downtime-migrations
Usage
-----
If you are currently using default postresql backend change it to:
.. code:: python
DATABASES = {
'default': {
'ENGINE': 'zero_downtime_migrations.backend',
...
}
...
}
If you are using your own custom backend you can:
* Set :code:`SchemaEditorClass` if you are currently using default one:
.. code:: python
from zero_downtime_migrations.backend.schema import DatabaseSchemaEditor
class DatabaseWrapper(BaseWrapper):
SchemaEditorClass = DatabaseSchemaEditor
* Add :code:`ZeroDownTimeMixin` to base classes of your :code:`DatabaseSchemaEditor` if you are using custom one:
.. code:: python
from zero_downtime_migrations.backend.schema import ZeroDownTimeMixin
class YourCustomSchemaEditor(ZeroDownTimeMixin, ...):
...
Note about indexes
------------------
Library will always force CONCURRENTLY index creation and after that check index status - if index was
created with INVALID status it will be deleted and error will be raised.
In this case you should fix problem if needed and restart migration.
For example if creating unique index was failed you should make sure that there are only unique values
in column on which index is creating.
Usually index creating with invalid status due to deadlock so you need just restart migration.
Example
-------
When adding not null column with default django will perform such sql query:
.. code:: sql
ALTER TABLE "test" ADD COLUMN "field" boolean DEFAULT True NOT NULL;
Which cause postgres to rewrite the whole table and when swap it with existing one (`note from django documentation <https://docs.djangoproject.com/en/dev/topics/migrations/#postgresql>`_)
and during this period it will hold exclusive lock on write/read on this table.
This package will break sql above in separate commands not only to prevent the rewriting of whole
table but also to add column with as small lock times as possible.
First of all we will add nullable column without default and add default value to it in separate command in one transaction:
.. code:: sql
ALTER TABLE "test" ADD COLUMN "field" boolean NULL;
ALTER TABLE "test" ALTER COLUMN "field" SET DEFAULT true;
This will add default for all new rows in table but all existing ones will be with null value in this column for now,
this operation will be quick because postgres doesn't have to fill all existing rows with default.
Next we will count objects in table and if result if more than zero - calculate the
size of batch in witch we will update existing rows. After that while where are still objects with null in this
column - we will update them.
While result of following statement is more than zero:
.. code:: sql
WITH cte AS (
SELECT <table_pk_column> as pk
FROM "test"
WHERE "field" is null
LIMIT <size_calculated_on_previous_step>
)
UPDATE "test" table_
SET "field" = true
FROM cte
WHERE table_.<table_pk_column> = cte.pk
When we have no more rows with null in this column we can set not null and drop default (which is django default
behavior):
.. code:: sql
ALTER TABLE "test" ALTER COLUMN "field" SET NOT NULL;
ALTER TABLE "test" ALTER COLUMN "field" DROP DEFAULT;
So we finish add field process.
It will be definitely more time consuming than basic variant with one sql statement, but in this approach
there are no long locks on table so service can work normally during this migrations process.
Run tests
---------
.. code:: bash
./run_tests.sh
| zero-downtime-migrations | /zero-downtime-migrations-0.11.tar.gz/zero-downtime-migrations-0.11/README.rst | README.rst |
from __future__ import unicode_literals
import re
import sys
import inspect
from distutils.version import StrictVersion
try:
from django.db.backends.postgresql.schema import DatabaseSchemaEditor as BaseEditor
except ImportError:
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor as BaseEditor
import django
from django.db.models.fields import NOT_PROVIDED
from django.db.models.fields.related import RelatedField
from django.db import transaction
from django.db.migrations.questioner import InteractiveMigrationQuestioner
from zero_downtime_migrations.backend.sql_template import (
SQL_ESTIMATE_COUNT_IN_TABLE,
SQL_CHECK_COLUMN_STATUS,
SQL_COUNT_IN_TABLE,
SQL_COUNT_IN_TABLE_WITH_NULL,
SQL_UPDATE_BATCH,
SQL_CREATE_UNIQUE_INDEX,
SQL_ADD_UNIQUE_CONSTRAINT_FROM_INDEX,
SQL_CHECK_INDEX_STATUS,
)
from zero_downtime_migrations.backend.exceptions import InvalidIndexError
DJANGO_VERISON = StrictVersion(django.get_version())
TABLE_SIZE_FOR_MAX_BATCH = 500000
MAX_BATCH_SIZE = 10000
MIN_BATCH_SIZE = 1000
_getargspec = getattr(inspect, 'getfullargspec', getattr(inspect, 'getargspec', None))
class ZeroDownTimeMixin(object):
RETRY_QUESTION_TEMPLATE = ('It look like column "{}" in table "{}" already exist with following '
'parameters: TYPE: "{}", DEFAULT: "{}", NULLABLE: "{}".'
)
RETRY_CHOICES = (
'abort migration',
'drop column and run migration from beginning',
'manually choose action to start from',
'show how many rows still need to be updated',
'mark operation as successful and proceed to next operation',
'drop column and run migration from standard SchemaEditor',
)
ADD_FIELD_WITH_DEFAULT_ACTIONS = [
'add field with default',
'update existing rows',
'set not null for field',
'drop default',
]
def alter_field(self, model, old_field, new_field, strict=False):
if DJANGO_VERISON >= StrictVersion('2.1'):
from django.db.backends.ddl_references import IndexName
if self._unique_should_be_added(old_field, new_field):
table = model._meta.db_table
index_name = str(IndexName(table, [new_field.column], '_uniq', self._create_index_name))
self.execute(
self._create_index_sql(model, [new_field], name=index_name, sql=SQL_CREATE_UNIQUE_INDEX)
)
self.execute(self._create_unique_constraint_from_index_sql(table, index_name))
self.already_added_unique = True
return super(ZeroDownTimeMixin, self).alter_field(model, old_field, new_field, strict=strict)
def _field_supported(self, field):
supported = True
if isinstance(field, RelatedField):
supported = False
elif field.default is NOT_PROVIDED:
supported = False
if (DJANGO_VERISON >= StrictVersion('1.10') and
(getattr(field, 'auto_now', False) or
getattr(field, 'auto_now_add', False))
):
self.date_default = True
supported = True
return supported
def add_field(self, model, field):
if not self._field_supported(field=field):
return super(ZeroDownTimeMixin, self).add_field(model, field)
# Checking which actions we should perform - maybe this operation was run
# before and it had crashed for some reason
actions = self.get_actions_to_perform(model, field)
if not actions:
return
# Saving initial values
default_effective_value = self.effective_default(field)
nullable = field.null
# Update the values to the required ones
field.default = None if DJANGO_VERISON < StrictVersion('1.11') else NOT_PROVIDED
if getattr(self, 'date_default', False):
if getattr(field, 'auto_now', False):
field.auto_now = False
if getattr(field, 'auto_now_add', False):
field.auto_now_add = False
if nullable is False:
field.null = True
# For Django < 1.10
atomic = getattr(self, 'atomic_migration', True)
if self.connection.in_atomic_block:
self.atomic.__exit__(None, None, None)
available_args = {
'model': model,
'field': field,
'nullable': nullable,
'default_effective_value': default_effective_value,
}
# Performing needed actions
for action in actions:
action = '_'.join(action.split())
func = getattr(self, action)
func_args = {arg: available_args[arg] for arg in
_getargspec(func).args if arg != 'self'
}
func(**func_args)
# If migrations was atomic=True initially
# entering atomic block again
if atomic:
self.atomic = transaction.atomic(self.connection.alias)
self.atomic.__enter__()
def add_field_with_default(self, model, field, default_effective_value):
"""
Adding field with default in two separate
operations, so we can avoid rewriting the
whole table
"""
with transaction.atomic():
super(ZeroDownTimeMixin, self).add_field(model, field)
self.add_default(model, field, default_effective_value)
def update_existing_rows(self, model, field, default_effective_value):
"""
Updating existing rows in table by (relatively) small batches
to avoid long locks on table
"""
if default_effective_value is None:
return
objects_in_table = self.count_objects_in_table(model=model)
if objects_in_table > 0:
objects_in_batch_count = self.get_objects_in_batch_count(objects_in_table)
while True:
with transaction.atomic():
updated = self.update_batch(model=model, field=field,
objects_in_batch_count=objects_in_batch_count,
value=default_effective_value,
)
print('Update {} rows in {}'.format(updated, model._meta.db_table))
if updated is None or updated == 0:
break
def set_not_null_for_field(self, model, field, nullable):
# If field was not null - adding
# this knowledge to table
if nullable is False:
self.set_not_null(model, field)
def get_column_info(self, model, field):
sql = SQL_CHECK_COLUMN_STATUS % {
"table": model._meta.db_table,
"column": field.name,
}
return self.get_query_result(sql)
def get_actions_to_perform(self, model, field):
actions = self.ADD_FIELD_WITH_DEFAULT_ACTIONS
# Checking maybe this column already exists
# if so asking user what to do next
column_info = self.get_column_info(model, field)
if column_info is not None:
existed_nullable, existed_type, existed_default = column_info
questioner = InteractiveMigrationQuestioner()
question = self.RETRY_QUESTION_TEMPLATE.format(
field.name, model._meta.db_table,
existed_type, existed_default,
existed_nullable,
)
result = questioner._choice_input(question, self.RETRY_CHOICES)
if result == 1:
sys.exit(1)
elif result == 2:
self.remove_field(model, field)
elif result == 3:
question = 'Now choose from which action process should continue'
result = questioner._choice_input(question, actions)
actions = actions[result - 1:]
elif result == 4:
question = 'Rows in table where column is null: "{}"'
need_to_update = self.need_to_update(model=model, field=field)
questioner._choice_input(question.format(need_to_update),
('Continue',)
)
return self.get_actions_to_perform(model, field)
elif result == 5:
actions = []
elif result == 6:
self.remove_field(model, field)
super(ZeroDownTimeMixin, self).add_field(model, field)
actions = []
return actions
def get_pk_column_name(self, model):
return model._meta.pk.name
def update_batch(self, model, field, objects_in_batch_count, value):
pk_column_name = self.get_pk_column_name(model)
sql = SQL_UPDATE_BATCH % {
"table": model._meta.db_table,
"column": field.name,
"batch_size": objects_in_batch_count,
"pk_column_name": pk_column_name,
"value": "%s",
}
params = [value]
return self.get_query_result(sql, params, row_count=True)
def get_objects_in_batch_count(self, model_count):
"""
Calculate batch size
:param model_count: int
:return: int
"""
if model_count > TABLE_SIZE_FOR_MAX_BATCH:
value = MAX_BATCH_SIZE
else:
value = int((model_count / 100) * 5)
return max(MIN_BATCH_SIZE, value)
def get_query_result(self, sql, params=(), row_count=False):
"""
Default django backend execute function does not
return any result so we use this custom where needed
"""
if self.collect_sql:
# in collect_sql case use django function logic
return self.execute(sql, params)
with self.connection.cursor() as cursor:
cursor.execute(sql, params)
if row_count:
return cursor.rowcount
return cursor.fetchone()
def parse_cursor_result(self, cursor_result, place=0, collect_sql_value=1, ):
result = None
if self.collect_sql:
result = collect_sql_value # For sqlmigrate purpose
elif cursor_result:
result = cursor_result[place]
return result
def execute_table_query(self, sql, model):
sql = sql % {
"table": model._meta.db_table
}
cursor_result = self.get_query_result(sql)
return self.parse_cursor_result(cursor_result=cursor_result)
def count_objects_in_table(self, model):
count = self.execute_table_query(
sql=SQL_ESTIMATE_COUNT_IN_TABLE,
model=model,
)
if count == 0:
# Check, maybe statistic is outdated?
# Because previous count return 0 it will be fast query
count = self.execute_table_query(
sql=SQL_COUNT_IN_TABLE,
model=model,
)
return count
def need_to_update(self, model, field):
sql = SQL_COUNT_IN_TABLE_WITH_NULL % {
"table": model._meta.db_table,
"column": field.name,
}
cursor_result = self.get_query_result(sql)
return self.parse_cursor_result(cursor_result=cursor_result)
def drop_default(self, model, field):
set_default_sql, params = self._alter_column_default_sql_local(field, drop=True)
self.execute_alter_column(model, set_default_sql, params)
def add_default(self, model, field, default_value):
set_default_sql, params = self._alter_column_default_sql_local(field, default_value)
self.execute_alter_column(model, set_default_sql, params)
def set_not_null(self, model, field):
set_not_null_sql = self.generate_set_not_null(field)
self.execute_alter_column(model, set_not_null_sql)
def execute_alter_column(self, model, changes_sql, params=()):
sql = self.sql_alter_column % {
"table": self.quote_name(model._meta.db_table),
"changes": changes_sql,
}
self.execute(sql, params)
def generate_set_not_null(self, field):
new_db_params = field.db_parameters(connection=self.connection)
sql = self.sql_alter_column_not_null
return sql % {
'column': self.quote_name(field.column),
'type': new_db_params['type'],
}
def _alter_column_default_sql_local(self, field, default_value=None, drop=False):
"""
Copy this method from django2.0
https://github.com/django/django/blob/master/django/db/backends/base/schema.py#L787
"""
default = '%s'
params = [default_value]
if drop:
params = []
new_db_params = field.db_parameters(connection=self.connection)
sql = self.sql_alter_column_no_default if drop else self.sql_alter_column_default
return (
sql % {
'column': self.quote_name(field.column),
'type': new_db_params['type'],
'default': default,
},
params,
)
def _unique_should_be_added(self, old_field, new_field):
if getattr(self, 'already_added_unique', False):
return False
return super(ZeroDownTimeMixin, self)._unique_should_be_added(old_field, new_field)
def _create_unique_constraint_from_index_sql(self, table, index_name):
return SQL_ADD_UNIQUE_CONSTRAINT_FROM_INDEX % {
"table": table,
"name": index_name,
"index_name": index_name,
}
def _check_index_sql(self, index_name):
return SQL_CHECK_INDEX_STATUS % {
"index_name": index_name,
}
def _check_valid_index(self, sql):
"""
Return index_name if it's invalid
"""
index_match = re.match(r'.* "(?P<index_name>.+)" ON .+', sql)
if index_match:
index_name = index_match.group('index_name')
check_index_sql = self._check_index_sql(index_name)
cursor_result = self.get_query_result(check_index_sql)
if self.parse_cursor_result(cursor_result=cursor_result):
return index_name
def _create_unique_failed(self, exc):
return (DJANGO_VERISON >= StrictVersion('2.1')
and 'could not create unique index' in repr(exc)
)
def execute(self, sql, params=()):
exit_atomic = False
# Account for non-string statement objects.
sql = str(sql)
if re.search('(CREATE|DROP).+INDEX', sql):
exit_atomic = True
if 'CONCURRENTLY' not in sql:
sql = sql.replace('INDEX', 'INDEX CONCURRENTLY')
atomic = self.connection.in_atomic_block
if exit_atomic and atomic:
self.atomic.__exit__(None, None, None)
try:
super(ZeroDownTimeMixin, self).execute(sql, params)
except django.db.utils.IntegrityError as exc:
# create unique index should be treated differently
# because it raises error, instead of quiet exit
if not self._create_unique_failed(exc):
raise
if exit_atomic and not self.collect_sql:
invalid_index_name = self._check_valid_index(sql)
if invalid_index_name:
# index was build, but invalid, we need to delete it
self.execute(self.sql_delete_index % {'name': invalid_index_name})
raise InvalidIndexError(
'Unsuccessful attempt to create an index, fix data if needed and restart.'
'Sql was: {}'.format(sql)
)
if exit_atomic and atomic:
self.atomic = transaction.atomic(self.connection.alias)
self.atomic.__enter__()
class DatabaseSchemaEditor(ZeroDownTimeMixin, BaseEditor):
pass | zero-downtime-migrations | /zero-downtime-migrations-0.11.tar.gz/zero-downtime-migrations-0.11/zero_downtime_migrations/backend/schema.py | schema.py |
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'tkl@ki*y@8*yma%x67(sa^tj()-yax138#&n^_@3!x0q*fhxj9'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'test_app',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'test_app.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'test_app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'postgres',
'USER': 'postgres',
'HOST': 'db',
'PORT': '5432',
},
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/' | zero-downtime-migrations | /zero-downtime-migrations-0.11.tar.gz/zero-downtime-migrations-0.11/test_app/settings.py | settings.py |
from dataclasses import dataclass
@dataclass
class KeyCodes:
def __getitem__(self, key):
return getattr(self, key)
MOD_LEFT_CONTROL = 0x01
MOD_LEFT_SHIFT = 0x02
MOD_LEFT_ALT = 0x04
MOD_LEFT_GUI = 0x08
MOD_RIGHT_CONTROL = 0x10
MOD_RIGHT_SHIFT = 0x20
MOD_RIGHT_ALT = 0x40
MOD_RIGHT_GUI = 0x80
KEY_RESERVED = 0x00
KEY_ERROR_ROLLOVER = 0x01
KEY_POST_FAIL = 0x02
KEY_ERROR_UNDEFINED = 0x03
KEY_A = 0x04
KEY_B = 0x05
KEY_C = 0x06
KEY_D = 0x07 # Keyboard d and D
KEY_E = 0x08 # Keyboard e and E
KEY_F = 0x09 # Keyboard f and F
KEY_G = 0x0a # Keyboard g and G
KEY_H = 0x0b # Keyboard h and H
KEY_I = 0x0c # Keyboard i and I
KEY_J = 0x0d # Keyboard j and J
KEY_K = 0x0e # Keyboard k and K
KEY_L = 0x0f # Keyboard l and L
KEY_M = 0x10 # Keyboard m and M
KEY_N = 0x11 # Keyboard n and N
KEY_O = 0x12 # Keyboard o and O
KEY_P = 0x13 # Keyboard p and P
KEY_Q = 0x14 # Keyboard q and Q
KEY_R = 0x15 # Keyboard r and R
KEY_S = 0x16 # Keyboard s and S
KEY_T = 0x17 # Keyboard t and T
KEY_U = 0x18 # Keyboard u and U
KEY_V = 0x19 # Keyboard v and V
KEY_W = 0x1a # Keyboard w and W
KEY_X = 0x1b # Keyboard x and X
KEY_Y = 0x1c # Keyboard y and Y
KEY_Z = 0x1d # Keyboard z and Z
KEY_1 = 0x1e # Keyboard 1 and !
KEY_2 = 0x1f # Keyboard 2 and @
KEY_3 = 0x20 # Keyboard 3 and #
KEY_4 = 0x21 # Keyboard 4 and $
KEY_5 = 0x22 # Keyboard 5 and %
KEY_6 = 0x23 # Keyboard 6 and ^
KEY_7 = 0x24 # Keyboard 7 and &
KEY_8 = 0x25 # Keyboard 8 and *
KEY_9 = 0x26 # Keyboard 9 and (
KEY_0 = 0x27 # Keyboard 0 and )
KEY_ENTER = 0x28 # Keyboard Return (ENTER)
KEY_ESC = 0x29 # Keyboard ESCAPE
KEY_BACKSPACE = 0x2a # Keyboard DELETE (Backspace)
KEY_TAB = 0x2b # Keyboard Tab
KEY_SPACE = 0x2c # Keyboard Spacebar
KEY_MINUS = 0x2d # Keyboard - and _
KEY_EQUAL = 0x2e # Keyboard = and +
KEY_LEFTBRACE = 0x2f # Keyboard [ and {
KEY_RIGHTBRACE = 0x30 # Keyboard ] and }
KEY_BACKSLASH = 0x31 # Keyboard \ and |
KEY_HASHTILDE = 0x32 # Keyboard Non-US # and ~
KEY_SEMICOLON = 0x33 # Keyboard ; and :
KEY_APOSTROPHE = 0x34 # Keyboard ' and "
KEY_GRAVE = 0x35 # Keyboard ` and ~
KEY_COMMA = 0x36 # Keyboard , and <
KEY_DOT = 0x37 # Keyboard . and >
KEY_SLASH = 0x38 # Keyboard / and ?
KEY_CAPSLOCK = 0x39 # Keyboard Caps Lock
KEY_F1 = 0x3a # Keyboard F1
KEY_F2 = 0x3b # Keyboard F2
KEY_F3 = 0x3c # Keyboard F3
KEY_F4 = 0x3d # Keyboard F4
KEY_F5 = 0x3e # Keyboard F5
KEY_F6 = 0x3f # Keyboard F6
KEY_F7 = 0x40 # Keyboard F7
KEY_F8 = 0x41 # Keyboard F8
KEY_F9 = 0x42 # Keyboard F9
KEY_F10 = 0x43 # Keyboard F10
KEY_F11 = 0x44 # Keyboard F11
KEY_F12 = 0x45 # Keyboard F12
KEY_SYSRQ = 0x46 # Keyboard Print Screen
KEY_SCROLLLOCK = 0x47 # Keyboard Scroll Lock
KEY_PAUSE = 0x48 # Keyboard Pause
KEY_INSERT = 0x49 # Keyboard Insert
KEY_HOME = 0x4a # Keyboard Home
KEY_PAGEUP = 0x4b # Keyboard Page Up
KEY_DELETE = 0x4c # Keyboard Delete Forward
KEY_END = 0x4d # Keyboard End
KEY_PAGEDOWN = 0x4e # Keyboard Page Down
KEY_RIGHT = 0x4f # Keyboard Right Arrow
KEY_LEFT = 0x50 # Keyboard Left Arrow
KEY_DOWN = 0x51 # Keyboard Down Arrow
KEY_UP = 0x52 # Keyboard Up Arrow
KEY_NUMLOCK = 0x53 # Keyboard Num Lock and Clear
KEY_KPSLASH = 0x54 # Keypad /
KEY_KPASTERISK = 0x55 # Keypad *
KEY_KPMINUS = 0x56 # Keypad -
KEY_KPPLUS = 0x57 # Keypad +
KEY_KPENTER = 0x58 # Keypad ENTER
KEY_KP1 = 0x59 # Keypad 1 and End
KEY_KP2 = 0x5a # Keypad 2 and Down Arrow
KEY_KP3 = 0x5b # Keypad 3 and PageDn
KEY_KP4 = 0x5c # Keypad 4 and Left Arrow
KEY_KP5 = 0x5d # Keypad 5
KEY_KP6 = 0x5e # Keypad 6 and Right Arrow
KEY_KP7 = 0x5f # Keypad 7 and Home
KEY_KP8 = 0x60 # Keypad 8 and Up Arrow
KEY_KP9 = 0x61 # Keypad 9 and Page Up
KEY_KP0 = 0x62 # Keypad 0 and Insert
KEY_KPDOT = 0x63 # Keypad . and Delete
KEY_102ND = 0x64 # Keyboard Non-US \ and |
KEY_COMPOSE = 0x65 # Keyboard Application
KEY_POWER = 0x66 # Keyboard Power
KEY_KPEQUAL = 0x67 # Keypad =
KEY_F13 = 0x68 # Keyboard F13
KEY_F14 = 0x69 # Keyboard F14
KEY_F15 = 0x6a # Keyboard F15
KEY_F16 = 0x6b # Keyboard F16
KEY_F17 = 0x6c # Keyboard F17
KEY_F18 = 0x6d # Keyboard F18
KEY_F19 = 0x6e # Keyboard F19
KEY_F20 = 0x6f # Keyboard F20
KEY_F21 = 0x70 # Keyboard F21
KEY_F22 = 0x71 # Keyboard F22
KEY_F23 = 0x72 # Keyboard F23
KEY_F24 = 0x73 # Keyboard F24
KEY_OPEN = 0x74 # Keyboard Execute
KEY_HELP = 0x75 # Keyboard Help
KEY_PROPS = 0x76 # Keyboard Menu
KEY_FRONT = 0x77 # Keyboard Select
KEY_STOP = 0x78 # Keyboard Stop
KEY_AGAIN = 0x79 # Keyboard Again
KEY_UNDO = 0x7a # Keyboard Undo
KEY_CUT = 0x7b # Keyboard Cut
KEY_COPY = 0x7c # Keyboard Copy
KEY_PASTE = 0x7d # Keyboard Paste
KEY_FIND = 0x7e # Keyboard Find
KEY_MUTE = 0x7f # Keyboard Mute
KEY_VOLUMEUP = 0x80 # Keyboard Volume Up
KEY_VOLUMEDOWN = 0x81 # Keyboard Volume Down
# = 0x82 Keyboard Locking Caps Lock
# = 0x83 Keyboard Locking Num Lock
# = 0x84 Keyboard Locking Scroll Lock
KEY_KPCOMMA = 0x85 # Keypad Comma
# = 0x86 Keypad Equal Sign
KEY_RO = 0x87 # Keyboard International1
KEY_KATAKANAHIRAGANA = 0x88 # Keyboard International2
KEY_YEN = 0x89 # Keyboard International3
KEY_HENKAN = 0x8a # Keyboard International4
KEY_MUHENKAN = 0x8b # Keyboard International5
KEY_KPJPCOMMA = 0x8c # Keyboard International6
# = 0x8d Keyboard International7
# = 0x8e Keyboard International8
# = 0x8f Keyboard International9
KEY_HANGEUL = 0x90 # Keyboard LANG1
KEY_HANJA = 0x91 # Keyboard LANG2
KEY_KATAKANA = 0x92 # Keyboard LANG3
KEY_HIRAGANA = 0x93 # Keyboard LANG4
KEY_ZENKAKUHANKAKU = 0x94 # Keyboard LANG5
# = 0x95 Keyboard LANG6
# = 0x96 Keyboard LANG7
# = 0x97 Keyboard LANG8
# = 0x98 Keyboard LANG9
# = 0x99 Keyboard Alternate Erase
# = 0x9a Keyboard SysReq/Attention
# = 0x9b Keyboard Cancel
# = 0x9c Keyboard Clear
# = 0x9d Keyboard Prior
# = 0x9e Keyboard Return
# = 0x9f Keyboard Separator
# = 0xa0 Keyboard Out
# = 0xa1 Keyboard Oper
# = 0xa2 Keyboard Clear/Again
# = 0xa3 Keyboard CrSel/Props
# = 0xa4 Keyboard ExSel
# = 0xb0 Keypad 00
# = 0xb1 Keypad 000
# = 0xb2 Thousands Separator
# = 0xb3 Decimal Separator
# = 0xb4 Currency Unit
# = 0xb5 Currency Sub-unit
KEY_KPLEFTPAREN = 0xb6 # Keypad (
KEY_KPRIGHTPAREN = 0xb7 # Keypad )
# = 0xb8 Keypad {
# = 0xb9 Keypad }
# = 0xba Keypad Tab
# = 0xbb Keypad Backspace
# = 0xbc Keypad A
# = 0xbd Keypad B
# = 0xbe Keypad C
# = 0xbf Keypad D
# = 0xc0 Keypad E
# = 0xc1 Keypad F
# = 0xc2 Keypad XOR
# = 0xc3 Keypad ^
# = 0xc4 Keypad %
# = 0xc5 Keypad <
# = 0xc6 Keypad >
# = 0xc7 Keypad &
# = 0xc8 Keypad &&
# = 0xc9 Keypad |
# = 0xca Keypad ||
# = 0xcb Keypad :
# = 0xcc Keypad #
# = 0xcd Keypad Space
# = 0xce Keypad @
# = 0xcf Keypad !
# = 0xd0 Keypad Memory Store
# = 0xd1 Keypad Memory Recall
# = 0xd2 Keypad Memory Clear
# = 0xd3 Keypad Memory Add
# = 0xd4 Keypad Memory Subtract
# = 0xd5 Keypad Memory Multiply
# = 0xd6 Keypad Memory Divide
# = 0xd7 Keypad +/-
# = 0xd8 Keypad Clear
# = 0xd9 Keypad Clear Entry
# = 0xda Keypad Binary
# = 0xdb Keypad Octal
# = 0xdc Keypad Decimal
# = 0xdd Keypad Hexadecimal
KEY_LEFTCTRL = 0xe0 # Keyboard Left Control
KEY_LEFTSHIFT = 0xe1 # Keyboard Left Shift
KEY_LEFTALT = 0xe2 # Keyboard Left Alt
KEY_LEFTMETA = 0xe3 # Keyboard Left GUI
KEY_RIGHTCTRL = 0xe4 # Keyboard Right Control
KEY_RIGHTSHIFT = 0xe5 # Keyboard Right Shift
KEY_RIGHTALT = 0xe6 # Keyboard Right Alt
KEY_RIGHTMETA = 0xe7 # Keyboard Right GUI
KEY_MEDIA_PLAYPAUSE = 0xe8
KEY_MEDIA_STOPCD = 0xe9
KEY_MEDIA_PREVIOUSSONG = 0xea
KEY_MEDIA_NEXTSONG = 0xeb
KEY_MEDIA_EJECTCD = 0xec
KEY_MEDIA_VOLUMEUP = 0xed
KEY_MEDIA_VOLUMEDOWN = 0xee
KEY_MEDIA_MUTE = 0xef
KEY_MEDIA_WWW = 0xf0
KEY_MEDIA_BACK = 0xf1
KEY_MEDIA_FORWARD = 0xf2
KEY_MEDIA_STOP = 0xf3
KEY_MEDIA_FIND = 0xf4
KEY_MEDIA_SCROLLUP = 0xf5
KEY_MEDIA_SCROLLDOWN = 0xf6
KEY_MEDIA_EDIT = 0xf7
KEY_MEDIA_SLEEP = 0xf8
KEY_MEDIA_COFFEE = 0xf9
KEY_MEDIA_REFRESH = 0xfa
KEY_MEDIA_CALC = 0xfb
KeyCodes = KeyCodes() | zero-hid | /zero_hid-0.1.4-py3-none-any.whl/zero_hid/hid/keycodes.py | keycodes.py |
import dataclasses
import logging
import multiprocessing
import typing
logger = logging.getLogger(__name__)
class Error(Exception):
pass
class WriteError(Error):
pass
@dataclasses.dataclass
class ProcessResult:
return_value: typing.Any = None
exception: Exception = None
def was_successful(self) -> bool:
return self.exception is None
class ProcessWithResult(multiprocessing.Process):
"""A multiprocessing.Process object that keeps track of the child process'
result (i.e., the return value and exception raised).
Inspired by:
https://stackoverflow.com/a/33599967/3769045
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Create the Connection objects used for communication between the
# parent and child processes.
self.parent_conn, self.child_conn = multiprocessing.Pipe()
def run(self):
"""Method to be run in sub-process."""
result = ProcessResult()
try:
if self._target:
result.return_value = self._target(*self._args, **self._kwargs)
except Exception as e:
result.exception = e
raise
finally:
self.child_conn.send(result)
def result(self):
"""Get the result from the child process.
Returns:
If the child process has completed, a ProcessResult object.
Otherwise, a None object.
"""
return self.parent_conn.recv() if self.parent_conn.poll() else None
def _write_to_hid_interface_immediately(hid_path, buffer):
try:
with open(hid_path, 'ab+') as hid_handle:
hid_handle.write(bytearray(buffer))
except BlockingIOError:
logger.error(
f'Failed to write to HID interface: {hid_path}. Is USB cable connected and Gadget module installed? check https://git.io/J1T7Q'
)
def write_to_hid_interface(hid_path, buffer):
# Avoid an unnecessary string formatting call in a write that requires low
# latency.
if logger.getEffectiveLevel() == logging.DEBUG:
logger.debug('writing to HID interface %s: %s', hid_path,
' '.join(['0x%02x' % x for x in buffer]))
# Writes can hang, for example, when TinyPilot is attempting to write to the
# mouse interface, but the target system has no GUI. To avoid locking up the
# main server process, perform the HID interface I/O in a separate process.
write_process = ProcessWithResult(
target=_write_to_hid_interface_immediately,
args=(hid_path, buffer),
daemon=True)
write_process.start()
write_process.join(timeout=0.5)
if write_process.is_alive():
write_process.kill()
_wait_for_process_exit(write_process)
result = write_process.result()
# If the result is None, it means the write failed to complete in time.
if result is None or not result.was_successful():
raise WriteError(
f'Failed to write to HID interface: {hid_path}. Is USB cable connected and Gadget module installed? check https://git.io/J1T7Q'
)
def _wait_for_process_exit(target_process):
max_attempts = 3
for _ in range(max_attempts):
target_process.join(timeout=0.1) | zero-hid | /zero_hid-0.1.4-py3-none-any.whl/zero_hid/hid/write.py | write.py |
from eliot import ActionType, Field, MessageType
PRIVACYPASS_MESSAGE = Field(
"message",
str,
"The PrivacyPass request-binding data associated with a pass.",
)
INVALID_REASON = Field(
"reason",
str,
"The reason given by the server for rejecting a pass as invalid.",
)
PASS_COUNT = Field(
"count",
int,
"A number of passes.",
)
GET_PASSES = MessageType(
"zkapauthorizer:get-passes",
[PRIVACYPASS_MESSAGE, PASS_COUNT],
"An attempt to spend passes is beginning.",
)
SPENT_PASSES = MessageType(
"zkapauthorizer:spent-passes",
[PASS_COUNT],
"An attempt to spend passes has succeeded.",
)
INVALID_PASSES = MessageType(
"zkapauthorizer:invalid-passes",
[INVALID_REASON, PASS_COUNT],
"An attempt to spend passes has found some to be invalid.",
)
RESET_PASSES = MessageType(
"zkapauthorizer:reset-passes",
[PASS_COUNT],
"Some passes involved in a failed spending attempt have not definitely been spent and are being returned for future use.",
)
SIGNATURE_CHECK_FAILED = MessageType(
"zkapauthorizer:storage-client:signature-check-failed",
[PASS_COUNT],
"Some passes the client tried to use were rejected for having invalid signatures.",
)
CALL_WITH_PASSES = ActionType(
"zkapauthorizer:storage-client:call-with-passes",
[PASS_COUNT],
[],
"A storage operation is being started which may spend some passes.",
)
CURRENT_SIZES = Field(
"current_sizes",
dict,
"A dictionary mapping the numbers of existing shares to their existing sizes.",
)
TW_VECTORS_SUMMARY = Field(
"tw_vectors_summary",
dict,
"A dictionary mapping share numbers from tw_vectors to test and write vector summaries.",
)
NEW_SIZES = Field(
"new_sizes",
dict,
"A dictionary like that of CURRENT_SIZES but for the sizes computed for the shares after applying tw_vectors.",
)
NEW_PASSES = Field(
"new_passes",
int,
"The number of passes computed as being required for the change in size.",
)
MUTABLE_PASSES_REQUIRED = MessageType(
"zkapauthorizer:storage:mutable-passes-required",
[CURRENT_SIZES, TW_VECTORS_SUMMARY, NEW_SIZES, NEW_PASSES],
"Some number of passes has been computed as the cost of updating a mutable.",
) | zero-knowledge-access-pass-authorizer | /zero_knowledge_access_pass_authorizer-2022.8.21-py3-none-any.whl/_zkapauthorizer/eliot.py | eliot.py |
from functools import partial, wraps
from typing import Any, Generator, Optional
import attr
from allmydata.interfaces import IStorageServer
from allmydata.util.eliotutil import log_call_deferred
from attr.validators import provides
from eliot.twisted import inline_callbacks
from twisted.internet.defer import Deferred, returnValue
from twisted.internet.interfaces import IReactorTime
from twisted.python.reflect import namedAny
from zope.interface import implementer
from .eliot import CALL_WITH_PASSES, SIGNATURE_CHECK_FAILED
from .storage_common import (
MorePassesRequired,
add_lease_message,
allocate_buckets_message,
get_required_new_passes_for_mutable_write,
get_write_sharenums,
pass_value_attribute,
required_passes,
slot_testv_and_readv_and_writev_message,
)
Secrets = tuple[bytes, bytes, bytes]
TestWriteVectors = dict[
int,
tuple[
list[
tuple[int, int, bytes],
],
list[
tuple[int, bytes],
],
Optional[int],
],
]
ReadVector = list[tuple[int, int]]
class IncorrectStorageServerReference(Exception):
"""
A Foolscap remote object which should reference a ZKAPAuthorizer storage
server instead references some other kind of object. This makes the
connection, and thus the configured storage server, unusable.
"""
def __init__(self, furl, actual_name, expected_name):
self.furl = furl
self.actual_name = actual_name
self.expected_name = expected_name
def __str__(self):
return "RemoteReference via {} provides {} instead of {}".format(
self.furl,
self.actual_name,
self.expected_name,
)
def invalidate_rejected_passes(passes, more_passes_required):
"""
Return a new ``IPassGroup`` with all rejected passes removed from it.
:param IPassGroup passes: A group of passes, some of which may have been
rejected.
:param MorePassesRequired more_passes_required: An exception possibly
detailing the rejection of some passes from the group.
:return: ``None`` if no passes in the group were rejected and so there is
nothing to replace. Otherwise, a new ``IPassGroup`` created from
``passes`` but with rejected passes replaced with new ones.
"""
num_failed = len(more_passes_required.signature_check_failed)
if num_failed == 0:
# If no signature checks failed then the call just didn't supply
# enough passes. The exception tells us how many passes we should
# spend so we could try again with that number of passes but for
# now we'll just let the exception propagate. The client should
# always figure out the number of passes right on the first try so
# this case is somewhat suspicious. Err on the side of lack of
# service instead of burning extra passes.
#
# We *could* just `raise` here and only be called from an `except`
# suite... but let's not be so vulgar.
return None
SIGNATURE_CHECK_FAILED.log(count=num_failed)
rejected_passes, okay_passes = passes.split(
more_passes_required.signature_check_failed
)
rejected_passes.mark_invalid("signature check failed")
# It would be great to just expand okay_passes right here. However, if
# that fails (eg because we don't have enough tokens remaining) then the
# caller will have a hard time figuring out which okay passes remain that
# it needs to reset. :/ So, instead, pass back the complete okay set. The
# caller can figure out by how much to expand it by considering its size
# and the original number of passes it requested.
return okay_passes
@inline_callbacks
def call_with_passes_with_manual_spend(method, num_passes, get_passes, on_success):
"""
Call a method, passing the requested number of passes as the first
argument, and try again if the call fails with an error related to some of
the passes being rejected.
:param (IPassGroup -> Deferred) method: An operation to call with some passes.
If the returned ``Deferred`` fires with ``MorePassesRequired`` then
the invalid passes will be discarded and replacement passes will be
requested for a new call of ``method``. This will repeat until no
passes remain, the method succeeds, or the methods fails in a
different way.
:param int num_passes: The number of passes to pass to the call.
:param (int -> IPassGroup) get_passes: A function for getting
passes.
:param (object -> IPassGroup -> None) on_success: A function to call when
``method`` succeeds. The first argument is the result of ``method``.
The second argument is the ``IPassGroup`` used with the successful
call. The intended purpose of this hook is to mark as spent passes in
the group which the method has spent. This is useful if the result of
``method`` can be used to determine the operation had a lower cost
than the worst-case expected from its inputs.
Spent passes should be marked as spent. All others should be reset.
:return: A ``Deferred`` that fires with whatever the ``Deferred`` returned
by ``method`` fires with (apart from ``MorePassesRequired`` failures
that trigger a retry).
"""
with CALL_WITH_PASSES(count=num_passes):
pass_group = get_passes(num_passes)
try:
# Try and repeat as necessary.
while True:
try:
result = yield method(pass_group)
except MorePassesRequired as e:
okay_pass_group = invalidate_rejected_passes(
pass_group,
e,
)
if okay_pass_group is None:
raise
else:
# Update the local in case we end up going to the
# except suite below.
pass_group = okay_pass_group
# Add the necessary number of new passes. This might
# fail if we don't have enough tokens.
pass_group = pass_group.expand(
num_passes - len(pass_group.passes)
)
else:
on_success(result, pass_group)
break
except:
# Something went wrong that we can't address with a retry.
pass_group.reset()
raise
# Give the operation's result to the caller.
returnValue(result)
def call_with_passes(method, num_passes, get_passes):
"""
Similar to ``call_with_passes_with_manual_spend`` but automatically spend
all passes associated with a successful call of ``method``.
For parameter documentation, see ``call_with_passes_with_manual_spend``.
"""
return call_with_passes_with_manual_spend(
method,
num_passes,
get_passes,
# Commit the spend of the passes when the operation finally succeeds.
lambda result, pass_group: pass_group.mark_spent(),
)
def with_rref(f):
"""
Decorate a function so that it automatically receives a
``RemoteReference`` as its first argument when called.
The ``RemoteReference`` is retrieved by calling ``_rref`` on the first
argument passed to the function (expected to be ``self``).
"""
@wraps(f)
def g(self, *args, **kwargs):
return f(self, self._rref(), *args, **kwargs)
return g
def _encode_passes(group):
"""
:param IPassGroup group: A group of passes to encode.
:return list[bytes]: The encoded form of the passes in the given group.
"""
return list(t.pass_bytes for t in group.passes)
@implementer(IStorageServer)
@attr.s
class ZKAPAuthorizerStorageClient(object):
"""
An implementation of the client portion of an access-pass-based
authorization scheme on top of the basic Tahoe-LAFS storage protocol.
This ``IStorageServer`` implementation aims to offer the same storage
functionality as Tahoe-LAFS' built-in storage server but with an added
layer of pass-based authorization for some operations. The Python
interface exposed to application code is the same but the network protocol
is augmented with passes which are automatically inserted by this class.
The passes are interpreted by the corresponding server-side implementation
of this scheme.
:ivar _get_rref: A no-argument callable which retrieves the most recently
valid ``RemoteReference`` corresponding to the server-side object for
this scheme.
:ivar (bytes -> int -> IPassGroup) _get_passes: A callable to use to
retrieve passes which can be used to authorize an operation. The
first argument is utf-8 encoded message binding the passes to the
request for which they will be used. The second gives the number of
passes to request.
"""
_expected_remote_interface_name = (
"RIPrivacyPassAuthorizedStorageServer.tahoe.privatestorage.io"
)
_pass_value = pass_value_attribute()
_get_rref = attr.ib()
_get_passes = attr.ib()
_clock = attr.ib(
validator=provides(IReactorTime),
default=attr.Factory(partial(namedAny, "twisted.internet.reactor")),
)
def _rref(self):
rref = self._get_rref()
# rref provides foolscap.ipb.IRemoteReference but in practice it is a
# foolscap.referenceable.RemoteReference instance. The interface
# doesn't give us enough functionality to verify that the reference is
# to the right sort of thing but the concrete type does.
#
# Foolscap development isn't exactly racing along and if we're lucky
# we'll switch to HTTP before too long anyway.
actual_name = rref.tracker.interfaceName
expected_name = self._expected_remote_interface_name
if actual_name != expected_name:
raise IncorrectStorageServerReference(
rref.tracker.getURL(),
actual_name,
expected_name,
)
return rref
@with_rref
def get_version(self, rref):
return rref.callRemote(
"get_version",
)
def _spend_for_allocate_buckets(
self,
allocated_size,
result,
pass_group,
):
"""
Spend some subset of a pass group based on the results of an
*allocate_buckets* call.
:param int allocate_buckets: The size of the shares that may have been
allocated.
:param ({int}, {int: IBucketWriter}) result: The result of the remote
*allocate_buckets* call.
:param IPassGroup pass_group: The passes which were used with the
remote call. A prefix of the passes in this group will be spent
based on the buckets which ``result`` indicates were actually
allocated.
"""
alreadygot, bucketwriters = result
# Passes only need to be spent for buckets that are being
# allocated. Someone already paid for any shares the server
# already has.
actual_passes = required_passes(
self._pass_value,
[allocated_size] * len(bucketwriters),
)
to_spend, to_reset = pass_group.split(range(actual_passes))
to_spend.mark_spent()
to_reset.reset()
@with_rref
def allocate_buckets(
self,
rref,
storage_index,
renew_secret,
cancel_secret,
sharenums,
allocated_size,
canary,
):
num_passes = required_passes(
self._pass_value, [allocated_size] * len(sharenums)
)
return call_with_passes_with_manual_spend(
lambda passes: rref.callRemote(
"allocate_buckets",
_encode_passes(passes),
storage_index,
renew_secret,
cancel_secret,
sharenums,
allocated_size,
canary,
),
num_passes,
partial(
self._get_passes,
allocate_buckets_message(storage_index),
),
partial(self._spend_for_allocate_buckets, allocated_size),
)
@with_rref
def get_buckets(
self,
rref,
storage_index,
):
return rref.callRemote(
"get_buckets",
storage_index,
)
@inline_callbacks
@with_rref
def add_lease(
self,
rref,
storage_index,
renew_secret,
cancel_secret,
):
share_sizes = (
yield rref.callRemote(
"share_sizes",
storage_index,
None,
)
).values()
num_passes = required_passes(self._pass_value, share_sizes)
result = yield call_with_passes(
lambda passes: rref.callRemote(
"add_lease",
_encode_passes(passes),
storage_index,
renew_secret,
cancel_secret,
),
num_passes,
partial(self._get_passes, add_lease_message(storage_index)),
)
returnValue(result)
@with_rref
def stat_shares(self, rref, storage_indexes):
return rref.callRemote(
"stat_shares",
storage_indexes,
)
@with_rref
def advise_corrupt_share(
self,
rref,
share_type,
storage_index,
shnum,
reason,
):
return rref.callRemote(
"advise_corrupt_share",
share_type,
storage_index,
shnum,
reason,
)
@log_call_deferred("zkapauthorizer:storage-client:slot_testv_and_readv_and_writev")
@inline_callbacks
@with_rref
def slot_testv_and_readv_and_writev(
self,
rref: Any,
storage_index: bytes,
secrets: Secrets,
tw_vectors: TestWriteVectors,
r_vector: ReadVector,
) -> Generator[Deferred[Any], Any, None]:
# Read operations are free.
num_passes = 0
# Convert tw_vectors from the new internal format to the wire format.
# See https://github.com/tahoe-lafs/tahoe-lafs/pull/1127/files#r716939082
old_tw_vectors = {
sharenum: (
[
(offset, length, b"eq", specimen)
for (offset, length, specimen) in test_vector
],
data_vectors,
new_length,
)
for (
sharenum,
(test_vector, data_vectors, new_length),
) in tw_vectors.items()
}
write_sharenums = get_write_sharenums(old_tw_vectors)
if len(write_sharenums) > 0:
# When performing writes, if we're increasing the storage
# requirement, we need to spend more passes. Unfortunately we
# don't know what the current storage requirements are at this
# layer of the system. It's *likely* that a higher layer does but
# that doesn't help us, even if it were guaranteed. So, instead,
# ask the server. Invoke a ZKAPAuthorizer-supplied remote method
# on the storage server that will give us a really good estimate
# of the current size of all of the specified shares (keys of
# tw_vectors).
[stats] = yield rref.callRemote(
"stat_shares",
[storage_index],
)
# Filter down to only the shares that have an active lease. If
# we're going to write to any other shares we will have to pay to
# renew their leases.
now = self._clock.seconds()
current_sizes = {
sharenum: stat.size
for (sharenum, stat) in stats.items()
if stat.lease_expiration > now
# Also, the size of any share we're not writing to doesn't
# matter.
and sharenum in write_sharenums
}
# Determine the cost of the new storage for the operation.
num_passes = get_required_new_passes_for_mutable_write(
self._pass_value,
current_sizes,
old_tw_vectors,
)
result = yield call_with_passes(
lambda passes: rref.callRemote(
"slot_testv_and_readv_and_writev",
_encode_passes(passes),
storage_index,
secrets,
old_tw_vectors,
r_vector,
),
num_passes,
partial(
self._get_passes,
slot_testv_and_readv_and_writev_message(storage_index),
),
)
returnValue(result)
@with_rref
def slot_readv(
self,
rref,
storage_index,
shares,
r_vector,
):
return rref.callRemote(
"slot_readv",
storage_index,
shares,
r_vector,
) | zero-knowledge-access-pass-authorizer | /zero_knowledge_access_pass_authorizer-2022.8.21-py3-none-any.whl/_zkapauthorizer/_storage_client.py | _storage_client.py |
# https://github.com/twisted/nevow/issues/106 may affect this code but if so
# then the hotfix Tahoe-LAFS applies should deal with it.
#
# We want to avoid depending on the Tahoe-LAFS Python API since it isn't
# public but we do want to make sure that hotfix is applied. This seems like
# an alright compromise.
import allmydata.web.private as awp
import attr
from cryptography.hazmat.primitives.constant_time import bytes_eq
from twisted.cred.checkers import ANONYMOUS
from twisted.cred.credentials import ICredentials
from twisted.cred.error import UnauthorizedLogin
from twisted.cred.portal import IRealm, Portal
from twisted.internet.defer import fail, succeed
from twisted.python.failure import Failure
from twisted.web.guard import HTTPAuthSessionWrapper
from twisted.web.iweb import ICredentialFactory
from twisted.web.resource import IResource
from zope.interface import implementer
del awp
SCHEME = b"tahoe-lafs"
class IToken(ICredentials):
def equals(auth_token):
pass
@implementer(IToken)
@attr.s
class Token(object):
proposed_token = attr.ib(type=bytes)
def equals(self, valid_token):
return bytes_eq(
valid_token,
self.proposed_token,
)
@attr.s
class TokenChecker(object):
get_auth_token = attr.ib()
credentialInterfaces = [IToken]
def requestAvatarId(self, credentials):
required_token = self.get_auth_token()
if credentials.equals(required_token):
return succeed(ANONYMOUS)
return fail(Failure(UnauthorizedLogin()))
@implementer(ICredentialFactory)
@attr.s
class TokenCredentialFactory(object):
scheme = SCHEME
authentication_realm = b"tahoe-lafs"
def getChallenge(self, request):
return {b"realm": self.authentication_realm}
def decode(self, response, request):
return Token(response)
@implementer(IRealm)
@attr.s
class PrivateRealm(object):
_root = attr.ib()
def _logout(self):
pass
def requestAvatar(self, avatarId, mind, *interfaces):
if IResource in interfaces:
return (IResource, self._root, self._logout)
raise NotImplementedError(
"PrivateRealm supports IResource not {}".format(interfaces),
)
def _create_private_tree(get_auth_token, vulnerable):
realm = PrivateRealm(vulnerable)
portal = Portal(realm, [TokenChecker(get_auth_token)])
return HTTPAuthSessionWrapper(portal, [TokenCredentialFactory()])
def create_private_tree(get_auth_token, vulnerable_tree):
"""
Create a new resource tree that only allows requests if they include a
correct `Authorization: tahoe-lafs <api_auth_token>` header (where
`api_auth_token` matches the private configuration value).
:param (IO -> bytes) get_auth_token: Get the valid authorization token.
:param IResource vulnerable_tree: Create the resource
hierarchy which will be protected by the authorization mechanism.
"""
return _create_private_tree(
get_auth_token,
vulnerable_tree,
) | zero-knowledge-access-pass-authorizer | /zero_knowledge_access_pass_authorizer-2022.8.21-py3-none-any.whl/_zkapauthorizer/private.py | private.py |
from __future__ import annotations
from typing import Callable
import attr
from zope.interface import Attribute, Interface, implementer
from .eliot import GET_PASSES, INVALID_PASSES, RESET_PASSES, SPENT_PASSES
from .model import Pass, UnblindedToken
class IPassGroup(Interface):
"""
A group of passed meant to be spent together.
"""
passes = Attribute(":ivar list[Pass] passes: The passes themselves.")
def split(select_indices):
"""
Create two new ``IPassGroup`` providers. The first contains all passes in
this group at the given indices. The second contains all the others.
:param list[int] select_indices: The indices of the passes to include
in the first resulting group.
:return (IPassGroup, IPassGroup): The two new groups.
"""
def expand(by_amount):
"""
Create a new ``IPassGroup`` provider which contains all of this group's
passes and some more.
:param int by_amount: The number of additional passes the resulting
group should contain.
:return IPassGroup: The new group.
"""
def mark_spent():
"""
The passes have been spent successfully. Ensure none of them appear in
any ``IPassGroup`` provider created in the future.
:return: ``None``
"""
def mark_invalid(reason):
"""
The passes could not be spent. Ensure none of them appear in any
``IPassGroup`` provider created in the future.
:param unicode reason: A short description of the reason the passes
could not be spent.
:return: ``None``
"""
def reset():
"""
The passes have not been spent. Return them to for use in a future
``IPassGroup`` provider.
:return: ``None``
"""
class IPassFactory(Interface):
"""
An object which can create passes.
"""
def get(message, num_passes):
"""
:param unicode message: A request-binding message for the resulting passes.
:param int num_passes: The number of passes to request.
:return IPassGroup: A group of passes bound to the given message and
of the requested size.
"""
def mark_spent(unblinded_tokens: list[UnblindedToken]) -> None:
"""
See ``IPassGroup.mark_spent``
"""
def mark_invalid(reason: str, unblinded_tokens: list[UnblindedToken]) -> None:
"""
See ``IPassGroup.mark_invalid``
"""
def reset(unblinded_tokens: list[UnblindedToken]) -> None:
"""
See ``IPassGroup.reset``
"""
@implementer(IPassGroup)
@attr.s
class PassGroup(object):
"""
Track the state of a group of passes intended as payment for an operation.
:ivar _message: The request binding message for this group of
passes.
:ivar IPassFactory _factory: The factory which created this pass group.
:ivar list[Pass] passes: The passes of which this group consists.
"""
_message: bytes = attr.ib(validator=attr.validators.instance_of(bytes))
_factory: IPassFactory = attr.ib(validator=attr.validators.provides(IPassFactory))
_tokens: list[tuple[UnblindedToken, Pass]] = attr.ib(
validator=attr.validators.instance_of(list)
)
@property
def passes(self) -> list[Pass]:
return list(pass_ for (unblinded_token, pass_) in self._tokens)
@property
def unblinded_tokens(self) -> list[UnblindedToken]:
return list(unblinded_token for (unblinded_token, pass_) in self._tokens)
def split(self, select_indices: list[int]) -> tuple[PassGroup, PassGroup]:
selected = []
unselected = []
for idx, t in enumerate(self._tokens):
if idx in select_indices:
selected.append(t)
else:
unselected.append(t)
return (
attr.evolve(self, tokens=selected),
attr.evolve(self, tokens=unselected),
)
def expand(self, by_amount: int) -> PassGroup:
return attr.evolve(
self,
tokens=self._tokens + self._factory.get(self._message, by_amount)._tokens,
)
def mark_spent(self) -> None:
self._factory.mark_spent(self.unblinded_tokens)
def mark_invalid(self, reason) -> None:
self._factory.mark_invalid(reason, self.unblinded_tokens)
def reset(self) -> None:
self._factory.reset(self.unblinded_tokens)
@implementer(IPassFactory)
@attr.s
class SpendingController(object):
"""
A ``SpendingController`` gives out ZKAPs and arranges for re-spend
attempts when necessary.
"""
get_unblinded_tokens: Callable[[int], list[UnblindedToken]] = attr.ib()
discard_unblinded_tokens: Callable[[list[UnblindedToken]], None] = attr.ib()
invalidate_unblinded_tokens: Callable[[list[UnblindedToken]], None] = attr.ib()
reset_unblinded_tokens: Callable[[list[UnblindedToken]], None] = attr.ib()
tokens_to_passes: Callable[[bytes, list[UnblindedToken]], list[Pass]] = attr.ib()
@classmethod
def for_store(cls, tokens_to_passes, store):
return cls(
get_unblinded_tokens=store.get_unblinded_tokens,
discard_unblinded_tokens=store.discard_unblinded_tokens,
invalidate_unblinded_tokens=store.invalidate_unblinded_tokens,
reset_unblinded_tokens=store.reset_unblinded_tokens,
tokens_to_passes=tokens_to_passes,
)
def get(self, message, num_passes):
unblinded_tokens = self.get_unblinded_tokens(num_passes)
passes = self.tokens_to_passes(message, unblinded_tokens)
GET_PASSES.log(
message=message.decode("utf-8"),
count=num_passes,
)
return PassGroup(message, self, list(zip(unblinded_tokens, passes)))
def mark_spent(self, unblinded_tokens):
SPENT_PASSES.log(
count=len(unblinded_tokens),
)
self.discard_unblinded_tokens(unblinded_tokens)
def mark_invalid(self, reason, unblinded_tokens):
INVALID_PASSES.log(
reason=reason,
count=len(unblinded_tokens),
)
self.invalidate_unblinded_tokens(reason, unblinded_tokens)
def reset(self, unblinded_tokens):
RESET_PASSES.log(
count=len(unblinded_tokens),
)
self.reset_unblinded_tokens(unblinded_tokens) | zero-knowledge-access-pass-authorizer | /zero_knowledge_access_pass_authorizer-2022.8.21-py3-none-any.whl/_zkapauthorizer/spending.py | spending.py |
import attr
from allmydata.interfaces import Offset, RIStorageServer, StorageIndex
from foolscap.api import Any, Copyable, DictOf, ListOf, RemoteCopy
from foolscap.constraint import ByteStringConstraint
from foolscap.remoteinterface import RemoteInterface, RemoteMethodSchema
@attr.s
class ShareStat(Copyable, RemoteCopy):
"""
Represent some metadata about a share.
:ivar int size: The size. in bytes, of the share.
:ivar int lease_expiration: The POSIX timestamp of the time at which the
lease on this share expires, or None if there is no lease.
"""
typeToCopy = copytype = "ShareStat"
# To be a RemoteCopy it must be possible to instantiate this with no
# arguments. :/ So supply defaults for these attributes.
size = attr.ib(default=0)
lease_expiration = attr.ib(default=0)
# The RemoteCopy interface
def setCopyableState(self, state):
self.__dict__ = state
# The Foolscap convention seems to be to try to constrain inputs to valid
# values. So we'll try to limit the number of passes a client can supply.
# Foolscap may be moving away from this so we may eventually drop this as
# well. Though it may still make sense on a non-Foolscap protocol (eg HTTP)
# which Tahoe-LAFS may eventually support.
#
# If a pass is worth 128 KiB of storage for some amount of time, 2 ** 20
# passes is worth 128 GiB of storage for some amount of time. It is an
# arbitrary upper limit on the size of immutable files but maybe it's large
# enough to not be an issue for a while.
#
# The argument for having a limit here at all is protection against denial of
# service attacks that exhaust server memory but creating unbearably large
# lists.
#
# A limit of 2 ** 20 passes translates to 177 MiB (times some constant factor
# for Foolscap/Python overhead). That should be tolerable.
_MAXIMUM_PASSES_PER_CALL = 2**20
# This is the length of a serialized Ristretto-flavored PrivacyPass pass The
# pass is a combination of token preimages and unblinded token signatures,
# each base64-encoded.
_PASS_LENGTH = 177
# Take those values and turn them into the appropriate Foolscap constraint
# objects. Foolscap seems to have a convention of representing these as
# CamelCase module-level values so I replicate that here.
_Pass = ByteStringConstraint(maxLength=_PASS_LENGTH, minLength=_PASS_LENGTH)
_PassList = ListOf(_Pass, maxLength=_MAXIMUM_PASSES_PER_CALL)
def add_passes(schema):
"""
Add a ``passes`` parameter to the given method schema.
:param foolscap.remoteinterface.RemoteMethodSchema schema: An existing
method schema to modify.
:return foolscap.remoteinterface.RemoteMethodSchema: A schema like
``schema`` but with one additional required argument.
"""
return add_arguments(schema, [("passes", _PassList)])
def add_arguments(schema, kwargs):
"""
Create a new schema like ``schema`` but with the arguments given by
``kwargs`` prepended to the signature.
:param foolscap.remoteinterface.RemoteMethodSchema schema: The existing
schema.
:param list[(bytes, foolscap.IConstraint)] kwargs: The arguments to
prepend to the signature of ``schema``.
:return foolscap.remoteinterface.RemoteMethodSchema: The new schema
object.
"""
new_kwargs = dict(schema.argConstraints)
new_kwargs.update(kwargs)
modified_schema = RemoteMethodSchema(**new_kwargs)
# Initialized from **new_kwargs, RemoteMethodSchema.argumentNames is in
# some arbitrary, probably-incorrect order. This breaks user code which
# tries to use positional arguments. Put them back in the order they were
# in originally (in the input ``schema``), prepended with the newly added
# arguments.
modified_schema.argumentNames = (
# The new arguments
list(argName for (argName, _) in kwargs)
+
# The original arguments in the original order
schema.argumentNames
)
return modified_schema
class RIPrivacyPassAuthorizedStorageServer(RemoteInterface):
"""
An object which can store and retrieve shares, subject to pass-based
authorization.
This is much the same as ``allmydata.interfaces.RIStorageServer`` but
several of its methods take an additional ``passes`` parameter. Clients
are expected to supply suitable passes and only after the passes have been
validated is service provided.
"""
__remote_name__ = "RIPrivacyPassAuthorizedStorageServer.tahoe.privatestorage.io"
get_version = RIStorageServer["get_version"]
allocate_buckets = add_passes(RIStorageServer["allocate_buckets"])
add_lease = add_passes(RIStorageServer["add_lease"])
get_buckets = RIStorageServer["get_buckets"]
def share_sizes(
storage_index_or_slot=StorageIndex,
# Notionally, ChoiceOf(None, SetOf(int, maxLength=MAX_BUCKETS)).
# However, support for such a construction appears to be
# unimplemented in Foolscap. So, instead...
sharenums=Any(),
):
"""
Get the size of the given shares in the given storage index or slot. If a
share has no stored state, its size is reported as 0.
"""
return DictOf(int, Offset)
def stat_shares(
storage_indexes_or_slots=ListOf(StorageIndex),
):
"""
Get various metadata about shares in the given storage index or slot.
:return [{int: ShareStat}]: A list of share stats. Dictionaries in
the list corresponds to the results for each storage index
requested by the ``storage_indexes_or_slots`` argument. Items in
the dictionary give share stats for each share known to this
server to be associated with the corresponding storage index.
Keys are share numbers and values are the stats.
"""
# Any() should be ShareStat but I don't know how to spell that.
return ListOf(DictOf(int, Any()))
slot_readv = RIStorageServer["slot_readv"]
slot_testv_and_readv_and_writev = add_passes(
RIStorageServer["slot_testv_and_readv_and_writev"],
)
advise_corrupt_share = RIStorageServer["advise_corrupt_share"] | zero-knowledge-access-pass-authorizer | /zero_knowledge_access_pass_authorizer-2022.8.21-py3-none-any.whl/_zkapauthorizer/foolscap.py | foolscap.py |
from collections.abc import Awaitable
from functools import partial
from json import dumps, loads
from typing import Callable, Optional
from allmydata.uri import ReadonlyDirectoryURI, from_string
from attr import Factory, define, field
from autobahn.twisted.resource import WebSocketResource
from autobahn.twisted.websocket import WebSocketServerFactory, WebSocketServerProtocol
from autobahn.websocket.interfaces import IWebSocketClientAgent
from hyperlink import DecodedURL
from twisted.internet.defer import Deferred
from twisted.logger import Logger
from twisted.python.failure import Failure
from twisted.web.http import BAD_REQUEST, CONFLICT, CREATED, INTERNAL_SERVER_ERROR
from twisted.web.iweb import IRequest
from twisted.web.resource import ErrorPage, IResource, NoResource, Resource
from twisted.web.server import NOT_DONE_YET
from zope.interface import Attribute
from . import NAME
from . import __version__ as _zkapauthorizer_version
from ._base64 import urlsafe_b64decode
from ._json import dumps_utf8
from .controller import PaymentController, get_redeemer
from .lease_maintenance import LeaseMaintenanceConfig
from .model import VoucherStore
from .pricecalculator import PriceCalculator
from .private import create_private_tree
from .recover import Downloader, RecoveryStages, RecoveryState, StatefulRecoverer
from .replicate import ReplicationAlreadySetup
from .storage_common import (
get_configured_allowed_public_keys,
get_configured_pass_value,
get_configured_shares_needed,
get_configured_shares_total,
)
from .tahoe import attenuate_writecap
# The number of tokens to submit with a voucher redemption.
NUM_TOKENS = 2**15
class IZKAPRoot(IResource):
"""
The root of the resource tree of this plugin's client web presence.
"""
store = Attribute("The ``VoucherStore`` used by this resource tree.")
controller = Attribute("The ``PaymentController`` used by this resource tree.")
def get_token_count(
plugin_name,
node_config,
):
"""
Retrieve the configured voucher value, in number of tokens, from the given
configuration.
:param str plugin_name: The plugin name to use to choose a
configuration section.
:param _Config node_config: See ``from_configuration``.
:param int default: The value to return if none is configured.
"""
section_name = "storageclient.plugins.{}".format(plugin_name)
return int(
node_config.get_config(
section=section_name,
option="default-token-count",
default=NUM_TOKENS,
)
)
def from_configuration(
node_config,
store,
get_downloader,
setup_replication,
redeemer=None,
clock=None,
):
"""
Instantiate the plugin root resource using data from its configuration
section, **storageclient.plugins.privatestorageio-zkapauthz-v2**, in the
Tahoe-LAFS configuration file. See the configuration documentation for
details of the configuration section.
:param _Config node_config: An object representing the overall node
configuration. The plugin configuration can be extracted from this.
This is also used to read and write files in the private storage area
of the node's persistent state location.
:param VoucherStore store: The store to use.
:param IRedeemer redeemer: The voucher redeemer to use. If ``None`` a
sensible one is constructed.
:param clock: See ``PaymentController._clock``.
:return IZKAPRoot: The root of the resource hierarchy presented by the
client side of the plugin.
"""
if redeemer is None:
redeemer = get_redeemer(
NAME,
node_config,
None,
None,
)
default_token_count = get_token_count(
NAME,
node_config,
)
controller = PaymentController(
store,
redeemer,
default_token_count,
allowed_public_keys=get_configured_allowed_public_keys(node_config),
clock=clock,
)
calculator = PriceCalculator(
get_configured_shares_needed(node_config),
get_configured_shares_total(node_config),
get_configured_pass_value(node_config),
)
calculate_price = _CalculatePrice(
calculator,
LeaseMaintenanceConfig.from_node_config(node_config).get_lease_duration(),
)
root = create_private_tree(
lambda: node_config.get_private_config("api_auth_token").encode("utf-8"),
authorizationless_resource_tree(
store,
controller,
get_downloader,
setup_replication,
calculate_price,
),
)
root.store = store
root.controller = controller
return root
def internal_server_error(err: Failure, logger: Logger, request: IRequest) -> None:
"""
Log a failure and return it as an internal server error for the given
request.
This is suitable for use as a last-resort errback while handling a
request.
"""
logger.failure("replication setup failed", err)
request.setResponseCode(INTERNAL_SERVER_ERROR)
request.write(dumps_utf8({"reason": err.getErrorMessage()}))
request.finish()
@define
class ReplicateResource(Resource):
"""
Integrate the replication configuration implementation with the HTTP
interface.
:ivar _setup: The callable the resource will use to do the actual setup
work.
"""
_setup: Callable[[], Awaitable[str]]
_log = Logger()
def __attrs_post_init__(self):
Resource.__init__(self)
def render_POST(self, request):
d = Deferred.fromCoroutine(self._setup_replication(request))
d.addErrback(internal_server_error, self._log, request)
return NOT_DONE_YET
async def _setup_replication(self, request) -> None:
"""
Call the replication setup function and asynchronously deliver its result
as a response to the given request.
"""
try:
cap_str = await self._setup()
except ReplicationAlreadySetup as e:
status = CONFLICT
cap_str = e.cap_str
else:
status = CREATED
application_json(request)
request.setResponseCode(status)
request.write(dumps_utf8({"recovery-capability": cap_str}))
request.finish()
class RecoverProtocol(WebSocketServerProtocol):
"""
Speaks the server side of the WebSocket /recover protocol.
A client connects to this to start recovery, sending an opening
message with the rquired capability.
As recovery is ongoing, the server sends status updates as they
become available.
When the recovery is finished, and final message is sent
(indicating overall success or failure) and the WebSocket is
closed.
"""
_log = Logger()
def onClose(self, wasClean, code, reason) -> None:
"""
WebSocket API: we've lost our connection for some reason
"""
try:
self.factory.clients.remove(self)
except ValueError:
# may not have initiated recovery yet so it might not be
# in the clients list
pass
def onMessage(self, payload, isBinary) -> None:
"""
WebSocket API: a message has been received from the client (the
only thing they can send is a request to initiate recovery).
"""
try:
body = loads(payload)
if set(body.keys()) != {"recovery-capability"}:
raise ValueError("Unknown keys present in request")
recovery_capability = from_string(body["recovery-capability"])
if not isinstance(recovery_capability, ReadonlyDirectoryURI):
raise ValueError("Not a readonly-dircap")
except Exception as e:
self._log.failure("Failed to initiate recovery")
self.sendClose(
code=4000,
reason=f"Failed to parse recovery request: {e}",
)
return
# we have a valid request, tell our factory to start recovery
self.factory.initiate_recovery(recovery_capability, self)
@define
class RecoverFactory(WebSocketServerFactory):
"""
Track state of recovery.
In the factory because we want at most one active recovery attempt
no matter how many clients there are and because something needs
to link to other resources that are also constructed once.
"""
store: VoucherStore = field()
get_downloader: Callable[[str], Downloader] = field()
recoverer: StatefulRecoverer = field()
recovering_d: Optional[Deferred] = field(default=None)
recovering_cap: Optional[ReadonlyDirectoryURI] = field(default=None)
# manage WebSocket client(s)
clients: list = field(default=Factory(list))
sent_updates: list = field(default=Factory(list))
_log = Logger()
@recoverer.default
def _default_recoverer(self) -> StatefulRecoverer:
return StatefulRecoverer(listeners={self._on_state_change})
def __attrs_post_init__(self) -> None:
self.protocol = RecoverProtocol
WebSocketServerFactory.__init__(self, server="ZKAPAuthorizer")
def _on_state_change(self, state: RecoveryState) -> None:
"""
Whenever the state of recovery changes, update all our clients
"""
update_msg = dumps(state.marshal()).encode("utf8")
self.sent_updates.append(update_msg)
for client in self.clients:
client.sendMessage(update_msg, False)
def initiate_recovery(
self, cap: ReadonlyDirectoryURI, client: WebSocketServerProtocol
) -> None:
"""
A new WebSocket client has asked for recovery.
If there is no recovery, begin one and send updates to this
client.
If a recovery is already started _and_ the capability is the
same, send updates to this client too.
Otherwise, error.
"""
self.clients.append(client)
if self.recovering_d is None:
self.recovering_cap = cap
self.recovering_d = Deferred.fromCoroutine(self._recover(self.store, cap))
def disconnect_clients():
for client in self.clients:
client.sendClose()
def err(f):
self._log.failure("Error during restore", f)
# One likely reason to get here is the ValueError we
# raise about existing local state .. and the
# "recoverer" itself can't really handle this (or
# other) errors happening before it is called.
self.recoverer._set_state(
RecoveryState(
RecoveryStages.import_failed,
f.getErrorMessage(),
)
)
disconnect_clients()
def happy(_):
disconnect_clients()
self.recovering_d.addCallbacks(happy, err)
elif self.recovering_cap != cap:
self.sendClose(
code=4000, reason="Ongoing recovery with different capability"
)
else:
# we got another client, and they sent the same recovery
# capability, so be idempotent by acting the same as if this
# was the first client. That means sending this client all the
# status updates we've sent so far.
for update in self.sent_updates:
client.sendMessage(update)
def buildProtocol(self, addr) -> RecoverProtocol:
"""
IFactory API
"""
protocol = self.protocol()
protocol.factory = self
return protocol
async def _recover(
self,
store: VoucherStore,
cap: ReadonlyDirectoryURI,
) -> None:
"""
:raises: NotEmpty if there is existing local state
"""
# If these things succeed then we will have started recovery and
# generated a response to the request.
downloader = self.get_downloader(cap.to_string().decode("ascii"))
await store.call_if_empty(
partial(self.recoverer.recover, downloader) # cursor added by call_if_empty
)
# let all exceptions (including NotEmpty) out
def authorizationless_resource_tree(
store,
controller,
get_downloader: Callable[[str], Downloader],
setup_replication: Callable[[], Awaitable[str]],
calculate_price,
):
"""
Create the full ZKAPAuthorizer client plugin resource hierarchy with no
authorization applied.
:param VoucherStore store: The store to use.
:param PaymentController controller: The payment controller to use.
:param get_downloader: A callable which accepts a replica identifier and
can download the replica data.
:param IResource calculate_price: The resource for the price calculation endpoint.
:return IResource: The root of the resource hierarchy.
"""
root = Resource()
root.putChild(
b"recover",
WebSocketResource(RecoverFactory(store, get_downloader)),
)
root.putChild(
b"replicate",
ReplicateResource(setup_replication),
)
root.putChild(
b"voucher",
_VoucherCollection(
store,
controller,
),
)
root.putChild(
b"lease-maintenance",
_LeaseMaintenanceResource(
store,
controller,
),
)
root.putChild(
b"version",
_ProjectVersion(),
)
root.putChild(
b"calculate-price",
calculate_price,
)
return root
class _CalculatePrice(Resource):
"""
This resource exposes a storage price calculator.
"""
allowedMethods = [b"POST"]
render_HEAD = render_GET = None
def __init__(self, price_calculator, lease_period):
"""
:param _PriceCalculator price_calculator: The object which can actually
calculate storage prices.
:param lease_period: See ``authorizationless_resource_tree``
"""
self._price_calculator = price_calculator
self._lease_period = lease_period
Resource.__init__(self)
def render_POST(self, request):
"""
Calculate the price in ZKAPs to store or continue storing files specified
sizes.
"""
if wrong_content_type(request, "application/json"):
return NOT_DONE_YET
application_json(request)
payload = request.content.read()
try:
body_object = loads(payload)
except ValueError:
request.setResponseCode(BAD_REQUEST)
return dumps_utf8(
{
"error": "could not parse request body",
}
)
try:
version = body_object["version"]
sizes = body_object["sizes"]
except (TypeError, KeyError):
request.setResponseCode(BAD_REQUEST)
return dumps_utf8(
{
"error": "could not read `version` and `sizes` properties",
}
)
if version != 1:
request.setResponseCode(BAD_REQUEST)
return dumps_utf8(
{
"error": "did not find required version number 1 in request",
}
)
if not isinstance(sizes, list) or not all(
isinstance(size, int) and size >= 0 for size in sizes
):
request.setResponseCode(BAD_REQUEST)
return dumps_utf8(
{
"error": "did not find required positive integer sizes list in request",
}
)
application_json(request)
price = self._price_calculator.calculate(sizes)
return dumps_utf8(
{
"price": price,
"period": self._lease_period,
}
)
def wrong_content_type(request, required_type):
"""
Check the content-type of a request and respond if it is incorrect.
:param request: The request object to check.
:param str required_type: The required content-type (eg
``"application/json"``).
:return bool: ``True`` if the content-type is wrong and an error response
has been generated. ``False`` otherwise.
"""
actual_type = request.requestHeaders.getRawHeaders(
"content-type",
[None],
)[0]
if actual_type != required_type:
request.setResponseCode(BAD_REQUEST)
request.finish()
return True
return False
def application_json(request):
"""
Set the given request's response content-type to ``application/json``.
:param twisted.web.iweb.IRequest request: The request to modify.
"""
request.responseHeaders.setRawHeaders("content-type", ["application/json"])
class _ProjectVersion(Resource):
"""
This resource exposes the version of **ZKAPAuthorizer** itself.
"""
def render_GET(self, request):
application_json(request)
return dumps_utf8(
{
"version": _zkapauthorizer_version,
}
)
class _LeaseMaintenanceResource(Resource):
"""
This class implements inspection of lease maintenance activity. Users
**GET** this resource to learn about lease maintenance spending.
"""
_log = Logger()
def __init__(self, store, controller):
self._store = store
self._controller = controller
Resource.__init__(self)
def render_GET(self, request):
"""
Retrieve the spending information.
"""
application_json(request)
return dumps_utf8(
{
"total": self._store.count_unblinded_tokens(),
"spending": self._lease_maintenance_activity(),
}
)
def _lease_maintenance_activity(self):
activity = self._store.get_latest_lease_maintenance_activity()
if activity is None:
return activity
return {
"when": activity.finished.isoformat(),
"count": activity.passes_required,
}
class _VoucherCollection(Resource):
"""
This class implements redemption of vouchers. Users **PUT** such numbers
to this resource which delegates redemption responsibilities to the
redemption controller. Child resources of this resource can also be
retrieved to monitor the status of previously submitted vouchers.
"""
_log = Logger()
def __init__(self, store, controller):
self._store = store
self._controller = controller
Resource.__init__(self)
def render_PUT(self, request):
"""
Record a voucher and begin attempting to redeem it.
"""
try:
payload = loads(request.content.read())
except Exception:
return bad_request("json request body required").render(request)
if payload.keys() != {"voucher"}:
return bad_request(
"request object must have exactly one key: 'voucher'"
).render(request)
voucher = payload["voucher"]
if not is_syntactic_voucher(voucher):
return bad_request("submitted voucher is syntactically invalid").render(
request
)
self._log.info(
"Accepting a voucher ({voucher}) for redemption.", voucher=voucher
)
self._controller.redeem(voucher.encode("ascii"))
return b""
def render_GET(self, request):
application_json(request)
return dumps_utf8(
{
"vouchers": list(
self._controller.incorporate_transient_state(voucher).marshal()
for voucher in self._store.list()
),
}
)
def getChild(self, segment, request):
voucher = segment.decode("utf-8")
if not is_syntactic_voucher(voucher):
return bad_request()
try:
voucher = self._store.get(voucher.encode("ascii"))
except KeyError:
return NoResource()
return VoucherView(self._controller.incorporate_transient_state(voucher))
def is_syntactic_voucher(voucher):
"""
:param voucher: A candidate object to inspect.
:return bool: ``True`` if and only if ``voucher`` is a text string
containing a syntactically valid voucher. This says **nothing** about
the validity of the represented voucher itself. A ``True`` result
only means the string can be **interpreted** as a voucher.
"""
if not isinstance(voucher, str):
return False
if len(voucher) != 44:
# TODO. 44 is the length of 32 bytes base64 encoded. This model
# information presumably belongs somewhere else.
return False
try:
urlsafe_b64decode(voucher.encode("ascii"))
except Exception:
return False
return True
class VoucherView(Resource):
"""
This class implements a view for a ``Voucher`` instance.
"""
def __init__(self, voucher):
"""
:param Voucher reference: The model object for which to provide a
view.
"""
self._voucher = voucher
Resource.__init__(self)
def render_GET(self, request):
application_json(request)
return self._voucher.to_json()
def bad_request(reason="Bad Request"):
"""
:return IResource: A resource which can be rendered to produce a **BAD
REQUEST** response.
"""
return ErrorPage(
BAD_REQUEST,
b"Bad Request",
reason.encode("utf-8"),
)
async def recover(
agent: IWebSocketClientAgent,
api_root: DecodedURL,
auth_token: str,
replica_dircap: str,
) -> list[dict]:
"""
Initiate recovery from a replica.
:return: The status updates received while recovery was progressing.
"""
endpoint_url = api_root.child(
"storage-plugins", "privatestorageio-zkapauthz-v2", "recover"
).to_text()
proto = await agent.open(
endpoint_url,
{"headers": {"Authorization": f"tahoe-lafs {auth_token}"}},
)
updates = []
proto.on("message", lambda msg, is_binary: updates.append(loads(msg)))
await proto.is_open
proto.sendMessage(
dumps({"recovery-capability": attenuate_writecap(replica_dircap)}).encode(
"utf8"
)
)
await proto.is_closed
return updates | zero-knowledge-access-pass-authorizer | /zero_knowledge_access_pass_authorizer-2022.8.21-py3-none-any.whl/_zkapauthorizer/resource.py | resource.py |
from collections.abc import Awaitable
from functools import wraps
from hashlib import sha256
from json import loads
from tempfile import mkdtemp
from typing import IO, Any, Callable, Iterable, Optional, Union
import treq
from allmydata.uri import from_string as capability_from_string
from allmydata.util.base32 import b2a as b32encode
from attrs import Factory, define, field, frozen
from hyperlink import DecodedURL
from treq.client import HTTPClient
from twisted.internet.error import ConnectionRefusedError
from twisted.python.filepath import FilePath
from twisted.web.client import Agent
from zope.interface import Interface, implementer
from ._types import CapStr
from .config import Config, read_node_url
from .storage_common import (
get_configured_shares_needed,
get_configured_shares_total,
required_passes,
share_size_for_data,
)
# An object which can get a readable byte stream
DataProvider = Callable[[], IO[bytes]]
@frozen
class DirectoryEntry:
"""
An entry in a directory.
:ivar kind: Either ``"filenode"`` or ``"dirnode"``.
:ivar size: The size of the entry's data, in bytes.
"""
kind: str
size: int
@frozen
class ShareEncoding:
"""
:ivar needed: The number of shares required to re-assemble the ciphertext.
:ivar total: The total number of shares produced the ciphertext has been
encoded in to.
"""
needed: int
total: int
def async_retry(matchers: list[Callable[[Exception], bool]]):
"""
Decorate a function with automatic retry behavior for certain cases.
:param matchers: A list of objects with a ``match`` method. If any of
these return ``True`` for an exception raised by the decorated
function then the decorated function will be called again.
"""
def retry_decorator(f) -> Callable:
@wraps(f)
async def decorated(*a, **kw) -> Awaitable:
while True:
try:
result = await f(*a, **kw)
except Exception as e:
if any(match(e) for match in matchers):
continue
raise
else:
return result
return decorated
return retry_decorator
def _not_enough_servers(exc: Exception, /) -> bool:
"""
Match the exception that is raised when the Tahoe-LAFS client node is not
connected to enough servers to satisfy the encoding configuration.
"""
return isinstance(exc, TahoeAPIError) and (
"allmydata.interfaces.NoServersError" in str(exc)
or "allmydata.mutable.common.NotEnoughServersError" in str(exc)
)
def _connection_refused(exc: Exception, /) -> bool:
"""
Match the exception that is raised when the Tahoe-LAFS client node does
not accept the API call connection attempt.
"""
# Note this is the exception from Twisted, not the builtin exception.
return isinstance(exc, ConnectionRefusedError)
def _scrub_cap(cap: str) -> str:
"""
Return a new string that cannot be used to recover the input string but
can usually be distinguished from the scrubbed version of a different
input string.
"""
scrubbed = b32encode(sha256(cap.encode("ascii")).digest())[:6]
return f"URI:SCRUBBED:{scrubbed}"
def _scrub_caps_from_url(url: DecodedURL) -> DecodedURL:
"""
Return a new URL that is like ``url`` but has all capability strings in it
replaced with distinct but unusable substitutes.
"""
# One form is like /uri/<cap>
if (
len(url.path) > 1
and url.path[0] == "uri"
and not url.path[1].startswith("URI:SCRUBBED:")
):
cap = url.path[1]
new = url.replace(path=(url.path[0], _scrub_cap(cap)) + tuple(url.path[2:]))
return new
# That is the only form we use at the moment, in fact.
return url
@define(auto_exc=False)
class TahoeAPIError(Exception):
"""
Some error was reported from a Tahoe-LAFS HTTP API.
:ivar status: The HTTP response status code.
:ivar body: The HTTP response body.
"""
method: str
url: DecodedURL = field(converter=_scrub_caps_from_url)
status: int
body: str
class NotWriteableError(Exception):
"""
An attempt was made to write to something which is not writeable.
"""
class NotADirectoryError(Exception):
"""
An attempt was made to treat a non-directory as a directory.
"""
_common_tahoe_errors = [_not_enough_servers, _connection_refused]
@async_retry(_common_tahoe_errors)
async def upload_bytes(
client: HTTPClient,
get_data_provider: DataProvider,
api_root: DecodedURL,
) -> Awaitable[str]:
"""
Upload the given data and return the resulting capability.
If not enough storage servers are reachable then the upload is
automatically retried.
:param client: An HTTP client to use to make requests to the Tahoe-LAFS
HTTP API to perform the upload.
:param get_data_provider: A callable that returns a BinaryIO ready
to provide the bytes to upload. This isn't a BinaryIO _directly_
because we might re-try the operation, in which case we need a new
stream.
:param api_root: The location of the root of the Tahoe-LAFS HTTP API to
use to perform the upload. This should typically be the ``node.url``
value from a Tahoe-LAFS client node.
:return: If the upload is successful then the capability of the uploaded
data is returned.
:raise: If there is a problem uploading the data -- except for
unavailability of storage servers -- then some exception is raised.
"""
uri = api_root.child("uri")
data = get_data_provider()
resp = await client.put(uri, data)
content = (await treq.content(resp)).decode("utf-8")
if resp.code in (200, 201):
return content
raise TahoeAPIError("put", uri, resp.code, content)
async def download(
client: HTTPClient,
outpath: FilePath,
api_root: DecodedURL,
cap: str,
child_path: Optional[Iterable[str]] = None,
) -> None:
"""
Download the object identified by the given capability to the given path.
:param client: An HTTP client to use to make requests to the Tahoe-LAFS
HTTP API to perform the upload.
:param outpath: The path to the regular file to which the downloaded
content will be written. The content will be written to a temporary
file next to this one during download and then moved to this location
at the end.
:param api_root: The location of the root of the Tahoe-LAFS HTTP API to
use to perform the upload. This should typically be the ``node.url``
value from a Tahoe-LAFS client node.
:raise: If there is a problem downloading the data then some exception is
raised.
"""
outtemp = outpath.temporarySibling()
uri = api_root.child("uri").child(cap)
if child_path is not None:
for segment in child_path:
uri = uri.child(segment)
resp = await client.get(uri)
if resp.code == 200:
with outtemp.open("w") as f:
await treq.collect(resp, f.write)
outtemp.moveTo(outpath)
else:
content = (await treq.content(resp)).decode("utf-8")
raise TahoeAPIError("get", uri, resp.code, content)
@async_retry(_common_tahoe_errors)
async def list_directory(
client: HTTPClient,
api_root: DecodedURL,
dir_cap: str,
) -> Awaitable[dict[str, dict[str, dict]]]:
"""
Read the direct children of a directory.
"""
if not dir_cap.startswith("URI:DIR2"):
raise ValueError(f"Cannot list a non-directory capability ({dir_cap[:7]})")
uri = api_root.child("uri").child(dir_cap).child("").add("t", "json")
resp = await client.get(uri)
content = (await treq.content(resp)).decode("utf-8")
if resp.code == 200:
kind, details = loads(content)
return details["children"]
raise TahoeAPIError("get", uri, resp.code, content)
@async_retry(_common_tahoe_errors)
async def make_directory(
client: HTTPClient,
api_root: DecodedURL,
) -> Awaitable[str]:
"""
Create a new mutable directory and return the write capability string.
"""
uri = api_root.child("uri").add("t", "mkdir")
resp = await client.post(uri)
content = (await treq.content(resp)).decode("utf-8")
if resp.code == 200:
return content
raise TahoeAPIError("post", uri, resp.code, content)
@async_retry(_common_tahoe_errors)
async def link(
client: HTTPClient,
api_root: DecodedURL,
dir_cap: str,
entry_name: str,
entry_cap: str,
) -> None:
"""
Link an object into a directory.
:param dir_cap: The capability string of the directory in which to create
the link.
:param entry_cap: The capability string of the object to link in to the
directory.
"""
uri = api_root.child("uri").child(dir_cap).child(entry_name).add("t", "uri")
resp = await client.put(uri, data=entry_cap.encode("ascii"))
content = (await treq.content(resp)).decode("utf-8")
if resp.code == 200:
return None
if resp.code == 500 and "allmydata.mutable.common.NotWriteableError" in content:
raise NotWriteableError()
raise TahoeAPIError("put", uri, resp.code, content)
@async_retry(_common_tahoe_errors)
async def unlink(
client: HTTPClient,
api_root: DecodedURL,
dir_cap: str,
entry_name: str,
) -> None:
"""
Unink an object from a directory.
:param dir_cap: The capability string of the directory in which to create
the link.
:param entry_name: The name of the entry to delete.
:raise NotWriteableError: If the given directory capability is a read-only
capability.
:raise NotDirectoryError: If the given capability is not a directory
capability at all.
"""
uri = api_root.child("uri").child(dir_cap).child(entry_name)
resp = await client.delete(uri)
content = (await treq.content(resp)).decode("utf-8")
if resp.code == 200:
return None
if resp.code == 500 and "allmydata.mutable.common.NotWriteableError" in content:
raise NotWriteableError()
elif resp.code == 400 and "Files have no children named" in content:
raise NotADirectoryError()
raise TahoeAPIError("delete", uri, resp.code, content)
@frozen
class TahoeConfig:
"""
An abstract interface to the configuration of a Tahoe-LAFS client node.
:ivar encoding: The node's default erasure encoding parameters.
"""
encoding: ShareEncoding
class ITahoeClient(Interface):
"""
A simple Tahoe-LAFS client interface.
"""
def get_config() -> TahoeConfig:
"""
Get an abstract representation of this client node's configuration.
"""
def get_private_path(name: str) -> FilePath:
"""
Get the path to a file in the client node's private directory.
"""
async def download(
outpath: FilePath, cap: CapStr, child_path: Optional[Iterable[str]]
) -> None:
"""
Download the contents of an object to a given local path.
"""
async def upload(data_provider: DataProvider) -> CapStr:
"""
Upload some data, creating a new object, and returning a capability for
it.
:param get_data_provider: A callable which returns the data to be
uploaded. This may be called more than once in case a retry is
required.
"""
async def make_directory() -> CapStr:
"""
Create a new, empty, mutable directory.
"""
async def link(dir_cap: CapStr, entry_name: str, entry_cap: CapStr) -> None:
"""
Link an object into a directory.
:param dir_cap: The capability of the directory to link into.
:param entry_name: The name of the new link.
:param entry_cap: The capability of the object to link in.
"""
async def unlink(dir_cap: CapStr, entry_name: str) -> None:
"""
Delete an object out of a directory.
:param dir_cap: The capability of the directory to unlink from.
:param entry_name: The name of the entry to remove.
"""
async def list_directory(dir_cap: CapStr) -> dict[str, list[Any]]:
"""
List the entries linked into a directory.
"""
@implementer(ITahoeClient)
@define
class Tahoe(object):
"""
An object with simple bindings to Tahoe-LAFS HTTP APIs for some
operations.
Application code using this API lends itself well to being tested against
the objects returned by ``MemoryGrid.client``.
"""
client: HTTPClient
_node_config: Config
@property
def _api_root(self):
# The reading of node.url is intentionally delayed until it is
# required for the benefit of test code that doesn't ever make any
# requests and also doesn't fully populate the node's filesystem
# state.
return read_node_url(self._node_config)
def get_config(self) -> TahoeConfig:
"""
Create an abstract configuration from this node's concrete configuration.
"""
return TahoeConfig(
ShareEncoding(
get_configured_shares_needed(self._node_config),
get_configured_shares_total(self._node_config),
)
)
def get_private_path(self, name: str) -> FilePath:
"""
Get the path to a file in the node's private directory.
"""
return FilePath(self._node_config.get_private_path(name))
def download(self, outpath, cap, child_path):
return download(self.client, outpath, self._api_root, cap, child_path)
def upload(self, get_data_provider):
return upload_bytes(self.client, get_data_provider, self._api_root)
def make_directory(self):
return make_directory(self.client, self._api_root)
def list_directory(self, dir_cap):
return list_directory(self.client, self._api_root, dir_cap)
def link(self, dir_cap, entry_name, entry_cap):
return link(self.client, self._api_root, dir_cap, entry_name, entry_cap)
def unlink(self, dir_cap, entry_name):
return unlink(self.client, self._api_root, dir_cap, entry_name)
@define
class _Directory:
"""
Represent a Tahoe-LAFS directory object.
:ivar children: A mapping from an entry name to a capability which can be
used to look up the object for that entry.
"""
children: dict[str, CapStr] = field()
@children.default
def _default_children(self):
return {}
@define
class MemoryGrid:
"""
An extremely simplified in-memory model of a Tahoe-LAFS storage grid.
This object allows data to be "uploaded" to it and produces capability
strings which can then be used to "download" the data from it later on.
:ivar _counter: An internal counter used to support the creation of
capability strings.
:ivar _objects: Storage for all data which has been "uploaded", as a
mapping from the capability strings to the values.
"""
_counter: int = 0
_objects: dict[CapStr, Union[bytes, _Directory]] = field(default=Factory(dict))
def client(
self,
basedir: Optional[FilePath] = None,
share_encoding: ShareEncoding = ShareEncoding(3, 10),
) -> ITahoeClient:
"""
Create a ``Tahoe``-alike that is backed by this object instead of by a
real Tahoe-LAFS storage grid.
"""
if basedir is None:
basedir = FilePath(mkdtemp(suffix=".memory-tahoe"))
return _MemoryTahoe(self, basedir, share_encoding)
def upload(self, data: bytes) -> CapStr:
def encode(s: str) -> str:
return b32encode(s.encode("ascii")).decode("ascii")
cap = "URI:CHK:{}:{}:{}:{}:{}".format(
encode("{:016}".format(self._counter)),
encode("{:032}".format(self._counter)),
self._counter,
self._counter,
self._counter,
)
self._objects[cap] = data
self._counter += 1
return cap
def download(self, cap: CapStr) -> Union[bytes, _Directory]:
return self._objects[cap]
def make_directory(self) -> CapStr:
def encode(s: str) -> str:
return b32encode(s.encode("ascii")).decode("ascii")
writekey = encode("{:016x}".format(self._counter))
fingerprint = encode("{:032x}".format(self._counter))
self._counter += 1
cap = f"URI:DIR2:{writekey}:{fingerprint}"
rocap = attenuate_writecap(cap)
self._objects[cap] = self._objects[rocap] = _Directory()
return cap
def link(self, dir_cap: CapStr, entry_name: str, entry_cap: CapStr) -> None:
d = capability_from_string(dir_cap)
if d.is_readonly():
raise NotWriteableError()
dirobj = self._objects[dir_cap]
if isinstance(dirobj, _Directory):
dirobj.children[entry_name] = entry_cap
else:
raise ValueError(
f"Cannot link entry into non-directory capability ({dir_cap[:7]})"
)
def unlink(self, dir_cap: CapStr, entry_name: str) -> None:
d = capability_from_string(dir_cap)
if d.is_readonly():
raise NotWriteableError()
dirobj = self._objects[dir_cap]
if isinstance(dirobj, _Directory):
del dirobj.children[entry_name]
else:
raise NotADirectoryError()
def list_directory(self, dir_cap: CapStr) -> dict[str, list[Any]]:
def kind(entry):
if isinstance(entry, _Directory):
return "dirnode"
return "filenode"
def describe(cap):
obj = self._objects[cap]
if kind(obj) == "dirnode":
return ["dirnode", {"rw_uri": cap}]
return ["filenode", {"size": len(obj), "ro_uri": cap}]
dirobj = self._objects[dir_cap]
if isinstance(dirobj, _Directory):
return {name: describe(entry) for (name, entry) in dirobj.children.items()}
raise ValueError(f"Cannot list a non-directory capability ({dir_cap[:7]})")
_no_children_message = (
"\n<html>\n"
" <head><title>400 - Files have no children named 'somepath'</title></head>\n"
" <body>\n"
" <h1>Files have no children named {path!r}'</h1>\n"
" <p>no details</p>\n"
" </body>\n"
"</html>\n"
)
@implementer(ITahoeClient)
@define
class _MemoryTahoe:
"""
An in-memory implementation of the ``Tahoe`` API.
"""
_grid: MemoryGrid
_nodedir: FilePath
share_encoding: ShareEncoding
def __attrs_post_init__(self):
self._nodedir.child("private").makedirs(ignoreExistingDirectory=True)
def get_config(self) -> TahoeConfig:
"""
Get this node's configuration.
"""
return TahoeConfig(self.share_encoding)
def get_private_path(self, name: str) -> FilePath:
"""
Get the path to a file in a private directory dedicated to this instance
(there is no Tahoe node directory to look in).
"""
return self._nodedir.child("private").child(name)
async def download(self, outpath, cap, child_path):
d = self._grid.download(cap)
if child_path is not None:
for p in child_path:
if cap.startswith("URI:DIR2"):
cap = d.children[p]
d = self._grid.download(cap)
else:
raise TahoeAPIError(
"get", DecodedURL(), 400, _no_children_message.format(path=p)
)
if isinstance(d, bytes):
outpath.setContent(d)
else:
raise ValueError(f"Cannot download non-data capability ({cap[:7]})")
async def upload(self, data_provider: DataProvider):
"""
Send some data to Tahoe-LAFS, returning an immutable capability.
:param get_data: a function that returns the data to
upload. This may be called more than once in case we need
to re-try the upload, which is also the reason this method
doesn't just take a `bytes` directly
"""
with data_provider() as d:
content = d.read()
return self._grid.upload(content)
async def make_directory(self):
return self._grid.make_directory()
async def link(self, dir_cap, entry_name, entry_cap):
return self._grid.link(dir_cap, entry_name, entry_cap)
async def unlink(self, dir_cap, entry_name):
return self._grid.unlink(dir_cap, entry_name)
async def list_directory(self, dir_cap):
return self._grid.list_directory(dir_cap)
def attenuate_writecap(rw_cap: CapStr) -> CapStr:
"""
Get a read-only capability corresponding to the same data as the given
read-write capability.
"""
return capability_from_string(rw_cap).get_readonly().to_string().decode("ascii")
def get_tahoe_client(reactor, node_config: Config) -> ITahoeClient:
"""
Return a Tahoe-LAFS client appropriate for the given node configuration.
:param reactor: The reactor the client will use for I/O.
:param node_config: The Tahoe-LAFS client node configuration for the
client (giving, for example, the root URI of the node's HTTP API).
"""
agent = Agent(reactor)
http_client = HTTPClient(agent)
return Tahoe(http_client, node_config)
def required_passes_for_data(
bytes_per_pass: int, encoding: ShareEncoding, data_size: int
) -> int:
"""
Calculate the total storage cost (in passes) for all shares of an object
of a certain size under certain encoding parameters and pass value.
"""
return required_passes(
bytes_per_pass,
share_sizes_for_data(encoding, data_size),
)
def share_sizes_for_data(encoding: ShareEncoding, data_size: int) -> list[int]:
"""
Get the sizes of all of the shares for data of the given size encoded
using the given encoding.
"""
return [share_size_for_data(encoding.needed, data_size)] * encoding.total | zero-knowledge-access-pass-authorizer | /zero_knowledge_access_pass_authorizer-2022.8.21-py3-none-any.whl/_zkapauthorizer/tahoe.py | tahoe.py |
from base64 import b64decode
from datetime import datetime
def returns_aware_datetime_validator(inst, attr, value) -> None:
"""
An attrs validator that verifies the attribute value is a function that
returns a timezone-aware datetime instance for at least one call.
"""
if is_aware_datetime(value()):
return None
# Is it really a TypeError and not a ValueError? It doesn't matter and
# also attrs converts anything we raise into a TypeError.
raise TypeError(
f"{attr.name!r} must return aware datetime instances (returned {value!r})"
)
def is_aware_datetime(value: datetime) -> bool:
"""
:return: ``True`` if and only iff the given value is a timezone-aware
datetime instance.
"""
return isinstance(value, datetime) and value.tzinfo is not None
def aware_datetime_validator(inst, attr, value) -> None:
"""
An attrs validator that verifies the attribute value is a timezone-aware
datetime instance.
"""
if is_aware_datetime(value):
return None
raise TypeError(f"{attr.name!r} must be an aware datetime instance (got {value!r})")
def is_base64_encoded(b64decode=b64decode):
"""
Return an attrs validator that verifies that the attributes is a base64
encoded byte string.
"""
def validate_is_base64_encoded(inst, attr, value):
try:
b64decode(value)
except TypeError:
raise TypeError(
"{name!r} must be base64 encoded bytes, (got {value!r})".format(
name=attr.name,
value=value,
),
)
return validate_is_base64_encoded
def has_length(expected):
def validate_has_length(inst, attr, value):
if len(value) != expected:
raise ValueError(
"{name!r} must have length {expected}, instead has length {actual}".format(
name=attr.name,
expected=expected,
actual=len(value),
),
)
return validate_has_length
def greater_than(expected):
def validate_relation(inst, attr, value):
if value > expected:
return None
raise ValueError(
"{name!r} must be greater than {expected}, instead it was {actual}".format(
name=attr.name,
expected=expected,
actual=value,
),
)
return validate_relation | zero-knowledge-access-pass-authorizer | /zero_knowledge_access_pass_authorizer-2022.8.21-py3-none-any.whl/_zkapauthorizer/validators.py | validators.py |
__all__ = [
"REPLICA_RWCAP_BASENAME",
"EmptyConfig",
"empty_config",
"read_duration",
"read_node_url",
]
from datetime import timedelta
from typing import Protocol, TypeVar, Union
from allmydata.node import _Config as Config
from attrs import define
from hyperlink import DecodedURL
from twisted.python.filepath import FilePath
from . import NAME
_T = TypeVar("_T")
# The basename of the replica read-write capability file in the node's private
# directory, if replication is configured.
REPLICA_RWCAP_BASENAME = NAME + ".replica-rwcap"
# The version number in NAME doesn't match the version here because the
# database is persistent state and we need to be sure to load the older
# version even if we signal an API compatibility break by bumping the version
# number elsewhere. Consider this version number part of a different scheme
# where we're versioning our ability to open the database at all. The schema
# inside the database is versioned by yet another mechanism.
CONFIG_DB_NAME = "privatestorageio-zkapauthz-v1.sqlite3"
class TahoeConfig(Protocol):
"""
A representation of the configuration for a Tahoe-LAFS node.
"""
def get_config(
self,
section: str,
option: str,
default: object = object(),
boolean: bool = False,
) -> object:
"""
Read an option from a section of the configuration.
"""
def get_private_path(self, name: str) -> str:
"""
Construct a path beneath the private directory of the node this
configuration belongs to.
:param name: A path relative to the private directory.
"""
@define
class EmptyConfig:
"""
Weakly pretend to be a Tahoe-LAFS configuration object with no
configuration.
"""
_basedir: FilePath = FilePath(".")
def get_config(self, section, option, default=object(), boolean=False):
return default
def get_private_path(self, name):
return self._basedir.child("private").child(name).path
empty_config = EmptyConfig()
def read_node_url(config: Config) -> DecodedURL:
"""
Get the root of the node's HTTP API.
"""
return DecodedURL.from_text(
FilePath(config.get_config_path("node.url"))
.getContent()
.decode("ascii")
.strip()
)
def read_duration(cfg: Config, option: str, default: _T) -> Union[timedelta, _T]:
"""
Read an integer number of seconds from the ZKAPAuthorizer section of a
Tahoe-LAFS config.
:param cfg: The Tahoe-LAFS config object to consult.
:param option: The name of the option to read.
:return: ``None`` if the option is missing, otherwise the parsed duration
as a ``timedelta``.
"""
section_name = "storageclient.plugins." + NAME
value_str = cfg.get_config(
section=section_name,
option=option,
default=None,
)
if value_str is None:
return default
return timedelta(seconds=int(value_str)) | zero-knowledge-access-pass-authorizer | /zero_knowledge_access_pass_authorizer-2022.8.21-py3-none-any.whl/_zkapauthorizer/config.py | config.py |
def get_schema_version(cursor):
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS [version] AS SELECT 0 AS [version]
"""
)
cursor.execute(
"""
SELECT [version] FROM [version]
"""
)
[(actual_version,)] = cursor.fetchall()
return actual_version
def get_schema_upgrades(from_version):
"""
Generate unicode strings containing SQL expressions to alter a schema from
``from_version`` to the latest version.
:param int from_version: The version of the schema which may require
upgrade.
"""
while from_version in _UPGRADES:
for upgrade in _UPGRADES[from_version]:
yield upgrade
yield _INCREMENT_VERSION
from_version += 1
def run_schema_upgrades(upgrades, cursor):
"""
Apply the given upgrades using the given cursor.
:param list[unicode] upgrades: The SQL statements to apply for the
upgrade.
:param cursor: A DB-API cursor to use to run the SQL.
"""
for upgrade in upgrades:
cursor.execute(upgrade)
_INCREMENT_VERSION = """
UPDATE [version]
SET [version] = [version] + 1
"""
# A mapping from old schema versions to lists of unicode strings of SQL to
# execute against that version of the schema to create the successor schema.
_UPGRADES = {
0: [
"""
CREATE TABLE [vouchers] (
[number] text,
[created] text, -- An ISO8601 date+time string.
[state] text DEFAULT "pending", -- pending, double-spend, redeemed
[finished] text DEFAULT NULL, -- ISO8601 date+time string when
-- the current terminal state was entered.
[token-count] num DEFAULT NULL, -- Set in the redeemed state to the number
-- of tokens received on this voucher's
-- redemption.
PRIMARY KEY([number])
)
""",
"""
CREATE TABLE [tokens] (
[text] text, -- The random string that defines the token.
[voucher] text, -- Reference to the voucher these tokens go with.
PRIMARY KEY([text])
FOREIGN KEY([voucher]) REFERENCES [vouchers]([number])
)
""",
"""
CREATE TABLE [unblinded-tokens] (
[token] text, -- The base64 encoded unblinded token.
PRIMARY KEY([token])
)
""",
"""
CREATE TABLE [lease-maintenance-spending] (
[id] integer, -- A unique identifier for a group of activity.
[started] text, -- ISO8601 date+time string when the activity began.
[finished] text, -- ISO8601 date+time string when the activity completed (or null).
-- The number of passes that would be required to renew all
-- shares encountered during this activity. Note that because
-- leases on different shares don't necessarily expire at the
-- same time this is not necessarily the number of passes
-- **actually** used during this activity. Some shares may
-- not have required lease renewal. Also note that while the
-- activity is ongoing this value may change.
[count] integer,
PRIMARY KEY([id])
)
""",
],
1: [
"""
-- Incorrectly track a single public-key for all. Later version of
-- the schema moves this elsewhere.
ALTER TABLE [vouchers] ADD COLUMN [public-key] text
""",
],
2: [
"""
-- Keep track of progress through redemption of each voucher.
ALTER TABLE [vouchers] ADD COLUMN [counter] integer DEFAULT 0
""",
],
3: [
"""
-- Reference to the counter these tokens go with.
ALTER TABLE [tokens] ADD COLUMN [counter] integer NOT NULL DEFAULT 0
""",
"""
-- Record the total number of tokens for which we expect to be able to
-- redeem this voucher. We don't want to allow NULL values here at
-- all because that allows insertion of garbage data going forward.
-- However to add a non-NULL column to a table we have to supply a
-- default value. Since no real vouchers have ever been issued at the
-- time of this upgrade we'll just make up some value. It doesn't
-- particularly matter if it is wrong for some testing voucher someone
-- used.
ALTER TABLE [vouchers] ADD COLUMN [expected-tokens] integer NOT NULL DEFAULT 32768
""",
],
4: [
"""
CREATE TABLE [invalid-unblinded-tokens] (
[token] text, -- The base64 encoded unblinded token.
[reason] text, -- The reason given for it being considered invalid.
PRIMARY KEY([token])
)
""",
],
5: [
"""
-- Create a table where rows represent a single group of unblinded
-- tokens all redeemed together. Some number of these rows represent
-- a complete redemption of a voucher.
CREATE TABLE [redemption-groups] (
-- A unique identifier for this redemption group.
[rowid] INTEGER PRIMARY KEY,
-- The text representation of the voucher this group is associated with.
[voucher] text,
-- A flag indicating whether these tokens can be spent or if
-- they're being held for further inspection.
[spendable] integer,
-- The public key seen when redeeming this group.
[public-key] text
)
""",
"""
-- Create one redemption group for every existing, redeemed voucher.
-- These tokens were probably *not* all redeemed in one group but
-- we've only preserved one public key for them so we can't do much
-- better than this.
INSERT INTO [redemption-groups] ([voucher], [public-key], [spendable])
SELECT DISTINCT([number]), [public-key], 1 FROM [vouchers] WHERE [state] = "redeemed"
""",
"""
-- Give each voucher a count of "sequestered" tokens. Currently,
-- these are unspendable tokens that were issued using a disallowed
-- public key.
ALTER TABLE [vouchers] ADD COLUMN [sequestered-count] integer NOT NULL DEFAULT 0
""",
"""
-- Give each unblinded token a reference to the [redemption-groups]
-- table identifying the group that token arrived with. This lets us
-- act collectively on tokens from these groups and identify tokens
-- which are spendable.
--
-- The default value is provided for rows that
-- existed prior to this upgrade which had no group association. For
-- unblinded tokens to exist at all there must be at least one voucher
-- in the vouchers table. [redemption-groups] will therefore have at
-- least one row added to it (by the statement a few lines above).
-- Note that SQLite3 rowid numbering begins at 1.
ALTER TABLE [unblinded-tokens] ADD COLUMN [redemption-group] integer DEFAULT 1
""",
],
6: [
"""
-- track the "event-stream" which are a series of SQL statements
-- that modify the database -- except statements which modify this table
--
-- Note that 'AUTOINCREMENT' is required to create the
-- 'sqlite_sequence' table, upon which we depend to discover the
-- next sequence number
CREATE TABLE [event-stream] (
-- A sequence number which allows us to identify specific positions in
-- the sequence of modifications which were made to the database.
[sequence-number] INTEGER PRIMARY KEY AUTOINCREMENT,
-- A SQL statement which likely made a change to the database state.
[statement] TEXT,
-- True if this statement was deemed "important" when recorded
[important] BOOL
)
""",
],
7: [
# Original rows inserted into the vouchers table used naive datetime
# values serialized with no timezone information. These values are
# all in the system's localtime (or at least, whatever the local time
# was when they were created - that information is lost, though).
# Convert them to UTC and add a timezone marker for compatibility with
# new code and to avoid further information loss.
#
# We can do this with the builtin SQLite3 datetime function and string
# concatenation. Note in particular:
#
# "utc" assumes that the time value to its left is in the local
# timezone and adjusts that time value to be in UTC.
#
# This conversion will do weird stuff for times arbitrarily far in the
# past or the future because timezones are hard. Since there should
# be no real values to upgrade that are very far in the past or the
# future, we'll just accept that.
#
# https://www.sqlite.org/lang_datefunc.html
"""
UPDATE [vouchers]
SET [created] = datetime([created], "utc") || "+00:00"
""",
"""
UPDATE [lease-maintenance-spending]
SET [started] = datetime([started], "utc") || "+00:00"
""",
"""
UPDATE [lease-maintenance-spending]
SET [finished] = datetime([finished], "utc") || "+00:00"
WHERE [finished] IS NOT NULL
""",
],
8: [
# Arguments were originally bound into the statement but this was
# found to be problematic. Now they live in this separate column.
# The default value is the CBOR serialization of an empty sequence.
"""
ALTER TABLE [event-stream] ADD COLUMN [serialized_arguments] TEXT DEFAULT X'80'
""",
],
} | zero-knowledge-access-pass-authorizer | /zero_knowledge_access_pass_authorizer-2022.8.21-py3-none-any.whl/_zkapauthorizer/schema.py | schema.py |
from datetime import timedelta
from errno import ENOENT
from functools import partial
from os import listdir, stat
from os.path import join
from struct import calcsize, unpack
from typing import Any, Optional
import attr
from allmydata.interfaces import TestAndWriteVectorsForShares
from allmydata.storage.common import storage_index_to_dir
from allmydata.storage.immutable import (
BucketWriter,
FoolscapBucketReader,
FoolscapBucketWriter,
ShareFile,
)
from allmydata.storage.lease import LeaseInfo
from allmydata.storage.mutable import MutableShareFile
from allmydata.storage.server import StorageServer
from allmydata.storage.shares import get_share_file
from allmydata.util.base32 import b2a
from attr.validators import instance_of, provides
from attrs import frozen
from challenge_bypass_ristretto import (
PublicKey,
SigningKey,
TokenPreimage,
VerificationSignature,
)
from eliot import log_call, start_action
from foolscap.api import Referenceable
from foolscap.ipb import IRemoteReference
from prometheus_client import CollectorRegistry, Histogram
from twisted.internet.defer import Deferred
from twisted.internet.interfaces import IReactorTime
from twisted.python.filepath import FilePath
from twisted.python.reflect import namedAny
from zope.interface import implementer
from .foolscap import RIPrivacyPassAuthorizedStorageServer, ShareStat
from .model import Pass
from .server.spending import ISpender
from .storage_common import (
MorePassesRequired,
add_lease_message,
allocate_buckets_message,
get_required_new_passes_for_mutable_write,
get_write_sharenums,
pass_value_attribute,
required_passes,
slot_testv_and_readv_and_writev_message,
)
# See allmydata/storage/mutable.py
SLOT_HEADER_SIZE = 468
LEASE_TRAILER_SIZE = 4
class NewLengthRejected(Exception):
"""
A non-None value for ``new_length`` was given to
``slot_testv_and_readv_and_writev``.
This is disallowed by ZKAPAuthorizer because of the undesirable
interactions with the current spending protocol and because there are no
known real-world use-cases for this usage.
"""
@frozen
class _ValidationResult(object):
"""
The result of validating a list of passes.
:ivar valid: A list of valid token preimages.
:ivar signature_check_failed: A list of indexes (into the validated list)
of passes which did not have a correct signature.
"""
valid: list[bytes]
signature_check_failed: list[int]
@classmethod
def _is_invalid_pass(cls, message, pass_, signing_key):
"""
Cryptographically check the validity of a single pass.
:param bytes message: The shared message for pass validation.
:param Pass pass_: The pass to validate.
:return bool: ``False`` (invalid) if the pass includes a valid
signature, ``True`` (valid) otherwise.
"""
assert isinstance(message, bytes), "message %r not bytes" % (message,)
assert isinstance(pass_, Pass), "pass %r not a Pass" % (pass_,)
try:
preimage = TokenPreimage.decode_base64(pass_.preimage)
proposed_signature = VerificationSignature.decode_base64(pass_.signature)
unblinded_token = signing_key.rederive_unblinded_token(preimage)
verification_key = unblinded_token.derive_verification_key_sha512()
invalid_pass = verification_key.invalid_sha512(
proposed_signature,
message,
)
return invalid_pass
except Exception:
# It would be pretty nice to log something here, sometimes, I guess?
return True
@classmethod
def validate_passes(cls, message, passes, signing_key):
"""
Check all of the given passes for validity.
:param bytes message: The shared message for pass validation.
:param list[bytes] passes: The encoded passes to validate.
:param SigningKey signing_key: The signing key to use to check the passes.
:return: An instance of this class describing the validation result
for all passes given.
"""
valid = []
signature_check_failed = []
for idx, pass_ in enumerate(passes):
pass_ = Pass.from_bytes(pass_)
if cls._is_invalid_pass(message, pass_, signing_key):
signature_check_failed.append(idx)
else:
valid.append(pass_.preimage)
return cls(
valid=valid,
signature_check_failed=signature_check_failed,
)
def raise_for(self, required_pass_count):
"""
:raise MorePassesRequired: Always raised with fields populated from this
instance and the given ``required_pass_count``.
"""
raise MorePassesRequired(
len(self.valid),
required_pass_count,
self.signature_check_failed,
)
class LeaseRenewalRequired(Exception):
"""
Mutable write operations fail with ``LeaseRenewalRequired`` when the slot
which is the target of the write does not have an active lease and no
passes are supplied to create one.
"""
@implementer(
RIPrivacyPassAuthorizedStorageServer # type: ignore # zope.interface.implementer accepts interface, not ...
)
# It would be great to use `frozen=True` (value-based hashing) instead of
# `cmp=False` (identity based hashing) but Referenceable wants to set some
# attributes on self and it's hard to avoid that.
@attr.s(cmp=False)
class ZKAPAuthorizerStorageServer(Referenceable):
"""
A class which wraps an ``RIStorageServer`` to insert pass validity checks
before allowing certain functionality.
"""
# This is the amount of time an added or renewed lease will last. We
# duplicate the value used by the underlying anonymous-access storage
# server which does not expose it via a Python API or allow it to be
# configured or overridden. It would be great if the anonymous-access
# storage server eventually made lease time a parameter so we could just
# control it ourselves.
LEASE_PERIOD = timedelta(days=31)
# A StorageServer instance, but not validated because of the fake used in
# the test suite.
_original = attr.ib()
_pass_value = pass_value_attribute()
_signing_key = attr.ib(validator=instance_of(SigningKey))
_spender = attr.ib(validator=provides(ISpender))
_registry = attr.ib(
default=attr.Factory(CollectorRegistry),
validator=attr.validators.instance_of(CollectorRegistry),
)
_clock = attr.ib(
validator=provides(IReactorTime),
default=attr.Factory(partial(namedAny, "twisted.internet.reactor")),
)
_public_key = attr.ib(init=False)
_metric_spending_successes = attr.ib(init=False)
_bucket_writer_disconnect_markers: dict[
BucketWriter, tuple[IRemoteReference, Any]
] = attr.ib(
init=False,
default=attr.Factory(dict),
)
@_public_key.default
def _get_public_key(self):
# attrs evaluates defaults (whether specified inline or via decorator)
# in the order the attributes were defined in the class definition,
# so that `self._signing_key` will be assigned when this runs.
return PublicKey.from_signing_key(self._signing_key)
def _bucket_writer_closed(self, bw: BucketWriter):
"""
This is registered as a callback with the storage backend and receives
notification when a bucket writer is closed. It removes the
disconnection-based cleanup callback for the given bucket.
"""
# This implementation was originally copied from
# allmydata.storage.server.FoolscapStorageServer. Since we don't use
# Tahoe's Foolscap storage server layer we need to do this bucket
# writer bookkeeping ourselves.
if bw in self._bucket_writer_disconnect_markers:
canary, disconnect_marker = self._bucket_writer_disconnect_markers.pop(bw)
canary.dontNotifyOnDisconnect(disconnect_marker)
def __attrs_post_init__(self):
"""
Finish initialization after attrs does its job. This consists of
registering a cleanup handler with the storage backend.
"""
self._original.register_bucket_writer_close_handler(self._bucket_writer_closed)
def _get_spending_histogram_buckets(self):
"""
Create the upper bounds for the ZKAP spending histogram.
"""
# We want a lot of small buckets to be able to get an idea of how much
# spending is for tiny files where our billing system doesn't work
# extremely well. We also want some large buckets so we have a point
# of comparison - is there a lot more or less spending on big files
# than small files? Prometheus recommends a metric have a maximum
# cardinality below 10
# (<https://prometheus.io/docs/practices/instrumentation/#do-not-overuse-labels>).
# Histograms are implemented with labels so the cardinality is equal
# to the number of buckets. We will push this a little bit so we can
# span a better range. The good news is that this is a static
# cardinality (it does not change based on the data observed) so we
# are not at risk of blowing up the metrics overhead unboundedly. 11
# finite buckets + 1 infinite bucket covers 1 to 1024 ZKAPs (plus
# infinity) and only needs 12 buckets.
return list(2**n for n in range(11)) + [float("inf")]
@_metric_spending_successes.default
def _make_histogram(self):
return Histogram(
"zkapauthorizer_server_spending_successes",
"ZKAP Spending Successes histogram",
registry=self._registry,
buckets=self._get_spending_histogram_buckets(),
)
def _clear_metrics(self):
"""
Forget all recorded metrics.
"""
# There is also a `clear` method it's for something else. See
# https://github.com/prometheus/client_python/issues/707
self._metric_spending_successes._metric_init()
def remote_get_version(self):
"""
Pass-through without pass check to allow clients to learn about our
version and configuration in case it helps them decide how to behave.
"""
return self._original.get_version()
def remote_allocate_buckets(
self,
passes,
storage_index,
renew_secret,
cancel_secret,
sharenums,
allocated_size,
canary,
):
"""
Pass-through after a pass check to ensure that clients can only allocate
storage for immutable shares if they present valid passes.
"""
validation = _ValidationResult.validate_passes(
allocate_buckets_message(storage_index),
passes,
self._signing_key,
)
# Note: The *allocate_buckets* protocol allows for some shares to
# already exist on the server. When this is the case, the cost of the
# operation is based only on the shares which are really allocated
# here. It's not clear if we can allow the client to supply the
# reduced number of passes in the call but we can be sure to only mark
# as spent enough passes to cover the allocated buckets. The return
# value of the method will tell the client what the true cost was and
# they can update their books in the same way.
#
# "Spending" isn't implemented yet so there is no code here to deal
# with this fact (though the client does do the necessary bookkeeping
# already). See
# https://github.com/PrivateStorageio/ZKAPAuthorizer/issues/41.
#
# Note: The downside of this scheme is that the client has revealed
# some tokens to us. If we act in bad faith we can use this
# information to correlate this operation with a future operation
# where they are re-spent. We don't do this but it would be better if
# we fixed the protocol so it's not even possible. Probably should
# file a ticket for this.
check_pass_quantity_for_write(
self._pass_value,
validation,
sharenums,
allocated_size,
)
alreadygot, bucketwriters = self._original.allocate_buckets(
storage_index,
renew_secret,
cancel_secret,
sharenums,
allocated_size,
renew_leases=False,
)
# We just committed to spending some of the presented passes. If
# `alreadygot` is not empty then we didn't commit to spending *all* of
# them. (Also, we didn't *accept* data for storage yet - but that's a
# defect in the spending protocol and metrics can't fix it so just
# ignore that for now.)
#
# This expression mirrors the expression the client uses to determine
# how many passes were spent when it processes the result we return to
# it.
spent_passes = required_passes(
self._pass_value,
[allocated_size] * len(bucketwriters),
)
self._metric_spending_successes.observe(spent_passes)
# Copy/paste the disconnection handling logic from
# StorageServer.remote_allocate_buckets.
for bw in bucketwriters.values():
disconnect_marker = canary.notifyOnDisconnect(bw.disconnected)
self._bucket_writer_disconnect_markers[bw] = (
canary,
disconnect_marker,
)
self._spender.mark_as_spent(
self._public_key,
validation.valid[:spent_passes],
)
return alreadygot, {
k: FoolscapBucketWriter(bw) for (k, bw) in bucketwriters.items()
}
def remote_get_buckets(self, storage_index):
"""
Pass-through without pass check to let clients read immutable shares as
long as those shares exist.
"""
return {
k: FoolscapBucketReader(bucket)
for (k, bucket) in self._original.get_buckets(storage_index).items()
}
def remote_add_lease(self, passes, storage_index, *a, **kw):
"""
Pass-through after a pass check to ensure clients can only extend the
duration of share storage if they present valid passes.
"""
validation = _ValidationResult.validate_passes(
add_lease_message(storage_index),
passes,
self._signing_key,
)
check_pass_quantity_for_lease(
self._pass_value,
storage_index,
validation,
self._original,
)
result = self._original.add_lease(storage_index, *a, **kw)
self._spender.mark_as_spent(
self._public_key,
validation.valid,
)
self._metric_spending_successes.observe(len(validation.valid))
return result
def remote_advise_corrupt_share(self, *a, **kw):
"""
Pass-through without a pass check to let clients inform us of possible
issues with the system without incurring any cost to themselves.
"""
return self._original.advise_corrupt_share(*a, **kw)
def remote_share_sizes(self, storage_index_or_slot, sharenums):
with start_action(
action_type="zkapauthorizer:storage-server:remote:share-sizes",
storage_index_or_slot=storage_index_or_slot,
):
return dict(
get_share_sizes(self._original, storage_index_or_slot, sharenums)
)
def remote_stat_shares(
self, storage_indexes_or_slots: list[bytes]
) -> list[dict[int, ShareStat]]:
return list(
dict(get_share_stats(self._original, storage_index_or_slot, None))
for storage_index_or_slot in storage_indexes_or_slots
)
def remote_slot_testv_and_readv_and_writev(
self,
passes,
storage_index,
secrets,
tw_vectors,
r_vector,
):
"""
Perform a test-and-set on a number of shares in a given slot.
Optionally, also read some data to be returned before writing any
changes.
If storage-time will be allocated by the operation then validate the
given passes and ensure they are of sufficient quantity to pay for the
allocation.
Specifically, passes are required in the following cases:
* If shares are created then a lease is added to them.
Passes are required for the full size of the share.
* If shares without unexpired leases are written then a lease is added to them.
Passes are required for the full size of the shares after the write.
* If shares with unexpired leases are made larger.
Passes are required for the difference in price between the old and new size.
Note that the lease is *not* renewed in this case (see #254).
"""
with start_action(
action_type="zkapauthorizer:storage-server:remote:slot-testv-and-readv-and-writev",
storage_index=b2a(storage_index),
path=storage_index_to_dir(storage_index),
):
result = self._slot_testv_and_readv_and_writev(
passes,
storage_index,
secrets,
tw_vectors,
r_vector,
)
if isinstance(result, Deferred):
raise TypeError("_slot_testv_and_readv_and_writev returned Deferred")
return result
def _slot_testv_and_readv_and_writev(
self,
passes,
storage_index,
secrets,
tw_vectors,
r_vector,
):
# Get a stable time to use for all lease expiration checks that are
# part of this call.
now = self._clock.seconds()
# We're not exactly sure what to do with mutable container truncations
# and the official client doesn't ever use that feature so just
# disable it by rejecting all attempts here.
for (testv, writev, new_length) in tw_vectors.values():
if new_length is not None:
raise NewLengthRejected(new_length)
# Check passes for cryptographic validity.
validation = _ValidationResult.validate_passes(
slot_testv_and_readv_and_writev_message(storage_index),
passes,
self._signing_key,
)
# Inspect the operation to determine its price based on any
# allocations.
required_new_passes = get_writev_price(
self._original,
self._pass_value,
storage_index,
tw_vectors,
now,
)
# Fail the operation right now if there aren't enough valid passes to
# cover the price.
if required_new_passes > len(validation.valid):
validation.raise_for(required_new_passes)
# Perform the operation.
result = self._original.slot_testv_and_readv_and_writev(
storage_index,
secrets,
tw_vectors,
r_vector,
# Disable all lease renewal logic from the wrapped storage server.
# We'll add or renew leases based on our billing model.
renew_leases=False,
)
# Add the leases that we charged the client for. This includes:
#
# - leases on newly created shares
#
# - leases on existing, modified shares without an unexpired lease
#
# Note it does not include existing shares that grew enough to be more
# expensive. The operation was required to pay the full price
# difference but this only grants storage for the remainder of the
# existing lease period. This results in the client being overcharged
# somewhat.
add_leases_for_writev(self._original, storage_index, secrets, tw_vectors, now)
self._spender.mark_as_spent(
self._public_key,
validation.valid,
)
# The operation has fully succeeded.
self._metric_spending_successes.observe(required_new_passes)
# Propagate the result of the operation.
return result
def remote_slot_readv(self, *a, **kw):
"""
Pass-through without a pass check to let clients read mutable shares as
long as those shares exist.
"""
return self._original.slot_readv(*a, **kw)
def check_pass_quantity(pass_value, validation, share_sizes):
"""
Check that the given number of passes is sufficient to cover leases for
one period for shares of the given sizes.
:param int pass_value: The value of a single pass in bytes Γ lease periods.
:param _ValidationResult validation: The validating results for a list of passes.
:param list[int] share_sizes: The sizes of the shares for which the lease
is being created.
:raise MorePassesRequired: If the given number of passes is too few for
the given share sizes.
:return: ``None`` if the given number of passes is sufficient.
"""
required_pass_count = required_passes(pass_value, share_sizes)
if len(validation.valid) < required_pass_count:
validation.raise_for(required_pass_count)
def check_pass_quantity_for_lease(
pass_value: int,
storage_index: bytes,
validation: _ValidationResult,
storage_server: ZKAPAuthorizerStorageServer,
) -> dict[int, int]:
"""
Check that the given number of passes is sufficient to add or renew a
lease for one period for the given storage index.
:param int pass_value: The value of a single pass in bytes Γ lease periods.
:param _ValidationResult validation: The validating results for a list of passes.
:raise MorePassesRequired: If the given number of passes is too few for
the share sizes at the given storage index.
:return: A mapping from share number to share size on the server if the
number of passes given is sufficient.
"""
allocated_sizes = dict(
get_share_sizes(
storage_server,
storage_index,
list(get_all_share_numbers(storage_server, storage_index)),
),
)
check_pass_quantity(pass_value, validation, allocated_sizes.values())
return allocated_sizes
def check_pass_quantity_for_write(pass_value, validation, sharenums, allocated_size):
"""
Determine if the given number of valid passes is sufficient for an
attempted write.
:param int pass_value: The value of a single pass in bytes Γ lease periods.
:param _ValidationResult validation: The validating results for a list of passes.
:param set[int] sharenums: The shares being written to.
:param int allocated_size: The size of each share.
:raise MorePassedRequired: If the number of valid passes given is too
small.
:return: ``None`` if the number of valid passes given is sufficient.
"""
check_pass_quantity(pass_value, validation, [allocated_size] * len(sharenums))
def get_all_share_paths(storage_server, storage_index):
"""
Get the paths of all shares in the given storage index (or slot).
:param allmydata.storage.server.StorageServer storage_server: The storage
server which owns the storage index.
:param bytes storage_index: The storage index (or slot) in which to look
up shares.
:return: A generator of tuples of (int, bytes) giving a share number and
the path to storage for that share number.
"""
bucket = join(storage_server.sharedir, storage_index_to_dir(storage_index))
try:
contents = listdir(bucket)
except OSError as e:
if e.errno == ENOENT:
return
raise
for candidate in contents:
try:
sharenum = int(candidate)
except ValueError:
pass
else:
yield sharenum, join(bucket, candidate)
def get_all_share_numbers(storage_server, storage_index):
"""
Get all share numbers in the given storage index (or slot).
:param allmydata.storage.server.StorageServer storage_server: The storage
server which owns the storage index.
:param bytes storage_index: The storage index (or slot) in which to look
up share numbers.
:return: A generator of int giving share numbers.
"""
for sharenum, sharepath in get_all_share_paths(storage_server, storage_index):
yield sharenum
@log_call(
action_type="zkapauthorizer:storage-server:get-share-sizes",
include_args=["storage_index_or_slot", "sharenums"],
)
def get_share_sizes(storage_server, storage_index_or_slot, sharenums):
"""
Get sizes of the given share numbers for the given storage index *or*
slot.
:see: ``get_share_stats``
:return: A list of tuples of (int, int) where the first element is a share
number and the second element is the data size for that share number.
"""
return list(
(sharenum, stat.size)
for (sharenum, stat) in get_share_stats(
storage_server, storage_index_or_slot, sharenums
)
)
def get_share_stats(storage_server, storage_index_or_slot, sharenums):
"""
Get the stats for the given share numbers for the given storage index *or*
slot.
:param allmydata.storage.server.StorageServer storage_server: The storage
server which owns the storage index.
:param bytes storage_index_or_slot: The storage index (or slot) in which
to look up share numbers.
:param sharenums: A container of share numbers to use to filter the
results. Only information about share numbers in this container is
included in the result. Or, ``None`` to get sizes for all shares
which exist.
:return: A generator of tuples of (int, ShareStat) where the first element
is a share number and the second element gives stats about that share.
"""
stat = None
for sharenum, sharepath in get_all_share_paths(
storage_server, storage_index_or_slot
):
if stat is None:
stat = get_stat(sharepath)
if sharenums is None or sharenum in sharenums:
info = stat(storage_server, storage_index_or_slot, sharepath)
yield sharenum, info
def get_storage_index_share_size(sharepath):
"""
Get the size of a share belonging to a storage index (an immutable share).
:param bytes sharepath: The path to the share file.
:return int: The data size of the share in bytes.
"""
# From src/allmydata/storage/immutable.py
#
# The share file has the following layout:
# 0x00: share file version number, four bytes, current version is 2
# 0x04: share data length, four bytes big-endian = A # See Footnote 1 below.
# 0x08: number of leases, four bytes big-endian
# 0x0c: beginning of share data (see immutable.layout.WriteBucketProxy)
# A+0x0c = B: first lease. Lease format is:
# B+0x00: owner number, 4 bytes big-endian, 0 is reserved for no-owner
# B+0x04: renew secret, 32 bytes (SHA256)
# B+0x24: cancel secret, 32 bytes (SHA256)
# B+0x44: expiration time, 4 bytes big-endian seconds-since-epoch
# B+0x48: next lease, or end of record
#
# Footnote 1: as of Tahoe v1.3.0 this field is not used by storage
# servers, but it is still filled in by storage servers in case the
# storage server software gets downgraded from >= Tahoe v1.3.0 to < Tahoe
# v1.3.0, or the share file is moved from one storage server to
# another. The value stored in this field is truncated, so if the actual
# share data length is >= 2**32, then the value stored in this field will
# be the actual share data length modulo 2**32.
share_file_size = stat(sharepath).st_size
header_format = ">LLL"
header_size = calcsize(header_format)
with open(sharepath, "rb") as share_file:
header = share_file.read(calcsize(header_format))
if len(header) != header_size:
raise ValueError(
"Tried to read {} bytes of share file header, got {!r} instead.".format(
calcsize(header_format),
header,
),
)
version, _, number_of_leases = unpack(header_format, header)
if version in (1, 2):
# Version 1 and 2 don't differ in a way that changes the size
# calculation.
return share_file_size - header_size - (number_of_leases * (4 + 32 + 32 + 4))
raise ValueError(
"Cannot interpret version {} share file.".format(version),
)
def stat_bucket(storage_server, storage_index, sharepath):
"""
Get a ``ShareStat`` for the shares in a bucket.
"""
return ShareStat(
size=get_storage_index_share_size(sharepath),
lease_expiration=get_lease_expiration(sharepath),
)
def stat_slot(storage_server, slot, sharepath):
"""
Get a ``ShareStat`` for the shares in a slot.
"""
return ShareStat(
size=get_slot_share_size(sharepath),
lease_expiration=get_lease_expiration(sharepath),
)
def get_lease_expiration(sharepath: str) -> Optional[int]:
"""
Get the latest lease expiration time for the share at the given path, or
``None`` if there are no leases on it.
:param sharepath: The path to the share file to inspect.
"""
leases = list(
lease.get_expiration_time() for lease in get_share_file(sharepath).get_leases()
)
if leases:
return max(leases)
return None
def get_slot_share_size(sharepath):
"""
Get the size of a share belonging to a slot (a mutable share).
:param bytes sharepath: The path to the share file.
:return int: The data size of the share in bytes.
"""
with open(sharepath, "rb") as share_file:
share_data_length_bytes = share_file.read(92)[-8:]
(share_data_length,) = unpack(">Q", share_data_length_bytes)
return share_data_length
def get_stat(sharepath):
"""
Get a function that can retrieve the metadata from the share at the given
path.
This is necessary to differentiate between buckets and slots.
"""
# Figure out if it is a storage index or a slot.
with open(sharepath, "rb") as share_file:
magic = share_file.read(32)
if len(magic) < 32:
# Tahoe could check for this.
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3853
raise ValueError("Share file has short header")
if ShareFile.is_valid_header(magic):
return stat_bucket
elif MutableShareFile.is_valid_header(magic):
return stat_slot
else:
raise ValueError("Cannot interpret share header {!r}".format(magic))
def add_leases_for_writev(storage_server, storage_index, secrets, tw_vectors, now):
"""
Add a new lease using the given secrets to all shares written by
``tw_vectors``.
"""
for (sharenum, sharepath) in get_all_share_paths(storage_server, storage_index):
testv, datav, new_length = tw_vectors.get(sharenum, (None, b"", None))
if datav or (new_length is not None):
# It has data or a new length - it is a write.
if share_has_active_leases(storage_server, storage_index, sharenum, now):
# It's fine, leave it be.
continue
# Aha. It has no lease that hasn't expired. Give it one.
(write_enabler, renew_secret, cancel_secret) = secrets
share = get_share_file(sharepath)
share.add_or_renew_lease(
storage_server.get_available_space(),
LeaseInfo(
owner_num=1,
renew_secret=renew_secret,
cancel_secret=cancel_secret,
expiration_time=now
+ ZKAPAuthorizerStorageServer.LEASE_PERIOD.total_seconds(),
nodeid=storage_server.my_nodeid,
),
)
def get_share_path(
storage_server: StorageServer, storage_index: bytes, sharenum: int
) -> FilePath:
"""
Get the path to the given storage server's storage for the given share.
"""
return (
FilePath(storage_server.sharedir)
.preauthChild(storage_index_to_dir(storage_index))
.child("{}".format(sharenum))
)
def share_has_active_leases(
storage_server: StorageServer, storage_index: bytes, sharenum: int, now: float
) -> bool:
"""
Determine whether the given share on the given server has an unexpired
lease or not.
:return: ``True`` if it has at least one unexpired lease, ``False``
otherwise.
"""
sharepath = get_share_path(storage_server, storage_index, sharenum)
share = get_share_file(sharepath.path)
return any(lease.get_expiration_time() > now for lease in share.get_leases())
def get_writev_price(
storage_server: StorageServer,
pass_value: int,
storage_index: bytes,
tw_vectors: TestAndWriteVectorsForShares,
now: float,
) -> int:
"""
Determine the price to execute the given test/write vectors.
"""
# Find the current size of shares being written.
current_sizes = dict(
get_share_sizes(
storage_server,
storage_index,
# Here's how we restrict the result to only written shares.
sharenums=get_write_sharenums(tw_vectors),
),
)
# Zero out the size of any share without an unexpired lease. We will
# renew the lease on this share along with the write but the client
# must supply the necessary passes to do so.
current_sizes.update(
{
sharenum: 0
for sharenum in current_sizes
if not share_has_active_leases(
storage_server,
storage_index,
sharenum,
now,
)
}
)
# Compute the number of passes required to execute the given writev
# against these existing shares.
return get_required_new_passes_for_mutable_write(
pass_value,
current_sizes,
tw_vectors,
) | zero-knowledge-access-pass-authorizer | /zero_knowledge_access_pass_authorizer-2022.8.21-py3-none-any.whl/_zkapauthorizer/_storage_server.py | _storage_server.py |
from base64 import b64decode, b64encode
from datetime import timedelta
from functools import partial
from hashlib import sha256
from json import loads
from operator import delitem, setitem
import attr
import challenge_bypass_ristretto
from attrs import define
from treq import content
from treq.client import HTTPClient
from twisted.internet.defer import Deferred, fail, inlineCallbacks, returnValue, succeed
from twisted.internet.task import LoopingCall
from twisted.logger import Logger
from twisted.python.reflect import namedAny
from twisted.python.url import URL
from twisted.web.client import Agent
from zope.interface import Interface, implementer
from ._base64 import urlsafe_b64decode
from ._json import dumps_utf8
from ._stack import less_limited_stack
from .model import Error as model_Error
from .model import Pass
from .model import Pending as model_Pending
from .model import RandomToken
from .model import Redeeming as model_Redeeming
from .model import UnblindedToken
from .model import Unpaid as model_Unpaid
from .model import Voucher
RETRY_INTERVAL = timedelta(milliseconds=1000)
# It would be nice to have frozen exception types but Failure.cleanFailure
# interacts poorly with these.
# https://twistedmatrix.com/trac/ticket/9641
# https://twistedmatrix.com/trac/ticket/9771
@define(auto_exc=False)
class UnexpectedResponse(Exception):
"""
The issuer responded in an unexpected and unhandled way.
"""
code: int
body: bytes
class AlreadySpent(Exception):
"""
An attempt was made to redeem a voucher which has already been redeemed.
The redemption cannot succeed and should not be retried automatically.
"""
class Unpaid(Exception):
"""
An attempt was made to redeem a voucher which has not yet been paid for.
The redemption attempt may be automatically retried at some point.
"""
@attr.s(auto_attribs=True)
class UnrecognizedFailureReason(Exception):
"""
An attempt was made to redeem a voucher and the response contained an unknown reason.
The redemption attempt may be automatically retried at some point.
"""
response: dict = attr.ib()
@attr.s
class RedemptionResult(object):
"""
Contain the results of an attempt to redeem a voucher for ZKAP material.
:ivar unblinded_tokens: The tokens which resulted from the redemption.
:ivar public_key: The public key which the server proved was involved in
the redemption process.
"""
unblinded_tokens: list[UnblindedToken] = attr.ib(
validator=attr.validators.instance_of(list),
)
public_key: str = attr.ib(
validator=attr.validators.instance_of(str),
)
class IRedeemer(Interface):
"""
An ``IRedeemer`` can exchange a voucher for one or more passes.
"""
def random_tokens_for_voucher(voucher, counter, count):
"""
Generate a number of random tokens to use in the redemption process for
the given voucher.
:param Voucher voucher: The voucher the tokens will be associated
with.
:param int counter: See ``redeemWithCounter``.
:param int count: The number of random tokens to generate.
:return list[RandomToken]: The generated tokens. Random tokens must
be unique over the lifetime of the Tahoe-LAFS node where this
plugin is being used but the same tokens *may* be generated for
the same voucher. The tokens must be kept secret to preserve the
anonymity property of the system.
"""
def redeemWithCounter(voucher, counter, random_tokens):
"""
Redeem a voucher for unblinded tokens which can be used to construct
passes.
Implementations of this method do not need to be fault tolerant. If a
redemption attempt is interrupted before it completes, it is the
caller's responsibility to call this method again with the same
arguments.
:param Voucher voucher: The voucher to redeem.
:param int counter: The counter to use in this redemption attempt. To
support vouchers which can be redeemed for a larger number of
tokens than is practical to handle at once, one voucher can be
partially redeemed repeatedly until the complete set of tokens has
been received. Each partial redemption must have a distinct
counter value.
:param list[RandomToken] random_tokens: The random tokens to use in
the redemption process.
:return: A ``Deferred`` which fires with a ``RedemptionResult``
instance or which fails with any error to allow a retry to be made
at some future point. It may also fail with an ``AlreadySpent``
error to indicate the redemption server considers the voucher to
have been redeemed already and will not allow it to be redeemed.
"""
def tokens_to_passes(message, unblinded_tokens):
"""
Construct passes from unblinded tokens which are suitable for use with a
given message.
:param bytes message: A valid utf-8-encoded byte sequence which serves
to protect the resulting passes from replay usage. It is
preferable if every use of passes is associated with a unique
message.
:param list[UnblindedToken] unblinded_tokens: Unblinded tokens,
previously returned by a call to this implementation's ``redeem``
method.
:return list[Pass]: Passes constructed from the message and unblinded
tokens. There is one pass in the resulting list for each unblinded
token in ``unblinded_tokens``.
"""
@attr.s
@implementer(IRedeemer)
class IndexedRedeemer(object):
"""
A ``IndexedRedeemer`` delegates redemption to a redeemer chosen to
correspond to the redemption counter given.
"""
_log = Logger()
redeemers = attr.ib()
def tokens_to_passes(self, message, unblinded_tokens):
raise NotImplementedError("IndexedRedeemer cannot create passes")
def random_tokens_for_voucher(self, voucher, counter, count):
return dummy_random_tokens(voucher, counter, count)
def redeemWithCounter(self, voucher, counter, random_tokens):
self._log.info(
"IndexedRedeemer redeeming {voucher}[{counter}] using {delegate}.",
voucher=voucher,
counter=counter,
delegate=self.redeemers[counter],
)
return self.redeemers[counter].redeemWithCounter(
voucher,
counter,
random_tokens,
)
@implementer(IRedeemer)
class NonRedeemer(object):
"""
A ``NonRedeemer`` never tries to redeem vouchers for ZKAPs.
"""
@classmethod
def make(cls, section_name, node_config, announcement, reactor):
return cls()
def random_tokens_for_voucher(self, voucher, counter, count):
return dummy_random_tokens(voucher, counter, count)
def redeemWithCounter(self, voucher, counter, random_tokens):
# Don't try to redeem them.
return Deferred()
def tokens_to_passes(self, message, unblinded_tokens):
raise Exception(
"Cannot be called because no unblinded tokens are ever returned."
)
@implementer(IRedeemer)
@attr.s(frozen=True)
class ErrorRedeemer(object):
"""
An ``ErrorRedeemer`` immediately locally fails voucher redemption with a
configured error.
"""
details = attr.ib(validator=attr.validators.instance_of(str))
@classmethod
def make(cls, section_name, node_config, announcement, reactor):
details = node_config.get_config(
section=section_name,
option="details",
)
return cls(details)
def random_tokens_for_voucher(self, voucher, counter, count):
return dummy_random_tokens(voucher, counter, count)
def redeemWithCounter(self, voucher, counter, random_tokens):
return fail(Exception(self.details))
def tokens_to_passes(self, message, unblinded_tokens):
raise Exception(
"Cannot be called because no unblinded tokens are ever returned."
)
@implementer(IRedeemer)
@attr.s
class DoubleSpendRedeemer(object):
"""
A ``DoubleSpendRedeemer`` pretends to try to redeem vouchers for ZKAPs but
always fails with an error indicating the voucher has already been spent.
"""
@classmethod
def make(cls, section_name, node_config, announcement, reactor):
return cls()
def tokens_to_passes(self, message, unblinded_tokens):
raise NotImplementedError("DoubleSpendRedeemer cannot create passes")
def random_tokens_for_voucher(self, voucher, counter, count):
return dummy_random_tokens(voucher, counter, count)
def redeemWithCounter(self, voucher, counter, random_tokens):
return fail(AlreadySpent(voucher))
@implementer(IRedeemer)
@attr.s
class UnpaidRedeemer(object):
"""
An ``UnpaidRedeemer`` pretends to try to redeem vouchers for ZKAPs but
always fails with an error indicating the voucher has not been paid for.
"""
@classmethod
def make(cls, section_name, node_config, announcement, reactor):
return cls()
def tokens_to_passes(self, message, unblinded_tokens):
raise NotImplementedError("UnpaidRedeemer cannot create passes")
def random_tokens_for_voucher(self, voucher, counter, count):
return dummy_random_tokens(voucher, counter, count)
def redeemWithCounter(self, voucher, counter, random_tokens):
return fail(Unpaid(voucher))
@implementer(IRedeemer)
@attr.s
class RecordingRedeemer(object):
"""
A ``CountingRedeemer`` delegates redemption logic to another object but
records all redemption attempts.
"""
original = attr.ib()
redemptions = attr.ib(default=attr.Factory(list))
def tokens_to_passes(self, message, unblinded_tokens):
return self.original.tokens_to_passes(message, unblinded_tokens)
def random_tokens_for_voucher(self, voucher, counter, count):
return dummy_random_tokens(voucher, counter, count)
def redeemWithCounter(self, voucher, counter, random_tokens):
self.redemptions.append((voucher, counter, random_tokens))
return self.original.redeemWithCounter(voucher, counter, random_tokens)
def dummy_random_tokens(voucher, counter, count):
v = urlsafe_b64decode(voucher.number)
def dummy_random_token(n):
return RandomToken(
# Padding is 96 (random token length) - 32 (decoded voucher
# length) - 4 (fixed-width counter)
b64encode(
v + "{:0>4}{:0>60}".format(counter, n).encode("ascii"),
),
)
return list(dummy_random_token(n) for n in range(count))
@implementer(IRedeemer)
@attr.s
class DummyRedeemer(object):
"""
A ``DummyRedeemer`` pretends to redeem vouchers for ZKAPs. Instead of
really redeeming them, it makes up some fake ZKAPs and pretends those are
the result.
:ivar str _public_key: The base64-encoded public key to return with
all successful redemption results. As with the tokens returned by
this redeemer, chances are this is not actually a valid public key.
Its corresponding private key certainly has not been used to sign
anything.
"""
_public_key = attr.ib(
validator=attr.validators.instance_of(str),
)
@classmethod
def make(cls, section_name, node_config, announcement, reactor):
return cls(
node_config.get_config(
section=section_name,
option="issuer-public-key",
),
)
def random_tokens_for_voucher(self, voucher, counter, count):
"""
Generate some number of random tokens to submit along with a voucher for
redemption.
"""
return dummy_random_tokens(voucher, counter, count)
def redeemWithCounter(self, voucher, counter, random_tokens):
"""
:return: An already-fired ``Deferred`` that has a list of
``UnblindedToken`` instances wrapping meaningless values.
"""
if not isinstance(voucher, Voucher):
raise TypeError(
"Got {}, expected instance of Voucher".format(
voucher,
),
)
def dummy_unblinded_token(random_token):
random_value = b64decode(random_token.token_value)
unblinded_value = random_value + b"x" * (96 - len(random_value))
return UnblindedToken(b64encode(unblinded_value))
return succeed(
RedemptionResult(
list(dummy_unblinded_token(token) for token in random_tokens),
self._public_key,
),
)
def tokens_to_passes(self, message, unblinded_tokens):
def token_to_pass(token):
# Generate distinct strings based on the unblinded token which we
# can include in the resulting Pass. This ensures the pass values
# will be unique if and only if the unblinded tokens were unique
# (barring improbable hash collisions).
token_digest = sha256(token.unblinded_token).hexdigest().encode("ascii")
preimage = b"preimage-" + token_digest[len(b"preimage-") :]
signature = b"signature-" + token_digest[len(b"signature-") :]
return Pass(
b64encode(preimage),
b64encode(signature),
)
return list(token_to_pass(token) for token in unblinded_tokens)
class IssuerConfigurationMismatch(Exception):
"""
The Ristretto issuer address in the local client configuration does not
match the Ristretto issuer address received in a storage server
announcement.
If these values do not match then there is no reason to expect that ZKAPs
will be accepted by the storage server because ZKAPs are bound to the
issuer's signing key.
This mismatch must be corrected before the storage server can be used.
Either the storage server needs to be reconfigured to respect the
authority of a different issuer (the same one the client is configured to
use), the client needs to select a different storage server to talk to, or
the client needs to be reconfigured to respect the authority of a
different issuer (the same one the storage server is announcing).
Note that issued ZKAPs cannot be exchanged between issues except through
some ad hoc, out-of-band means. That is, if the client already has some
ZKAPs and chooses to change its configured issuer address, those existing
ZKAPs will not be usable and new ones must be obtained.
"""
def __str__(self):
return "Announced issuer ({}) disagrees with configured issuer ({}).".format(
*self.args
)
@implementer(IRedeemer)
@attr.s
class RistrettoRedeemer(object):
"""
An ``IRedeemer`` which speaks the Ristretto-flavored PrivacyPass protocol
described at
https://docs.rs/challenge-bypass-ristretto/1.0.0-pre.0/challenge_bypass_ristretto/#cryptographic-protocol
:ivar treq.client.HTTPClient _treq: An HTTP client to use to make calls to
the issuer.
:ivar URL _api_root: The root of the issuer HTTP API.
"""
_log = Logger()
_treq = attr.ib()
_api_root = attr.ib(validator=attr.validators.instance_of(URL))
@classmethod
def make(cls, section_name, node_config, announcement, reactor):
configured_issuer = node_config.get_config(
section=section_name,
option="ristretto-issuer-root-url",
)
if announcement is not None:
# Don't let us talk to a storage server that has a different idea
# about who issues ZKAPs. If we did, they *probably* wouldn't
# accept our ZKAPs since they should have the wrong signature.
#
# We should lift this limitation (that is, we should support as
# many different issuers as the user likes) in the future but
# doing so requires changing how the web interface works and
# possibly also the interface for voucher submission.
#
# If we aren't given an announcement then we're not being used in
# the context of a specific storage server so the check is
# unnecessary and impossible.
announced_issuer = announcement["ristretto-issuer-root-url"]
if announced_issuer != configured_issuer:
raise IssuerConfigurationMismatch(announced_issuer, configured_issuer)
return cls(
HTTPClient(Agent(reactor)),
URL.from_text(configured_issuer),
)
def random_tokens_for_voucher(self, voucher, counter, count):
return list(
RandomToken(
challenge_bypass_ristretto.RandomToken.create().encode_base64(),
)
for n in range(count)
)
@inlineCallbacks
def redeemWithCounter(self, voucher, counter, encoded_random_tokens):
random_tokens = list(
challenge_bypass_ristretto.RandomToken.decode_base64(token.token_value)
for token in encoded_random_tokens
)
blinded_tokens = list(token.blind() for token in random_tokens)
response = yield self._treq.post(
self._api_root.child("v1", "redeem").to_text(),
dumps_utf8(
{
"redeemVoucher": voucher.number.decode("ascii"),
"redeemCounter": counter,
"redeemTokens": list(
token.encode_base64().decode("ascii")
for token in blinded_tokens
),
}
),
headers={b"content-type": b"application/json"},
)
response_body = yield content(response)
try:
result = loads(response_body)
except ValueError:
raise UnexpectedResponse(response.code, response_body)
success = result.get("success", False)
if not success:
reason = result.get("reason", None)
if reason == "double-spend":
raise AlreadySpent(voucher)
elif reason == "unpaid":
raise Unpaid(voucher)
raise UnrecognizedFailureReason(result)
self._log.info(
"Redeemed: {public_key} {proof} {count}",
public_key=result["public-key"],
proof=result["proof"],
count=len(result["signatures"]),
)
marshaled_signed_tokens = result["signatures"]
marshaled_proof = result["proof"]
marshaled_public_key = result["public-key"]
public_key = challenge_bypass_ristretto.PublicKey.decode_base64(
marshaled_public_key.encode("ascii"),
)
self._log.info("Decoded public key")
clients_signed_tokens = list(
challenge_bypass_ristretto.SignedToken.decode_base64(
marshaled_signed_token.encode("ascii"),
)
for marshaled_signed_token in marshaled_signed_tokens
)
self._log.info("Decoded signed tokens")
clients_proof = challenge_bypass_ristretto.BatchDLEQProof.decode_base64(
marshaled_proof.encode("ascii"),
)
with less_limited_stack():
self._log.info("Decoded batch proof")
clients_unblinded_tokens = clients_proof.invalid_or_unblind(
random_tokens,
blinded_tokens,
clients_signed_tokens,
public_key,
)
self._log.info("Validated proof")
unblinded_tokens = list(
UnblindedToken(token.encode_base64()) for token in clients_unblinded_tokens
)
returnValue(
RedemptionResult(
unblinded_tokens,
marshaled_public_key,
)
)
def tokens_to_passes(self, message, unblinded_tokens):
assert isinstance(message, bytes)
assert isinstance(unblinded_tokens, list)
assert all(isinstance(element, UnblindedToken) for element in unblinded_tokens)
unblinded_tokens = list(
challenge_bypass_ristretto.UnblindedToken.decode_base64(
token.unblinded_token
)
for token in unblinded_tokens
)
clients_verification_keys = list(
token.derive_verification_key_sha512() for token in unblinded_tokens
)
clients_signatures = list(
verification_key.sign_sha512(message)
for verification_key in clients_verification_keys
)
clients_preimages = list(token.preimage() for token in unblinded_tokens)
passes = list(
Pass(
preimage.encode_base64(),
signature.encode_base64(),
)
for (preimage, signature) in zip(clients_preimages, clients_signatures)
)
return passes
def token_count_for_group(num_groups, total_tokens, group_number):
"""
Determine a number of tokens to retrieve for a particular group out of an
overall redemption attempt.
:param int num_groups: The total number of groups the tokens will be
divided into.
:param int total_tokens: The total number of tokens to divide up.
:param int group_number: The particular group for which to determine a
token count.
:return int: A number of tokens to redeem in this group.
"""
if total_tokens < num_groups:
raise ValueError(
"Cannot distribute {} tokens among {} groups coherently.".format(
total_tokens,
num_groups,
),
)
if group_number >= num_groups or group_number < 0:
raise ValueError(
"Group number {} is out of valid range [0..{})".format(
group_number,
num_groups,
),
)
group_size, remainder = divmod(total_tokens, num_groups)
if group_number < remainder:
return group_size + 1
return group_size
@attr.s
class PaymentController(object):
"""
The ``PaymentController`` coordinates the process of turning a voucher
into a collection of ZKAPs:
1. A voucher to be consumed is handed to the controller. Once a voucher
is handed over to the controller the controller takes all
responsibility for it.
2. The controller tells the data store to remember the voucher. The
data store provides durability for the voucher which represents an
investment (ie, a purchase) on the part of the client.
3. The controller hands the voucher and some random tokens to a redeemer.
In the future, this step will need to be retried in the case of failures.
4. When the voucher has been redeemed for unblinded tokens (inputs to
pass construction), the controller hands them to the data store with
the voucher. The data store marks the voucher as redeemed and stores
the unblinded tokens for use by the storage client.
:ivar int default_token_count: The number of tokens to request when
redeeming a voucher, if no other count is given when the redemption is
started.
:ivar set[str] allowed_public_keys: The base64-encoded public keys for
which to accept tokens.
:ivar dict[str, Redeeming] _active: A mapping from voucher identifiers
which currently have redemption attempts in progress to a
``Redeeming`` state representing the attempt.
:ivar dict[str, datetime] _error: A mapping from voucher identifiers
which have recently failed with an unrecognized, transient error.
:ivar dict[str, datetime] _unpaid: A mapping from voucher identifiers
which have recently failed a redemption attempt due to an unpaid
response from the redemption server to timestamps when the failure was
observed.
:ivar int num_redemption_groups: The number of groups into which to divide
tokens during the redemption process, with each group being redeemed
separately from the rest. This value needs to agree with the value
the PaymentServer is configured with.
TODO: Retrieve this value from the PaymentServer or from the
ZKAPAuthorizer configuration instead of just hard-coding a duplicate
value in this implementation.
:ivar IReactorTime _clock: The reactor to use for scheduling redemption
retries.
"""
_log = Logger()
store = attr.ib()
redeemer = attr.ib()
default_token_count = attr.ib()
allowed_public_keys = attr.ib(validator=attr.validators.instance_of(set))
num_redemption_groups = attr.ib(default=16)
_clock = attr.ib(default=None)
_error = attr.ib(default=attr.Factory(dict))
_unpaid = attr.ib(default=attr.Factory(dict))
_active = attr.ib(default=attr.Factory(dict))
def __attrs_post_init__(self):
"""
Check the voucher store for any vouchers in need of redemption.
This is an initialization-time hook called by attrs.
"""
if self._clock is None:
self._clock = namedAny("twisted.internet.reactor")
self._check_pending_vouchers()
# Also start a time-based polling loop to retry redemption of vouchers
# in retryable error states.
self._schedule_retries()
def _schedule_retries(self):
# TODO: should not eagerly schedule calls. If there are no vouchers
# in an error state we shouldn't wake up at all.
#
# TODO: should schedule retries on a bounded exponential backoff
# instead, perhaps on a per-voucher basis.
self._retry_task = LoopingCall(self._retry_redemption)
self._retry_task.clock = self._clock
self._retry_task.start(
RETRY_INTERVAL.total_seconds(),
now=False,
)
def _retry_redemption(self):
for voucher in list(self._error.keys()) + list(self._unpaid.keys()):
if voucher in self._active:
continue
if self.get_voucher(voucher).state.should_start_redemption():
self.redeem(voucher)
def _check_pending_vouchers(self):
"""
Find vouchers in the voucher store that need to be redeemed and try to
redeem them.
"""
vouchers = self.store.list()
for voucher in vouchers:
if voucher.state.should_start_redemption():
self._log.info(
"Controller found voucher ({voucher}) at startup that needs redemption.",
voucher=voucher.number,
)
self.redeem(voucher.number)
else:
self._log.info(
"Controller found voucher ({voucher}) at startup that does not need redemption.",
voucher=voucher.number,
)
def _get_random_tokens_for_voucher(
self, voucher, counter, num_tokens, total_tokens
):
"""
Generate or load random tokens for a redemption attempt of a voucher.
:param int num_tokens: The number of tokens to get.
:param int total_tokens: The total number of tokens for which this
voucher is expected to be redeemed.
"""
def get_tokens():
self._log.info(
"Generating random tokens for a voucher ({voucher}).",
voucher=voucher,
)
return self.redeemer.random_tokens_for_voucher(
Voucher(
number=voucher,
# Unclear whether this information is useful to redeemers
# but we cannot construct a Voucher without some value
# here.
expected_tokens=total_tokens,
),
counter,
num_tokens,
)
return self.store.add(
voucher,
total_tokens,
counter,
get_tokens,
)
@inlineCallbacks
def redeem(self, voucher, num_tokens=None):
"""
:param bytes voucher: A voucher to redeem.
:param int num_tokens: A number of tokens to redeem.
"""
# Try to get an existing voucher object for the given number.
try:
voucher_obj = self.store.get(voucher)
except KeyError:
# This is our first time dealing with this number.
counter_start = 0
if num_tokens is None:
num_tokens = self.default_token_count
else:
num_tokens = voucher_obj.expected_tokens
# Determine the starting point from the state.
if voucher_obj.state.should_start_redemption():
counter_start = voucher_obj.state.counter
else:
raise ValueError(
"Cannot redeem voucher in state {}.".format(
voucher_obj.state,
),
)
self._log.info(
"Starting redemption of {voucher}[{start}..{end}] for {num_tokens} tokens.",
voucher=voucher,
start=counter_start,
end=self.num_redemption_groups,
num_tokens=num_tokens,
)
for counter in range(counter_start, self.num_redemption_groups):
# Pre-generate the random tokens to use when redeeming the voucher.
# These are persisted with the voucher so the redemption can be made
# idempotent. We don't want to lose the value if we fail after the
# server deems the voucher redeemed but before we persist the result.
# With a stable set of tokens, we can re-submit them and the server
# can re-sign them without fear of issuing excess passes. Whether the
# server signs a given set of random tokens once or many times, the
# number of passes that can be constructed is still only the size of
# the set of random tokens.
token_count = token_count_for_group(
self.num_redemption_groups, num_tokens, counter
)
tokens = self._get_random_tokens_for_voucher(
voucher,
counter,
num_tokens=token_count,
total_tokens=num_tokens,
)
# Reload state before each iteration. We expect it to change each time.
voucher_obj = self.store.get(voucher)
succeeded = yield self._perform_redeem(voucher_obj, counter, tokens)
if not succeeded:
self._log.info(
"Temporarily suspending redemption of {voucher} after non-success result.",
voucher=voucher,
)
break
def _perform_redeem(self, voucher, counter, random_tokens):
"""
Use the redeemer to redeem the given voucher and random tokens.
This will not persist the voucher or random tokens but it will persist
the result.
:return Deferred[bool]: A ``Deferred`` firing with ``True`` if and
only if redemption succeeds.
"""
if not isinstance(voucher.state, model_Pending):
raise ValueError(
"Cannot redeem voucher in state {} instead of Pending.".format(
voucher.state,
),
)
# Ask the redeemer to do the real task of redemption.
self._log.info(
"Redeeming random tokens for a voucher ({voucher}).", voucher=voucher
)
d = bracket(
lambda: setitem(
self._active,
voucher.number,
model_Redeeming(
started=self.store.now(),
counter=voucher.state.counter,
),
),
lambda: delitem(self._active, voucher.number),
lambda: self.redeemer.redeemWithCounter(voucher, counter, random_tokens),
)
d.addCallbacks(
partial(self._redeem_success, voucher.number, counter),
partial(self._redeem_failure, voucher.number),
)
d.addErrback(partial(self._final_redeem_error, voucher.number))
return d
def _redeem_success(self, voucher, counter, result):
"""
Update the database state to reflect that a voucher was redeemed and to
store the resulting unblinded tokens (which can be used to construct
passes later).
"""
self._log.info(
"Inserting redeemed unblinded tokens for a voucher ({voucher}).",
voucher=voucher,
)
self.store.insert_unblinded_tokens_for_voucher(
voucher,
result.public_key,
result.unblinded_tokens,
completed=(counter + 1 == self.num_redemption_groups),
spendable=result.public_key in self.allowed_public_keys,
)
return True
def _redeem_failure(self, voucher, reason):
if reason.check(AlreadySpent):
self._log.error(
"Voucher {voucher} reported as already spent during redemption.",
voucher=voucher,
)
self.store.mark_voucher_double_spent(voucher)
elif reason.check(Unpaid):
self._log.error(
"Voucher {voucher} reported as not paid for during redemption.",
voucher=voucher,
)
self._unpaid[voucher] = self.store.now()
else:
self._log.error(
"Redeeming random tokens for a voucher ({voucher}) failed: {reason!r}",
reason=reason.value,
voucher=voucher,
)
self._error[voucher] = model_Error(
finished=self.store.now(),
details=reason.getErrorMessage(),
)
return False
def _final_redeem_error(self, voucher, reason):
self._log.failure(
"Redeeming random tokens for a voucher ({voucher}) encountered error.",
reason,
voucher=voucher,
)
return False
def get_voucher(self, number):
return self.incorporate_transient_state(
self.store.get(number),
)
def incorporate_transient_state(self, voucher):
"""
Create a new ``Voucher`` which represents the given voucher but which also
incorporates relevant transient state known to the controller. For
example, if a redemption attempt is current in progress, this is
incorporated.
"""
if isinstance(voucher.state, model_Pending):
if voucher.number in self._active:
return attr.evolve(
voucher,
state=self._active[voucher.number],
)
if voucher.number in self._unpaid:
return attr.evolve(
voucher,
state=model_Unpaid(finished=self._unpaid[voucher.number]),
)
if voucher.number in self._error:
return attr.evolve(
voucher,
state=self._error[voucher.number],
)
return voucher
def get_redeemer(plugin_name, node_config, announcement, reactor):
section_name = "storageclient.plugins.{}".format(plugin_name)
redeemer_kind = node_config.get_config(
section=section_name,
option="redeemer",
default="ristretto",
)
return _REDEEMERS[redeemer_kind](section_name, node_config, announcement, reactor)
_REDEEMERS = {
"non": NonRedeemer.make,
"dummy": DummyRedeemer.make,
"double-spend": DoubleSpendRedeemer.make,
"unpaid": UnpaidRedeemer.make,
"error": ErrorRedeemer.make,
"ristretto": RistrettoRedeemer.make,
}
@inlineCallbacks
def bracket(first, last, between):
"""
Invoke an action between two other actions.
:param first: A no-argument function that may return a Deferred. It is
called first.
:param last: A no-argument function that may return a Deferred. It is
called last.
:param between: A no-argument function that may return a Deferred. It is
called after ``first`` is done and completes before ``last`` is called.
:return Deferred: A ``Deferred`` which fires with the result of
``between``.
"""
yield first()
try:
result = yield between()
except GeneratorExit:
raise
except:
yield last()
raise
else:
yield last()
returnValue(result) | zero-knowledge-access-pass-authorizer | /zero_knowledge_access_pass_authorizer-2022.8.21-py3-none-any.whl/_zkapauthorizer/controller.py | controller.py |
from __future__ import annotations
from datetime import datetime, timedelta
from errno import ENOENT
from functools import partial
from typing import Any, Callable, Iterable
import attr
from allmydata.interfaces import IDirectoryNode, IFilesystemNode
from allmydata.util.hashutil import (
bucket_cancel_secret_hash,
bucket_renewal_secret_hash,
file_cancel_secret_hash,
file_renewal_secret_hash,
)
from aniso8601 import parse_datetime
from twisted.application.service import Service
from twisted.internet.defer import inlineCallbacks, maybeDeferred
from twisted.python.log import err
from zope.interface import implementer
from .config import Config, read_duration
from .controller import bracket
from .foolscap import ShareStat
from .model import ILeaseMaintenanceObserver
SERVICE_NAME = "lease maintenance service"
@inlineCallbacks
def visit_storage_indexes(root_nodes, visit):
"""
Call a visitor with the storage index of ``root_node`` and that of all
nodes reachable from it.
:param IFilesystemNode root_node: The node from which to start.
:param visit: A one-argument callable. It will be called with the storage
index of all visited nodes.
:return Deferred: A Deferred which fires after all nodes have been
visited.
"""
if not isinstance(root_nodes, list):
raise TypeError(
"root_nodes must be a list, not {!r}".format(
root_nodes,
)
)
for node in root_nodes:
if not IFilesystemNode.providedBy(node):
raise TypeError(
"Root nodes must provide IFilesystemNode, {!r} does not".format(
node,
)
)
stack = root_nodes[:]
while stack:
elem = stack.pop()
visit(elem.get_storage_index())
if IDirectoryNode.providedBy(elem):
children = yield elem.list()
# Produce consistent results by forcing some consistent ordering
# here. This will sort by name.
stable_children = sorted(children.items())
for (name, (child_node, child_metadata)) in stable_children:
stack.append(child_node)
def iter_storage_indexes(visit_assets):
"""
Get an iterator over storage indexes of all nodes visited by
``visit_assets``.
:param visit_assets: A one-argument function which takes a visit function
and calls it with all nodes to visit.
:return Deferred[list[bytes]]: A Deferred that fires with a list of
storage indexes from the visited nodes. The list is in an arbitrary
order and does not include duplicates if any nodes were visited more
than once.
"""
storage_indexes = set()
visit = storage_indexes.add
d = visit_assets(visit)
# Create some order now that we've ensured they're unique.
d.addCallback(lambda ignored: list(storage_indexes))
return d
@inlineCallbacks
def renew_leases(
visit_assets,
storage_broker,
secret_holder,
min_lease_remaining,
get_activity_observer,
now,
):
"""
Check the leases on a group of nodes for those which are expired or close
to expiring and renew such leases.
:param visit_assets: A one-argument callable which takes a visitor
function and calls it with the storage index of every node to check.
:param StorageFarmBroker storage_broker: A storage broker which can supply
the storage servers where the nodes should be checked.
:param SecretHolder secret_holder: The source of the renew secret for any
leases which require renewal.
:param timedelta min_lease_remaining: The minimum amount of time remaining
to allow on a lease without renewing it.
:param get_activity_observer: A no-argument callable which returns an
``ILeaseMaintenanceObserver``.
:param now: A no-argument function returning the current time, as a
datetime instance, for comparison against lease expiration time.
:return Deferred: A Deferred which fires when all visitable nodes have
been checked and any leases renewed which required it.
"""
activity = get_activity_observer()
storage_indexes = yield iter_storage_indexes(visit_assets)
renewal_secret = secret_holder.get_renewal_secret()
cancel_secret = secret_holder.get_cancel_secret()
servers = list(
server.get_storage_server() for server in storage_broker.get_connected_servers()
)
for server in servers:
# Consider parallelizing this.
yield renew_leases_on_server(
min_lease_remaining,
renewal_secret,
cancel_secret,
storage_indexes,
server,
activity,
now(),
)
activity.finish()
@inlineCallbacks
def renew_leases_on_server(
min_lease_remaining,
renewal_secret,
cancel_secret,
storage_indexes,
server,
activity,
now,
):
"""
Check leases on the shares for the given storage indexes on the given
storage server for those which are expired or close to expiring and renew
such leases.
:param timedelta min_lease_remaining: The minimum amount of time remaining
to allow on a lease without renewing it.
:param renewal_secret: See ``renew_lease``.
:param cancel_secret: See ``renew_lease``.
:param list[bytes] storage_indexes: The storage indexes to check.
:param StorageServer server: The storage server on which to check.
:param ILeaseMaintenanceObserver activity: An object which will receive
events allowing it to observe the lease maintenance activity.
:param datetime now: The current time for comparison against the least
expiration time.
:return Deferred: A Deferred which fires after all storage indexes have
been checked and any leases that need renewal have been renewed.
"""
stats = yield server.stat_shares(storage_indexes)
for storage_index, stat_dict in zip(storage_indexes, stats):
if not stat_dict:
# The server has no shares for this storage index.
continue
# Keep track of what's been seen.
activity.observe([stat.size for stat in stat_dict.values()])
# Each share has its own leases and each lease has its own expiration
# time. For each share the server only returns the lease with the
# expiration time farthest in the future.
#
# There is no API for renewing leases on just *some* shares! It is
# all or nothing. So from the server's response we find the share
# that will have no active lease soonest and make our decision about
# whether to renew leases at this storage index or not based on that.
most_endangered = soonest_expiration(stat_dict.values())
if needs_lease_renew(min_lease_remaining, most_endangered, now):
yield renew_lease(renewal_secret, cancel_secret, storage_index, server)
def soonest_expiration(stats: Iterable[ShareStat]) -> ShareStat:
"""
:return: The share stat from ``stats`` with a lease which expires before
all others.
"""
return min(
stats,
key=lambda stat: stat.lease_expiration,
)
def renew_lease(renewal_secret, cancel_secret, storage_index, server):
"""
Renew the lease on the shares in one storage index on one server.
:param renewal_secret: A seed for the renewal secret hash calculation for
any leases which need to be renewed.
:param cancel_secret: A seed for the cancel secret hash calculation for
any leases which need to be renewed.
:param bytes storage_index: The storage index to operate on.
:param StorageServer server: The storage server to operate on.
:return Deferred: A Deferred that fires when the lease has been renewed.
"""
# See allmydata/immutable/checker.py, _get_renewal_secret
renew_secret = bucket_renewal_secret_hash(
file_renewal_secret_hash(
renewal_secret,
storage_index,
),
server.get_lease_seed(),
)
cancel_secret = bucket_cancel_secret_hash(
file_cancel_secret_hash(
cancel_secret,
storage_index,
),
server.get_lease_seed(),
)
# Use add_lease to add a new lease *or* renew an existing one with a
# matching renew secret.
return server.add_lease(
storage_index,
renew_secret,
cancel_secret,
)
def needs_lease_renew(min_lease_remaining, stat, now):
"""
Determine if a lease needs renewal.
:param timedelta min_lease_remaining: The minimum amount of time remaining
to allow on a lease without renewing it.
:param ShareStat stat: The metadata about a share to consider.
:param datetime now: The current time for comparison against the lease
expiration time.
:return bool: ``True`` if the lease needs to be renewed, ``False``
otherwise.
"""
remaining = datetime.utcfromtimestamp(stat.lease_expiration) - now
return remaining < min_lease_remaining
@attr.s
class _FuzzyTimerService(Service):
"""
A service to periodically, but not *too* periodically, run an operation.
:ivar operation: A no-argument callable to fuzzy-periodically run. It may
return a Deferred in which case the next run will not be scheduled
until the Deferred fires.
:ivar timedelta initial_interval: The amount of time to wait before the first
run of the operation.
:ivar sample_interval_distribution: A no-argument callable which returns a
number of seconds as a float giving the amount of time to wait before
the next run of the operation. It will be called each time the
operation completes.
:ivar IReactorTime reactor: A Twisted reactor to use to schedule runs of
the operation.
:ivar get_config: A function to call to return the service's
configuration. The configuration is represented as a service-specific
object.
"""
name = attr.ib()
operation = attr.ib()
initial_interval = attr.ib()
sample_interval_distribution = attr.ib()
get_config: Callable[[], Any] = attr.ib()
reactor = attr.ib()
def startService(self):
Service.startService(self)
self._call = self.reactor.callLater(
self.initial_interval.total_seconds(),
self._iterate,
)
def stopService(self):
self._call.cancel()
self._call = None
return Service.stopService(self)
def _iterate(self):
"""
Run the operation once and then schedule it to run again.
"""
d = maybeDeferred(self.operation)
d.addErrback(err, "Fuzzy timer service ({})".format(self.name))
d.addCallback(lambda ignored: self._schedule())
def _schedule(self):
"""
Schedule the next run of the operation.
"""
self._call = self.reactor.callLater(
self.sample_interval_distribution().total_seconds(),
self._iterate,
)
def lease_maintenance_service(
maintain_leases,
reactor,
last_run_path,
random,
lease_maint_config,
):
"""
Get an ``IService`` which will maintain leases on ``root_node`` and any
nodes directly or transitively reachable from it.
:param IReactorClock reactor: A Twisted reactor for scheduling renewal
activity.
:param FilePath last_run_path: A path containing the time (as an ISO8601
datetime string) at which lease maintenance last ran to inform an
adjustment to the first interval before running it again. If no file
exists at the path it is treated as though there has been no previous
run. The path will also be rewritten on each run to update this
value.
:param random: An object like ``random.Random`` which can be used as a
source of scheduling delay.
:param lease_maint_config: Configuration for the tweakable lease
maintenance parameters.
:param maintain_leases: A no-argument callable which performs a round of
lease-maintenance. The resulting service calls this periodically.
"""
interval_mean = lease_maint_config.crawl_interval_mean
interval_range = lease_maint_config.crawl_interval_range
halfrange = interval_range // 2
def sample_interval_distribution():
return timedelta(
seconds=random.uniform(
(interval_mean - halfrange).total_seconds(),
(interval_mean + halfrange).total_seconds(),
),
)
# Rather than an all-or-nothing last-run time we probably eventually want
# to have a more comprehensive record of the state when we were last
# interrupted. This would remove the unfortunate behavior of restarting
# from the beginning if we shut down during a lease scan. Shutting down
# during a lease scan becomes increasingly likely the more shares there
# are to check.
last_run = read_time_from_path(last_run_path)
if last_run is None:
initial_interval = sample_interval_distribution()
else:
initial_interval = calculate_initial_interval(
sample_interval_distribution,
last_run,
datetime.utcfromtimestamp(reactor.seconds()),
)
initial_interval = max(
initial_interval,
timedelta(0),
)
def get_lease_maint_config():
return lease_maint_config
return _FuzzyTimerService(
SERVICE_NAME,
lambda: bracket(
lambda: None,
lambda: write_time_to_path(
last_run_path,
datetime.utcfromtimestamp(reactor.seconds()),
),
maintain_leases,
),
initial_interval,
sample_interval_distribution,
get_lease_maint_config,
reactor,
)
@attr.s(frozen=True)
class LeaseMaintenanceConfig(object):
"""
Represent the configuration for a lease maintenance service.
:ivar crawl_interval_mean: The mean time between lease renewal checks.
:ivar crawl_interval_range: The range of the uniform distribution of lease
renewal checks (centered on ``interval_mean``).
:ivar min_lease_remaining: The minimum amount of time remaining to allow
on a lease without renewing it.
"""
crawl_interval_mean: timedelta = attr.ib()
crawl_interval_range: timedelta = attr.ib()
min_lease_remaining: timedelta = attr.ib()
@classmethod
def from_node_config(cls, node_config: Config) -> LeaseMaintenanceConfig:
"""
Return a ``LeaseMaintenanceConfig`` representing the values from the given
configuration object.
"""
return cls(
crawl_interval_mean=read_duration(
node_config,
"lease.crawl-interval.mean",
timedelta(days=26),
),
crawl_interval_range=read_duration(
node_config,
"lease.crawl-interval.range",
timedelta(days=4),
),
# The greater the min lease remaining time, the more of each lease
# period is "wasted" by renewing the lease before it has expired.
# The premise of ZKAPAuthorizer's use of leases is that if they
# expire, the storage server is free to reclaim the storage by
# forgetting about the share. However, since we do not know of
# any ZKAPAuthorizer-enabled storage grids which will garbage
# collect shares when leases expire, we have no reason not to use
# a zero duration here - for now.
#
# In the long run, storage servers must run with garbage
# collection enabled. Ideally, before that happens, we will have
# a system that doesn't involve trading of wasted lease time
# against reliability of leases being renewed before the shares
# are garbage collected.
#
# Also, since this is configuration, you can set it to something
# else if you want.
min_lease_remaining=read_duration(
node_config,
"lease.min-time-remaining",
timedelta(days=0),
),
)
def get_lease_duration(self):
"""
Return the minimum amount of time for which a newly granted lease will
ensure data is stored.
The actual lease duration is hard-coded in Tahoe-LAFS in many places.
However, we have local configuration that tells us when to renew a lease.
Since lease renewal discards any remaining time on a current lease and
puts a new lease period in its place, starting from the time of the
operation, the amount of time we effectively get from a lease is based on
Tahoe-LAFS' hard-coded lease duration and our own lease renewal
configuration.
Since this function only promises to return the *minimum* time a client
can expect a lease to last, we respond with a lease time shortened by our
configuration.
An excellent goal to pursue in the future would be to change the lease
renewal behavior in Tahoe-LAFS so that we can control the length of leases
and/or add to an existing lease instead of replacing it. The former
option would let us really configure lease durations. The latter would
let us stop worrying so much about what is lost by renewing a lease before
the last second of its validity period.
:return int: The minimum number of seconds for which a newly acquired
lease will be valid.
"""
# See lots of places in Tahoe-LAFS, eg src/allmydata/storage/server.py
upper_bound = 31 * 24 * 60 * 60
min_time_remaining = self.min_lease_remaining.total_seconds()
return int(upper_bound - min_time_remaining)
def lease_maintenance_config_to_dict(
lease_maint_config: LeaseMaintenanceConfig,
) -> dict[str, str]:
return {
"lease.crawl-interval.mean": _format_duration(
lease_maint_config.crawl_interval_mean,
),
"lease.crawl-interval.range": _format_duration(
lease_maint_config.crawl_interval_range,
),
"lease.min-time-remaining": _format_duration(
lease_maint_config.min_lease_remaining,
),
}
def _format_duration(td: timedelta) -> str:
return str(int(td.total_seconds()))
def _parse_duration(duration_str: str) -> timedelta:
return timedelta(seconds=int(duration_str))
def lease_maintenance_config_from_dict(d: dict[str, str]) -> LeaseMaintenanceConfig:
return LeaseMaintenanceConfig(
crawl_interval_mean=_parse_duration(d["lease.crawl-interval.mean"]),
crawl_interval_range=_parse_duration(d["lease.crawl-interval.range"]),
min_lease_remaining=_parse_duration(d["lease.min-time-remaining"]),
)
def write_time_to_path(path, when):
"""
Write an ISO8601 datetime string to a file.
:param FilePath path: The path to a file to which to write the datetime
string.
:param datetime when: The datetime to write.
"""
path.setContent(when.isoformat().encode("utf-8"))
def read_time_from_path(path):
"""
Read an ISO8601 datetime string from a file.
:param FilePath path: The path to a file containing a datetime string.
:return: None if no file exists at the path. Otherwise, a datetime
instance giving the time represented in the file.
"""
try:
when = path.getContent()
except IOError as e:
if ENOENT == e.errno:
return None
raise
else:
return parse_datetime(when.decode("ascii"))
def visit_storage_indexes_from_root(visitor, get_root_nodes):
"""
An operation for ``lease_maintenance_service`` which applies the given
visitor to ``root_node`` and all its children.
:param visitor: A one-argument callable which takes the traversal function
and which should call it as desired.
:param get_root_nodes: A no-argument callable which returns a list of
filesystem nodes (``IFilesystemNode``) at which traversal will begin.
:return: A no-argument callable to perform the visits.
"""
return lambda: visitor(
partial(
visit_storage_indexes,
# Make sure we call get_root_nodes each time to give us a chance
# to notice when it changes.
get_root_nodes(),
),
)
@implementer(ILeaseMaintenanceObserver)
class NoopMaintenanceObserver(object):
"""
A lease maintenance observer that does nothing.
"""
def observe(self, sizes):
pass
def finish(self):
pass
@implementer(ILeaseMaintenanceObserver)
@attr.s
class MemoryMaintenanceObserver(object):
"""
A lease maintenance observer that records observations in memory.
"""
observed = attr.ib(default=attr.Factory(list))
finished = attr.ib(default=False)
def observe(self, sizes):
self.observed.append(sizes)
def finish(self):
self.finished = True
def maintain_leases_from_root(
get_root_nodes,
storage_broker,
secret_holder,
min_lease_remaining,
progress,
get_now,
):
"""
An operation for ``lease_maintenance_service`` which visits ``root_node``
and all its children and renews their leases if they have
``min_lease_remaining`` or less on them.
:param get_root_nodes: A no-argument callable which returns the list of
Tahoe-LAFS filesystem nodes (``IFilesystemNode``) to use as the roots
of the node hierarchies to be maintained.
:param StorageFarmBroker storage_broker: The storage broker which can put
us in touch with storage servers where shares of the nodes to maintain
might be found.
:param SecretHolder secret_holder: The Tahoe-LAFS client node secret
holder which can give us the lease renewal secrets needed to renew
leases.
:param timedelta min_lease_remaining: The minimum amount of time remaining
to allow on a lease without renewing it.
:param get_now: A no-argument callable that returns the current time as a
``datetime`` instance.
:return: A no-argument callable to perform the maintenance.
"""
def visitor(visit_assets):
return renew_leases(
visit_assets,
storage_broker,
secret_holder,
min_lease_remaining,
progress,
get_now,
)
return visit_storage_indexes_from_root(
visitor,
get_root_nodes,
)
def calculate_initial_interval(sample_interval_distribution, last_run, now):
"""
Determine how long to wait before performing an initial (for this process)
scan for aging leases.
:param sample_interval_distribution: See ``_FuzzyTimerService``.
:param datetime last_run: The time of the last scan.
:param datetime now: The current time.
"""
since_last_run = now - last_run
initial_interval = sample_interval_distribution() - since_last_run
return initial_interval | zero-knowledge-access-pass-authorizer | /zero_knowledge_access_pass_authorizer-2022.8.21-py3-none-any.whl/_zkapauthorizer/lease_maintenance.py | lease_maintenance.py |
from __future__ import annotations
"""
A system for replicating local SQLite3 database state to remote storage.
Theory of Operation
===================
A function to wrap a ``sqlite3.Connection`` in a new type is provided. This
new type provides facilities for accomplishing two goals:
* It (can someday) presents an expanded connection interface which includes
the ability to switch the database into "replicated" mode. This is an
application-facing interface meant to be used when the application is ready
to discharge its responsibilities in the replication process.
* It (can someday) expose the usual cursor interface wrapped around the usual
cursor behavior combined with extra logic to record statements which change
the underlying database (DDL and DML statements). This recorded data then
feeds into the above replication process once it is enabled.
An application's responsibilities in the replication process are to arrange
for remote storage of "snapshots" and "event streams". See the
replication/recovery design document for details of these concepts.
Once replication has been enabled, the application (can someday be) informed
whenever the event stream changes (respecting database transactionality) and
data can be shipped to remote storage as desired.
It is essential to good replication performance that once replication is
enabled all database-modifying actions are captured in the event stream. This
is the reason for providing a ``sqlite3.Connection``-like object for use by
application code rather than a separate side-car interface: it minimizes the
opportunities for database changes which are overlooked by this replication
system.
"""
__all__ = [
"ReplicationAlreadySetup",
"fail_setup_replication",
"setup_tahoe_lafs_replication",
"with_replication",
"statements_to_snapshot",
"connection_to_statements",
"snapshot",
]
import os
import re
from enum import Enum
from io import BytesIO
from sqlite3 import Connection as _SQLite3Connection
from sqlite3 import Cursor as _SQLite3Cursor
from typing import (
IO,
Any,
Awaitable,
Callable,
ClassVar,
Generator,
Iterable,
Iterator,
Optional,
Protocol,
Sequence,
)
import cbor2
from attrs import Factory, define, field, frozen
from compose import compose
from eliot import log_call
from twisted.application.service import IService, Service
from twisted.internet.defer import CancelledError, Deferred, DeferredQueue, succeed
from twisted.logger import Logger
from twisted.python.filepath import FilePath
from twisted.python.lockfile import FilesystemLock
from ._types import CapStr
from .config import REPLICA_RWCAP_BASENAME, Config
from .sql import Connection, Cursor, SQLRuntimeType, SQLType, statement_mutates
from .tahoe import (
DataProvider,
DirectoryEntry,
ITahoeClient,
attenuate_writecap,
capability_from_string,
)
# function which can set remote ZKAPAuthorizer state.
Uploader = Callable[[str, DataProvider], Awaitable[None]]
# function which can remove entries from ZKAPAuthorizer state.
Pruner = Callable[[Callable[[str], bool]], Awaitable[None]]
# functions which can list all entries in ZKAPAuthorizer state
Lister = Callable[[], Awaitable[list[str]]]
EntryLister = Callable[[], Awaitable[dict[str, DirectoryEntry]]]
class SnapshotPolicy(Protocol):
"""
Encode policy rules about when to take and upload a new snapshot.
"""
def should_snapshot(self, snapshot_size: int, replica_sizes: list[int]) -> bool:
"""
Given the size of a new snapshot and the size of an existing replica
(snapshot and event streams), is now a good time to take a new
snapshot?
"""
SNAPSHOT_NAME = "snapshot"
@frozen
class Replica:
"""
Manage a specific replica.
"""
upload: Uploader
prune: Pruner
entry_lister: EntryLister
async def list(self) -> list[str]:
return list(await self.entry_lister())
class ReplicationJob(Enum):
"""
The kinds of jobs that the Replication queue knows about
:ivar startup: The job that is run once when the replication service
starts and which is responsible for inspecting local and remote state
to determine if any actions are immediately necessary (even before any
further local changes are made).
:ivar event_stream: The job to upload a new event stream object.
:ivar snapshot: The job to upload a new snapshot object and prune
now-obsolete event stream objects.
:ivar consider_snapshot: The job to inspect replica event stream and
snapshot state and potentially schedule a new snapshot which will
allow pruning of existing event streams.
"""
startup = 1
event_stream = 2
snapshot = 3
consider_snapshot = 4
@frozen
class Change:
"""
Represent an item in a replication event stream
:ivar sequence: The sequence number of this event.
:ivar statement: The SQL statement associated with this event.
:ivar important: Whether this change was "important" or not.
:ivar arguments: Any arguments for the SQL statement.
"""
sequence: int
statement: str
arguments: Sequence[SQLType] = field(converter=tuple)
important: bool
@arguments.validator
def _validate_arguments(self, attribute, value) -> None:
"""
Require that the value has as elements only values are legal SQL values.
:note: attrs validators run after attrs converters.
"""
if all(isinstance(o, SQLRuntimeType) for o in value):
return None
raise ValueError("sequence contains values incompatible with SQL")
@frozen
class EventStream:
"""
A series of database operations represented as `Change` instances.
:ivar version: An identifier for the schema of the serialized form of this
event stream. This will appear inside the serialized form. A change
to the schema will be accompanied with an increment to this value.
"""
changes: Sequence[Change] = field(converter=tuple)
version: ClassVar[int] = 1
def highest_sequence(self) -> Optional[int]:
"""
:returns: the highest sequence number in this EventStream (or
None if there are no events)
"""
if not self.changes:
return None
return max(change.sequence for change in self.changes)
def to_bytes(self) -> IO[bytes]:
"""
:returns: a producer of bytes representing this EventStream.
"""
return BytesIO(
cbor2.dumps(
{
"version": self.version,
"events": tuple(
(
event.sequence,
event.statement,
event.arguments,
event.important,
)
for event in self.changes
),
}
)
)
@classmethod
def from_bytes(cls, stream: IO[bytes]) -> EventStream:
"""
:returns EventStream: an instance of EventStream from the given
bytes (which should have been produced by a prior call to
``to_bytes``)
"""
data = cbor2.load(stream)
serial_version = data.get("version", None)
if serial_version != cls.version:
raise ValueError(
f"Unknown serialized event stream version {serial_version}"
)
return cls(
changes=[
# List comprehension has incompatible type List[Change]; expected List[_T_co]
# https://github.com/python-attrs/attrs/issues/519
Change(*args) # type: ignore
for args in data["events"]
]
)
class AlreadySettingUp(Exception):
"""
Another setup attempt is currently in progress.
"""
@define(auto_exc=False)
class ReplicationAlreadySetup(Exception):
"""
An attempt was made to setup of replication but it is already set up.
:ivar cap_str: The read-only capability of the replica that was already
set up.
"""
cap_str: str
async def fail_setup_replication():
"""
A replication setup function that always fails.
"""
raise Exception("Test not set up for replication")
async def setup_tahoe_lafs_replication(client: ITahoeClient) -> str:
"""
Configure the ZKAPAuthorizer plugin that lives in the Tahoe-LAFS node with
the given configuration to replicate its state onto Tahoe-LAFS storage
servers using that Tahoe-LAFS node.
"""
# Find the configuration path for this node's replica.
config_path = client.get_private_path(REPLICA_RWCAP_BASENAME)
# Take an advisory lock on the configuration path to avoid concurrency
# shennanigans.
config_lock = FilesystemLock(config_path.asTextMode().path + ".lock")
if not config_lock.lock():
raise AlreadySettingUp()
try:
# Check to see if there is already configuration.
if config_path.exists():
rwcap_obj = capability_from_string(config_path.getContent())
rocap_str = rwcap_obj.get_readonly().to_string().decode("ascii")
raise ReplicationAlreadySetup(rocap_str)
# Create a directory with it
rw_cap = await client.make_directory()
# Store the resulting write-cap in the node's private directory
config_path.setContent(rw_cap.encode("ascii"))
finally:
# On success and failure, release the lock since we're done with the
# file for now.
config_lock.unlock()
# Attenuate it to a read-cap
rocap = attenuate_writecap(rw_cap)
# Return the read-cap
return rocap
def is_replication_setup(config: Config) -> bool:
"""
:return: ``True`` if and only if replication has previously been setup for
the Tahoe-LAFS node associated with the given configuration.
"""
# Find the configuration path for this node's replica.
return FilePath(config.get_private_path(REPLICA_RWCAP_BASENAME)).exists()
def get_replica_rwcap(config: Config) -> CapStr:
"""
:return: a mutable directory capability for our replica.
:raises: Exception if replication is not setup
"""
rwcap_file = FilePath(config.get_private_path(REPLICA_RWCAP_BASENAME))
return rwcap_file.getContent().decode("ascii")
@define
class _Important:
"""
A context-manager to set and unset the ._important flag on a
_ReplicationCapableConnection
"""
_replication_cursor: _ReplicationCapableCursor
def __enter__(self) -> None:
self._replication_cursor._important = True
def __exit__(self, *args) -> None:
self._replication_cursor._important = False
return None
def with_replication(
connection: _SQLite3Connection, enable_replication: bool
) -> _ReplicationCapableConnection:
"""
Wrap the given connection in a layer which is capable of entering a
"replication mode". In replication mode, the wrapper stores all changes
made through the connection so that they are available to be replicated by
another component. In normal mode, changes are not stored.
:param connection: The SQLite3 connection to wrap.
:param enable_replication: If ``True`` then the wrapper is placed in
"replication mode" initially. Otherwise it is not but it can be
switched into that mode later.
:return: The wrapper object.
"""
return _ReplicationCapableConnection(connection, enable_replication)
Mutation = tuple[bool, str, Iterable[tuple[SQLType, ...]]]
MutationObserver = Callable[[_SQLite3Cursor, Iterable[Mutation]], Callable[[], None]]
@define
class _ReplicationCapableConnection:
"""
Wrap a ``sqlite3.Connection`` to provide additional snapshot- and
streaming replication-related features.
All of this type's methods are intended to behave the same way as
``sqlite3.Connection``\ 's methods except they may also add some
additional functionality to support replication.
:ivar _replicating: ``True`` if this connection is currently in
replication mode and is recording all executed DDL and DML statements,
``False`` otherwise.
"""
# the "real" / normal sqlite connection
_conn: _SQLite3Connection
_replicating: bool
_observers: tuple[MutationObserver, ...] = Factory(tuple)
_mutations: list[Mutation] = Factory(list)
def enable_replication(self) -> None:
"""
Turn on replication support.
"""
self._replicating = True
def add_mutation_observer(self, fn: MutationObserver) -> None:
"""
Add another observer of changes made through this connection.
:param fn: An object to call after any transaction with changes is
committed on this connection.
"""
self._observers = self._observers + (fn,)
def iterdump(self) -> Iterator[str]:
"""
:return: SQL statements which can be used to reconstruct the database
state.
"""
return self._conn.iterdump()
def close(self) -> None:
return self._conn.close()
def __enter__(self) -> _ReplicationCapableConnection:
self._conn.__enter__()
return self
def __exit__(
self,
exc_type: Optional[type],
exc_value: Optional[BaseException],
exc_tb: Optional[Any],
) -> bool:
propagate = self._conn.__exit__(exc_type, exc_value, exc_tb)
if exc_type is None:
# There was no exception, signal observers that a change has been
# committed.
post_txn_fns: list[Callable[[], None]] = []
with self._conn:
curse = self._conn.cursor()
curse.execute("BEGIN IMMEDIATE TRANSACTION")
post_txn_fns.extend(self._maybe_signal_observers(curse))
for f in post_txn_fns:
f()
# Respect the underlying propagation decision.
return propagate
def _maybe_signal_observers(
self, cursor
) -> Generator[Callable[[], None], None, None]:
"""
If there are recorded mutations, deliver them to each of the observers and
then forget about them.
:return: A generator of the return values of the observers.
"""
if self._mutations:
to_signal = self._mutations
self._mutations = list()
for ob in self._observers:
yield ob(cursor, to_signal)
def cursor(self, factory: Optional[type] = None) -> _ReplicationCapableCursor:
"""
Get a replication-capable cursor for this connection.
"""
kwargs = {}
if factory is not None:
kwargs["factory"] = factory
cursor = self._conn.cursor(**kwargs)
# this cursor honors the ._replicating flag in this instance
return _ReplicationCapableCursor(cursor, self)
@define
class _ReplicationCapableCursor:
"""
Wrap a ``sqlite3.Cursor`` to provide additional streaming
replication-related features.
All of this type's attributes and methods are intended to behave the same
way as ``sqlite3.Cursor``\ 's methods except they may also add some
additional functionality to support replication.
"""
_cursor: _SQLite3Cursor
_connection: _ReplicationCapableConnection
# true while statements are "important" (which is pased along to
# the observers and interpreted as being "important data that the
# user will be interested in preserving")
_important: bool = field(init=False, default=False)
@property
def lastrowid(self):
return self._cursor.lastrowid
@property
def rowcount(self):
return self._cursor.rowcount
def close(self):
return self._cursor.close()
def execute(self, statement: str, row: Iterable[SQLType] = ()) -> Cursor:
"""
sqlite's Cursor API
:param row: the arguments
"""
assert isinstance(row, tuple)
self._cursor.execute(statement, row)
if self._connection._replicating and statement_mutates(statement):
# note that this interface is for multiple statements, so
# we turn our single row into a one-tuple
self._connection._mutations.append((self._important, statement, (row,)))
return self
def fetchall(self):
return self._cursor.fetchall()
def fetchmany(self, n):
return self._cursor.fetchmany(n)
def fetchone(self):
return self._cursor.fetchone()
def executemany(self, statement: str, rows: Iterable[Any]) -> Cursor:
self._cursor.executemany(statement, rows)
if self._connection._replicating and statement_mutates(statement):
self._connection._mutations.append((self._important, statement, rows))
return self
def important(self) -> _Important:
"""
Create a new context-manager that -- while active -- sets the
'important' flag to true and resets it afterwards.
"""
return _Important(self)
def statements_to_snapshot(statements: Iterator[str]) -> bytes:
"""
Take a snapshot of the database reachable via the given connection.
The snapshot is consistent and write transactions on the given connection
are blocked until it has been completed.
"""
return cbor2.dumps({"version": 1, "statements": [x for x in statements]})
def connection_to_statements(connection: Connection) -> Iterator[str]:
"""
Create an iterator of SQL statements as strings representing a consistent,
self-contained snapshot of the database reachable via the given
connection.
"""
return iter(connection.iterdump())
# Convenience API to dump statements and encode them for storage.
snapshot: Callable[[Connection], bytes] = compose(
statements_to_snapshot, connection_to_statements
)
async def tahoe_lafs_uploader(
client: ITahoeClient,
recovery_cap: str,
get_snapshot_data: DataProvider,
entry_name: str,
) -> None:
"""
Upload a replica to Tahoe, linking the result into the given recovery
mutable capbility under the name given by :py:data:`SNAPSHOT_NAME`.
"""
snapshot_immutable_cap = await client.upload(get_snapshot_data)
await client.link(recovery_cap, entry_name, snapshot_immutable_cap)
def get_tahoe_lafs_direntry_uploader(
client: ITahoeClient,
directory_mutable_cap: str,
) -> Callable[[str, DataProvider], Awaitable[None]]:
"""
Bind a Tahoe client to a mutable directory in a callable that will
upload some data and link it into the mutable directory under the
given name.
:return: A callable that will upload some data as the latest replica
snapshot. The data isn't given directly, but instead from a
zero-argument callable itself to facilitate retrying.
"""
async def upload(entry_name: str, get_data_provider: DataProvider) -> None:
await tahoe_lafs_uploader(
client, directory_mutable_cap, get_data_provider, entry_name
)
return upload
def get_tahoe_lafs_direntry_pruner(
client: ITahoeClient,
directory_mutable_cap: str,
) -> Callable[[Callable[[str], bool]], Awaitable[None]]:
"""
Bind a Tahoe client to a mutable directory in a callable that will
unlink some entries. Which entries to unlink are controlled by a predicate.
:return: A callable that will unlink some entries given a
predicate. The prediate is given a filename inside the mutable to
consider.
"""
async def maybe_unlink(predicate: Callable[[str], bool]) -> None:
"""
For each child of `directory_mutable_cap` delete it iff the
predicate returns True for that name
"""
entries = await client.list_directory(directory_mutable_cap)
for name in entries.keys():
if predicate(name):
await client.unlink(directory_mutable_cap, name)
return maybe_unlink
def get_tahoe_lafs_direntry_lister(
client: ITahoeClient, directory_mutable_cap: str
) -> EntryLister:
"""
Bind a Tahoe client to a mutable directory in a callable that will list
the entries of that directory.
"""
async def lister() -> dict[str, DirectoryEntry]:
entries = await client.list_directory(directory_mutable_cap)
return {
name: DirectoryEntry(kind, entry.get("size", 0))
for name, (kind, entry) in entries.items()
}
return lister
def get_tahoe_lafs_direntry_replica(
client: ITahoeClient, directory_mutable_cap: str
) -> Replica:
"""
Get an object that can interact with a replica stored in a Tahoe-LAFS
mutable directory.
"""
uploader = get_tahoe_lafs_direntry_uploader(client, directory_mutable_cap)
pruner = get_tahoe_lafs_direntry_pruner(client, directory_mutable_cap)
lister = get_tahoe_lafs_direntry_lister(client, directory_mutable_cap)
return Replica(uploader, pruner, lister)
def add_events(
cursor: _SQLite3Cursor,
events: Iterable[tuple[str, Sequence[SQLType]]],
important: bool,
) -> None:
"""
Add some new changes to the event-log.
"""
sql_args = []
for sql, args in events:
assert all(
isinstance(a, SQLRuntimeType) for a in args
), f"{args} contains non-SQL value"
sql_args.append((sql, cbor2.dumps(args), important))
cursor.executemany(
"""
INSERT INTO [event-stream]([statement], [serialized_arguments], [important])
VALUES (?, ?, ?)
""",
sql_args,
)
def get_events(conn: _SQLite3Connection) -> EventStream:
"""
Return all events currently in our event-log.
"""
with conn:
cursor = conn.cursor()
cursor.execute(
"""
SELECT [sequence-number], [statement], [serialized_arguments], [important]
FROM [event-stream]
"""
)
rows = cursor.fetchall()
return EventStream(
changes=[
# List comprehension has incompatible type List[Change]; expected List[_T_co]
# https://github.com/python-attrs/attrs/issues/519
Change(seq, stmt, cbor2.loads(arguments), important) # type: ignore
for seq, stmt, arguments, important in rows
]
)
def prune_events_to(conn: _SQLite3Connection, sequence_number: int) -> None:
"""
Remove all events <= sequence_number
"""
with conn:
cursor = conn.cursor()
cursor.execute(
"""
DELETE FROM [event-stream]
WHERE [sequence-number] <= (?)
""",
(sequence_number,),
)
cursor.fetchall()
@frozen
class AccumulatedChanges:
"""
A summary of some changes that have been made.
:ivar important: Are any of these "important" changes?
:ivar size: The approximate size in bytes to represent all of the changes.
"""
important: bool
size: int
@classmethod
def no_changes(cls) -> AccumulatedChanges:
"""
Create an ``AccumulatedChanges`` that represents no changes.
"""
return cls(False, 0)
@classmethod
def from_connection(cls, connection: _SQLite3Connection) -> AccumulatedChanges:
"""
Load information about unreplicated changes from the database.
"""
# this size is larger than what we would have computed via
# `from_changes` which only counts the statement-sizes .. but
# maybe fine? (also could fix by just accumulating the
# statement-sizes instead below)
events = get_events(connection)
data = events.to_bytes()
size = data.seek(0, os.SEEK_END)
any_important = any(change.important for change in events.changes)
return cls(any_important, size)
@classmethod
def from_statements(
cls,
important: bool,
statements: Iterable[tuple[str, Sequence[SQLType]]],
) -> AccumulatedChanges:
"""
Load information about unreplicated changes from SQL statements giving
those changes.
"""
# note that we're ignoring a certain amount of size overhead here: the
# _actual_ size will be some CBOR information and the sequence number,
# although the statement text should still dominate.
# XXX Fix the size calculation
return cls(important, sum(len(sql) for (sql, _) in statements))
def __add__(self, other: AccumulatedChanges) -> AccumulatedChanges:
return AccumulatedChanges(
self.important or other.important, self.size + other.size
)
def event_stream_name(high_seq: int) -> str:
"""
Construct the basename of the event stream object containing the given
highest sequence number.
"""
return f"event-stream-{high_seq}"
@define
class _ReplicationService(Service):
"""
Perform all activity related to maintaining a remote replica of the local
ZKAPAuthorizer database.
If this service is running for a database then the database is in
replication mode and changes will be uploaded.
:ivar _connection: A connection to the database being replicated.
:ivar _replicating: The long-running replication operation. This is never
expected to complete but it will be cancelled when the service stops.
"""
name = "replication-service" # type: ignore # Service assigns None, screws up type inference
_logger = Logger()
_connection: _ReplicationCapableConnection = field()
_replica: Replica
_snapshot_policy: SnapshotPolicy
_replicating: Optional[Deferred] = field(init=False, default=None)
_changes: AccumulatedChanges = AccumulatedChanges.no_changes()
_jobs: DeferredQueue = field(factory=DeferredQueue)
@property
def _unreplicated_connection(self):
"""
A normal SQLite3 connection object, changes made via which will not be
replicated.
"""
return self._connection._conn
def startService(self) -> None:
super().startService()
# Register ourselves as a change observer (first! we don't want to
# miss anything) and then put the database into replication mode so
# that there are recorded events for us to work with.
self._connection.add_mutation_observer(self.observed_event)
self._connection.enable_replication()
# Reflect whatever state is left over in the database from previous
# efforts.
self._changes = AccumulatedChanges.from_connection(
self._unreplicated_connection
)
self.queue_job(ReplicationJob.startup)
# Start the actual work of reacting to changes by uploading them (as
# appropriate).
self._replicating = Deferred.fromCoroutine(self._replicate())
async def _replicate(self) -> None:
"""
React to changes by replicating them to remote storage.
"""
try:
await self.wait_for_uploads()
except CancelledError:
# Ignore cancels; this will be the normal way we quit -- see
# stopService.
pass
except Exception:
# If something besides a cancel happens, at least make it visible.
self._logger.failure("unexpected wait_for_uploads error")
return None
def queue_job(self, job: ReplicationJob) -> None:
"""
Queue a job, if it is not already queued, to be executed after any other
queued jobs.
"""
if job not in self._jobs.pending:
self._jobs.put(job)
@log_call(action_type="zkapauthorizer:replicate:queue-event-upload")
def queue_event_upload(self) -> None:
"""
Request an event-stream upload of outstanding events.
"""
self.queue_job(ReplicationJob.event_stream)
@log_call(action_type="zkapauthorizer:replicate:queue-snapshot-upload")
def queue_snapshot_upload(self) -> None:
"""
Request that an upload of a new snapshot occur. Stale
event-streams will also be pruned after the snapshot is
successfully uploaded.
"""
self.queue_job(ReplicationJob.snapshot)
async def wait_for_uploads(self) -> None:
"""
An infinite async loop that processes uploads of event-streams or
snapshots
"""
while True:
job = await self._jobs.get()
if job == ReplicationJob.event_stream:
await self._do_one_event_upload()
elif job == ReplicationJob.snapshot:
await self._do_one_snapshot_upload()
elif job == ReplicationJob.consider_snapshot:
await self._do_consider_snapshot()
elif job == ReplicationJob.startup:
await self._do_startup()
else:
raise Exception("internal error") # pragma: nocover
async def _do_startup(self) -> None:
"""
Check local and remote state to determine if there is any work that should
be done immediately.
Currently, this will upload a snapshot if none exists in the replica,
or upload an event stream if there are events that warrant immediate
upload.
"""
if await self.should_upload_snapshot():
self.queue_snapshot_upload()
elif self.should_upload_eventstream(self._changes):
self.queue_event_upload()
@log_call(action_type="zkapauthorizer:replicate:snapshot-upload")
async def _do_one_snapshot_upload(self) -> None:
"""
Perform a single snapshot upload, including pruning event-streams
from the replica that are no longer relevant.
"""
# extract sequence-number and snapshot data
seqnum = 1
rows = (
self._connection.cursor()
.execute(
"SELECT seq FROM sqlite_sequence WHERE name = 'event-stream'", tuple()
)
.fetchall()
)
if len(rows):
seqnum = int(rows[0][0])
snap = snapshot(self._connection)
# upload snapshot
await self._replica.upload("snapshot", lambda: BytesIO(snap))
# remove local event history (that should now be encapsulated
# by the snapshot we just uploaded)
prune_events_to(self._connection._conn, seqnum)
# if we crash here, there will be extra event-stream objects
# in the replica. This will be fixed correctly upon our next
# snapshot upload. The extra event-stream objects will be
# ignored by the recovery code.
# prune old events from the replica
def is_old_eventstream(fname: str) -> bool:
"""
:returns: True if the `fname` is an event-stream object and the
sequence number is strictly less than our snapshot's
maximum sequence.
"""
m = re.match("event-stream-([0-9]*)", fname)
if m:
seq = int(m.group(1))
if seq <= seqnum:
return True
return False
await self._replica.prune(is_old_eventstream)
@log_call(action_type="zkapauthorizer:replicate:event-upload")
async def _do_one_event_upload(self) -> None:
"""
Process a single upload of all current events and then delete them
from our database.
"""
events = get_events(self._unreplicated_connection)
high_seq = events.highest_sequence()
# if this is None there are no events at all
if high_seq is None:
return
# otherwise, upload the events we found.
name = event_stream_name(high_seq)
await self._replica.upload(name, events.to_bytes)
# then discard the uploaded events from the local database.
prune_events_to(self._unreplicated_connection, high_seq)
# Arrange to examine replica state soon to determine whether taking a
# new snapshot and pruning existing event streams is useful.
self.queue_job(ReplicationJob.consider_snapshot)
async def _do_consider_snapshot(self) -> None:
"""
Inspect local and remote state to decide if the cost of taking and
uploading a new snapshot is worth the resulting savings in storage.
"""
local_size = await self._new_snapshot_size()
replica_size = await self._replica_size()
if self._snapshot_policy.should_snapshot(local_size, replica_size):
self.queue_snapshot_upload()
async def _new_snapshot_size(self) -> int:
"""
Measure the size of snapshot of the current database state, in bytes.
"""
return len(snapshot(self._connection))
async def _replica_size(self) -> list[int]:
"""
Retrieve the size of all the files that are part of the current on-grid
replica.
"""
entries = await self._replica.entry_lister()
return [
entry.size
for (name, entry) in entries.items()
if entry.kind == "filenode"
and name == "snapshot"
or name.startswith("event-stream-")
]
def stopService(self) -> Deferred[None]:
"""
Cancel the replication operation and then wait for it to complete.
"""
super().stopService()
replicating = self._replicating
if replicating is None:
return succeed(None)
self._replicating = None
replicating.cancel()
return replicating
def observed_event(
self,
unobserved_cursor: _SQLite3Cursor,
all_changes: Iterable[Mutation],
) -> Callable[[], None]:
"""
A mutating SQL statement was observed by the cursor. This is like
the executemany interface: there is always a list of args. For
a single statement, we call this with the len(args) == 1
:param all_changes: 3-tuples of (important, statement, args)
where important is whether this should trigger an
immediate upload; statement is the SQL statement; and args
are the arguments for the SQL.
"""
# A mutation contains one statement and one or more rows of arguments
# that go with it. We're going to generate an event per
# statement/argument pair - so "unroll" those rows and pair each
# individual argument tuple with its statement.
events = []
any_important = False
for (important, sql, manyargs) in all_changes:
any_important = any_important or important
for args in manyargs:
events.append((sql, args))
add_events(unobserved_cursor, events, any_important)
changes = AccumulatedChanges.from_statements(any_important, events)
self._changes = self._changes + changes
if self.should_upload_eventstream(self._changes):
return self._complete_upload
else:
return lambda: None
def _complete_upload(self) -> None:
"""
This is called after the transaction closes (because we return it
from our observer function). See
_ReplicationCapableConnection.__exit__
"""
self.queue_event_upload()
self._changes = AccumulatedChanges.no_changes()
async def should_upload_snapshot(self) -> bool:
"""
:returns: True if there is no remote snapshot
"""
entries = await self._replica.list()
return SNAPSHOT_NAME not in entries
def should_upload_eventstream(self, changes: AccumulatedChanges) -> bool:
"""
:returns: True if we have accumulated enough statements to upload
an event-stream record.
"""
return changes.important or changes.size >= 570000
def replication_service(
replicated_connection: _ReplicationCapableConnection,
replica: Replica,
snapshot_policy: SnapshotPolicy,
) -> IService:
"""
Return a service which implements the replication process documented in
the ``backup-recovery`` design document.
"""
return _ReplicationService(
connection=replicated_connection,
replica=replica,
snapshot_policy=snapshot_policy,
) | zero-knowledge-access-pass-authorizer | /zero_knowledge_access_pass_authorizer-2022.8.21-py3-none-any.whl/_zkapauthorizer/replicate.py | replicate.py |
__all__ = [
"RecoveryStages",
"RecoveryState",
"SetState",
"Downloader",
"StatefulRecoverer",
"make_fail_downloader",
"noop_downloader",
]
from collections.abc import Awaitable
from enum import Enum, auto
from functools import partial
from io import BytesIO
from typing import Callable, Iterable, Iterator, NoReturn, Optional, Sequence
import cbor2
from attrs import define, field
from .replicate import SNAPSHOT_NAME, EventStream, statements_to_snapshot
from .sql import Cursor, escape_identifier
from .tahoe import CapStr, DataProvider, ITahoeClient
class SnapshotMissing(Exception):
"""
No snapshot was not found in the replica directory.
"""
class RecoveryStages(Enum):
"""
Constants representing the different stages a recovery process may have
reached.
:ivar inactive: The recovery system has not been activated. No recovery
has yet been attempted.
:ivar succeeded: The recovery system has successfully recovered state from
a replica. Recovery is finished. Since state now exists in the local
database, the recovery system cannot be re-activated.
:ivar failed: The recovery system has definitively failed in its attempt
to recover from a replica. Recovery will progress no further. It is
undefined what state now exists in the local database.
"""
inactive = auto()
started = auto()
inspect_replica = auto()
downloading = auto()
importing = auto()
succeeded = auto()
download_failed = auto()
import_failed = auto()
@define(frozen=True)
class RecoveryState:
"""
Describe the state of an attempt at recovery.
:ivar state: The recovery process progresses through different stages.
This indicates the point which that progress has reached.
:ivar failure_reason: If the recovery failed then a human-meaningful
(maybe) string giving details about why.
"""
stage: RecoveryStages = RecoveryStages.inactive
failure_reason: Optional[str] = None
def marshal(self) -> dict[str, Optional[str]]:
return {"stage": self.stage.name, "failure-reason": self.failure_reason}
# A function for reporting a change in the state of a recovery attempt.
SetState = Callable[[RecoveryState], None]
Replica = tuple[
# The snapshot
DataProvider,
# All of the event streams that could possibly -- but do not necessarily
# -- apply on top of the snapshot. In normal operation it is likely that
# they will all apply on top of it but no metadata has been checked yet to
# verify this.
Sequence[DataProvider],
]
# An object which can retrieve remote ZKAPAuthorizer state.
Downloader = Callable[[SetState], Awaitable[Replica]]
@define
class StatefulRecoverer:
"""
An ``IRecoverer`` that exposes changing state as it progresses through the
recovery process.
"""
_state: RecoveryState = RecoveryState(stage=RecoveryStages.inactive)
_listeners: Iterable[Callable[[RecoveryState], object]] = field(default=())
async def recover(
self,
download: Downloader,
cursor: Cursor,
) -> None:
"""
Begin the recovery process.
:param downloader: A callable which can be used to retrieve a replica.
:param cursor: A database cursor which can be used to populate the
database with recovered state.
"""
if self._state.stage != RecoveryStages.inactive:
return
self._set_state(RecoveryState(stage=RecoveryStages.started))
try:
(snapshot, event_streams) = await download(self._set_state)
except Exception as e:
self._set_state(
RecoveryState(
stage=RecoveryStages.download_failed, failure_reason=str(e)
)
)
return
try:
recover(snapshot, event_streams, cursor)
except Exception as e:
self._set_state(
RecoveryState(stage=RecoveryStages.import_failed, failure_reason=str(e))
)
return
self._set_state(RecoveryState(stage=RecoveryStages.succeeded))
def _set_state(self, state: RecoveryState) -> None:
"""
Change the recovery state.
"""
self._state = state
for listener in self._listeners:
listener(state)
def state(self) -> RecoveryState:
"""
Get the latest recovery state.
"""
return self._state
def make_fail_downloader(reason: Exception) -> Downloader:
"""
Make a downloader that always fails with the given exception.
"""
async def fail_downloader(set_state: SetState) -> NoReturn:
raise reason
return fail_downloader
def make_canned_downloader(snapshot: bytes, event_streams: list[bytes]) -> Downloader:
"""
Make a downloader that always immediately succeeds with the given values.
"""
assert isinstance(snapshot, bytes)
assert all(isinstance(e, bytes) for e in event_streams)
async def canned_downloader(set_state: SetState) -> Replica:
return (
partial(BytesIO, snapshot),
[partial(BytesIO, e) for e in event_streams],
)
return canned_downloader
# A downloader that does nothing and then succeeds with an empty snapshot.
noop_downloader = make_canned_downloader(
statements_to_snapshot(iter([])),
[],
)
def statements_from_snapshot(get_snapshot: DataProvider) -> Iterator[str]:
"""
Read the SQL statements which constitute the replica from a byte string.
"""
with get_snapshot() as fp:
snapshot = cbor2.load(fp)
version = snapshot.get("version", None)
if version != 1:
raise ValueError(f"Unknown serialized snapshot version {version}")
return snapshot["statements"]
def recover(
snapshot: DataProvider, event_stream_data: Sequence[DataProvider], cursor: Cursor
) -> None:
"""
Synchronously execute statements from a snapshot and any applicable event
streams against the given cursor.
"""
recover_snapshot(statements_from_snapshot(snapshot), cursor)
for event_stream in sorted_event_streams(load_event_streams(event_stream_data)):
recover_event_stream(event_stream, cursor)
def sorted_event_streams(event_streams: Iterable[EventStream]) -> list[EventStream]:
"""
Sort some event streams by order of increasing highest change sequence
number they contain.
"""
streams_with_changes = (e for e in event_streams if len(e.changes) > 0)
def event_stream_key(e: EventStream) -> int:
seq = e.highest_sequence()
assert seq is not None
return seq
return sorted(streams_with_changes, key=event_stream_key)
def load_event_streams(
event_stream_data: Iterable[DataProvider],
) -> Iterable[EventStream]:
"""
Load some number of ``EventStream`` instances from their serialized form.
"""
for event_stream_datum in event_stream_data:
with event_stream_datum() as f:
yield EventStream.from_bytes(f)
def recover_event_stream(event_stream: EventStream, cursor: Cursor) -> None:
"""
Replay the changes in an event stream using the given cursor.
"""
for change in event_stream.changes:
if change.statement not in ("BEGIN TRANSACTION;", "COMMIT;"):
cursor.execute(change.statement, change.arguments)
def recover_snapshot(statements: Iterator[str], cursor: Cursor) -> None:
"""
Replay the changes in a snapshot using the given cursor.
"""
# There are certain tables that can't be dropped .. however, we
# should be refusing to run "recover" at all if there's useful
# information in the database so these tables should be in the
# same state as they would be if we'd been able to drop it. This
# table exists because we use AUTOINCREMENT in the schema.
do_not_drop = ("sqlite_sequence",)
# Discard all existing data in the database.
cursor.execute("SELECT [name] FROM [sqlite_master] WHERE [type] = 'table'", ())
tables = cursor.fetchall()
for (table_name,) in tables:
if table_name in do_not_drop:
continue
cursor.execute(f"DROP TABLE {escape_identifier(table_name)}", ())
# The order of statements does not necessarily guarantee that foreign key
# constraints are satisfied after every statement. Turn off enforcement
# so we can insert our rows. If foreign keys were valid at the dump the
# snapshot was created then they'll be valid by the time we finish
# processing all of the statements. With this pragma, SQLite3 will
# enforce them when the current transaction is committed and the effect
# vanishes after the current transaction (whether it commits or rolls
# back).
cursor.execute("PRAGMA defer_foreign_keys = ON", ())
# Load everything back in two passes. The two passes thing sucks.
# However, if a row is inserted into a table and the table has a foreign
# key constraint and the table it references hasn't been created yet,
# SQLite3 raises an OperationalError - despite the defer_foreign_keys
# pragma above.
#
# Probably a right-er solution is to change the snapshotter to emit all of
# the Data Definition Language (DDL) statements first and all of the Data
# Manipulation Language (DML) statements second so that executing the
# statements in the order given is correct.
#
# Possibly it is also true that if we had never turned on the foreign_keys
# pragma in the first place, SQLite3 would allow this to pass. It is too
# late to turn it off here, though, since it cannot be changed inside a
# transaction.
# So, pull the DDL apart from the DML. Do this in one pass because we
# won't be able to iterate statements twice.
dml = []
for sql in statements:
if sql.startswith("CREATE TABLE"):
cursor.execute(sql, ())
elif sql not in ("BEGIN TRANSACTION;", "COMMIT;"):
dml.append(sql)
# Run all the DML
for sql in dml:
cursor.execute(sql, ())
async def tahoe_lafs_downloader(
client: ITahoeClient,
recovery_cap: str,
set_state: SetState,
) -> Replica:
"""
Download replica data from the given replica directory capability into the
node's private directory.
"""
set_state(RecoveryState(stage=RecoveryStages.inspect_replica))
entries = await client.list_directory(recovery_cap)
set_state(RecoveryState(stage=RecoveryStages.downloading))
snapshot_path = client.get_private_path(SNAPSHOT_NAME)
await client.download(snapshot_path, recovery_cap, [SNAPSHOT_NAME])
entry_paths = []
for name, (entry_type, entry) in entries.items():
if entry_type == "filenode" and name.startswith("event-stream-"):
entry_path = client.get_private_path(name)
entry_paths.append(entry_path)
await client.download(entry_path, entry["ro_uri"], None)
return (
partial(snapshot_path.open, "rb"),
[partial(stream_path.open, "rb") for stream_path in entry_paths],
)
def get_tahoe_lafs_downloader(client: ITahoeClient) -> Callable[[str], Downloader]:
"""
Bind some parameters to ``tahoe_lafs_downloader`` in a convenient way.
:return: A callable that accepts a Tahoe-LAFS capability string and
returns a downloader for that capability.
"""
def get_downloader(cap_str: CapStr) -> Downloader:
async def downloader(set_state: SetState) -> Replica:
return await tahoe_lafs_downloader(client, cap_str, set_state)
return downloader
return get_downloader | zero-knowledge-access-pass-authorizer | /zero_knowledge_access_pass_authorizer-2022.8.21-py3-none-any.whl/_zkapauthorizer/recover.py | recover.py |
import random
from datetime import datetime
from functools import partial
from sqlite3 import connect as _sqlite3_connect
from typing import Any, Callable
from weakref import WeakValueDictionary
from allmydata.client import _Client
from allmydata.interfaces import (
IAnnounceableStorageServer,
IFilesystemNode,
IFoolscapStoragePlugin,
)
from allmydata.node import MissingConfigEntry
from attrs import Factory, define, field, frozen
from autobahn.twisted.resource import WebSocketResource
from challenge_bypass_ristretto import PublicKey, SigningKey
from eliot import start_action
from prometheus_client import CollectorRegistry, write_to_textfile
from twisted.application.service import IService, MultiService
from twisted.internet import task
from twisted.internet.defer import succeed
from twisted.logger import Logger
from twisted.python.filepath import FilePath
from twisted.web.guard import HTTPAuthSessionWrapper
from zope.interface import implementer
from . import NAME
from ._types import CapStr, GetTime
from .api import ZKAPAuthorizerStorageClient, ZKAPAuthorizerStorageServer
from .config import CONFIG_DB_NAME, Config
from .controller import get_redeemer
from .lease_maintenance import SERVICE_NAME as MAINTENANCE_SERVICE_NAME
from .lease_maintenance import (
LeaseMaintenanceConfig,
lease_maintenance_service,
maintain_leases_from_root,
)
from .model import VoucherStore, aware_now
from .model import open_database as _open_database
from .recover import get_tahoe_lafs_downloader
from .replicate import (
_ReplicationCapableConnection,
get_replica_rwcap,
get_tahoe_lafs_direntry_replica,
is_replication_setup,
replication_service,
setup_tahoe_lafs_replication,
with_replication,
)
from .resource import from_configuration as resource_from_configuration
from .server.spending import get_spender
from .spending import SpendingController
from .sql import UnboundConnect
from .storage_common import BYTES_PER_PASS, get_configured_pass_value
from .tahoe import (
ITahoeClient,
ShareEncoding,
attenuate_writecap,
get_tahoe_client,
required_passes_for_data,
)
_log = Logger()
@implementer(IAnnounceableStorageServer)
@define
class AnnounceableStorageServer(object):
announcement = field()
storage_server = field()
def open_store(
now: GetTime, conn: _ReplicationCapableConnection, node_config: Config
) -> VoucherStore:
"""
Open a ``VoucherStore`` for the given configuration.
:param now: A function that can be used to get the current time.
:param conn: The database connection to give to the store.
:param node_config: The Tahoe-LAFS configuration object for the node
for which we want to open a store.
:return: A new ``VoucherStore`` instance.
"""
pass_value = get_configured_pass_value(node_config)
return VoucherStore.from_connection(pass_value, now, conn)
@frozen
class _CostBasedPolicy:
"""
Encode policy rules about when to take and upload a new snapshot.
:ivar bytes_per_pass: The price of on-grid storage.
:ivar encoding: The erasure encoding configuration used for all uploads.
:ivar factor: A multiplier for how much more expensive must be to maintain
the on-grid replica than it would be to maintain a replica based on a
new snapshot before a new snapshot will be taken.
"""
bytes_per_pass: int
encoding: ShareEncoding
factor: float
def _required_passes(self, size: int) -> int:
"""
Calculate the number of passes required to store an object of the given
size, in bytes.
"""
return required_passes_for_data(self.bytes_per_pass, self.encoding, size)
def should_snapshot(self, snapshot_size: int, replica_sizes: list[int]) -> bool:
"""
Decide to take a new snapshot if the cost to maintain the replica is
greater than the new snapshot's cost by at least a factor of
``self.factor``.
"""
snapshot_cost = self._required_passes(snapshot_size)
replica_cost = sum(map(self._required_passes, replica_sizes))
return snapshot_cost * self.factor < replica_cost
def get_recovery_websocket_resource(root: HTTPAuthSessionWrapper) -> WebSocketResource:
"""
:returns: the resource that speaks the WebSocket recovery protocol
"""
return root._portal.realm._root.children[b"recover"]
@implementer(IFoolscapStoragePlugin)
@define
class ZKAPAuthorizer(object):
"""
A storage plugin which provides a token-based access control mechanism on
top of the Tahoe-LAFS built-in storage server interface.
:ivar _stores: A mapping from node directories to this plugin's database
connections for those nodes. The existence of any kind of attribute
to reference database connections (not so much the fact that it is a
WeakValueDictionary; if it were just a weakref the same would be true)
probably reflects an error in the interface which forces different
methods to use instance state to share a database connection.
"""
name: str
reactor: Any
_get_tahoe_client: Callable[[Any, Config], ITahoeClient]
# UnboundConnect doesn't actually unify with sqlite3.connect at the
# moment. Can't be bothered to fix it right now.
_connect: UnboundConnect = _sqlite3_connect # type: ignore
_stores: WeakValueDictionary = Factory(WeakValueDictionary)
_service: MultiService = field()
@_service.default
def _service_default(self):
svc = MultiService()
# There doesn't seem to be an API in Twisted to hook a service up to
# the reactor. There are pieces of it but they're spread out and
# mixed with other stuff. So, just do it ourselves. See
# twisted.application.app.startApplication for some of it, if you
# want.
#
# We intentionally don't hook up privilegedStartService because
# there's no expectation of a requirement for privileged operations
# and because we don't expect to ever run with any privileges and
# because we never expect to try to shed any privileges.
self.reactor.callWhenRunning(svc.startService)
self.reactor.addSystemEventTrigger("before", "shutdown", svc.stopService)
return svc
def _get_store(self, node_config: Config) -> VoucherStore:
"""
:return: The ``VoucherStore`` for the given node. At most one connection
is made to the database per ``ZKAPAuthorizer`` instance.
"""
key = node_config.get_config_path()
try:
store = self._stores[key]
except KeyError:
db_path = FilePath(node_config.get_private_path(CONFIG_DB_NAME))
unreplicated_conn = _open_database(partial(self._connect, db_path.path))
replicated_conn = with_replication(
unreplicated_conn, is_replication_setup(node_config)
)
store = open_store(aware_now, replicated_conn, node_config)
if is_replication_setup(node_config):
self._add_replication_service(replicated_conn, node_config)
self._stores[key] = store
return store
def _add_replication_service(
self, replicated_conn: _ReplicationCapableConnection, node_config: Config
) -> CapStr:
"""
Create a replication service for the given database and arrange for it to
start and stop when the reactor starts and stops.
"""
client = self._get_tahoe_client(self.reactor, node_config)
mutable = get_replica_rwcap(node_config)
replica = get_tahoe_lafs_direntry_replica(client, mutable)
cost = _CostBasedPolicy(
get_configured_pass_value(node_config),
client.get_config().encoding,
10,
)
replication_service(replicated_conn, replica, cost).setServiceParent(
self._service
)
return mutable
def _get_redeemer(self, node_config, announcement):
"""
:return IRedeemer: The voucher redeemer indicated by the given
configuration. A new instance is returned on every call because
the redeemer interface is stateless.
"""
return get_redeemer(self.name, node_config, announcement, self.reactor)
def get_storage_server(self, configuration, get_anonymous_storage_server):
registry = CollectorRegistry()
kwargs = configuration.copy()
# If metrics are desired, schedule their writing to disk.
metrics_interval = kwargs.pop("prometheus-metrics-interval", None)
metrics_path = kwargs.pop("prometheus-metrics-path", None)
if metrics_interval is not None and metrics_path is not None:
FilePath(metrics_path).parent().makedirs(ignoreExistingDirectory=True)
t = task.LoopingCall(make_safe_writer(metrics_path, registry))
t.clock = self.reactor
t.start(int(metrics_interval))
root_url = kwargs.pop("ristretto-issuer-root-url")
pass_value = int(kwargs.pop("pass-value", BYTES_PER_PASS))
signing_key = load_signing_key(
FilePath(
kwargs.pop("ristretto-signing-key-path"),
),
)
public_key = PublicKey.from_signing_key(signing_key)
announcement = {
"ristretto-issuer-root-url": root_url,
"ristretto-public-keys": [public_key.encode_base64()],
}
anonymous_storage_server = get_anonymous_storage_server()
spender = get_spender(
config=kwargs,
reactor=self.reactor,
registry=registry,
)
storage_server = ZKAPAuthorizerStorageServer(
anonymous_storage_server,
pass_value=pass_value,
signing_key=signing_key,
spender=spender,
registry=registry,
**kwargs,
)
return succeed(
AnnounceableStorageServer(
announcement,
storage_server,
),
)
def get_storage_client(self, node_config, announcement, get_rref):
"""
Create an ``IStorageClient`` that submits ZKAPs with certain requests in
order to authorize them. The ZKAPs are extracted from the database
managed by this plugin in the node directory that goes along with
``node_config``.
"""
redeemer = self._get_redeemer(node_config, announcement)
store = self._get_store(node_config)
controller = SpendingController.for_store(
tokens_to_passes=redeemer.tokens_to_passes,
store=store,
)
return ZKAPAuthorizerStorageClient(
get_configured_pass_value(node_config),
get_rref,
controller.get,
)
def get_client_resource(self, node_config):
"""
Get an ``IZKAPRoot`` for the given node configuration.
:param allmydata.node._Config node_config: The configuration object
for the relevant node.
"""
store = self._get_store(node_config)
tahoe = self._get_tahoe_client(self.reactor, node_config)
async def setup_replication():
# Setup replication
await setup_tahoe_lafs_replication(tahoe)
# And then turn replication on for the database connection already
# in use.
mutable = self._add_replication_service(store._connection, node_config)
return attenuate_writecap(mutable)
return resource_from_configuration(
node_config,
store=store,
get_downloader=get_tahoe_lafs_downloader(tahoe),
setup_replication=setup_replication,
redeemer=self._get_redeemer(node_config, None),
clock=self.reactor,
)
def make_safe_writer(
metrics_path: str, registry: CollectorRegistry
) -> Callable[[], None]:
"""
Make a no-argument callable that writes metrics from the given registry to
the given path. The callable will log errors writing to the path and not
raise exceptions.
"""
def safe_writer():
try:
with start_action(
action_type="zkapauthorizer:metrics:write-to-textfile",
metrics_path=metrics_path,
):
write_to_textfile(metrics_path, registry)
except Exception:
pass
return safe_writer
_init_storage = _Client.__dict__["init_storage"]
def _attach_zkapauthorizer_services(self, announceable_storage_servers):
"""
A monkey-patched version of ``_Client.init_storage`` which also
initializes ZKAPAuthorizer's services.
"""
from twisted.internet import reactor
# Make sure the original work happens.
result = _init_storage(self, announceable_storage_servers)
# Find the database relevant to this node. The global state, the weakref
# lookup... these things are not great.
store = storage_server_plugin._get_store(self.config)
# Hook up our services.
for name, predicate, create in _SERVICES:
if predicate(self.config):
_maybe_attach_service(
reactor,
self,
store,
name,
create,
)
return result
_Client.init_storage = _attach_zkapauthorizer_services
def _maybe_attach_service(
reactor, client_node, store: VoucherStore, name: str, make_service
) -> None:
"""
Check for an existing service and if one is not found create one and
attach it to the client service.
:param allmydata.client._Client client_node: The client node to check and,
possibly, modify. A lease maintenance service is added to it if and
only if one is not already present.
"""
try:
# If there is already one we don't need another.
client_node.getServiceNamed(name)
except KeyError:
# There isn't one so make it and add it.
_log.info(f"Creating new {name} service")
try:
service = make_service(
reactor,
client_node,
store,
)
except:
_log.failure(f"Attaching {name} service to client node")
else:
service.setServiceParent(client_node)
else:
_log.info(f"Found existing {name} service")
def _create_maintenance_service(reactor, client_node, store: VoucherStore) -> IService:
"""
Create a lease maintenance service to be attached to the given client
node.
:param allmydata.client._Client client_node: The client node the lease
maintenance service will be attached to.
"""
node_config = client_node.config
def get_now():
return datetime.utcfromtimestamp(reactor.seconds())
maint_config = LeaseMaintenanceConfig.from_node_config(node_config)
# Create the operation which performs the lease maintenance job when
# called.
maintain_leases = maintain_leases_from_root(
get_root_nodes=partial(get_root_nodes, client_node, node_config),
storage_broker=client_node.get_storage_broker(),
secret_holder=client_node._secret_holder,
min_lease_remaining=maint_config.min_lease_remaining,
progress=store.start_lease_maintenance,
get_now=get_now,
)
last_run_path = FilePath(node_config.get_private_path("last-lease-maintenance-run"))
# Create the service to periodically run the lease maintenance operation.
return lease_maintenance_service(
maintain_leases,
reactor,
last_run_path,
random,
lease_maint_config=maint_config,
)
def _is_client_plugin_enabled(node_config: Config) -> bool:
"""
:return: ``True`` if and only if the ZKAPAuthorizer storage client plugin
is enabled in the given configuration.
"""
# See allmydata/storage_client.py, StorageClientConfig.from_node_config.
storage_plugins = node_config.get_config("client", "storage.plugins", "")
plugin_names = {name.strip() for name in storage_plugins.split(",")}
return NAME in plugin_names
_SERVICES = [
# Run the lease maintenance service on client nodes.
(MAINTENANCE_SERVICE_NAME, _is_client_plugin_enabled, _create_maintenance_service),
]
def get_root_nodes(client_node, node_config) -> list[IFilesystemNode]:
"""
Get the configured starting points for lease maintenance traversal.
"""
try:
rootcap = node_config.get_private_config("rootcap")
except MissingConfigEntry:
return []
else:
return [client_node.create_node_from_uri(rootcap.encode("utf-8"))]
def load_signing_key(path):
"""
Read a serialized Ristretto signing key from the given path and return it
as a ``challenge_bypass_ristretto.SigningKey``.
Unlike ``challenge_bypass_ristretto.SigningKey.decode_base64`` this
function will clean up any whitespace around the key.
:param FilePath path: The path from which to read the key.
:raise challenge_bypass_ristretto.DecodeException: If
``SigningKey.decode_base64`` raises this exception it will be passed
through.
:return challenge_bypass_ristretto.SigningKey: An object representing the
key read.
"""
return SigningKey.decode_base64(path.getContent().strip())
# Create the global plugin object, re-exported elsewhere so Twisted can
# discover it. We'll also use it here since it carries some state that we
# sometimes need to dig up and can't easily get otherwise.
def _create_plugin():
# Do not leak the global reactor into the module scope!
from twisted.internet import reactor
return ZKAPAuthorizer(
name=NAME,
reactor=reactor,
get_tahoe_client=get_tahoe_client,
)
storage_server_plugin = _create_plugin() | zero-knowledge-access-pass-authorizer | /zero_knowledge_access_pass_authorizer-2022.8.21-py3-none-any.whl/_zkapauthorizer/_plugin.py | _plugin.py |
from __future__ import annotations
from datetime import datetime
from enum import Enum, auto
from sqlite3 import Connection as _SQLite3Connection
from typing import Any, ContextManager, Iterable, Optional, Protocol, Union
from attrs import frozen
from sqlparse import parse
SQLType = Union[int, float, str, bytes, datetime, None]
SQLRuntimeType = (int, float, str, bytes, datetime, type(None))
class AbstractCursor(Protocol):
"""
A SQLite3 database cursor.
"""
@property
def lastrowid(self) -> Optional[int]:
...
@property
def rowcount(self) -> Optional[int]:
...
def execute(self, statement: str, args: Iterable[Any]) -> AbstractCursor:
...
def executemany(self, statement, args: Iterable[Iterable[Any]]) -> AbstractCursor:
...
def close(self) -> None:
...
def fetchall(self) -> list[Any]:
...
def fetchmany(self, n: int) -> list[Any]:
...
def fetchone(self) -> Any:
...
class AbstractConnection(Protocol):
"""
A SQLite3 database connection.
"""
def iterdump(self) -> Iterable[str]:
...
def cursor(self, cursorClass: Optional[type] = None) -> AbstractCursor:
...
def __enter__(self) -> ContextManager:
...
def __exit__(
self,
exc_type: Optional[type],
exc_value: Optional[BaseException],
exc_tb: Optional[Any],
) -> bool:
...
Connection = AbstractConnection
Cursor = AbstractCursor
class UnboundConnect(Protocol):
"""
Connect to a SQLite3 database.
"""
def __call__(
self,
path: str,
timeout: int = None,
detect_types: bool = None,
isolation_level: str = None,
check_same_thread: bool = False,
factory: Any = None,
cached_statements: Any = None,
) -> _SQLite3Connection:
"""
Get a new database connection.
"""
class BoundConnect(Protocol):
"""
Connect to a certain (ie, not parameterized) SQLite3 database.
"""
def __call__(
self,
timeout: int = None,
detect_types: bool = None,
isolation_level: str = None,
check_same_thread: bool = False,
factory: Any = None,
cached_statements: Any = None,
) -> _SQLite3Connection:
"""
Get a new database connection.
"""
class StorageAffinity(Enum):
"""
Represent the different "storage affinities" possible for a SQLite3
column.
"""
INT = auto()
TEXT = auto()
BLOB = auto()
REAL = auto()
NUMERIC = auto()
@frozen
class Column:
"""
Represent a column in a SQLite3 table.
:ivar affinity: The expected type affinity for values in this column. See
https://www.sqlite.org/datatype3.html
"""
affinity: StorageAffinity
@frozen
class Table:
"""
Represent a table in a SQLite3 database.
:ivar columns: The columns that make up this table.
"""
columns: list[tuple[str, Column]]
@frozen
class Insert:
"""
Represent an insertion of one row into a table.
:ivar table_name: The name of the table where the row can be inserted.
:ivar table: A representation of the table itself.
:ivar fields: The values which can be inserted.
"""
table_name: str
table: Table
fields: tuple[SQLType, ...]
def statement(self):
names = ", ".join((escape_identifier(name) for (name, _) in self.table.columns))
placeholders = ", ".join("?" * len(self.table.columns))
return (
f"INSERT INTO {escape_identifier(self.table_name)} "
f"({names}) "
f"VALUES ({placeholders})"
)
def arguments(self) -> tuple[SQLType, ...]:
return self.fields
def quote_sql_value(cursor: Cursor, value: SQLType) -> str:
"""
Use the SQL `quote()` function to return the quoted version of `value`.
:returns: the quoted value
"""
if isinstance(value, (int, float, datetime)):
return str(value)
if value is None:
return "NULL"
if isinstance(value, (str, bytes)):
cursor.execute("SELECT quote(?);", (value,))
result = cursor.fetchall()[0][0]
assert isinstance(result, str)
return result
raise ValueError(f"Do not know how to quote value of type {type(value)}")
@frozen
class Update:
"""
Represent an update to some rows in a table.
Currently this updates all rows.
:ivar table_name: The name of the table to which the update applies.
:ivar table: A representation of the table itself.
:ivar fields: The new values for each column in the table.
"""
table_name: str
table: Table
fields: tuple[SQLType, ...]
def statement(self):
field_names = list(name for (name, _) in self.table.columns)
assignments = ", ".join(
f"{escape_identifier(name)} = ?" for name in field_names
)
return f"UPDATE {escape_identifier(self.table_name)} SET {assignments}"
def arguments(self) -> tuple[SQLType, ...]:
return self.fields
@frozen
class Select:
"""
Represent a query about a certain table
:ivar table_name: valid SQL identifier for a table
"""
table_name: str
def statement(self):
return f"SELECT * FROM {escape_identifier(self.table_name)}"
def arguments(self) -> tuple[()]:
return ()
@frozen
class Delete:
"""
Represent the deletion of some rows from a table.
Currently this deletes all rows.
:ivar table_name: The name of the table from which to rows can be deleted.
"""
table_name: str
def statement(self):
return f"DELETE FROM {escape_identifier(self.table_name)}"
def arguments(self) -> tuple[()]:
return ()
def escape_identifier(string: str) -> str:
"""
Escape an arbitrary string for use as a SQLite3 identifier.
"""
return f"'{string}'"
def column_ddl(name: str, column: Column) -> str:
"""
Get a column DDL fragment for a column of the given name and type.
:return: *bar* in **create table foo ( bar )**
"""
return f"{escape_identifier(name)} {column.affinity.name}"
def create_table(name: str, table: Table) -> str:
"""
Get a table creation DDL statement for a table of the given name and type.
"""
columns = ", ".join(column_ddl(name, column) for (name, column) in table.columns)
return f"CREATE TABLE {escape_identifier(name)} ({columns})"
def statement_mutates(statement: str) -> bool:
"""
predicate to decide if `statement` will change the database
"""
if statement == "BEGIN IMMEDIATE TRANSACTION":
return False
(parsed,) = parse(statement)
return parsed.get_type() not in {"SELECT"} | zero-knowledge-access-pass-authorizer | /zero_knowledge_access_pass_authorizer-2022.8.21-py3-none-any.whl/_zkapauthorizer/sql.py | sql.py |
from base64 import b64encode
from typing import Callable, Union, ValuesView
import attr
from pyutil.mathutil import div_ceil
from . import NAME
from .eliot import MUTABLE_PASSES_REQUIRED
from .validators import greater_than
@attr.s(str=True)
class MorePassesRequired(Exception):
"""
Storage operations fail with ``MorePassesRequired`` when they are not
accompanied by a sufficient number of valid passes.
:ivar valid_count: The number of valid passes presented in the operation.
ivar required_count: The number of valid passes which must be presented
for the operation to be authorized.
:ivar signature_check_failed: Indices into the supplied list of passes
indicating passes which failed the signature check.
"""
valid_count: int = attr.ib(validator=attr.validators.instance_of(int))
required_count: int = attr.ib(validator=attr.validators.instance_of(int))
signature_check_failed: frozenset[int] = attr.ib(converter=frozenset)
def _message_maker(label: str) -> Callable[[bytes], bytes]:
def make_message(storage_index):
return "{label} {storage_index}".format(
label=label,
storage_index=b64encode(storage_index).decode("ascii"),
).encode("ascii")
return make_message
# Functions to construct the PrivacyPass request-binding message for pass
# construction for different Tahoe-LAFS storage operations.
allocate_buckets_message = _message_maker("allocate_buckets")
add_lease_message = _message_maker("add_lease")
slot_testv_and_readv_and_writev_message = _message_maker(
"slot_testv_and_readv_and_writev"
)
# The number of bytes we're willing to store for a lease period for each pass
# submitted.
BYTES_PER_PASS = 1024 * 1024
def get_configured_shares_needed(node_config):
"""
Determine the configured-specified value of "needed" shares (``k``).
If no value is explicitly configured, the Tahoe-LAFS default (as best as
we know it) is returned.
"""
return int(
node_config.get_config(
section="client",
option="shares.needed",
default=3,
)
)
def get_configured_shares_total(node_config):
"""
Determine the configured-specified value of "total" shares (``N``).
If no value is explicitly configured, the Tahoe-LAFS default (as best as
we know it) is returned.
"""
return int(
node_config.get_config(
section="client",
option="shares.total",
default=10,
)
)
def get_configured_pass_value(node_config):
"""
Determine the configuration-specified value of a single ZKAP.
If no value is explicitly configured, a default value is returned. The
value is read from the **pass-value** option of the ZKAPAuthorizer plugin
client section.
"""
section_name = "storageclient.plugins." + NAME
return int(
node_config.get_config(
section=section_name,
option="pass-value",
default=BYTES_PER_PASS,
)
)
def get_configured_allowed_public_keys(node_config):
"""
Read the set of allowed issuer public keys from the given configuration.
"""
section_name = "storageclient.plugins." + NAME
return set(
node_config.get_config(
section=section_name,
option="allowed-public-keys",
)
.strip()
.split(",")
)
_dict_values: type = type(dict().values())
def required_passes(
bytes_per_pass: int, share_sizes: Union[ValuesView[int], list[int]]
) -> int:
"""
Calculate the number of passes that are required to store shares of the
given sizes for one lease period.
:param bytes_per_pass: The number of bytes the storage of which for one
lease period one pass covers.
:param share_sizes: The sizes of the shared which will be stored.
:return: The number of passes required to cover the storage cost.
"""
if not isinstance(share_sizes, (list, _dict_values)):
raise TypeError(
"Share sizes must be a list (or dict_values) of integers, got {!r} instead".format(
share_sizes,
),
)
result, b = divmod(sum(share_sizes, 0), bytes_per_pass)
if b > 0:
result += 1
# print("required_passes({}, {}) == {}".format(bytes_per_pass, share_sizes, result))
return result
def share_size_for_data(shares_needed, datasize):
"""
Calculate the size of a single erasure encoding share for data of the
given size and with the given level of redundancy.
:param int shares_needed: The number of shares (``k``) from the erasure
encoding process which are required to reconstruct original data of
the indicated size.
:param int datasize: The size of the data to consider, in bytes.
:return int: The size of a single erasure encoding share for the given
inputs.
"""
return div_ceil(datasize, shares_needed)
def has_writes(tw_vectors):
"""
:param tw_vectors: See
``allmydata.interfaces.TestAndWriteVectorsForShares``.
:return bool: ``True`` if any only if there are writes in ``tw_vectors``.
"""
return any(
data or (new_length is not None)
for (test, data, new_length) in tw_vectors.values()
)
def get_write_sharenums(tw_vectors):
"""
:param tw_vectors: See
``allmydata.interfaces.TestAndWriteVectorsForShares``.
:return set[int]: The share numbers which the given test/write vectors would write to.
"""
return set(
# This misses cases where `data` is empty but `new_length` is
# non-None, non-0.
#
# Related to #222.
sharenum
for (sharenum, (test, data, new_length)) in tw_vectors.items()
if data
)
def get_allocated_size(tw_vectors):
"""
:param tw_vectors: See
``allmydata.interfaces.TestAndWriteVectorsForShares``.
:return int: The largest position ``tw_vectors`` writes in any share.
"""
return max(
list(
max(offset + len(s) for (offset, s) in data)
for (sharenum, (test, data, new_length)) in tw_vectors.items()
if data
),
)
def get_implied_data_length(data_vector, new_length):
"""
:param data_vector: See ``allmydata.interfaces.DataVector``.
:param new_length: See
``allmydata.interfaces.RIStorageServer.slot_testv_and_readv_and_writev``.
:return int: The amount of data, in bytes, implied by a data vector and a
size.
"""
data_based_size = (
max(offset + len(data) for (offset, data) in data_vector) if data_vector else 0
)
if new_length is None:
return data_based_size
# new_length is only allowed to truncate, not expand.
return min(new_length, data_based_size)
def get_required_new_passes_for_mutable_write(pass_value, current_sizes, tw_vectors):
"""
:param int pass_value: The value of a single pass in byte-months.
:param current_sizes:
"""
current_passes = required_passes(
pass_value,
current_sizes.values(),
)
new_sizes = current_sizes.copy()
size_updates = {
sharenum: get_implied_data_length(data_vector, new_length)
for (sharenum, (_, data_vector, new_length)) in tw_vectors.items()
}
for sharenum, size in size_updates.items():
if size > new_sizes.get(sharenum, 0):
new_sizes[sharenum] = size
new_passes = required_passes(
pass_value,
new_sizes.values(),
)
required_new_passes = new_passes - current_passes
MUTABLE_PASSES_REQUIRED.log(
current_sizes=current_sizes,
tw_vectors_summary=summarize(tw_vectors),
current_passes=current_passes,
new_sizes=new_sizes,
new_passes=new_passes,
)
return required_new_passes
def summarize(tw_vectors):
return {
sharenum: {
"testv": list(
(offset, length, operator, len(specimen))
for (offset, length, operator, specimen) in test_vector
),
"datav": list((offset, len(data)) for (offset, data) in data_vectors),
"new_length": new_length,
}
for (sharenum, (test_vector, data_vectors, new_length)) in tw_vectors.items()
}
def pass_value_attribute():
"""
Define an attribute for an attrs-based object which can hold a pass value.
"""
return attr.ib(
validator=attr.validators.and_(
attr.validators.instance_of(int),
greater_than(0),
),
) | zero-knowledge-access-pass-authorizer | /zero_knowledge_access_pass_authorizer-2022.8.21-py3-none-any.whl/_zkapauthorizer/storage_common.py | storage_common.py |
from __future__ import annotations
import os
from datetime import datetime, timezone
from functools import wraps
from json import loads
from sqlite3 import Connection as _SQLite3Connection
from sqlite3 import OperationalError
from sqlite3 import connect as _connect
from typing import Awaitable, Callable, List, Optional, TypeVar
import attr
from aniso8601 import parse_datetime
from attrs import define, field, frozen
from hyperlink import DecodedURL
from twisted.logger import Logger
from twisted.python.filepath import FilePath
from zope.interface import Interface, implementer
from ._base64 import urlsafe_b64decode
from ._json import dumps_utf8
from ._types import GetTime
from .replicate import (
_ReplicationCapableConnection,
_ReplicationCapableCursor,
snapshot,
)
from .schema import get_schema_upgrades, get_schema_version, run_schema_upgrades
from .sql import BoundConnect, Cursor
from .storage_common import required_passes
from .validators import (
aware_datetime_validator,
greater_than,
has_length,
is_aware_datetime,
is_base64_encoded,
returns_aware_datetime_validator,
)
_T = TypeVar("_T")
def aware_now() -> datetime:
"""
Get the current time as a timezone-aware UTC datetime.
"""
return datetime.now(timezone.utc)
class NotEmpty(Exception):
"""
The precondition that there be no non-trivial state in the database was
not met.
"""
class ILeaseMaintenanceObserver(Interface):
"""
An object which is interested in receiving events related to the progress
of lease maintenance activity.
"""
def observe(sizes):
"""
Observe some shares encountered during lease maintenance.
:param list[int] sizes: The sizes of the shares encountered.
"""
def finish():
"""
Observe that a run of lease maintenance has completed.
"""
class StoreOpenError(Exception):
"""
There was a problem opening the underlying data store.
"""
def __init__(self, reason):
self.reason = reason
class NotEnoughTokens(Exception):
"""
An attempt to extract tokens failed because the store does not contain as
many tokens as were requested.
"""
def open_database(connect: BoundConnect) -> _SQLite3Connection:
"""
Create and return a database connection using the required connect
parameters.
"""
try:
return connect(isolation_level="IMMEDIATE")
except OperationalError as e:
raise StoreOpenError(e)
def initialize_database(conn: _ReplicationCapableConnection) -> None:
"""
Make any persistent and temporary schema changes required to make the
given database compatible with this version of the software.
If the database has an older schema version, it will be upgraded.
Temporary tables required by application code will also be created.
"""
cursor = conn.cursor()
with conn:
# Enforcement of foreign key constraints is off by default. It must
# be enabled on a per-connection basis. This is a helpful feature to
# ensure consistency so we want it enforced and we use it in our
# schema.
cursor.execute("PRAGMA foreign_keys = ON", ())
# Upgrade the database to the most recent version of the schema. That
# is the only schema the Python code will actually work against.
actual_version = get_schema_version(cursor)
schema_upgrades = list(get_schema_upgrades(actual_version))
run_schema_upgrades(schema_upgrades, cursor)
# Create some tables that only exist (along with their contents) for
# this connection. These are outside of the schema because they are not
# persistent. We can change them any time we like without worrying about
# upgrade logic because we re-create them on every connection.
cursor.execute(
"""
-- Track tokens in use by the process holding this connection.
CREATE TEMPORARY TABLE [in-use] (
[unblinded-token] text, -- The base64 encoded unblinded token.
PRIMARY KEY([unblinded-token])
-- A foreign key on unblinded-token to [unblinded-tokens]([token])
-- would be alright - however SQLite3 foreign key constraints
-- can't cross databases (and temporary tables are considered to
-- be in a different database than normal tables).
)
""",
(),
)
cursor.execute(
"""
-- Track tokens that we want to remove from the database. Mainly just
-- works around the awkward DB-API interface for dealing with deleting
-- many rows.
CREATE TEMPORARY TABLE [to-discard] (
[unblinded-token] text
)
""",
(),
)
cursor.execute(
"""
-- Track tokens that we want to remove from the [in-use] set. Similar
-- to [to-discard].
CREATE TEMPORARY TABLE [to-reset] (
[unblinded-token] text
)
""",
(),
)
cursor.close()
def with_cursor_async(f: Callable[..., Awaitable[_T]]) -> Callable[..., Awaitable[_T]]:
"""
Like ``with_cursor`` but support decorating async functions instead.
The transaction will be kept open until the async function completes.
"""
@wraps(f)
async def with_cursor_async(self, *a, **kw) -> _T:
with self._connection:
cursor = self._connection.cursor()
try:
cursor.execute("BEGIN IMMEDIATE TRANSACTION")
return await f(self, cursor, *a, **kw)
finally:
cursor.close()
return with_cursor_async
def with_cursor(f: Callable[..., _T]) -> Callable[..., _T]:
"""
Decorate a function so it is automatically passed a cursor with an active
transaction as the first positional argument. If the function returns
normally then the transaction will be committed. Otherwise, the
transaction will be rolled back.
"""
@wraps(f)
def with_cursor(self, *a, **kw):
with self._connection:
cursor = self._connection.cursor()
try:
cursor.execute("BEGIN IMMEDIATE TRANSACTION")
return f(self, cursor, *a, **kw)
finally:
cursor.close()
# Callable has no attribute wrapped ... yea ... true.
with_cursor.wrapped = f # type: ignore
return with_cursor
def path_to_memory_uri(path: FilePath) -> str:
"""
Construct a SQLite3 database URI for an in-memory connection to a database
identified by the given path.
Since in-memory databases do not exist on disk the path does not actually
specify where on the filesystem the database exists. Instead, it serves
as a key so that the same in-memory database can be opened multiple times
by supplying the same path (and similarly, different paths will result in
connections to different in-memory databases).
:return: A string suitable to be passed as the first argument to
``sqlite3.connect`` along with the `uri=True` keyword argument.
"""
# See https://www.sqlite.org/uri.html for docs on URI-style database
# paths.
return (
DecodedURL()
.replace(
scheme="file",
# segmentsFrom(FilePath("/")) is tempting but on Windows "/" is
# not necessarily the root for every path.
path=path.asTextMode().path.split(os.sep),
)
.add("mode", "memory")
# The shared cache mode is required for two connections to the same
# memory-mode database.
# https://www.sqlite.org/sharedcache.html#shared_cache_and_in_memory_databases
.add("cache", "shared")
.to_text()
)
def memory_connect(path: str, *a, uri=None, **kw) -> _SQLite3Connection:
"""
Always connect to an in-memory SQLite3 database.
"""
kw["uri"] = True
conn = _connect(path_to_memory_uri(FilePath(path)), *a, **kw)
return conn
# The largest integer SQLite3 can represent in an integer column. Larger than
# this an the representation loses precision as a floating point.
_SQLITE3_INTEGER_MAX = 2**63 - 1
def _require_aware_time(now: GetTime) -> Callable[[], datetime]:
"""
Get a timezone-aware datetime for the current time or raise ``ValueError``
if the function returns naive datetimes.
"""
@wraps(now)
def f() -> datetime:
result = now()
if not is_aware_datetime(result):
raise TypeError(
"{} returned {}, expected aware datetime".format(now, result)
)
return result
return f
@frozen
class VoucherStore(object):
"""
This class implements persistence for vouchers.
:ivar now: A no-argument callable that returns the time of the call as a
``datetime`` instance.
"""
pass_value: int
now: GetTime = field(
validator=returns_aware_datetime_validator, converter=_require_aware_time
)
_connection: _ReplicationCapableConnection
_log = Logger()
@classmethod
def from_connection(
cls,
pass_value: int,
now: GetTime,
replicating_conn: _ReplicationCapableConnection,
) -> VoucherStore:
initialize_database(replicating_conn)
return cls(pass_value=pass_value, now=now, connection=replicating_conn)
def snapshot(self) -> bytes:
"""
Create and return a consistent, self-contained snapshot of the underlying
database state.
"""
return snapshot(self._connection)
@with_cursor_async
async def call_if_empty(self, cursor, f: Callable[[Cursor], Awaitable[_T]]) -> _T:
"""
Transactionally determine that the database is empty and call the given
function if it is or raise ``NotEmpty`` if it is not.
The function may return an ``Awaitable``. If it does the transaction
opened for it will be kept open until the ``Awaitable`` completes.
"""
# After redeemed-voucher garbage collection is implemented, this won't
# be enough of a check. We should check the unblinded-tokens table
# (or call `count_unblinded_tokens`) and the
# `invalid-unblinded-tokens` table and maybe also look at lease
# maintenance spending.
wrapped = self.list.wrapped # type: ignore
if wrapped(self, cursor) == []:
return await f(cursor)
else:
raise NotEmpty("there is existing local state")
@with_cursor
def get(self, cursor: Cursor, voucher: bytes) -> Voucher:
"""
:param voucher: The text value of a voucher to retrieve.
:return: The voucher object that matches the given value.
"""
cursor.execute(
"""
SELECT
[number], [created], [expected-tokens], [state], [finished], [token-count], [public-key], [counter]
FROM
[vouchers]
WHERE
[number] = ?
""",
(voucher.decode("ascii"),),
)
refs = cursor.fetchall()
if len(refs) == 0:
raise KeyError(voucher)
return Voucher.from_row(refs[0])
@with_cursor
def add(
self,
cursor: _ReplicationCapableCursor,
voucher: bytes,
expected_tokens: int,
counter: int,
get_tokens: Callable[[], list[RandomToken]],
) -> list[RandomToken]:
"""
Add random tokens associated with a voucher (possibly new, possibly
existing) to the database. If the (voucher, counter) pair is already
present, do nothing.
:param bytes voucher: The text value of a voucher with which to
associate the tokens.
:param int expected_tokens: The total number of tokens for which this
voucher is expected to be redeemed. This is only respected the
first time a voucher is added. Subsequent calls with the same
voucher but a different count ignore the value because it is
already known (and the database knows better than the caller what
it should be).
This probably means ``add`` is a broken interface for doing these
two things. Maybe it should be fixed someday.
:param int counter: The redemption counter for the given voucher with
which to associate the tokens.
:param list[RandomToken]: The tokens to add alongside the voucher.
"""
voucher_text = voucher.decode("ascii")
cursor.execute(
"""
SELECT [text]
FROM [tokens]
WHERE [voucher] = ? AND [counter] = ?
""",
(voucher_text, counter),
)
rows = cursor.fetchall()
if len(rows) > 0:
self._log.info(
"Loaded {count} random tokens for a voucher ({voucher}[{counter}]).",
count=len(rows),
voucher=voucher_text,
counter=counter,
)
tokens = list(
RandomToken(token_value.encode("ascii")) for (token_value,) in rows
)
else:
tokens = get_tokens()
self._log.info(
"Persisting {count} random tokens for a voucher ({voucher}[{counter}]).",
count=len(tokens),
voucher=voucher_text,
counter=counter,
)
with cursor.important():
cursor.execute(
"""
INSERT OR IGNORE INTO [vouchers] ([number], [expected-tokens], [created]) VALUES (?, ?, ?)
""",
(voucher_text, expected_tokens, self.now()),
)
cursor.executemany(
"""
INSERT INTO [tokens] ([voucher], [counter], [text]) VALUES (?, ?, ?)
""",
list(
(
voucher_text,
counter,
token.token_value.decode("ascii"),
)
for token in tokens
),
)
return tokens
@with_cursor
def list(self, cursor: Cursor) -> list[Voucher]:
"""
Get all known vouchers.
:return list[Voucher]: All vouchers known to the store.
"""
cursor.execute(
"""
SELECT
[number], [created], [expected-tokens], [state], [finished], [token-count], [public-key], [counter]
FROM
[vouchers]
""",
(),
)
refs = cursor.fetchall()
return list(Voucher.from_row(row) for row in refs)
@with_cursor
def insert_unblinded_tokens_for_voucher(
self,
cursor: Cursor,
voucher: bytes,
public_key: str,
unblinded_tokens: List[UnblindedToken],
completed: bool,
spendable: bool,
) -> None:
"""
Store some unblinded tokens received from redemption of a voucher.
:param bytes voucher: The voucher associated with the unblinded
tokens. This voucher will be marked as redeemed to indicate it
has fulfilled its purpose and has no further use for us.
:param str public_key: The encoded public key for the private key
which was used to sign these tokens.
:param list[UnblindedToken] unblinded_tokens: The unblinded tokens to
store.
:param bool completed: ``True`` if redemption of this voucher is now
complete, ``False`` otherwise.
:param bool spendable: ``True`` if it should be possible to spend the
inserted tokens, ``False`` otherwise.
"""
if completed:
voucher_state = "redeemed"
else:
voucher_state = "pending"
if spendable:
token_count_increase = len(unblinded_tokens)
sequestered_count_increase = 0
else:
token_count_increase = 0
sequestered_count_increase = len(unblinded_tokens)
voucher_text = voucher.decode("ascii")
cursor.execute(
"""
INSERT INTO [redemption-groups] ([voucher], [public-key], [spendable]) VALUES (?, ?, ?)
""",
(voucher_text, public_key, spendable),
)
group_id = cursor.lastrowid
self._log.info(
"Recording {count} {unspendable}spendable unblinded tokens from public key {public_key}.",
count=len(unblinded_tokens),
unspendable="" if spendable else "un",
public_key=public_key,
)
cursor.execute(
"""
UPDATE [vouchers]
SET [state] = ?
, [token-count] = COALESCE([token-count], 0) + ?
, [sequestered-count] = COALESCE([sequestered-count], 0) + ?
, [finished] = ?
, [counter] = [counter] + 1
WHERE [number] = ?
""",
(
voucher_state,
token_count_increase,
sequestered_count_increase,
self.now(),
voucher_text,
),
)
if cursor.rowcount == 0:
raise ValueError(
"Cannot insert tokens for unknown voucher; add voucher first"
)
cursor.execute(
"""
SELECT [counter] FROM [vouchers] WHERE [number] = ?
""",
(voucher_text,),
)
(new_counter,) = cursor.fetchone()
cursor.executemany(
"""
INSERT INTO [unblinded-tokens] ([token], [redemption-group]) VALUES (?, ?)
""",
list(
(token.unblinded_token.decode("ascii"), group_id)
for token in unblinded_tokens
),
)
self._delete_corresponding_tokens(cursor, voucher_text, new_counter - 1)
def _delete_corresponding_tokens(self, cursor, voucher: str, counter: int) -> None:
"""
Delete rows from the [tokens] table corresponding to the given redemption
group.
"""
cursor.execute(
"""
DELETE FROM [tokens] WHERE [voucher] = ? AND [counter] = ?
""",
(voucher, counter),
)
@with_cursor
def mark_voucher_double_spent(self, cursor: Cursor, voucher: bytes) -> None:
"""
Mark a voucher as having failed redemption because it has already been
spent.
"""
cursor.execute(
"""
UPDATE [vouchers]
SET [state] = "double-spend"
, [finished] = ?
WHERE [number] = ?
AND [state] = "pending"
""",
(self.now(), voucher.decode("ascii")),
)
if cursor.rowcount == 0:
# Was there no matching voucher or was it in the wrong state?
cursor.execute(
"""
SELECT [state]
FROM [vouchers]
WHERE [number] = ?
""",
(voucher.decode("ascii"),),
)
rows = cursor.fetchall()
if len(rows) == 0:
raise ValueError(f"Voucher {voucher!r} not found")
else:
raise ValueError(
f"Voucher {voucher!r} in state {rows[0][0]} cannot transition to double-spend",
)
@with_cursor
def get_unblinded_tokens(self, cursor: Cursor, count: int) -> List[UnblindedToken]:
"""
Get some unblinded tokens.
These tokens are not removed from the store but they will not be
returned from a future call to ``get_unblinded_tokens`` *on this
``VoucherStore`` instance* unless ``reset_unblinded_tokens`` is used
to reset their state.
If the underlying storage is access via another ``VoucherStore``
instance then the behavior of this method will be as if all tokens
which have not had their state changed to invalid or spent have been
reset.
:raise NotEnoughTokens: If there are fewer than the requested number
of tokens available to be spent. In this case, all tokens remain
available to future calls and do not need to be reset.
:return: The removed unblinded tokens.
"""
if count > _SQLITE3_INTEGER_MAX:
# An unreasonable number of tokens and also large enough to
# provoke undesirable behavior from the database.
raise NotEnoughTokens()
cursor.execute(
"""
SELECT T.[token]
FROM [unblinded-tokens] AS T, [redemption-groups] AS G
WHERE T.[redemption-group] = G.[rowid]
AND G.[spendable] = 1
AND T.[token] NOT IN [in-use]
LIMIT ?
""",
(count,),
)
texts = cursor.fetchall()
if len(texts) < count:
raise NotEnoughTokens()
cursor.executemany(
"""
INSERT INTO [in-use] VALUES (?)
""",
texts,
)
return list(UnblindedToken(t.encode("ascii")) for (t,) in texts)
@with_cursor
def count_random_tokens(self, cursor) -> int:
"""
:return: The number of random tokens present in the database. This is
usually not interesting but it is exposed so the test suite can check
invariants related to it.
"""
cursor.execute("SELECT count(1) FROM [tokens]")
(count,) = cursor.fetchone()
return count
@with_cursor
def count_unblinded_tokens(self, cursor: Cursor) -> int:
"""
Return the largest number of unblinded tokens that can be requested from
``get_unblinded_tokens`` without causing it to raise
``NotEnoughTokens``.
"""
cursor.execute(
"""
SELECT count(1)
FROM [unblinded-tokens] AS T, [redemption-groups] AS G
WHERE T.[redemption-group] = G.[rowid]
AND G.[spendable] = 1
AND T.[token] NOT IN [in-use]
""",
(),
)
(count,) = cursor.fetchone()
return count
@with_cursor
def discard_unblinded_tokens(
self, cursor: Cursor, unblinded_tokens: List[UnblindedToken]
) -> None:
"""
Get rid of some unblinded tokens. The tokens will be completely removed
from the system. This is useful when the tokens have been
successfully spent.
:param list[UnblindedToken] unblinded_tokens: The tokens to discard.
:return: ``None``
"""
cursor.executemany(
"""
INSERT INTO [to-discard] VALUES (?)
""",
list(
(token.unblinded_token.decode("ascii"),) for token in unblinded_tokens
),
)
cursor.execute(
"""
DELETE FROM [in-use]
WHERE [unblinded-token] IN [to-discard]
""",
(),
)
cursor.execute(
"""
DELETE FROM [unblinded-tokens]
WHERE [token] IN [to-discard]
""",
(),
)
cursor.execute(
"""
DELETE FROM [to-discard]
""",
(),
)
@with_cursor
def invalidate_unblinded_tokens(
self, cursor: Cursor, reason: str, unblinded_tokens: List[UnblindedToken]
) -> None:
"""
Mark some unblinded tokens as invalid and unusable. Some record of the
tokens may be retained for future inspection. These tokens will not
be returned by any future ``get_unblinded_tokens`` call. This is
useful when an attempt to spend a token has met with rejection by the
validator.
:param unblinded_tokens: The tokens to mark.
:return: ``None``
"""
cursor.executemany(
"""
INSERT INTO [invalid-unblinded-tokens] VALUES (?, ?)
""",
list(
(token.unblinded_token.decode("ascii"), reason)
for token in unblinded_tokens
),
)
cursor.execute(
"""
DELETE FROM [in-use]
WHERE [unblinded-token] IN (SELECT [token] FROM [invalid-unblinded-tokens])
""",
(),
)
cursor.execute(
"""
DELETE FROM [unblinded-tokens]
WHERE [token] IN (SELECT [token] FROM [invalid-unblinded-tokens])
""",
(),
)
@with_cursor
def reset_unblinded_tokens(
self, cursor: Cursor, unblinded_tokens: List[UnblindedToken]
) -> None:
"""
Make some unblinded tokens available to be retrieved from the store again.
This is useful if a spending operation has failed with a transient
error.
"""
cursor.executemany(
"""
INSERT INTO [to-reset] VALUES (?)
""",
list(
(token.unblinded_token.decode("ascii"),) for token in unblinded_tokens
),
)
cursor.execute(
"""
DELETE FROM [in-use]
WHERE [unblinded-token] IN [to-reset]
""",
(),
)
cursor.execute(
"""
DELETE FROM [to-reset]
""",
(),
)
def start_lease_maintenance(self) -> LeaseMaintenance:
"""
Get an object which can track a newly started round of lease maintenance
activity.
:return LeaseMaintenance: A new, started lease maintenance object.
"""
m = LeaseMaintenance(self.pass_value, self.now, self._connection)
m.start()
return m
@with_cursor
def get_latest_lease_maintenance_activity(
self, cursor: Cursor
) -> Optional[LeaseMaintenanceActivity]:
"""
Get a description of the most recently completed lease maintenance
activity.
:return LeaseMaintenanceActivity|None: If any lease maintenance has
completed, an object describing its results. Otherwise, None.
"""
cursor.execute(
"""
SELECT [started], [count], [finished]
FROM [lease-maintenance-spending]
WHERE [finished] IS NOT NULL
ORDER BY [finished] DESC
LIMIT 1
""",
(),
)
activity = cursor.fetchall()
if len(activity) == 0:
return None
[(started, count, finished)] = activity
return LeaseMaintenanceActivity(
parse_datetime(started, delimiter=" "),
count,
parse_datetime(finished, delimiter=" "),
)
@implementer(ILeaseMaintenanceObserver)
@define
class LeaseMaintenance(object):
"""
A state-updating helper for recording pass usage during a lease
maintenance run.
Get one of these from ``VoucherStore.start_lease_maintenance``. Then use
the ``observe`` and ``finish`` methods to persist state about a lease
maintenance run.
:ivar int _pass_value: The value of a single ZKAP in byte-months.
:ivar _now: A no-argument callable which returns a datetime giving a time
to use as current.
:ivar _connection: A SQLite3 connection object to use to persist observed
information.
:ivar _rowid: None for unstarted lease maintenance objects. For started
objects, the database row id that corresponds to the started run.
This is used to make sure future updates go to the right row.
"""
_pass_value: int
_now: GetTime
_connection: _ReplicationCapableConnection
_rowid: Optional[int] = None
@with_cursor
def start(self, cursor: Cursor) -> None:
"""
Record the start of a lease maintenance run.
"""
if self._rowid is not None:
raise Exception("Cannot re-start a particular _LeaseMaintenance.")
cursor.execute(
"""
INSERT INTO [lease-maintenance-spending] ([started], [finished], [count])
VALUES (?, ?, ?)
""",
(self._now(), None, 0),
)
self._rowid = cursor.lastrowid
@with_cursor
def observe(self, cursor: Cursor, sizes: list[int]) -> None:
"""
Record a storage shares of the given sizes.
"""
count = required_passes(self._pass_value, sizes)
cursor.execute(
"""
UPDATE [lease-maintenance-spending]
SET [count] = [count] + ?
WHERE [id] = ?
""",
(count, self._rowid),
)
@with_cursor
def finish(self, cursor):
"""
Record the completion of this lease maintenance run.
"""
cursor.execute(
"""
UPDATE [lease-maintenance-spending]
SET [finished] = ?
WHERE [id] = ?
""",
(self._now(), self._rowid),
)
self._rowid = None
@frozen
class LeaseMaintenanceActivity(object):
started: datetime = field(validator=aware_datetime_validator)
passes_required: int
finished: datetime = field(validator=aware_datetime_validator)
@frozen(order=True)
class UnblindedToken(object):
"""
An ``UnblindedToken`` instance represents cryptographic proof of a voucher
redemption. It is an intermediate artifact in the PrivacyPass protocol
and can be used to construct a privacy-preserving pass which can be
exchanged for service.
:ivar bytes unblinded_token: The base64 encoded serialized form of the
unblinded token. This can be used to reconstruct a
``challenge_bypass_ristretto.UnblindedToken`` using that class's
``decode_base64`` method.
"""
unblinded_token = field(
validator=attr.validators.and_(
attr.validators.instance_of(bytes),
is_base64_encoded(),
has_length(128),
),
)
@frozen
class Pass(object):
"""
A ``Pass`` instance completely represents a single Zero-Knowledge Access Pass.
"""
preimage = field(
validator=attr.validators.and_(
attr.validators.instance_of(bytes),
is_base64_encoded(),
has_length(88),
),
)
signature = field(
validator=attr.validators.and_(
attr.validators.instance_of(bytes),
is_base64_encoded(),
has_length(88),
),
)
@property
def pass_bytes(self):
"""
The byte string representation of the pass.
This can be sent to a service provider one time to anonymously prove a
prior voucher redemption. If it is sent more than once the service
provider may choose to reject it and the anonymity property is
compromised. This value should be kept secret. If this value is
divulged to third-parties the anonymity property may be compromised.
"""
return b" ".join((self.preimage, self.signature))
@classmethod
def from_bytes(cls, pass_):
return cls(*pass_.split(b" "))
@frozen
class RandomToken(object):
"""
:ivar bytes token_value: The base64-encoded representation of the random
token.
"""
token_value = field(
validator=attr.validators.and_(
attr.validators.instance_of(bytes),
is_base64_encoded(),
has_length(128),
),
)
def _counter_attribute():
return field(
validator=attr.validators.and_(
attr.validators.instance_of(int),
greater_than(-1),
),
)
@frozen
class Pending(object):
"""
The voucher has not yet been completely redeemed for ZKAPs.
:ivar int counter: The number of partial redemptions which have been
successfully performed for the voucher.
"""
counter: int = _counter_attribute()
def should_start_redemption(self):
return True
def to_json_v1(self):
return {
"name": "pending",
"counter": self.counter,
}
@frozen
class Redeeming(object):
"""
This is a non-persistent state in which a voucher exists when the database
state is **pending** but for which there is a redemption operation in
progress.
"""
started: datetime = field(validator=aware_datetime_validator)
counter: int = _counter_attribute()
def should_start_redemption(self):
return False
def to_json_v1(self):
return {
"name": "redeeming",
"started": self.started.isoformat(),
"counter": self.counter,
}
@frozen
class Redeemed(object):
"""
The voucher was successfully redeemed. Associated tokens were retrieved
and stored locally.
:ivar finished: The time when the redemption finished.
:ivar token_count: The number of tokens the voucher was redeemed for.
"""
finished: datetime = field(validator=aware_datetime_validator)
token_count: int
def should_start_redemption(self):
return False
def to_json_v1(self):
return {
"name": "redeemed",
"finished": self.finished.isoformat(),
"token-count": self.token_count,
}
@frozen
class DoubleSpend(object):
finished: datetime = field(validator=aware_datetime_validator)
def should_start_redemption(self):
return False
def to_json_v1(self):
return {
"name": "double-spend",
"finished": self.finished.isoformat(),
}
@frozen
class Unpaid(object):
"""
This is a non-persistent state in which a voucher exists when the database
state is **pending** but the most recent redemption attempt has failed due
to lack of payment.
"""
finished: datetime = field(validator=aware_datetime_validator)
def should_start_redemption(self):
return True
def to_json_v1(self):
return {
"name": "unpaid",
"finished": self.finished.isoformat(),
}
@frozen
class Error(object):
"""
This is a non-persistent state in which a voucher exists when the database
state is **pending** but the most recent redemption attempt has failed due
to an error that is not handled by any other part of the system.
"""
finished: datetime = field(validator=aware_datetime_validator)
details: str
def should_start_redemption(self):
return True
def to_json_v1(self):
return {
"name": "error",
"finished": self.finished.isoformat(),
"details": self.details,
}
@frozen
class Voucher(object):
"""
:ivar number: The byte string which gives this voucher its identity.
:ivar expected_tokens: The total number of tokens for which we expect to
be able to redeem this voucher. Tokens are redeemed in smaller
groups, progress of which is tracked in ``state``. This only gives
the total we expect to reach at completion.
:ivar created: The time at which this voucher was added to this node.
:ivar state: An indication of the current state of this voucher. This is
an instance of ``Pending``, ``Redeeming``, ``Redeemed``,
``DoubleSpend``, ``Unpaid``, or ``Error``.
"""
number: bytes = field(
validator=attr.validators.and_(
attr.validators.instance_of(bytes),
is_base64_encoded(urlsafe_b64decode),
has_length(44),
),
)
expected_tokens: Optional[int] = field(
validator=attr.validators.optional(
attr.validators.and_(
attr.validators.instance_of(int),
greater_than(0),
),
),
)
created: Optional[datetime] = field(
default=None,
validator=attr.validators.optional(aware_datetime_validator),
)
state = field(
default=Pending(counter=0),
validator=attr.validators.instance_of(
(
Pending,
Redeeming,
Redeemed,
DoubleSpend,
Unpaid,
Error,
)
),
)
@classmethod
def from_row(cls, row):
def state_from_row(state, row):
if state == "pending":
return Pending(counter=row[3])
if state == "double-spend":
return DoubleSpend(
parse_datetime(row[0], delimiter=" "),
)
if state == "redeemed":
return Redeemed(
parse_datetime(row[0], delimiter=" "),
row[1],
)
raise ValueError("Unknown voucher state {}".format(state))
number, created, expected_tokens, state = row[:4]
return cls(
number=number.encode("ascii"),
expected_tokens=expected_tokens,
# All Python datetime-based date/time libraries fail to handle
# leap seconds. This parse call might raise an exception of the
# value represents a leap second. However, since we also use
# Python to generate the data in the first place, it should never
# represent a leap second... I hope.
created=parse_datetime(created, delimiter=" "),
state=state_from_row(state, row[4:]),
)
@classmethod
def from_json(cls, json):
values = loads(json)
version = values.pop("version")
return getattr(cls, "from_json_v{}".format(version))(values)
@classmethod
def from_json_v1(cls, values):
state_json = values["state"]
state_name = state_json["name"]
if state_name == "pending":
state = Pending(counter=state_json["counter"])
elif state_name == "redeeming":
state = Redeeming(
started=parse_datetime(state_json["started"]),
counter=state_json["counter"],
)
elif state_name == "double-spend":
state = DoubleSpend(
finished=parse_datetime(state_json["finished"]),
)
elif state_name == "redeemed":
state = Redeemed(
finished=parse_datetime(state_json["finished"]),
token_count=state_json["token-count"],
)
elif state_name == "unpaid":
state = Unpaid(
finished=parse_datetime(state_json["finished"]),
)
elif state_name == "error":
state = Error(
finished=parse_datetime(state_json["finished"]),
details=state_json["details"],
)
else:
raise ValueError("Unrecognized state {!r}".format(state_json))
return cls(
number=values["number"].encode("ascii"),
expected_tokens=values["expected-tokens"],
created=None
if values["created"] is None
else parse_datetime(values["created"]),
state=state,
)
def to_json(self):
return dumps_utf8(self.marshal())
def marshal(self):
return self.to_json_v1()
def to_json_v1(self):
state = self.state.to_json_v1()
return {
"number": self.number.decode("ascii"),
"expected-tokens": self.expected_tokens,
"created": None if self.created is None else self.created.isoformat(),
"state": state,
"version": 1,
} | zero-knowledge-access-pass-authorizer | /zero_knowledge_access_pass_authorizer-2022.8.21-py3-none-any.whl/_zkapauthorizer/model.py | model.py |
# Zero Motorcycles
[](https://pypi.org/project/zero-motorcycles)
[](https://pypi.org/project/zero-motorcycles)
-----
**Table of Contents**
- [Installation](#installation)
- [Usage](#usage)
- [License](#license)
## Installation
```console
pip install zero-motorcycles
```
## Usage
```python
from zero_motorcycles import Zero
zero_client = Zero(username="email", password="password")
# Get units
zero_client.get_units()
# Get last transmit data for a specific unit
zero_client.get_last_transmit(123456)
# Get subscription expiration for a specific unit
zero_client.get_expiration_date(123456)
```
## License
`zero-motorcycles` is distributed under the terms of the [BSD 3-Clause](https://spdx.org/licenses/BSD-3-Clause.html) license.
| zero-motorcycles | /zero_motorcycles-1.0.1.tar.gz/zero_motorcycles-1.0.1/README.md | README.md |
import base64
from hashlib import md5
import json
import requests.auth
from Crypto.Cipher import AES
from Crypto.Util.Padding import pad
from Crypto.Random import get_random_bytes
from requests.exceptions import HTTPError
class Zero:
API_URL = 'https://api-us-cypherstore-prod.zeromotorcycles.com/starcom/v1'
def __init__(self, username, password):
self.auth = ZeroAuth(username, password)
pass
async def get_units(self):
data = {
"commandname": "get_units"
}
return self._make_request(data)
async def get_last_transmit(self, unit):
data = {
"unitnumber": unit,
"commandname": "get_last_transmit"
}
return self._make_request(data)
async def get_expiration_date(self, unit):
data = {
"unitnumber": unit,
"unittype": 5,
"commandname": "get_expiration_date"
}
return self._make_request(data)
def _make_request(self, data):
response = requests.post(self.API_URL, json=data, auth=self.auth, headers={"User-Agent": "ZeroMoto/1.0"})
# Check for the usual errors
response.raise_for_status()
json_data = response.json()
if response.status_code >= 600:
raise HTTPError(json_data['error'], response=response)
return json_data
class ZeroAuth(requests.auth.AuthBase):
ENCRYPTION_KEY = "8FA043AADEC92367108D0E25D2C6064F"
SOURCE = "zero"
FORMAT = "json"
def __init__(self, username, password):
self.username = username
self.password = password
def __eq__(self, other):
return all(
[
self.username == getattr(other, "username", None),
self.password == getattr(other, "password", None),
]
)
def __ne__(self, other):
return not self == other
def __call__(self, r):
# Decode and turn the body back into JSON so we can edit it
data = json.loads(r.body.decode())
# Add some additional keys to the JSON body
data["format"] = self.FORMAT
data["source"] = self.SOURCE
data["user"] = self.username
data["pass"] = self.password
# Encrypt the payload
encrypted = self._encrypt(json.dumps(data).encode())
# Set the request body to our newly encrypted value
r.body = json.dumps({"data": encrypted}).encode()
return r
def _encrypt(self, message):
# from https://stackoverflow.com/a/36780727
salt = get_random_bytes(8)
key_iv = self._bytes_to_key(self.ENCRYPTION_KEY.encode(), salt)
key = key_iv[:32]
iv = key_iv[32:]
aes = AES.new(key, AES.MODE_CBC, iv)
return base64.b64encode(b"Salted__" + salt + aes.encrypt(pad(message, AES.block_size))).decode()
@staticmethod
def _bytes_to_key(data, salt, output=48):
# from https://stackoverflow.com/a/36780727
# extended from https://gist.github.com/gsakkis/4546068
assert len(salt) == 8, len(salt)
data += salt
key = md5(data).digest()
final_key = key
while len(final_key) < output:
key = md5(key + data).digest()
final_key += key
return final_key[:output] | zero-motorcycles | /zero_motorcycles-1.0.1.tar.gz/zero_motorcycles-1.0.1/zero_motorcycles/Zero.py | Zero.py |
from PySide6.QtCore import QMargins
from PySide6.QtGui import QResizeEvent, QFontMetrics, Qt
from PySide6.QtWidgets import QLabel
class ScaledLabel(QLabel):
def resizeEvent(self, event: QResizeEvent):
# This flag is used for pixmaps, but I thought it might be useful to
# be able to disable font scaling.
if not self.hasScaledContents():
return
self.update_margins()
target_rect = self.contentsRect()
text = self.text()
# Use binary search to efficiently find the biggest font that will fit.
max_size = self.height()
min_size = 1
font = self.font()
while min_size < max_size:
new_size = (min_size + max_size) // 2
font.setPointSize(new_size)
metrics = QFontMetrics(font)
# Be careful which overload of boundingRect() you call.
rect = metrics.boundingRect(target_rect,
Qt.AlignmentFlag.AlignLeft,
text,
tabstops=0,
tabarray=None) # type: ignore
if (target_rect.width() < rect.width() or
target_rect.height() < rect.height()):
max_size = new_size - 1
elif (rect.width() == target_rect.width() or
rect.height() == target_rect.height()):
min_size = max_size = new_size
else:
min_size = new_size + 1
font.setPointSize(min_size)
self.setFont(font)
def update_margins(self):
pixmap = self.pixmap()
if pixmap is None:
return
pixmap_width = pixmap.width()
pixmap_height = pixmap.height()
target_rect = self.contentsRect()
margins = self.contentsMargins()
target_width = target_rect.width() + margins.left() + margins.right()
target_height = target_rect.height() + margins.top() + margins.bottom()
if pixmap_width == 0 or pixmap_height == 0:
new_margins = QMargins()
elif target_width * pixmap_height < target_height * pixmap_width:
m = target_height - pixmap_height * target_width // pixmap_width
# noinspection PyUnresolvedReferences
vertical_alignment = self.alignment() & Qt.AlignVertical_Mask
if vertical_alignment == Qt.AlignTop:
new_margins = QMargins(0, 0, 0, m)
elif vertical_alignment == Qt.AlignBottom:
new_margins = QMargins(0, m, 0, 0)
else:
assert vertical_alignment == Qt.AlignVCenter, vertical_alignment
new_margins = QMargins(0, m//2, 0, m//2)
else:
m = target_width - pixmap_width * target_height // pixmap_height
# noinspection PyUnresolvedReferences
horizontal_alignment = self.alignment() & Qt.AlignHorizontal_Mask
if horizontal_alignment == Qt.AlignLeft:
new_margins = QMargins(0, 0, m, 0)
elif horizontal_alignment == Qt.AlignRight:
new_margins = QMargins(m, 0, 0, 0)
else:
assert horizontal_alignment == Qt.AlignHCenter, horizontal_alignment
new_margins = QMargins(m//2, 0, m//2, 0)
if new_margins != margins:
self.setContentsMargins(new_margins) | zero-play | /zero_play-0.5.0-py3-none-any.whl/zero_play/scaled_label.py | scaled_label.py |
from PySide6.QtCore import QSize
from PySide6.QtGui import QResizeEvent, QFontMetrics, Qt
from PySide6.QtWidgets import QRadioButton
class ScaledRadioButton(QRadioButton):
def resizeEvent(self, event: QResizeEvent):
target_rect = self.contentsRect()
text = self.text()
icon = self.icon()
icon_spacing = 4 # Constant?
# Use binary search to efficiently find the biggest font that will fit.
min_fail = self.height() # Smallest known to fail.
max_size = min_fail # Largest left in search space.
min_size = 1 # Smallest left in search space.
max_pass = min_size # Largest known to pass.
font = self.font()
while max_pass+1 < min_fail:
new_size = (min_size + max_size) // 2
indicator_width = new_size
indicator_spacing = new_size // 2
font.setPointSize(new_size)
metrics = QFontMetrics(font)
# Be careful which overload of boundingRect() you call.
rect = metrics.boundingRect(target_rect,
Qt.AlignmentFlag.AlignLeft,
text,
tabstops=0,
tabarray=None) # type: ignore
full_width = indicator_width + indicator_spacing + rect.width()
height = rect.height()
if icon:
icon_size = new_size * 3 // 2
full_width += icon_size
if text:
full_width += icon_spacing
height = max(height, icon_size)
if (target_rect.width() < full_width or
target_rect.height() < height):
min_fail = new_size
max_size = new_size - 1
elif (full_width == target_rect.width() or
height == target_rect.height()):
max_pass = min_size = max_size = new_size
min_fail = new_size+1
else:
max_pass = new_size
min_size = new_size+1
indicator_width = max_pass
indicator_spacing = max_pass // 2
if icon:
icon_size = max_pass*3//2
self.setIconSize(QSize(icon_size, icon_size))
font.setPointSize(max_pass)
self.setFont(font)
self.setStyleSheet(f'QRadioButton::indicator {{width: {indicator_width}}} '
f'QRadioButton {{spacing: {indicator_spacing}}}') | zero-play | /zero_play-0.5.0-py3-none-any.whl/zero_play/scaled_radio_button.py | scaled_radio_button.py |
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
from importlib import import_module
from zero_play.game_state import GameState
from zero_play.mcts_player import MctsPlayer
from zero_play.play_controller import PlayController
def parse_args():
# noinspection PyTypeChecker
parser = ArgumentParser(description='Run a game scenario with Zero Play, '
'and report the slowest code.',
formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument('game',
default='zero_play.tictactoe.state.TicTacToeState',
nargs='?',
help='Game state class to test with.')
parser.add_argument('game_count',
type=int,
default=1,
nargs='?',
help='Limit the number of games to run.')
parser.add_argument('--flip',
action='store_true',
help='Flip first player after every game.')
parser.add_argument('--iter1',
type=int,
default=100,
help='Number of search iterations for player 1.')
parser.add_argument('--iter2',
type=int,
default=100,
help='Number of search iterations for player 2.')
parser.add_argument('--processes1',
type=int,
default=1,
help='Number of parallel search processes for player 1.')
parser.add_argument('--processes2',
type=int,
default=1,
help='Number of parallel search processes for player 2.')
parser.add_argument('--display',
action='store_true',
help='Display moves in the games.')
return parser.parse_args()
def main() -> None:
args = parse_args()
class_path = args.game
class_parts = class_path.split('.')
class_name = class_parts.pop()
module_name = '.'.join(class_parts)
module = import_module(module_name)
game_state_class = getattr(module, class_name)
start_state: GameState = game_state_class()
player1 = MctsPlayer(start_state,
iteration_count=args.iter1,
process_count=args.processes1)
player2 = MctsPlayer(start_state,
iteration_count=args.iter2,
process_count=args.processes2)
controller = PlayController(start_state, [player1, player2])
controller.play(args.game_count, args.flip, args.display)
if __name__ == '__main__':
main() | zero-play | /zero_play-0.5.0-py3-none-any.whl/zero_play/zero_perf.py | zero_perf.py |
import typing
from datetime import datetime
from zero_play.game_state import GameState
from zero_play.player import Player
class PlayerResults:
def __init__(self, player: Player):
self.player = player
self.total_time = 0.0
self.move_count = self.win_count = 0
self.summary = player.get_summary()
def get_summary(self):
if self.move_count:
move_time = self.total_time / self.move_count
else:
move_time = 0.0
return f'{self.summary} - {self.win_count} wins, {move_time:0.3}s/move'
class PlayController:
def __init__(self, start_state: GameState, players: typing.Sequence[Player]):
self.board = self.start_state = start_state
x_player: Player
o_player: Player
x_player, o_player = players
x_player.player_number = start_state.players[0]
o_player.player_number = start_state.players[1]
self.players = {x_player.player_number: x_player,
o_player.player_number: o_player}
self.results = [PlayerResults(x_player), PlayerResults(o_player)]
self.start_game()
def start_game(self):
self.board = self.start_state
def take_turn(self) -> bool:
""" Take one turn in the game, and return True if the game is over. """
player_number = self.board.get_active_player()
player = self.players[player_number]
start_time = datetime.now()
move = player.choose_move(self.board)
move_duration = datetime.now() - start_time
player_results = self.get_player_results(player)
player_results.total_time += move_duration.total_seconds()
player_results.move_count += 1
self.board = self.board.make_move(move)
if not self.board.is_ended():
return False
other_player = None
for other_player in self.players.values():
if other_player is not player:
break
player.end_game(self.board, other_player)
other_player.end_game(self.board, player)
return True
def get_player_results(self, player: Player) -> PlayerResults:
for player_results in self.results:
if player_results.player is player:
return player_results
raise ValueError('Player not found.')
def play(self, games: int = 1, flip: bool = False, display: bool = False):
x_number = self.start_state.players[0]
o_number = self.start_state.players[1]
current_x = original_x = self.players[x_number]
current_o = original_o = self.players[o_number]
self.results = [PlayerResults(current_x), PlayerResults(current_o)]
ties = 0
for i in range(games):
if i and flip:
current_x = self.players[o_number]
current_o = self.players[x_number]
current_x.player_number = x_number
current_o.player_number = o_number
self.players[x_number] = current_x
self.players[o_number] = current_o
while True:
if display:
print(self.board.display(show_coordinates=True))
if self.take_turn():
break
if display:
print(self.board.display(show_coordinates=True))
if self.board.is_win(x_number):
self.get_player_results(current_x).win_count += 1
elif self.board.is_win(o_number):
self.get_player_results(current_o).win_count += 1
else:
ties += 1
self.start_game()
original_x.player_number = x_number
original_o.player_number = o_number
self.players[x_number] = original_x
self.players[o_number] = original_o
for player_results in self.results:
print(player_results.get_summary())
print(ties, 'ties')
x_results = self.get_player_results(original_x)
o_results = self.get_player_results(original_o)
return x_results.win_count, ties, o_results.win_count | zero-play | /zero_play-0.5.0-py3-none-any.whl/zero_play/play_controller.py | play_controller.py |
from abc import abstractmethod
import typing
import numpy as np
from PySide6.QtCore import Signal, QThread, QSize, Slot
from PySide6.QtGui import QResizeEvent
from PySide6.QtWidgets import QGraphicsSimpleTextItem
from zero_play.game_state import GameState
from zero_play.log_display import LogDisplay
from zero_play.mcts_player import MctsPlayer
from zero_play.mcts_worker import MctsWorker
from zero_play.process_display import ProcessDisplay
class GameDisplay(ProcessDisplay):
rules_path: typing.Optional[str] = None
move_needed = Signal(int, np.ndarray) # active_player, board
move_made = Signal(np.ndarray) # board
game_ended = Signal(np.ndarray) # final_board
def __init__(self, start_state: GameState):
super().__init__()
self.start_state = start_state
self.mcts_workers: typing.Dict[int, MctsWorker] = {}
self.current_state = self.start_state
self.valid_moves = self.start_state.get_valid_moves()
self._show_coordinates = False
self.log_display = LogDisplay()
self.is_reviewing = False
@property
def show_coordinates(self):
return self._show_coordinates
@show_coordinates.setter
def show_coordinates(self, value):
self._show_coordinates = value
# noinspection PyUnresolvedReferences
scene = self.scene()
size = QSize(scene.width(), scene.height())
self.resizeEvent(QResizeEvent(size, size))
@property
def mcts_players(self):
return [worker.player for worker in self.mcts_workers.values()]
@mcts_players.setter
def mcts_players(self, players: typing.Sequence[MctsPlayer]):
self.stop_workers()
self.log_display = LogDisplay()
self.mcts_workers = {player.player_number: MctsWorker(player)
for player in players}
if not self.mcts_workers:
self.worker_thread = None
else:
self.worker_thread = QThread()
self.worker_thread.finished.connect( # type: ignore
self.worker_thread.deleteLater)
for worker in self.mcts_workers.values():
worker.move_chosen.connect(self.make_move) # type: ignore
worker.move_analysed.connect(self.analyse_move) # type: ignore
self.move_needed.connect(worker.choose_move) # type: ignore
self.move_made.connect(worker.analyse_move) # type: ignore
worker.moveToThread(self.worker_thread)
self.worker_thread.start()
def get_player(self, player_number: int) -> typing.Optional[MctsPlayer]:
worker = self.mcts_workers.get(player_number)
if worker:
return worker.player
return None
@abstractmethod
def update_board(self, board: GameState):
""" Update self.scene, based on the state in board.
It's probably also helpful to override resizeEvent().
:param board: the state of the game to display.
"""
def resizeEvent(self, event: QResizeEvent):
self.update_board(self.current_state)
@property
def credit_pairs(self) -> typing.Iterable[typing.Tuple[str, str]]:
""" Return a list of label and detail pairs.
These are displayed in the about box.
"""
return ()
def choose_active_text(self):
active_player = self.current_state.get_active_player()
if active_player in self.mcts_workers:
return 'thinking'
return 'to move'
@Slot(int) # type: ignore
def make_move(self, move: int):
self.log_display.record_move(self.current_state, move)
# noinspection PyUnresolvedReferences
self.move_made.emit(self.current_state) # type: ignore
self.current_state = self.current_state.make_move(move)
self.update_board(self.current_state)
if self.current_state.is_ended():
# noinspection PyUnresolvedReferences
self.game_ended.emit(self.current_state) # type: ignore
forced_move = self.get_forced_move()
if forced_move is None:
self.request_move()
else:
self.make_move(forced_move)
def get_forced_move(self) -> typing.Optional[int]:
""" Override this method if some moves should be forced.
Look at self.valid_moves and self.current_board to decide.
:return: move number, or None if there is no forced move.
"""
return None
@Slot(GameState, int, list) # type: ignore
def analyse_move(
self,
board: GameState,
analysing_player: int,
move_probabilities: typing.List[typing.Tuple[str,
float,
int,
float]]):
self.log_display.analyse_move(board,
analysing_player,
move_probabilities)
def request_move(self):
if self.current_state.is_ended():
return
player = self.current_state.get_active_player()
# noinspection PyUnresolvedReferences
self.move_needed.emit(player, self.current_state)
def can_move(self):
if self.is_reviewing:
return False
return not self.current_state.get_active_player() in self.mcts_workers
def center_text_item(item: QGraphicsSimpleTextItem, x: float, y: float):
bounds = item.boundingRect()
x -= bounds.width() // 2
y -= bounds.height() // 2
item.setPos(x, y) | zero-play | /zero_play-0.5.0-py3-none-any.whl/zero_play/game_display.py | game_display.py |
import typing
from zero_play.game_state import GameState
from zero_play.models import SessionBase
from zero_play.models.game import GameRecord
from zero_play.models.match import MatchRecord
from zero_play.models.match_player import MatchPlayerRecord
from zero_play.plot_canvas import PlotCanvas
from zero_play.tictactoe.state import TicTacToeState
class StrengthHistoryPlot(PlotCanvas):
def __init__(self, parent=None) -> None:
self.game: GameState = TicTacToeState()
super().__init__(parent)
def fetch_strengths(self, db_session) -> typing.List[int]:
if db_session is None:
return []
game_record = GameRecord.find_or_create(db_session, self.game)
strengths = []
match: MatchRecord
# noinspection PyTypeChecker
for match in game_record.matches: # type: ignore
match_player: MatchPlayerRecord
has_human = False
ai_player = None
for match_player in match.match_players: # type: ignore
player = match_player.player
if player.type == player.HUMAN_TYPE:
has_human = True
else:
ai_player = player
if has_human and ai_player is not None:
assert ai_player.iterations is not None
strengths.append(ai_player.iterations)
return strengths
def requery(self, db_session: SessionBase | None, future_strength: int):
strengths = self.fetch_strengths(db_session)
self.axes.clear()
marker = 'o' if len(strengths) == 1 else ''
self.axes.plot(strengths, marker, label='past')
self.axes.plot([len(strengths)], [future_strength], 'o', label='next')
self.axes.set_ylim(0)
if len(strengths) + 1 < len(self.axes.get_xticks()):
self.axes.set_xticks(list(range(len(strengths) + 1)))
self.axes.set_title('Search iterations over time')
self.axes.set_ylabel('Search iterations')
self.axes.set_xlabel('Number of games played')
self.axes.legend(loc='lower right')
self.axes.figure.canvas.draw() | zero-play | /zero_play-0.5.0-py3-none-any.whl/zero_play/strength_history_plot.py | strength_history_plot.py |
import typing
from dataclasses import dataclass
from decimal import Decimal
from zero_play.game_state import GameState
@dataclass
class LogItem:
step: int
player: str
move_text: str
game_state: GameState
comment: str = ''
# [(move_display, probability, value_count, avg_value)]
choices: typing.Sequence[typing.Tuple[str, float, int, float]] = ()
def __str__(self):
suffix = f' ({self.comment})' if self.comment else ''
return f'{self.step}: {self.player} - {self.move_text}{suffix}'
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return (self.step == other.step and
self.player == other.player and
self.move_text == other.move_text and
self.game_state == other.game_state and
self.comment == other.comment and
self.choices == other.choices)
class LogDisplay:
def __init__(self) -> None:
self.step = 0
self.items: typing.List[LogItem] = []
self.offsets: typing.List[int] = []
def record_move(self, game_state: GameState, move: int):
self.step += 1
player = game_state.display_player(game_state.get_active_player())
move_text = game_state.display_move(move)
self.items.append(LogItem(self.step, player, move_text, game_state))
def analyse_move(
self,
game_state: GameState,
analysing_player: int,
move_probabilities: typing.List[typing.Tuple[str,
float,
int,
float]]):
""" Record analysis of the choices for a move.
:param game_state: the state before the move
:param analysing_player: the player doing the analysis; if both players
report analysis, the active player's will be kept
:param move_probabilities: the detailed analysis of best valid moves
[(choice, probability, count, value)] where choice is the move
display, probability is the recommended probability of choosing that
move, count is the number of times the move or a descendant was
analysed during the search, and value is the estimated value of the
move, where 1 is 100% wins for the active player and -1 is 100%
losses.
"""
for item in reversed(self.items):
if item.game_state == game_state:
break
else:
raise ValueError('Board not found in log.')
active_player = game_state.get_active_player()
if item.choices and active_player != analysing_player:
return
item.choices = move_probabilities
q = Decimal('0.1')
for i, (choice,
probability,
count,
value) in enumerate(move_probabilities, 1):
dec_value = Decimal(value).quantize(q)
if choice == item.move_text:
item.comment = f'{dec_value}'
if i != 1:
item.comment += f', choice {i}'
break
else:
# Didn't find chosen move in analysed moves, probably a poor choice.
item.comment = '?'
def rewind_to(self, step: int):
del self.items[step:]
self.step = step | zero-play | /zero_play-0.5.0-py3-none-any.whl/zero_play/log_display.py | log_display.py |
import math
import os
import sys
import typing
from datetime import datetime
from functools import partial
from itertools import chain
from operator import attrgetter
from os import cpu_count
from pathlib import Path
from random import shuffle
import numpy as np
from PySide6.QtCore import QSettings
from PySide6.QtGui import Qt, QIcon, QPixmap
from PySide6.QtWidgets import (QApplication, QMainWindow, QFileDialog,
QTableWidgetItem, QGridLayout, QPushButton,
QSizePolicy, QDialog, QWidget, QLabel, QComboBox)
from alembic import command
from alembic.config import Config
from pkg_resources import iter_entry_points, EntryPoint
from sqlalchemy import create_engine
from sqlalchemy.orm import Session as BaseSession
from sqlalchemy.util import immutabledict
import zero_play
from zero_play.about_dialog import Ui_Dialog
from zero_play.game_state import GameState
from zero_play.game_display import GameDisplay
from zero_play.main_window import Ui_MainWindow
from zero_play.mcts_player import MctsPlayer
from zero_play.models import Session
from zero_play.models.game import GameRecord
from zero_play.models.match import MatchRecord
from zero_play.models.match_player import MatchPlayerRecord
from zero_play.models.player import PlayerRecord
from zero_play.play_controller import PlayController
from zero_play.playout import Playout
from zero_play.process_display import ProcessDisplay
from zero_play.strength_adjuster import StrengthAdjuster
from zero_play import zero_play_rules_rc
from zero_play import zero_play_images_rc
from zero_play.strength_history_plot import StrengthHistoryPlot
from zero_play.strength_plot import StrengthPlot
assert zero_play_rules_rc # Need to import this module to load resources.
assert zero_play_images_rc # Need to import this module to load resources.
try:
from zero_play.plot_canvas import PlotCanvas
except ImportError:
from zero_play.plot_canvas_dummy import PlotCanvasDummy as PlotCanvas # type: ignore
DEFAULT_SEARCHES = 600
class AboutDialog(QDialog):
def __init__(self,
credit_pairs: typing.Iterable[typing.Tuple[str, str]],
parent: QWidget | None = None) -> None:
super().__init__(parent)
self.ui = Ui_Dialog()
self.ui.setupUi(self)
self.ui.version.setText(zero_play.__version__)
credits_layout = self.ui.credits_layout
row = 0
for row, (title, text) in enumerate(credit_pairs):
credits_layout.addWidget(QLabel(title),
row,
0,
Qt.AlignmentFlag.AlignRight)
credits_layout.addWidget(QLabel(text), row, 1)
row += 1
credits_layout.addWidget(self.ui.version_label, row, 0)
credits_layout.addWidget(self.ui.version, row, 1)
def get_settings(game_state: GameState | None = None):
settings = QSettings("Don Kirkby", "Zero Play")
if game_state is not None:
settings.beginGroup('games')
settings.beginGroup(game_state.game_name.replace(' ', '_'))
return settings
def get_database_url(database_path: Path | None = None) -> typing.Optional[str]:
if database_path is None:
settings = get_settings()
database_path = settings.value('db_path')
if database_path is None or not os.path.exists(str(database_path)):
return None
database_url = f'sqlite:///{database_path}'
return database_url
class ZeroPlayWindow(QMainWindow):
""" Main window for a collection of board games.
To create your own collection, declare a sub class, and override these
methods: get_collection_name(), filter_games().
"""
icon_path = ":/zero_play_images/main_icon.png"
def __init__(self) -> None:
super().__init__()
self.setAttribute(Qt.WidgetAttribute.WA_DeleteOnClose, True)
ui = self.ui = Ui_MainWindow()
ui.setupUi(self)
self.plot_canvas = StrengthHistoryPlot(ui.centralwidget)
ui.plot_history_page.layout().addWidget(self.plot_canvas, 1, 0, 1, 2)
self.strength_canvas = StrengthPlot(ui.centralwidget)
ui.plot_strength_display_page.layout().addWidget(self.strength_canvas)
ui.cancel.clicked.connect(self.on_cancel)
ui.start.clicked.connect(self.on_start)
ui.action_game.triggered.connect(self.on_new_game)
ui.action_new_db.triggered.connect(self.on_new_db)
ui.action_open_db.triggered.connect(self.on_open_db)
ui.action_plot.triggered.connect(self.on_plot)
ui.action_coordinates.triggered.connect(self.on_view_coordinates)
ui.action_about.triggered.connect(self.on_about)
ui.action_strength_test.triggered.connect(self.on_new_strength_test)
ui.start_strength_test.clicked.connect(self.on_start_strength_test)
ui.toggle_review.clicked.connect(self.on_toggle_review)
ui.resume_here.clicked.connect(self.on_resume_here)
ui.rules_close.clicked.connect(self.on_close_rules)
ui.move_history.currentIndexChanged.connect(self.on_move_history)
ui.player1.currentIndexChanged.connect(
lambda new_index: self.on_player_changed(ui.player1, new_index))
ui.player2.currentIndexChanged.connect(
lambda new_index: self.on_player_changed(ui.player2, new_index))
ui.searches1.valueChanged.connect(self.on_searches_changed)
ui.searches_lock1.stateChanged.connect(self.on_lock_changed)
ui.searches_lock2.stateChanged.connect(self.on_lock_changed)
self.cpu_count = cpu_count() or 1
self.is_history_dirty = False # Has current game been rewound?
self.all_displays: typing.List[GameDisplay] = []
self.load_game_list(ui.game_page.layout())
icon_pixmap = QPixmap(self.icon_path) # After displays load resources!
icon = QIcon(icon_pixmap)
self.setWindowIcon(icon)
self.start_state: typing.Optional[GameState] = None
self.display: ProcessDisplay | StrengthPlot | None = None
self.game_display: typing.Optional[GameDisplay] = None
self.on_new_game()
self.board_to_resume: typing.Optional[np.ndarray] = None
self.review_names = [name.strip()
for name in ui.toggle_review.text().split('/')]
self.are_coordinates_always_visible = False
self.game_start_time = datetime.now()
ui.history_game.currentIndexChanged.connect(self.requery_plot)
self._db_session = None
settings = get_settings()
ui.strength_test_game.setCurrentText(settings.value(
'strength_test_game',
ui.strength_test_game.currentText()))
ui.strength_test_strengths.setText(settings.value(
'strength_test_strengths',
ui.strength_test_strengths.text()))
ui.strength_test_min.setValue(settings.value(
'strength_test_min',
ui.strength_test_min.value(),
type=int))
ui.strength_test_max.setValue(settings.value(
'strength_test_max',
ui.strength_test_max.value(),
type=int))
self.on_toggle_review()
@staticmethod
def get_collection_name() -> str:
return 'Zero Play'
@staticmethod
def filter_games(
entries: typing.Iterable[EntryPoint]) -> typing.Generator[EntryPoint,
None,
None]:
yield from entries
@property
def db_session(self) -> typing.Optional[BaseSession]:
if self._db_session is None:
db_url = get_database_url()
if db_url is None:
return None
engine = create_engine(db_url)
Session.configure(bind=engine)
self._db_session = Session()
return self._db_session
def on_about(self):
credit_pairs = chain(*(display.credit_pairs
for display in self.all_displays))
dialog = AboutDialog(credit_pairs, self)
dialog.setWindowTitle(f'About {self.get_collection_name()}')
dialog.exec_()
def load_game_list(self, game_layout: QGridLayout):
while game_layout.count():
child = game_layout.takeAt(0)
if child.widget():
child.widget().deleteLater()
games = self.all_displays
all_entries = iter_entry_points('zero_play.game_display')
filtered_entries = self.filter_games(all_entries)
for game_entry in filtered_entries:
display_class = game_entry.load()
display: GameDisplay = display_class()
self.destroyed.connect(display.close) # type: ignore
display.game_ended.connect(self.on_game_ended) # type: ignore
games.append(display)
games.sort(key=attrgetter('start_state.game_name'))
column_count = math.ceil(math.sqrt(len(games)))
for i, display in enumerate(games):
row = i // column_count
column = i % column_count
game_name = display.start_state.game_name
game_button = QPushButton(game_name)
game_button.setSizePolicy(QSizePolicy.Policy.Minimum,
QSizePolicy.Policy.Minimum)
game_button.clicked.connect(partial(self.show_game, # type: ignore
display))
game_layout.addWidget(game_button, row, column)
self.ui.history_game.addItem(game_name, userData=display)
self.ui.strength_test_game.addItem(game_name, userData=display)
if display.rules_path is not None:
game_rules_action = self.ui.menu_rules.addAction(game_name)
game_rules_action.triggered.connect(partial(self.on_rules,
display))
def on_toggle_review(self):
choices = self.ui.choices
current_page = self.ui.stacked_widget.currentWidget()
is_game_displayed = current_page is self.ui.display_page
is_named_review = self.ui.toggle_review.text() == self.review_names[0]
is_review_visible = is_game_displayed and is_named_review
if not is_review_visible:
if self.game_display is not None and self.board_to_resume is not None:
self.game_display.update_board(self.board_to_resume)
self.board_to_resume = None
self.ui.action_coordinates.setChecked(
self.are_coordinates_always_visible)
self.on_view_coordinates(self.are_coordinates_always_visible)
else:
self.board_to_resume = self.game_display.current_state
self.are_coordinates_always_visible = (
self.ui.action_coordinates.isChecked())
self.ui.action_coordinates.setChecked(True)
self.on_view_coordinates(True)
choices.clear()
choices.setRowCount(3)
choices.setColumnCount(10)
choices.resizeColumnsToContents()
choices.resizeRowsToContents()
choices.setMaximumHeight(choices.horizontalHeader().height() +
choices.verticalHeader().length() +
choices.horizontalScrollBar().height())
self.ui.move_history.clear()
self.ui.move_history.addItems(
[str(item) for item in self.game_display.log_display.items])
self.ui.move_history.setCurrentIndex(self.ui.move_history.count()-1)
self.ui.resume_here.setVisible(is_review_visible)
self.ui.move_history.setVisible(is_review_visible)
self.ui.choices.setVisible(is_review_visible)
self.ui.toggle_review.setText(self.review_names[is_review_visible])
if self.game_display is not None:
self.game_display.is_reviewing = is_review_visible
choices.setVisible(is_review_visible)
def on_resume_here(self):
self.board_to_resume = self.game_display.current_state
self.is_history_dirty = True
history_index = self.ui.move_history.currentIndex()
self.game_display.log_display.rewind_to(history_index)
self.on_toggle_review()
self.game_display.request_move()
def on_move_history(self, item_index: int):
assert self.game_display is not None
history_item = self.game_display.log_display.items[item_index]
self.game_display.update_board(history_item.game_state)
choices = self.ui.choices
choices.clear()
choices.setColumnCount(len(history_item.choices))
choices.setVerticalHeaderLabels(['count', 'probability', 'value'])
choices.setHorizontalHeaderLabels([choice[0]
for choice in history_item.choices])
for i, (choice,
probability,
count,
value) in enumerate(history_item.choices):
choices.setItem(0, i, QTableWidgetItem(f'{count}'))
choices.setItem(1, i, QTableWidgetItem(f'{probability}'))
choices.setItem(2, i, QTableWidgetItem(f'{value}'))
choices.resizeColumnsToContents()
def on_new_game(self):
self.stop_workers()
self.ui.stacked_widget.setCurrentWidget(self.ui.game_page)
self.setWindowTitle(self.get_collection_name())
def stop_workers(self):
if self.display is not None:
self.display.stop_workers()
self.display = self.game_display = None
def show_game(self, display: GameDisplay):
QApplication.setOverrideCursor(Qt.CursorShape.WaitCursor)
self.display = self.game_display = display
start_state = display.start_state
self.start_state = start_state
collection_name = self.get_collection_name()
self.setWindowTitle(f'{collection_name} - {start_state.game_name}')
self.ui.game_name.setText(start_state.game_name)
settings = get_settings(start_state)
is_locked = settings.value('searches_locked', False, bool)
self.ui.searches_lock1.setChecked(is_locked)
self.ui.searches_lock2.setChecked(is_locked)
search_count = settings.value('searches', DEFAULT_SEARCHES, int)
self.ui.searches1.setValue(search_count)
self.ui.searches2.setValue(search_count)
self.ui.shuffle_players.setChecked(settings.value('shuffle_players',
False,
bool))
heuristics = self.load_heuristics()
player1_index = settings.value('player_1', 0, int)
player2_index = settings.value('player_2', 0, int)
self.ui.player1.clear()
self.ui.player2.clear()
self.ui.player1.addItem('Human', None)
self.ui.player2.addItem('Human', None)
for name, heuristic in heuristics:
self.ui.player1.addItem(name, heuristic)
self.ui.player2.addItem(name, heuristic)
self.ui.player1.setCurrentIndex(player1_index)
self.ui.player2.setCurrentIndex(player2_index)
self.ui.stacked_widget.setCurrentWidget(self.ui.players_page)
self.board_to_resume = None
self.on_toggle_review()
QApplication.restoreOverrideCursor()
def on_player_changed(self, player: QComboBox, new_index: int):
if new_index < 0:
# Combo box was cleared.
return
assert self.start_state is not None
settings = get_settings(self.start_state)
if player is self.ui.player1:
searches = self.ui.searches1
searches_label = self.ui.searches_label1
searches_lock = self.ui.searches_lock1
setting_name = 'player_1'
row = 1
else:
searches = self.ui.searches2
searches_label = self.ui.searches_label2
searches_lock = self.ui.searches_lock2
setting_name = 'player_2'
row = 2
settings.setValue(setting_name, new_index)
heuristic = player.itemData(new_index)
searches.setVisible(heuristic is not None)
searches_label.setVisible(heuristic is not None)
searches_lock.setVisible(heuristic is not None)
colspan = 4 if heuristic is None else 1
self.ui.player_layout.addWidget(player, row, 1, 1, colspan)
@staticmethod
def load_heuristics():
heuristics = [('Computer', Playout())]
# entry: EntryPoint
# for entry in iter_entry_points('zero_play.heuristic'):
# try:
# heuristic_class = entry.load()
# except ImportError as ex:
# library_path = os.environ.get('LD_LIBRARY_PATH')
# if library_path is not None:
# raise
# message = (f'Unable to load entry {entry.name}. Do you need to'
# f' set LD_LIBRARY_PATH?')
# raise ImportError(message) from ex
# try:
# heuristic: Heuristic = heuristic_class(start_state)
# except ValueError:
# continue
# heuristics.append((entry.name, heuristic))
return heuristics
def on_cancel(self):
self.ui.stacked_widget.setCurrentWidget(self.ui.game_page)
def on_network1(self):
file_name, _ = QFileDialog.getOpenFileName(
self.ui.players_page,
"Open a file for player 1's neural network.",
filter='Checkpoint (*.h5)',
options=QFileDialog.DontUseNativeDialog)
def on_start(self) -> None:
self.game_start_time = datetime.now()
assert self.game_display is not None
assert self.start_state is not None
self.game_display.update_board(self.game_display.start_state)
self.is_history_dirty = False
ui = self.ui
player_fields = [(ui.player1.currentData(), ui.searches1.value()),
(ui.player2.currentData(), ui.searches2.value())]
is_shuffled = ui.shuffle_players.isChecked()
settings = get_settings(self.start_state)
settings.setValue('shuffle_players', is_shuffled)
if is_shuffled:
shuffle(player_fields)
mcts_choices = {self.start_state.players[0]: player_fields[0],
self.start_state.players[1]: player_fields[1]}
self.game_display.mcts_players = [
MctsPlayer(self.start_state,
player_number,
iteration_count=searches,
process_count=self.cpu_count)
for player_number, (heuristic, searches) in mcts_choices.items()
if heuristic is not None]
layout: QGridLayout = ui.display_page.layout()
layout.replaceWidget(ui.game_display, self.game_display)
ui.game_display.setVisible(False)
ui.game_display = self.game_display
self.game_display.setVisible(True)
self.game_display.show_coordinates = ui.action_coordinates.isChecked()
self.on_view_game()
def on_view_game(self):
if self.game_display is None:
self.on_new_game()
else:
self.ui.stacked_widget.setCurrentWidget(self.ui.display_page)
self.game_display.update_board(self.game_display.current_state)
self.game_display.request_move()
def on_view_coordinates(self, is_checked: bool):
if self.game_display is not None:
self.game_display.show_coordinates = is_checked
def on_new_strength_test(self):
self.stop_workers()
self.ui.stacked_widget.setCurrentWidget(
self.ui.plot_strength_page)
def on_start_strength_test(self) -> None:
settings = get_settings()
ui = self.ui
settings.setValue('strength_test_game',
ui.strength_test_game.currentText())
settings.setValue('strength_test_strengths',
ui.strength_test_strengths.text())
settings.setValue('strength_test_min', ui.strength_test_min.value())
settings.setValue('strength_test_max', ui.strength_test_max.value())
game_display: GameDisplay = ui.strength_test_game.currentData()
start_state = game_display.start_state
players = [MctsPlayer(start_state, GameState.X_PLAYER),
MctsPlayer(start_state, GameState.O_PLAYER)]
controller = PlayController(start_state, players)
player_definitions = ui.strength_test_strengths.text().split()
self.display = self.strength_canvas
assert self.db_session is not None
self.strength_canvas.start(self.db_session,
controller,
player_definitions,
ui.strength_test_min.value(),
ui.strength_test_max.value())
ui.stacked_widget.setCurrentWidget(
ui.plot_strength_display_page)
def on_plot(self):
self.ui.stacked_widget.setCurrentWidget(self.ui.plot_history_page)
self.requery_plot()
def requery_plot(self) -> None:
display: GameDisplay = self.ui.history_game.currentData()
self.plot_canvas.game = display.start_state
settings = get_settings(display.start_state)
future_strength = settings.value('searches', DEFAULT_SEARCHES, int)
self.plot_canvas.requery(self.db_session, future_strength)
def on_searches_changed(self, search_count: int):
if self.ui.stacked_widget.currentWidget() is not self.ui.players_page:
return
if self.start_state is not None:
settings = get_settings(self.start_state)
settings.setValue('searches', search_count)
settings.remove('game_count')
settings.remove('last_score')
settings.remove('streak_length')
def on_game_ended(self, game_state: GameState):
if (self.is_history_dirty or
self.game_display is None or
self.ui.searches_lock1.isChecked()):
return
db_session = self.db_session
if db_session is None:
return
game_record = GameRecord.find_or_create(db_session, game_state)
game_end_time = datetime.now()
game_duration = game_end_time - self.game_start_time
match_record = MatchRecord(game=game_record,
start_time=self.game_start_time,
total_seconds=round(game_duration.total_seconds()),
move_count=game_state.get_move_count())
db_session.add(match_record)
winner = game_state.get_winner()
mcts_player: typing.Optional[MctsPlayer]
for player_number in game_state.get_players():
mcts_player = self.game_display.get_player(player_number)
if mcts_player is None:
player_record = db_session.query(PlayerRecord).filter_by(
type=PlayerRecord.HUMAN_TYPE).one_or_none()
if player_record is None:
player_record = PlayerRecord(type=PlayerRecord.HUMAN_TYPE)
db_session.add(player_record)
else:
player_record = db_session.query(PlayerRecord).filter_by(
type=PlayerRecord.PLAYOUT_TYPE,
iterations=mcts_player.iteration_count).one_or_none()
if player_record is None:
player_record = PlayerRecord(type=PlayerRecord.PLAYOUT_TYPE,
iterations=mcts_player.iteration_count)
db_session.add(player_record)
if player_number == winner:
result = 1
elif winner == game_state.NO_PLAYER:
result = 0
else:
result = -1
match_player = MatchPlayerRecord(match=match_record,
player=player_record,
player_number=player_number,
result=result)
db_session.add(match_player)
db_session.commit()
try:
mcts_player, = self.game_display.mcts_players
except ValueError:
# Didn't have exactly one MCTS player
return
assert mcts_player is not None
winning_player = game_state.get_winner()
if winning_player == mcts_player.player_number:
score = -1
elif winning_player == GameState.NO_PLAYER:
score = 0
else:
score = 1
settings = get_settings(self.start_state)
strength_adjuster = StrengthAdjuster(
strength=mcts_player.iteration_count,
game_count=settings.value('game_count', 0, int),
last_score=settings.value('last_score', 0, int),
streak_length=settings.value('streak_length', 1, int))
strength_adjuster.record_score(score)
settings.setValue('searches', strength_adjuster.strength)
settings.setValue('game_count', strength_adjuster.game_count)
settings.setValue('last_score', strength_adjuster.last_score)
settings.setValue('streak_length', strength_adjuster.streak_length)
def on_lock_changed(self, is_checked):
self.ui.searches_lock1.setChecked(is_checked)
self.ui.searches_lock2.setChecked(is_checked)
settings = get_settings(self.start_state)
settings.setValue('searches_locked', is_checked)
def on_rules(self, display: GameDisplay):
self.ui.stacked_widget.setCurrentWidget(self.ui.rules_page)
rules_path = display.rules_path
if rules_path is None:
game_name = display.start_state.game_name
rules_html = f'No rules found for {game_name}.'
self.ui.rules_text.setHtml(rules_html)
else:
self.ui.rules_text.setSource('qrc' + rules_path)
def on_close_rules(self):
if self.game_display is None:
page = self.ui.game_page
elif self.game_display.current_state == self.game_display.start_state:
page = self.ui.players_page
else:
page = self.ui.display_page
self.ui.stacked_widget.setCurrentWidget(page)
def on_new_db(self):
settings = get_settings()
db_path = settings.value('db_path')
file_name, _ = QFileDialog.getSaveFileName(
self,
"Create a new database",
dir=db_path,
filter='Player databases (*.zpl)',
options=QFileDialog.DontUseNativeDialog)
if not file_name:
return
script_path = str(Path(__file__).parent / 'db')
database_path = Path(file_name).absolute()
settings.setValue('db_path', str(database_path))
database_url = get_database_url(database_path)
alembic_config = Config(config_args=immutabledict({
'script_location': script_path,
'sqlalchemy.url': database_url}))
command.upgrade(alembic_config, 'head')
def on_open_db(self):
pass
def main():
app = QApplication(sys.argv)
window = ZeroPlayWindow()
window.show()
return app.exec()
if __name__ == "__main__":
sys.exit(main()) | zero-play | /zero_play-0.5.0-py3-none-any.whl/zero_play/zero_play.py | zero_play.py |
import typing
from base64 import standard_b64encode
from contextlib import contextmanager
from PySide6.QtCore import (QByteArray, QBuffer, QIODevice, QSize,
QRect)
from PySide6.QtGui import QPixmap, QPainter, QColor, QImage, Qt
from PySide6.QtWidgets import QGraphicsView
from space_tracer import LiveImageDiffer, LiveImage, LivePainter
from zero_play.game_display import GameDisplay
from zero_play.live_qpainter import LiveQPainter
class PixmapDiffer(LiveImageDiffer):
@staticmethod
def start_painter(size: LiveImage.Size,
fill: LiveImage.FlexibleFill = 'white') -> LivePainter:
width, height = size
pixmap = QPixmap(width, height)
return LiveQPainter(pixmap, QColor(fill))
def end_painters(self, *painters: LivePainter):
for painter in painters:
assert isinstance(painter, LiveQPainter)
painter.end()
@contextmanager
def create_qpainters(
self,
size: LiveImage.Size,
fill: LiveImage.FlexibleFill = 'ivory') -> typing.Iterator[
typing.Tuple[QPainter, QPainter]]:
with self.create_painters(size, fill) as (actual, expected):
assert isinstance(actual, LiveQPainter)
assert isinstance(expected, LiveQPainter)
yield actual.painter, expected.painter
def encode_image(image: QImage) -> str:
image_bytes = QByteArray()
buffer = QBuffer(image_bytes)
buffer.open(QIODevice.WriteOnly) # type: ignore
# writes pixmap into bytes in PNG format
image.save(buffer, "PNG") # type: ignore
raw_bytes = buffer.data().data()
b64_bytes = standard_b64encode(raw_bytes)
b64_string = b64_bytes.decode('UTF-8')
return b64_string
def decode_image(text: str) -> QImage:
encoded_bytes = QByteArray(text.encode('utf8'))
image_bytes = QByteArray.fromBase64(encoded_bytes)
image = QImage.fromData(image_bytes)
return image
def render_display(display: GameDisplay,
painter: QPainter,
is_closed: bool = True):
""" Check scene size, render, then clear scene.
You have to clear the scene to avoid a crash after running several unit
tests.
:param display: display widget whose children contain a QGraphicsView to
render.
:param painter: a canvas to render on
:param is_closed: True if the display should be closed after rendering. Be
sure to close the display before exiting the test, if it contains any
items with reference cycles back to the scene.
"""
__tracebackhide__ = True
try:
for child in display.children():
if isinstance(child, QGraphicsView):
view = child
break
else:
raise ValueError("No QGraphicsView in display's children.")
view.grab() # Force layout to recalculate, if needed.
scene_size = view.contentsRect().size()
device = painter.device()
assert isinstance(device, QPixmap)
painter_size = device.size()
if scene_size != painter_size:
display_size = find_display_size(display, view, painter_size)
message = (f"Try resizing display to "
f"{display_size.width()}x{display_size.height()}.")
painter.drawText(QRect(0, 0,
painter_size.width(), painter_size.height()),
Qt.AlignCenter | Qt.TextWordWrap, # type: ignore
message)
return
assert scene_size == painter_size
view.scene().render(painter)
finally:
if is_closed:
display.close()
def find_display_size(display: GameDisplay,
view: QGraphicsView,
target_size: QSize) -> QSize:
max_width = None
max_height = None
min_width = min_height = 1
display_width = display.width()
display_height = display.height()
while True:
scene_size = view.contentsRect().size()
if scene_size.width() == target_size.width():
min_width = max_width = display_width
elif scene_size.width() < target_size.width():
min_width = display_width+1
else:
max_width = display_width-1
if scene_size.height() == target_size.height():
min_height = max_height = display_height
elif scene_size.height() < target_size.height():
min_height = display_height+1
else:
max_height = display_height-1
if max_width is None:
display_width *= 2
else:
display_width = (min_width + max_width) // 2
if max_height is None:
display_height *= 2
else:
display_height = (min_height + max_height) // 2
if min_width == max_width and min_height == max_height:
return QSize(display_width, display_height)
display.resize(display_width, display_height)
view.grab() # Force layout recalculation. | zero-play | /zero_play-0.5.0-py3-none-any.whl/zero_play/pixmap_differ.py | pixmap_differ.py |
def parse_args():
parser = ArgumentParser(description='Plot player strengths.',
formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument('game',
default='zero_play.tictactoe.state.TicTacToeState',
nargs='?',
help='Game state class to test with.')
parser.add_argument('--player_definitions',
nargs='*',
type=int,
help='list of definitions for player strength: number '
'of iterations, plus "nn" for neural net',
default=[8, 64, 512])
parser.add_argument('--opponent_min',
help='minimum search iterations for the opponent',
type=int,
default=1)
parser.add_argument('--opponent_max',
help='maximum search iterations for the opponent',
type=int,
default=512)
parser.add_argument('--checkpoint',
help='checkpoint file to load for neural net')
return parser.parse_args()
# def main():
# logging.basicConfig(level=logging.INFO,
# format="%(asctime)s[%(levelname)s]:%(name)s:%(message)s")
# logger.setLevel(logging.DEBUG)
# args = parse_args()
# class_path = args.game
# class_parts = class_path.split('.')
# class_name = class_parts.pop()
# module_name = '.'.join(class_parts)
# module = import_module(module_name)
# game_state_class = getattr(module, class_name)
# start_state: GameState = game_state_class()
#
# # parser = args.parser
# # parser.add_argument(
# # '-p', '--player',
# # default='mcts',
# # nargs='*',
# # help="the player to use",
# # action='entry_point')
# # args.player = ['mcts']
# # args.mcts_iterations = MctsPlayer.DEFAULT_ITERATIONS
#
# if '__live_coding_context__' in locals():
# controller = None
# else:
# players = [MctsPlayer(start_state, GameState.X_PLAYER),
# MctsPlayer(start_state, GameState.O_PLAYER)]
# controller = PlayController(start_state, players)
#
# figure = plt.figure()
# db_path = os.path.abspath(os.path.join(
# __file__,
# f'../../data/{args.game}-strengths.db'))
# neural_net_path = os.path.abspath(os.path.join(
# __file__,
# f'../../data/{args.game}-nn/best.h5'))
# logger.debug(db_path)
# plotter = Plotter(db_path,
# args.game,
# controller,
# args.player_definitions,
# args.opponent_min,
# args.opponent_max,
# neural_net_path)
# if controller is None:
# animation = None
# else:
# animation = FuncAnimation(figure, plotter.update, interval=30000)
#
# plt.show()
# assert controller is None or animation is not None
#
#
# if __name__ in ('__main__', '__live_coding__'):
# main() | zero-play | /zero_play-0.5.0-py3-none-any.whl/zero_play/plot_strengths.py | plot_strengths.py |
from PySide6 import QtCore
qt_resource_data = b"\
\x00\x00\x05\xd8\
\x89\
PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\
\x00\x00\x80\x00\x00\x00\x80\x08\x06\x00\x00\x00\xc3>a\xcb\
\x00\x00\x00\x09pHYs\x00\x00\x0e\xc4\x00\x00\x0e\xc4\
\x01\x95+\x0e\x1b\x00\x00\x05\x8aIDATx\x9c\xed\
\x9d\xc1\x92\xe3*\x0cE\xed\xa9\xde\xe5/\xf3a\xfd\x97\
Yg\x16\xef\xa9\xc6\xed\x0e \x84\x04WH\xa7*\x8b\
\xa9I\xe0\xca\xba\x08;i[\xe7\xd1\xc3\xf7\xe3\xdd\xf5\
~d\x9e\xaf\x9f\xff\xfe~\xac\xd1\xa1\xcd\xf3u\xf6\xbc\
\xfd\x8b\xf5\xae\x9d\x12\xbf;\xd7\x5c1\xcc\xf0\xa79X\
&\xdf/\x8c\xdc\x95\x0d\x90\x89\xdf\x83F\x1e?\x1b \
\x93\xbf\x17\x95|\xfe6@&\x7fO\x0ay\xfd\xe2\xbc\
I\x8d\xfb\x99w\x89]\xce\xc8\xef\xac\x8e\xff\xfb\xf1\xbe\
\x9f\x18\x9e?\xfe\xd3\x02n\xd0%\xac\x0e\xc6\xac\xcb@\
\xc4\xf8/&\xb01\xc0h\xd0%4\x0f\x86\xa5\x01|\
\xc4\x7f\x1e\x07m\x01Z\xc9\xb7\x0a\xfc>>\xea\x16\xe1\
0\xfe\xfa\xf7\x00=X\x07\xbfj..\x0cM\xef\xf7\
\x9b\xf5\xd2\x98\x8b\xcb9\xbc\xfaW'C\xba\x1a\xb4\xb6\
\x80F\xfc\xac\x84V8\xcf\xc6\x97y#\xd5\xe0\xf9:\
\xc7*\xc0\xea\xe4\xaf\xd6P\x99\x9b\xbd\x9a\x1b4\xc7\x19\
\x8c_n\x00\x84\xe4\x13+\xb4\x14\xe6\xd4J|\xd7\xb8\
\x03\xf1\xcb\x0c\x80\x94|\x02\xe0\x1c\xc4\x22\xf1\xec9\x84\
\xf1\xf7\x1b\x001\xf9\xc4\x0cm\x1f\xe6\xb0Z\xf5%\x8a\
\xf3\x09\xe2\xef3\x00r\xf2\x09K\x8d\x85\xe4\xafb\xd8\
\x04\xdf\x8f\xb7\xdee`@V&_K\x03\xdf\x00\x1e\
V?a\xa1u\xd3\xf8y\x06\xf0\x14<\xa1\xa9\x19\xac\
\xf4\xdf\x19\xd9\x0ar\x0b\x10\x80\x94|B\xaa\xa9m\x00\
\x8f\xab\x9f\xd0\xd0\xbey\xfcY\x01:A\x5c\xfd\x84D\
[\x1a 8u\x03x.\x7f\xc4H\x0c\xb7\xcf\x22\xaf\
~\xe2\x97\xc6F\xfcY\x01\x82\x93\x06\x08N\xd9\x00;\
\x94\x7fB\x12\x8b\xc3\xf2O\xf4l\x03Y\x01\x82\x93\x06\
\x08N\x1a 8i\x80\xe0\xa4\x01\x82\x93\x06\x08N\x1a\
8i\x80\xe0\xa4\x01\x82S{B\xc8D\x19\xc6H\
b\xb9}\xa6y\x87\x0e\x10\xbf\xb4V\xe2\xcf\x0a\x10\x9c\
4@pZO\x09\x9b$\xc3\x90\x91\x18\x1cn\x03=\
\xe5\xff8\xb2\x02\x84'\x0d\xd0\x09r\x15\x90hk\x1b\
\xc0\xf36\xa0\xa1}\xf3\xf8\xb3\x02\x08@\xac\x02RM\
<\x03x\x5c\x05\x9a\x9a?\x8c\x85d\x82\x8fZ\x98\xf1\
\xf3+\x80'\x13Xh\xdd4\xfe\xdc\x02\x06@\xa8\x02\
\xa3\x1a\xfa\x0c\xe0a\x15Xj\x04\xdb\x0aFJ?\xd1\
_\x01\x90M0C[\xc1\x043\x8dP\x9cO\x10\xbf\
l\x0b@4\xc1LM\x85\xb9f\x98\xa08\x870~\
\xf99\x00\x92\x09Vh\xa9\x98\xc0\xc2\x08\xd5q\x07\xe2\
\x1f;\x09D0\xc1J\x0d\x95\xb9\xb5\x8c\xd0\x1cg0\
~^\xcf\xa0\x1a$`\xf6\x9dD\x08\xe6;\x8ef\xfc\
\xd7\xe4q\xef.b\x19G)~\xbd\xcb@\x80=x\
)\x0cM\xb4\x9a[/\x8d\xb9\xb8\x8cW\x80+\xd6\xd5\
\x001\xf1W\x1c\xc6\xafk\x00\xe2*\x14\xb1a\x825\
\x8e\xe2\xb71\xc0\x15\xc9\xc1\xf0\x98\xf4\x12\xe0\xf1\xdb\x1b\
\xe0\xcaN\x89\x95\x00\x18\x7f\xfe\x16\x10\x9c4@p\xd2\
\x00\xc19\x8f\xe3\xf0\xf3\xec\x93D\x9d\xac\x00\xc1I\x03\
\x04'\x0d\x10\x9c/\xc4k\xd3)\xccj\x1d\x0bNV\
\x80\xe0\xa4\x01\x82\x93\x06\x08\xce\xdc\xdf\x02\x00\x7f\x0c\x99\
\x0a`\xfc\xf6\x06\x18}N\xafw3\x80\xc7oc\x00\
\xab\x86M^\xcc\xe0(~]\x03X\xff] \x8d\x8f\
j\x04\x87\xf1\xeb\x9d\x042\x82\xa7\x96\xa7\xad\x97\xc6\x5c\
\xd3\x01\xe8],a\xbc\x024\xc4H\x9e\xb3\x7f\xfdL\
\xf1\x8f$Q\xaa\xc1*3*\xc5?V\x01*\xc1k\
5Tn\x8e\xb3\xb2\x1a T\xa2A\x0dr\x03TZ\
\xa8[t\xd7\xa8\x8e\xbb\x22\x11\x08\xc9'\x06\xb4\xc8\x0c\
PI\xbe5\x10&@J>!\xd4\xd4o\x80B\x1f\
\xdd\x99=u\x8a\xf3\xcdH\x0cb\xf2\x09\x81\xb6>\x03\
\x805Q\x9en\x02\xe4\xe4\x13\x9d\x1a\x87N\x02\x11:\
i!h\xf0\x0c\xdf\x00\x1e\xdcOXh\xdd4~\x9e\
\x01\xc0J\xff\x1d\xf3\xad\xc0S\xf2\x09\xa6f\xd1\x16\x80\
\x94|\x02Q\x93\x07\xda\x06\xf0\xe8~BC\xfb\xe6\xf1\
wW\x00\xe4\x95\x86\xac\x0d\x95\xfc\x8b\xa0\xe0\xd4\x0d\xe0\
\xb0\x81rO\xe3\xe4&\x9e\xcb?\xd1\x88!+@p\
\xd2\x00\xc1)\x1b\xc0a\xf9'T\xb6\x81\x1d\xca?Q\
\x89%+@p\xd2\x00\xc1I\x03\x04'\x0d\x10\x9c4\
@p\xd2\x00\xc1I\x03\x04'\x0d\x10\x9c4@p\xca\
\x06p\xd88\x99\xe8m\xa0\xfc\x91\xd5w\x1ciR\x89\
%+@p\xd2\x00\xc1\xa9\x1b\xc0\xe16\xa0R\xfe5\
>\x8bB#\x86\xac\x00\xc1\xe96\x00r\x15@\xd6\x86\
J\xdb\x00\x9e\xcb\xa0\x86\xf6\xcd\xe3\x17m\x01\x88+\x0d\
Q\x93\x07x\x06\x00k\x9a|G\xab\x8fn\x11\x8fU\
\x80\xa9\x99_\x01<\x1d\x04\x0b\xad\x9b\xc6?t\x15\x80\
P\x05\x104x\xa6\xcf\x00`[\x81y\xe9\x9f9\xb6\
\x16=\x1a\x9f\xaf\xb3\xbf\x02\x14L0\xd3\x08\xc5\xf9f\
$\x08\xd9\x04\x02m\xb2-\xa0\xd2:\xdd\x1a\x8b\x16\xea\
\xdd \x9a@\xa8I~\x0eP1\x81\x85\x11\xaa\xe3\xae\
H\x08\x92\x09\x06\xb4\xfc9\x9e/y\xb6*\x13k\x19\
\xa19\xce\xcaD \x98`P\xc3\xf8\x93B\x1b\x1d\xb3\
\xaf\xc9\xe3\xde]4\xbb\x85\xfa\x10\xd6\x1d\xc3[\xf3J\
\xf9\x7f\xe1\xeb\xfd\x18\xc4\x10D\xab\xb9\xf5\xd2\x98k:\
N\xcfA\xfe\x1d\xed\xef\x87\xde\xcd\x7fV\xabA\xf3 \
[6\x8dB\x8f\xff\xb2\xed\xff\x5cn\x9a&\xf87\xd9\
\xd8\xe7\xadV\xd6\xac\xaea\x88\xf1\x17\x0d\xf0\xdf\x84v\
\xb7\x01#\xb5LY\xd16\x0e!\xfe\xdbI\xff\xe7\x0d\
\xd7\xd2\x04(D\xec\x1b\xf8\xe1\x8a\xef\xf3I\xe0\xc8\xa5\
a\x82I!\xa7\xb5\x07D\xa4\x09v\xa1\x92\xcb\xd6C\
\xa2\xd2\x04\x9ey\xbe\xceV\x0e\xdb_\x04]\x07\x88p\
n\xb0\x03\x1d\x0b\xb7\xef\x9b\xc0O\x03\xa7)\xd62X\
\xa5\xff\x02\xd0\xe6^\xdd#-6Z\x00\x00\x00\x00I\
END\xaeB`\x82\
\x00\x00\x00\x83\
<\
!DOCTYPE RCC><RC\
C version=\x221.0\x22>\
\x0a<qresource>\x0a<fi\
le>./main_icon.p\
ng</file>\x0a<file>\
./zero_play_imag\
es.qrc</file>\x0a</\
qresource>\x0a</RCC\
>\x0a\
"
qt_resource_name = b"\
\x00\x10\
\x0f-\x1ec\
\x00z\
\x00e\x00r\x00o\x00_\x00p\x00l\x00a\x00y\x00_\x00i\x00m\x00a\x00g\x00e\x00s\
\x00\x0d\
\x01a\xdd\x07\
\x00m\
\x00a\x00i\x00n\x00_\x00i\x00c\x00o\x00n\x00.\x00p\x00n\x00g\
\x00\x14\
\x0e|\x02\xa3\
\x00z\
\x00e\x00r\x00o\x00_\x00p\x00l\x00a\x00y\x00_\x00i\x00m\x00a\x00g\x00e\x00s\x00.\
\x00q\x00r\x00c\
"
qt_resource_struct = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x02\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00&\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x01t\xcd\x02\x03\xb4\
\x00\x00\x00F\x00\x00\x00\x00\x00\x01\x00\x00\x05\xdc\
\x00\x00\x01t\xcd\x0e\xbd\x1e\
"
def qInitResources():
QtCore.qRegisterResourceData(0x03, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x03, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources() | zero-play | /zero_play-0.5.0-py3-none-any.whl/zero_play/zero_play_images_rc.py | zero_play_images_rc.py |
from contextlib import contextmanager
from pathlib import Path
import typing
from PySide6.QtGui import QPixmap, QColor, QPainter, QImage, QPen, Qt
from PySide6.QtWidgets import QApplication
from zero_play.connect4.display import Connect4Display
from zero_play.connect4.game import Connect4State
from zero_play.game_display import GameDisplay
from zero_play.live_qpainter import LiveQPainter
from zero_play.pixmap_differ import render_display
from zero_play.tictactoe.display import TicTacToeDisplay
from zero_play.tictactoe.state import TicTacToeState
class DiagramWriter:
app = QApplication()
def __init__(self, display: GameDisplay, width=200, height=200):
self.display = display
self.width = width
self.height = height
def draw(self, painter: QPainter):
render_display(self.display, painter)
pen = QPen()
pen.setWidth(self.width // 50)
pen.setCapStyle(Qt.PenCapStyle.RoundCap)
painter.setPen(pen)
@contextmanager
def create_painter(self) -> typing.Iterator[QPainter]:
white = QColor('white')
pixmap = QPixmap(self.width, self.height)
pixmap.fill(white)
painter = QPainter(pixmap)
try:
yield painter
finally:
painter.end()
def demo(self) -> None:
painter: QPainter
with self.create_painter() as painter:
self.draw(painter)
pixmap = painter.device()
assert isinstance(pixmap, QPixmap)
live_painter = LiveQPainter(pixmap, fill=None)
live_painter.display()
def write(self, path: Path):
print('Writing to', path)
with self.create_painter() as painter:
self.draw(painter)
device = painter.device()
assert isinstance(device, QPixmap), device
image: QImage = device.toImage()
image.save(str(path))
class TictactoeDiagram(DiagramWriter):
def __init__(self):
display = TicTacToeDisplay()
display.update_board(TicTacToeState('''\
OX.
XO.
X.O
'''))
display.resize(276, 224)
super().__init__(display, 200, 200)
def draw(self, painter: QPainter):
super().draw(painter)
w = self.width
h = self.height
painter.drawLine(w//6, h//6, w*5//6, h*5//6)
class Connect4Diagram(DiagramWriter):
def __init__(self):
display = Connect4Display()
display.resize(288, 204)
display.update_board(Connect4State('''\
.......
......X
.....XO
..XOXOX
..OXOXO
..OXXXO
'''))
super().__init__(display, 210, 180)
def draw(self, painter: QPainter):
super().draw(painter)
pen = painter.pen()
pen.setColor(QColor('darkgrey'))
painter.setPen(pen)
w = self.width
h = self.height
painter.drawLine(w//2, h*3//4, w*13//14, h*3//12)
def main():
rules_path = Path(__file__).parent.parent / "docs" / "rules"
TictactoeDiagram().write(rules_path / "tictactoe.png")
Connect4Diagram().write(rules_path / "connect4.png")
if __name__ == '__main__':
main()
elif __name__ == '__live_coding__':
TictactoeDiagram().demo() | zero-play | /zero_play-0.5.0-py3-none-any.whl/zero_play/diagram_writer.py | diagram_writer.py |
import typing
from abc import ABC, abstractmethod
from io import StringIO
import numpy as np
class GameState(ABC):
DISPLAY_CHARS = 'O.X'
NO_PLAYER = 0
X_PLAYER = 1
O_PLAYER = -1
players = (X_PLAYER, O_PLAYER)
def __repr__(self):
return f"{self.__class__.__name__}()"
@property
@abstractmethod
def game_name(self) -> str:
""" Display name for the game. """
@abstractmethod
def __eq__(self, other) -> bool:
""" Compare with another game state. """
@abstractmethod
def get_valid_moves(self) -> np.ndarray:
""" Decide which moves are valid for this board state.
:return: an array with one boolean entry for every possible game move.
True if that move is allowed, otherwise False. Each move's index is
the move value to pass to make_move().
"""
def is_ended(self) -> bool:
""" Has the game ended in the given board? """
if self.get_winner() != self.NO_PLAYER:
return True
valid_moves = self.get_valid_moves()
return not valid_moves.any()
@abstractmethod
def display(self, show_coordinates: bool = False) -> str:
""" Create human-readable display text for this board state.
:param show_coordinates: True if the display should include coordinate
labels.
:return: display text. Typically, this should be valid text for passing
to create_board().
"""
@abstractmethod
def display_move(self, move: int) -> str:
""" Create human-readable display text for the given move.
:param move: the move to describe.
:return: display text. Typically, this should be valid text for passing
to parse_move().
"""
def get_players(self) -> typing.Iterable[int]:
return self.X_PLAYER, self.O_PLAYER
@abstractmethod
def get_move_count(self) -> int:
""" The number of moves that have already been made in the game. """
@abstractmethod
def get_spaces(self) -> np.ndarray:
""" Extract the board spaces from the complete game state.
Useful for teaching machine learning models.
"""
@abstractmethod
def parse_move(self, text: str) -> int:
""" Parse a human-readable description into a move index.
:param text: the move description, typically coordinates
:return: the index of a move in the result of get_valid_moves().
:raise: ValueError if text is invalid.
"""
def display_player(self, player: int) -> str:
""" Create human-readable display text for a player. """
if player == self.X_PLAYER:
return 'Player X'
return 'Player O'
def get_active_player(self) -> int:
""" Decide which player will play next.
This default implementation assumes that PLAYER_X goes first, and
the players alternate turns adding a piece to the board.
:return: the player number to play next, typically PLAYER_X or
PLAYER_O.
"""
board = self.get_spaces()
x_count = (board == self.X_PLAYER).sum()
y_count = (board == self.O_PLAYER).sum()
return self.X_PLAYER if x_count == y_count else self.O_PLAYER
@abstractmethod
def make_move(self, move: int) -> 'GameState':
""" Get the board state after making a move.
:param move: the index of a move in the result of get_valid_moves().
:return: an array of piece values, updated by the move.
"""
def get_winner(self) -> int:
""" Decide which player has won, if any.
:return: the player number of the winner, or NO_PLAYER if neither has
won.
"""
for player in (self.X_PLAYER, self.O_PLAYER):
if self.is_win(player):
return player
return self.NO_PLAYER
@abstractmethod
def is_win(self, player: int) -> bool:
""" Check if the given player has won on this board state.
:param player: the player number to check.
:return: True if the player has won.
"""
# noinspection PyAbstractClass
class GridGameState(GameState):
def __init__(self,
board_height: int,
board_width: int,
text: str | None = None,
lines: typing.Sequence[str] | None = None,
spaces: np.ndarray | None = None,
extra_count: int = 0):
self.board_height = board_height
self.board_width = board_width
if spaces is None:
self.board = np.zeros(self.board_height*self.board_width + extra_count,
dtype=int)
else:
self.board = spaces
spaces = self.get_spaces()
if extra_count == 0:
self.board = spaces
if text:
lines = text.splitlines()
if lines:
if len(lines) == self.board_height + 1:
# Trim off coordinates.
lines = lines[1:]
lines = [line[2:] for line in lines]
for i, line in enumerate(lines):
spaces[i] = [self.DISPLAY_CHARS.index(c) - 1 for c in line]
def __repr__(self):
board_repr = " ".join(repr(self.board).split())
board_repr = board_repr.replace('[ ', '[')
return f'{self.__class__.__name__}(spaces={board_repr})'
def __eq__(self, other):
if not isinstance(other, GridGameState):
return False
return np.array_equal(self.board, other.board)
def get_move_count(self) -> int:
return int((self.get_spaces() != GameState.NO_PLAYER).sum())
def get_spaces(self) -> np.ndarray:
return self.board[:self.board_height*self.board_width].reshape(
self.board_height,
self.board_width)
def get_valid_moves(self) -> np.ndarray:
spaces = self.get_spaces()
return spaces.reshape(self.board_height *
self.board_width) == GameState.NO_PLAYER
def display(self, show_coordinates: bool = False) -> str:
result = StringIO()
if show_coordinates:
result.write(' ')
for i in range(65, 65+self.board_width):
result.write(chr(i))
result.write('\n')
spaces = self.get_spaces()
for i in range(self.board_height):
if show_coordinates:
result.write(chr(49+i) + ' ')
for j in range(self.board_width):
result.write(self.DISPLAY_CHARS[spaces[i, j]+1])
result.write('\n')
return result.getvalue()
def display_move(self, move: int) -> str:
row = move // self.board_width
column = move % self.board_width
column_text = chr(65 + column)
return f'{row+1}{column_text}'
def parse_move(self, text: str) -> int:
trimmed = text.strip().replace(' ', '')
if len(trimmed) != 2:
raise ValueError('A move must be a row and a column.')
row, column = trimmed[0], trimmed[1:]
i = ord(row) - 49
j = ord(column.upper()) - 65
if i < 0 or self.board_height <= i:
raise ValueError(f'Row must be between 1 and {self.board_height}.')
if j < 0 or self.board_width <= j:
max_column = chr(64 + self.board_width)
raise ValueError(f'Column must be between A and {max_column}.')
return i*self.board_width + j
def make_move(self, move: int) -> 'GridGameState':
moving_player = self.get_active_player()
new_board: np.ndarray = self.board.copy()
i, j = move // self.board_width, move % self.board_width
new_board[i, j] = moving_player
return self.__class__(board_height=self.board_height,
board_width=self.board_width,
spaces=new_board) | zero-play | /zero_play-0.5.0-py3-none-any.whl/zero_play/game_state.py | game_state.py |
from PySide6 import QtCore
qt_resource_data = b"\
\x00\x00\x02e\
<\
!DOCTYPE html>\x0a<\
html>\x0a<head><met\
a charset=\x22UTF-8\
\x22></head>\x0a<body>\
\x0a<h1>Tic Tac Toe\
Rules</h1>\x0a<p>P\
layers: 2</p>\x0a<h\
3>Goal</h3>\x0a<p>P\
lace your pieces\
to make a line \
of three in a ro\
w, either vertic\
ally,\x0ahorizontal\
ly, or diagonall\
y.</p>\x0a<p><img a\
lt=\x22Example\x22 src\
=\x22tictactoe.png\x22\
/></p>\x0a<h3>Setu\
p</h3>\x0a<p>Choose\
which player wi\
ll play black an\
d go first.</p>\x0a\
<h3>Play</h3>\x0a<p\
>Starting with t\
he black player,\
take turns addi\
ng your mark in \
one of the empty\
\x0aspaces. If you \
make a line of t\
hree in a row, y\
ou win immediate\
ly. If the board\
\x0ais full without\
any line of thr\
ee in a row, the\
game is a draw.\
</p></body>\x0a</ht\
ml>\x0a\
\x00\x00\x08\x82\
\x89\
PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\
\x00\x00\xc8\x00\x00\x00\xc8\x08\x02\x00\x00\x00\x22:9\xc9\
\x00\x00\x00\x09pHYs\x00\x00\x0e\xc4\x00\x00\x0e\xc4\
\x01\x95+\x0e\x1b\x00\x00\x084IDATx\x9c\xed\
\x9dK\x92\xc28\x10D\xe5\x89\xd9qK\x0e\xc6-Y\
3\x0b\xd1\x8c\x00c\xebSYUR\xe5\xdbuD\x83\
e\xeb9\xcb\x1f\x09m\xe9vI\x84H\xf3\x8fu\x03\
\xe4\xb8\xde\xad[ \xc1\x1a{\xb1\x94X\xc4\x13\x14\x8b\
@\xa0X\x04\x02\xc5\x22\x10(\x16\x81@\xb1\x08\x04\x8a\
E \xfc\x8b\xfa\xe2\xe3\xe71\xb3<\x95]c/,\
\x10\x15\xab\xe8\x86\xc7\xe3q\xf0\x8f\xdb\xb6\xfd\xff\x87\xb7\
\xee\xa9\x7fDY\xfe\xa7\xb7\xbd\xb0FH\xac\xeb=\x9d\
\xc9TR\xfe\xe7S2\x0f\x1d3\xf2\xd4;\x7f\xd6\xc3\
^\xf8`X\xacF\xa5\xbe\xc9\x9f5\xd6K\xeaE\x0a\
\xf5\xfac@\xaca\xa5J\xcc\xf4B\xbc\x9b\xa3^\xfd\
w\x85\xd7\xfb\xe3\xf1\x90\xb2\xea\xc5\xf3;\xd5^\xc4B\
7\xb4\xca\xeb\xe4>\xba\xc4\xba\xde\xc5\x95*Qrk\
\x8dMx\xa5],\xb0U\x19\xb8[k\x84\xa2c\x1a\
\xc5R\xb1*\x03tK\xb9\xb3C\xba\xd5\x22\x96\xa2U\
\x19\x88[&\xdd\x1c\xcf-\xbe\xd2!\x10\xaa\xc5R\x8f\
\xab\x8cph\x19&G\xb0\xd0\xaa\x13\xcb\xc8\xaa\x8c\x98\
[\xe6]k\xde\x00EX\x0a\x09\x84\x0a\xb1L\xe3*\
#\x10ZN\xd2\xc2I3\xf00\xb1\x08\x843\xb1\x1c\
\xc4Uf(\xb4\x5c\xe5\x84\xab\xc6\xc0`b\x11\x08\x14\
\x8b@8\x14\xcbM\x1d\xcctVC\x87\xa5\xc7a\x93\
\xa4ab\x11\x08\x14\x8b@\x80\x8b\xb5m\xdb\xdb\xd4\x09\
\x12\x03\xacX/\xa5\xe8V4\xf4J!\xdd\x0a\x05V\
\xac\x8f\x9bJ\xba\x15\x07xb\xd1\xad\x98h\x94B\xba\
\x15\x10\xa5k,\xba\x15\x0d\xbd\x8bw\xba\x15\x0a\xd5\x07\
\xa4t+\x0e\x87b\xdd.\xe2}?\xe2\xd6\xb6m=\
\xf3\xd6\x1dNuw\xd8$i\x0c^\xe90\xb7\x22`\
\xf3\xae\x90n-\xcf\x99X\x80j\x98iu\xab\xb3\x0e\
f\x5c\x95\x1eW\x8d\x81a9\xba\x81\xb9\xb50\x15b\
\xc1B+U\xbb5\x14W\x19'9\xe1\xa4\x19x\xec\
\xc7c1\xb7\x96\xa4N,dh\xa53\xb7\x04\xe2*\
c\x9e\x16\xe6\x0dP\xa4:\xb1\x8c\xdc\x12\xb3*c\xd8\
\xb5\x91\xacJ\x1eJ\xe1\x0b\xd6\xc4\x95h\x11\x0b\x1cZ\
i\xd7-\xf1\x13\xdd$9\x82\xc5UjN,u\xb7\
3\xa5\x94\xbb9\x9eU\xa9\xa7\x14\xd2-\x9f\x1br\
F\xd75\x16\xd8\xad\x9d\x0a8\xa9[Q\xadJ\xfd\x17\
\xef\xb7\x0bb^\xd7\xf3;s\x7f\xcc\xeeV`\xab\xd2\
\xd0\xca\x14\xb7K\xfa\xbbw\x1b\x9f\x89\xbf\xbf&\xc5\xed\
\xf2\xe6\xd3\xf5\x8e\xba\x96\x97\xb56\xb6R\x99\xe1\xb5t\
\x86\xf5:Y\xe6D\xc1\xad$\xa7\x17\x95\xfaCh\xf5\
\xafB\xaf\x8c\xe4\xb2r:n\xa51\xbd\xa8\xd4;\xa2\
\xeb\x15\x16\x07\xf7\xe4\xf2\xab\xb5\x1b\xd4\xdcJ\xefm\xe3\
B\x98\xbd\xc0VXE\x5c\x0c\xa9\xb9Un\x94t\xe1\
\xe8\x95\xce9\x0a\xf7\x89D\x88\xa9\xc4Jtk\x1af\
\x13+\xd1\xad9\x98P\xacD\xb7&`N\xb1\x12\xdd\
\xf2\xce\xb4b%\xde\xb2\xb9ff\xb1\x12s\xcb3<\
\xef\x09\x00\xc0\x10M\x13>\xb2j\xde\x9d\xd2y\xf0\x8b\
g\xf2R\xf8\x825\xd1\x19\xab\x88\xf5\x0d\xdd2e-\
\xb1\x98[nXK\xacD\xb7\xbc\xb0\x9cX\x89n\xb9\
`E\xb1\x12\xdd\xb2\x076\x1e\xcb|\x88\x9c\xc8\xf8-\
\xf3\xbd\x98\x16Q\xb1\xea\x83\xa1\xfcO\xe8X\xd0\x0e\xb7\
\x8a\x8fH\x0e\xb0\x0e\x86\x90X#\xb5&\x7f\x167\x8c\
\xbd\xde\xad\xeb=\xb5L\x09)\xff\xf3dJH<\x86\
\xc5\x92\xba|\xc1\xe9U\xe3V\xa3R\xdf\xe4\xcfR\xaf\
\x17\x03b!\xae\x88Az\x1d\xb85\xacT\x09\xf5z\
\xd1{W\x08\xbd\xcfR\x98\xf4\x9c7q\xbd?\x1e\x0f\
\xf1u\xaf\x9f\xdf\x19\xfbV\xb4K,\x85C\xa6\xe2\x16\
t)\xf5\xe0n\xb5\x8b\xa5v\xb0\xf0ni\xfclN\
T\xb7\x1a\xc5R>LtkZZ\xc429@\xe2\
\x1b\xfd\xaa\x80t\x0b\xc1\xa2\xaft\xce\xe0\xef\x9d\xa2\xa9\
\x16\xcb\xf0\x9c\x13\xdct\x11W\x9an\x05\x0c\xad:\xb1\
\xcc\x0f\x8aH\x03\xbe\x8a \xdd\xc2\x11\xb4\x14\xbe`M\
\x04Q!\x96\x93\xf3l\xb0\x19\xbf\x9fZ\xa9\xb9\x15*\
\xb4\xa2'V\x86\xb9%\xce\x99X\xae\xce\xb0\xee\xc6T\
<d\xd7q+Nh1\xb1\xfe\x87\xb9%\x08\xc5z\
\x83nIq(\x96\xc3\xd0\xeehR\xe3\xcbf\xb4[\
A\xaa!\x13k\x07\xe6\xd68\x14k\x1f\xba5\x08\xc5\
\xfa\x09\xdd\x1a\x81b\x1dA\xb7\xba\xa1X'\xd0\xad>\
(\xd69t\xab\x03\x8aU\x05\xddj\x85b\xd5B\xb7\
\x9a\xa0X\x0d\xd0\xadz(V\x1bt\xab\x12\x8a\xd5\x0c\
\xdd\xaa\xe1P,\x87\x93\xc4;\x9a\x04X\x1a}\xc4\xad\
\x9d\xa5\xd4W\x84\x89\xd5\x09s\xeb\x18\x8a\xd5\x0f\xdd:\
\xe0L,W\xa1\xdd\xdd\x18@5\xcc\xb4\xba\x15\xa4\x0e\
&&\xd68\xcc\xad]*\xc4rr\x86\x0d6\x03\x16\
Z\xa9\xda\xad8q\x95\x98XR0\xb7>\xa8\x13\xcb\
\xfc<\x13i\x002\xb4\xd2\x99[\xa1\xe2*5$\x96\
\xe1A\x11\xdc\xb4\x91[\xd1\xacJ,\x85\xe2\xb0&f\
Z\xc429\xe7\x00?t\xab\xf1\x83X\x05\x01\xe3*\
5'\x96\xf2\x01\x82\xfd\xf8\xbb\xb2[\x11\xe6{}\xd0\
^\x0a\xd5\xdc\x82n\x88n\x81\xe9\xba\xc6\xd2Y\x09\x07\
\xbf\x09\xa8[;\x150\x92[\xbd\x17\xef\xe08\x01~\
\xf9\xfb\x86\xb6m\x13\xd7\xeb\xf9\x9dy/\xa2\xba5\xb0\
2E>d\xb2GJ\xff\x22\xf7vI\x7f\xf7n\xe3\
?\xfb\xbe\xbf&\x85\xc8:d\xb31\xbc\x96\x8e\x94^\
\xb6\xc7zX\xaf\x93eN\xe2\xb9%\xb4\xfa\xd7\x88^\
~\x0eq\xa1WFrY\xb9`n\x89\xaeWX\x1e\
\xa9y\x97\x90,\xdavr\xf9\xd5\xba\x17\x91\xdc\x82\xad\
\xb0\xba\xc6!\xd3\x5c\x87l-\xf8JG\x9d\x18\xf7\x89\
\x14\xcb\x82\x00nQ,#Vw\x8bb\xd9\xb1\xb4[\
\x14\xcb\x94u\xdd\xa2X\xd6\xf0\xae\x90\xa0X3\xb7\x16\
=c\x88-\x0b\x0dn\x5c\xe0a\xe3GV\xcd\xbc;\
,\x85\x9eX\xa8&R,\xdfL\xeb\x16\xc5\xf2\xc7\x12\
\xb9E\xb1\x5c2\xbf[\x14\xcb+\x93\xbbE\xb1\x1c3\
\xb3[\xb0\xf1X\xf3\x0e\xf4s\x85\xc8\xf8-\x8b\xbe\x10\
\x15\xab\xfe\x94*\xff\x93\x92\x1d\xd3\xe7V\xf1\x11\xc9\x01\
\xd6\xd5\x08\x895\x92\xd2\xf9\xb3\xd4\xeb\x80&\xb7\xae\xf7\
\xd42%\xa4\xfc\xcf\x93)!-\x0c\x8b%U\xf8\xa9\
\xd715n5*\xf5M\xfe\xac\x88^\x03b!\xae\
%\xa9\xd7\x01\x07n\x0d+U\x22\xa2W\xef]!\xf4\
\x0ee\xaa\xdb\x1fUv\xef\x13\xaf\xf7\xc7\xe3!e\xd5\
\x8b\xe7w\xf6\xf6E\x97X\x0a\x1dO\xb7~\xf1\xe5\x96\
\xb8R%\xddn\xb5\x8b\xa5\xd6\xe5t\xeb\x17\xefni\
\xfclN{_4\x8a\xa5\xdc\xd9t\xeb\x17\xee\xddj\
\x11\xcb\xa4\x9b\xe9\xd6._\x15\xd0\x9b[|\xa531\
\x9e\x7f\xef\xb4Z,\xc3\xe4`h}P\xc4\x95\xa6[\
M\xa1U'\x96y\xd7\x9a7\xc0\x0f_E\xd0\xa7[\
,\x85+\xe0\xb0&V\x88\xe5$-\x9c4\xc3\x96\xdf\
O\xad\xd4\xdc\xaa\x0c-&\xd6:\xb8\xca\xad3\xb1\x5c\
\xe5\x84\xab\xc6\xe8S\xf1\x90]\xc7\xad\x9a\xd0bb\xad\
\x86\x93\xdc\xa2X\x0b\xe2\xc1\xadC\xb1\x1c\x96\x1e\x87M\
\xd2\xa1\xf1e3\xda\xad\xd3j\xc8\xc4Z\x16\xdb\xdc\xa2\
X+c\xe8\x16\xc5Z\x1c+\xb7(\xd6\xfa\x98\xb8E\
\xb1B\xa0\xef\x16\xc5\x8a\x02t\x04\xf37\x14+\x10\xbf\
\x06\xdb \x80M\xb1'.Q\xcb-&\x16\x81@\xb1\
\x08\x04\x8aE \x1c\x8a\xe5p\xaa\xbb\xc3&\xe9\x00^\
\x1a\xbd\x95\x9d\xa5\xd4\xdfab\x11\x08\x14\x8b@8\x13\
\xcbU\xe9q\xd5\x18}\xdcT\xc3\xd3:\x98\x98X\x04\
D\x85XNr\xc2I3lq\x10Z5q\x95\x98\
X\x04D\x9dX\xe6ia\xde\x00?\x98\x86Ve\x5c\
\xa5\x86\xc42\xecZZ\xf5\x81\x91[\xf5V%\x96B\
\x02\xa2E,\x93\xe4`\x5c\xed\xa2\x1eZMq\x95\x9a\
\x13K\xb9\x9bi\xd5\x01\x8an\xb5Z\x95zJ\xa1Z\
g\xd3\xaaST\xdc\xea\xb0*u^c)t9\xad\
\xaa\x04\xecV\x9fU\xa9\xff\xe2\x1d\xda\xf1\xb4\xaa\x89\xdb\
e\xdb6q\xbd\x9e\xdf\xd9\xdb\x17\x03C\x93\xf3&e\
\xe7\xbcS\xa9>n\x97\xf47\xf7f|\xf0\xb1\xf5\x92\
'\x19)\xbd\xa8\xd48\xc3zyZ\xa4)3\xa2\x17\
\x95\x92\xa5\xd0+3\xf3\xb2r\x99\xb2Y\x5c\x08\xd3\x96\
\xe2\x08\x9f\x5c~M\xb0\x10f\x09\xd5\xf1\x83E_\xf0\
\x95\x0e\x81@\xb1\x08\x04\x8aE P,\x02\x81b\x11\
\x08\x14\x8b@\xa0X\x04\x02\xc5\x22\x10\xfe\x03\x02\x02\xf7\
\xca\xf6X\xfa\x83\x00\x00\x00\x00IEND\xaeB`\
\x82\
\x00\x00\x06\x8c\
\x89\
PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\
\x00\x00\xd2\x00\x00\x00\xb4\x08\x02\x00\x00\x00nV\xe5R\
\x00\x00\x00\x09pHYs\x00\x00\x0e\xc4\x00\x00\x0e\xc4\
\x01\x95+\x0e\x1b\x00\x00\x06>IDATx\x9c\xed\
\x9c\xcbv\x838\x10\x05\x9b9\xb3\xcb\x07f\x9f\x0f\xcb\
\xde\x1f\x98\xb5gA\x9c\x01\x03z\x8bV\xb7\xaa\x96F\
\x5ct\xec\x8a\x94\x03\xf6]\xe4\xfbC:\xf1\xf5\xd3+\
\x9cd\xe3\xc9\xff\xf4\x08\x05\x08\x83v\xa0\x00\xda\x81\x02\
h\x07\x0a\xa0\x1d(\x80v\xa0\x00\xda\x81\x02h\x07\x0a\
\xa0\x1d(\x80v\xa0\x00\xda\x81\x02h\x07\x0a\xa0\x1d(\
\x80v\xa0\x00\xda\x81\x02h\x07\x0a\xa0\x1d(\x80v\xa0\
\x00\xda\x81\x02h\x07\x0a\xa0\x1d(\x80v\xa0\x00\xda\x81\
\x02h\x07\x0a\xa0\x1d(\x80v\xa0\x00\xda\x81\x02h\x07\
\x0a\xa0\x1d\x00\xcc\xc1B\xd1\x18\xc9\xf7'\xb3\xc9\x82\x02\
\xffjO\x00,\xf3\xf5s\xf2b\xc2\x02\x89vP\xc4\
\xa9p\xdbCA\xf9\xd8d!\x9f\x80sic\xd0\x0e\
2Iq.6\x12\xed \x87t\xe7\x82\xe3\xd1\x0e\x92\
\xc9u\xee\xfa,\xb4\x834\xca\x9c\xbb8\x17\xed@\x01\
\xb4\x83\x04j\x96\xba\xb3\x04\xb4\x03\x05\xd0\x0e\x1a\xf0x\
<\x1e\x8fG\xfax\xb4\x83Z\xfe\x84K7\x0f\xed\xa0\
\x8a\xacE\xee\x0f\xb4\x83r\xde\x9c\xfb\xfc\xfcL<\x11\
\xed\xa0\x90b\xe7\x04\xed \x89\xc3\xd7I\xb2\x9d\xdb'\
\xa0\x1ddS\xb3\xce\xad\xa0\x1d\xa4\xf1Z\xaeJ\x9c;\
,\x96|\xcd\x13\x92\xf9\xfex||o_(sN\
X\xed \x9dV\xce\x09\xdaA\x22\x0d\x9d\x13\xb4\x83\x14\
\xde\x9d\xfb\xf9\x8a\x9f\x13\xfc-\x05\xff\xdbA\x84s\xe7\
V\xab\xf8\xe5\x18\xf4 \xb2\xce\x95\xfex\x9bM\x16.\
y\xbfW\x92\xb2\xb7\xa6\x81vpN\xc9\xffs\xc9\xa0\
\x1d\x9c\xd0\xd59\x11Y\xda\xc6\x81\x03\xea\x9f}E\xa1\
\xf1\x89\xe4\x1d\xbd\xd7\xb9\x156Y\xf8\x9f\x92{\xc2E\
p\x03e\x02\xd2\xee\xae\x95\xacs\xdc\xb7\x83\x13\x92{\
\x99\xb2\x9d\xfb\xfa\x11\x91\xe7\xf3y<\xb2,\xcb6\xf9\
\x146Y\xbf$\xf72\x158\xf7|>O\x9d\x13\x91\
\xdfC4>\xcdH\xf2\x0f\xaa\xcb\x9c\x8b\xc6\x86\xcdC\
;\x8f\xa4;\x97\xfb\x1c\x22\xcd\xb9\x95\x80yh\xe7\x8e\
1\x9c[\xb92\x0f\xed|Q\xec\xdc\xe7g\xe4\xdc|\
\xe7VN\xcdC\xbb\x19\xb9\xe19D\x18\xb4sD\xda\
R\x17r\xee*\xa1t\xa9[9.xh7\x17\xea\
\xeb\xdc\x0a\xdaM\xc4 \xce\x09\xda\xcd\xc38\xce\x09\xda\
M\xc2P\xce\x09\xda\xcd\xc0h\xce\x09\xda\xb9g@\xe7\
\x04\xed\x5c\xd1\xba\x97i\xfb\xfa\xef\xf7J\x8aX\x96\xf7\
o\x13\xa3\x9d[\xc6\x5c\xe7V\xd0\xce\x17M{\x99\xde\
\x8e\x96-x\xc7\xa5N\xf8\x9a\xa7C\xda\xf52\x1d\xc7\
,\xcb\x92\xf5\xb8\xe2\xd49a\xb5\xf3G/\xe7^#\
\xd3\xd7\xbc+\xe7\x04\xed\x9c\xd1\xd7\xb9\xd7\xf8\x14\xf3\x02\
\xce\x09\x9b\xac'\x9a\xf72\x05\xceZ\xcd+\xfe-\x05\
\xda9\xa1G/S\x88\xef\x0f\xf93,?\x19\xed<\
\xd0\xa9\x97)\x0e\x8dO\xd3\xd2\xaf\x97\xa9\x1fhg\x9b\
{\xba#\x9a\x83v\x861\xea\x9c\xd0\xf8d\x97\x91\x9f\
}E\xa1\xf1\xc9d\xf2M\xeb\x5c\xb7w\x83M\xd6\x1e\
\xb7\xf52\xf5\xc3\xe9\x0d\x94\x1ew\xaa\xc6H\xbe\xb3\x97\
\xa9_\xb2;\xed\x92;\x8e\xca\x92\x8b\xef\xcbG\x93C\
\x87\x94z\x99\xa2\xc9\xa1C\x135>%w\x1c\x15$\
Wv\x1cU\xcdG\xa9\x97)<\x9f\x9a1\x8e\xb4K\
\x7f\x07s\xdf\xeb\x16\x1dG\x953Q\xe9e\xbaJ\xae\
\x1f\xe9E\xbb\x9e\xef]\x93\x8e\xa3\x9a9h\xf52\x9d\
&\xa7\x8e\x0c\x8ew\xa1]\xcf\x9d\xa2U\xc7Q\xc9\xd5\
ED\xb5\x97\xe9\x98\x9c\x15\x1b8\xcb\xbeve\xefE\
\xca\xb9M;\x8e2\xae\xbbA\xb7\x97\xe9-\xb9 \xf6\
\xea\x5c\xfb\xda\xf9\xc5\xf4s\x880\xc6\xb5\xab\xf9\x13\x0c\
'\xb4\xee8\x8a_q\xcf\x08\xbdL\xf1+\xa6C\xe3\
\xd3\xf88^\xe7V\xd0n8\xdc;'h7\x1a3\
8'h7\x14\x938'h7\x0e\xf38'h7\
\x08S9'\xe6\xb5\xab\xff\xf6\xce]\x1dG\x81+\x0e\
\xdb\xcb\x14\xbfb:4>\x0d\xc5l\xeb\xdc\x8a}\xed\
j\xfe\x10o\xec8:\xbd\xee\xe0\xbdL\x19\xd7\x0ds\
8\xd7\xbevR\xde\xa8\x902&\xf7S\x8c\x7f~\xaf\
\xe4\xf6\xce\xbd\xc6\xf4\x9bsVl\xe0,\x17\xdaII\
\x7fL\xfa\xc8&\x1dGo\x98\xe8e\xaa\x9aCp\xbc\
\x17\xed$\xefS\xc9M\xae\xef8\xdab\xa5\x97\xa9v\
&\xd7#\x1di'\xa9{PY\xf2\xb2,W\x1f\xe4\
\xef\xa12\xe7:\xf725\x99s\xc9|\xe6j|\x1a\
\xb5\xe3h\xc5V/S4\x99_\x8e\xed\x19\xaf\xe3H\
\x0c\xf62\xf5K\xf6\xb5\xc9\x0e\x8c\xc5^\xa6~\xa0\xdd\
\x1d\xd8\xed\xc8\xe9\x04\xdau\x07\xe7\x8e\xd0\xf8\xd4\x979\
\x9f}E\xa1\xf1\xa9c\xb2\xf5^&\x1a\x9f\xec\xe1\xa0\
\x97\xa9\x1fNo\xa0hw\x1c\xf9\xe8e\xea\x97\xecN\
\xbb\x01:\x8e\xdc\xf42E\x93\x8b\xe7\xeck\x93\x1d\xa0\
\xe3\xc8S/S\xe0\xac\xca9;\xd2n\x80\x8e#g\
\xbdLW\xe3\xeb\xe7\xecE\xbb\x01:\x8e\xfc\xf52\x9d\
\x8el2g\x17\xda\x0d\xd0q\xe4\xb2\x97\xe98\xa6\xd5\
\x9c\xedk7@\xc7\x91\xd7^\xa6\xb7\xa3\x0d\xe7l_\
;mx\x0eQ\x80q\xed\xb4;\x8e|\xf72m_\
o;g\xe3\xda\xa9\xc2:W\x0c\xda\x15\x82s5\xa0\
]\x098W\x09\xda\xd5\x82s\x05\xa0]\x09\x7f\xaa\xe1\
\x5c\x19\xee\xbe\x0ap\x17\x08W\x83\xf1\xd5\xceN\xc7\xd1\
\xf6u\xe6l\x5c;\xb0\x89}\xed\x8ct\x1c\xbd\x1d\x9d\
|\xce\xf6\xb5\x13\x1b\x1dG\xc713\xcf\xd9\x85vb\
\xa0\xe3\xe8t\xe4\xb4s\xf6\xa2\x9d\x8c\xdeqt5~\
\xce9;\xd2N\x86\xee8\x0a\x9c5\xe1\x9c\xdd\xdd\xb7\
\x1b\xb5\xe3(\x9a<\xd5\x9c\xddi\xb72^\xc7\x11\xc9\
[|m\xb2`\x04\xb4\x03\x05\xd0\x0e\x00\xe6\x80\xc6'\
\x92\x15\x92\xd9dA\x01\xa77P\xc6\xeb8\x22y\x8b\
;\xed\xcc\xb6'YL\xa6\xf1ID\xac\xb6'YL\
\xa6\xf1\xe9\x85\xcd\xf6$\x8b\xc94>\xbd\xb0\xd9\x9ed\
1\x99\xc6\xa7\x176\xdb\x93,&\xd3\xf8\xf4\xc2f{\
\x92\xc5d\x1a\x9f\xc06\xc6\xb5\xb3\xd9\x9ed1\x99\xc6\
'0\x0f\xda\x81\x02h\x07\x0a\xa0\x1d(\x80v\xa0\x00\
\xda\x81\x02\xc6\xb5\xb3\xd9\x9ed1\x99\xc6'0\x8f}\
\xedl\xb6'YL\xa6\xf1i\x8f\xcd\xf6$\x8b\xc94\
>\xed\xb1\xd9\x9ed1\x99\xc6\xa7=6\xdb\x93,&\
\xd3\xf8\xb4\xc7f{\x92\xc5d\x1a\x9f\xf6\x98mO\xb2\
\x98L\xe3\xd3\x9e\xf1:\x8eH\xde\xe2k\x93\x05#\xa0\
\x1d(\x80v\xa0\xc0\x7fg\xec\xf1\xe5\xdb\xda\xffK\x00\
\x00\x00\x00IEND\xaeB`\x82\
\x00\x00\x02\xa1\
<\
!DOCTYPE html>\x0a<\
html>\x0a<head><met\
a charset=\x22UTF-8\
\x22></head>\x0a<body>\
\x0a<h1>Connect 4 R\
ules</h1>\x0a<p>Pla\
yers: 2</p>\x0a<h3>\
Goal</h3>\x0a<p>Pla\
ce your pieces t\
o make a line of\
four in a row, \
either verticall\
y,\x0ahorizontally,\
or diagonally.<\
/p>\x0a<p><img alt=\
\x22Example\x22 src=\x22c\
onnect4.png\x22 /><\
/p>\x0a<h3>Setup</h\
3>\x0a<p>Choose whi\
ch player will p\
lay black and go\
first.</p>\x0a<h3>\
Play</h3>\x0a<p>Sta\
rting with the b\
lack player, tak\
e turns adding y\
our piece in one\
of the empty\x0asp\
aces, but the pi\
ece will fall to\
the lowest empt\
y space in the c\
olumn. If you\x0ama\
ke a line of fou\
r in a row, you \
win immediately.\
If the board is\
full without\x0aan\
y line of four i\
n a row, the gam\
e is a draw.</p>\
</body>\x0a</html>\x0a\
\
"
qt_resource_name = b"\
\x00\x0f\
\x0d\xc7d\xe3\
\x00z\
\x00e\x00r\x00o\x00_\x00p\x00l\x00a\x00y\x00_\x00r\x00u\x00l\x00e\x00s\
\x00\x0e\
\x0aP\x10\xfc\
\x00t\
\x00i\x00c\x00t\x00a\x00c\x00t\x00o\x00e\x00.\x00h\x00t\x00m\x00l\
\x00\x0d\
\x0e\xa5\x19\xe7\
\x00t\
\x00i\x00c\x00t\x00a\x00c\x00t\x00o\x00e\x00.\x00p\x00n\x00g\
\x00\x0c\
\x07;\xfe'\
\x00c\
\x00o\x00n\x00n\x00e\x00c\x00t\x004\x00.\x00p\x00n\x00g\
\x00\x0d\
\x03\xbem\xdc\
\x00c\
\x00o\x00n\x00n\x00e\x00c\x00t\x004\x00.\x00h\x00t\x00m\x00l\
"
qt_resource_struct = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x04\x00\x00\x00\x02\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x84\x00\x00\x00\x00\x00\x01\x00\x00\x11\x7f\
\x00\x00\x01\x87\xd8\xe2\xe4O\
\x00\x00\x00f\x00\x00\x00\x00\x00\x01\x00\x00\x0a\xef\
\x00\x00\x01\x80\x01Y\x88\xce\
\x00\x00\x00$\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x01\x87\xd8\xe2\xe4S\
\x00\x00\x00F\x00\x00\x00\x00\x00\x01\x00\x00\x02i\
\x00\x00\x01\x80\x01Y\x88\xce\
"
def qInitResources():
QtCore.qRegisterResourceData(0x03, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x03, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources() | zero-play | /zero_play-0.5.0-py3-none-any.whl/zero_play/zero_play_rules_rc.py | zero_play_rules_rc.py |
import itertools
import typing
from PySide6.QtGui import QColor, QBrush, QFont, QResizeEvent, QPixmap, Qt, QPainter, QPen
from PySide6.QtWidgets import QGraphicsEllipseItem, \
QGraphicsSceneHoverEvent, QGraphicsSceneMouseEvent, QGraphicsScene
from zero_play.game_state import GridGameState, GameState
from zero_play.game_display import GameDisplay, center_text_item
from zero_play.grid_controls_ui import Ui_GridControls
class GraphicsPieceItem(QGraphicsEllipseItem):
def __init__(self, row, column, hover_listener):
super().__init__(0, 0, 100, 100)
self.row = row
self.column = column
self.setAcceptHoverEvents(True)
self.hover_listener = hover_listener
def hoverEnterEvent(self, event: QGraphicsSceneHoverEvent):
super().hoverEnterEvent(event)
self.hover_listener.on_hover_enter(self)
def hoverLeaveEvent(self, event: QGraphicsSceneHoverEvent):
super().hoverLeaveEvent(event)
self.hover_listener.on_hover_leave(self)
def mousePressEvent(self, event: QGraphicsSceneMouseEvent):
super().mousePressEvent(event)
self.hover_listener.on_click(self)
class GridDisplay(GameDisplay):
background_colour = QColor.fromRgb(0x009E0B)
line_colour = QColor.fromRgb(0x000000)
player1_colour = QColor.fromRgb(0x000000)
player2_colour = QColor.fromRgb(0xFFFFFF)
def __init__(self,
start_state: GridGameState):
super().__init__(start_state)
self.start_state: GridGameState = start_state
self.spaces = [] # self.spaces[i][j] holds row i, column j
self.column_dividers = []
self.row_dividers = []
self.column_labels = []
self.row_labels = []
self.text_x = self.text_y = 0
ui = self.ui = Ui_GridControls()
ui.setupUi(self)
scene = QGraphicsScene()
ui.game_display.setScene(scene)
scene.setBackgroundBrush(self.background_colour)
self.player1_icon = self.create_icon(self.player1_colour)
self.player2_icon = self.create_icon(self.player2_colour)
ui.black_count_pixmap.setText('')
ui.white_count_pixmap.setText('')
ui.black_count.setText('')
ui.white_count.setText('')
for _ in range(start_state.board_height - 1):
self.row_dividers.append(scene.addLine(0, 0, 1, 1))
for _ in range(start_state.board_width - 1):
self.column_dividers.append(scene.addLine(0, 0, 1, 1))
for i in range(start_state.board_height):
self.row_labels.append(scene.addSimpleText(f'{i + 1}'))
for j in range(start_state.board_width):
self.column_labels.append(scene.addSimpleText(chr(65+j)))
self.to_move = scene.addEllipse(
0, 0, 1, 1, brush=self.get_player_brush(self.start_state.X_PLAYER))
self.to_move.setVisible(False)
self.move_text = ui.move_text
for i in range(self.start_state.board_height):
row: typing.List[GraphicsPieceItem] = []
self.spaces.append(row)
for j in range(self.start_state.board_width):
piece = GraphicsPieceItem(i, j, self)
scene.addItem(piece)
piece.setBrush(self.background_colour)
piece.setPen(self.background_colour)
row.append(piece)
self.debug_message = ''
def resizeEvent(self, event: QResizeEvent):
super().resizeEvent(event)
game_display = self.ui.game_display
view_size = game_display.contentsRect()
width = view_size.width()
height = view_size.height()
extra_columns = 0
margin = 1 if self.show_coordinates else 0
cell_size = min(width // (self.start_state.board_width + extra_columns + margin),
height // (self.start_state.board_height + margin))
size = cell_size*self.start_state.board_width
x0 = (width - cell_size * (self.start_state.board_width + extra_columns + margin)) // 2
y0 = (height - cell_size * (self.start_state.board_height + margin)) // 2
x0 += margin*cell_size
y0 += margin*cell_size
font = QFont(self.default_font)
font_size = max(1, int(cell_size // 2))
font.setPointSize(font_size)
for i in range(self.start_state.board_height - 1):
r = cell_size * (i+1)
self.row_dividers[i].setLine(x0, y0+r, x0+size, y0+r)
for i in range(self.start_state.board_width - 1):
r = cell_size * (i+1)
self.column_dividers[i].setLine(x0+r, y0, x0+r, y0+size)
for i, label in enumerate(self.row_labels):
r = cell_size * (2*i + 1) // 2
label.setFont(font)
text_x = x0 - cell_size // 2
text_y = y0 + r
center_text_item(label, text_x, text_y)
for i, label in enumerate(self.column_labels):
r = cell_size * (2*i + 1) // 2
label.setFont(font)
center_text_item(label, x0 + r, y0 - cell_size // 2)
font_size = max(1, int(cell_size * extra_columns // 6))
font.setPointSize(font_size)
self.text_x = x0 + size + cell_size * extra_columns // 2
self.text_y = (y0 + cell_size * self.start_state.board_height // 2 +
cell_size * extra_columns // 5)
self.update_move_text()
for i, row in enumerate(self.spaces):
for j, piece in enumerate(row):
x = x0 + j * cell_size + cell_size // 8
y = y0 + i * cell_size + cell_size // 8
piece.setRect(x, y, cell_size * 3 // 4, cell_size * 3 // 4)
self.scene().setSceneRect(0, 0, width, height)
def scene(self) -> QGraphicsScene:
return self.ui.game_display.scene()
@staticmethod
def create_icon(player_colour: QColor) -> QPixmap:
size = 200
icon = QPixmap(size, size)
icon.fill(Qt.GlobalColor.transparent)
painter = QPainter(icon)
try:
painter.setBrush(player_colour)
pen = QPen()
pen.setWidth(3)
painter.setPen(pen)
painter.drawEllipse(1, 1, size-2, size-2)
finally:
painter.end()
return icon
def update_board(self, state: GameState):
assert isinstance(state, GridGameState)
self.current_state = state
self.valid_moves = self.current_state.get_valid_moves()
is_ended = self.current_state.is_ended()
spaces = self.current_state.get_spaces()
for i in range(self.current_state.board_height):
for j in range(self.current_state.board_width):
player = spaces[i][j]
piece = self.spaces[i][j]
if player == self.current_state.NO_PLAYER:
if is_ended:
piece.setVisible(False)
else:
piece.setVisible(True)
piece.setBrush(self.background_colour)
piece.setPen(self.background_colour)
else:
piece.setVisible(True)
piece.setBrush(self.get_player_brush(player))
piece.setPen(self.line_colour)
piece.setOpacity(1)
self.ui.player_pixmap.setVisible(True)
for label in itertools.chain(self.row_labels, self.column_labels):
label.setVisible(self.show_coordinates)
if is_ended:
if self.current_state.is_win(self.current_state.X_PLAYER):
self.update_move_text('wins')
self.ui.player_pixmap.setPixmap(self.player1_icon)
elif self.current_state.is_win(self.current_state.O_PLAYER):
self.update_move_text('wins')
self.to_move.setBrush(self.get_player_brush(self.current_state.O_PLAYER))
else:
self.update_move_text('draw')
self.ui.player_pixmap.clear()
else:
self.update_move_text(self.choose_active_text())
active_player = self.current_state.get_active_player()
self.ui.player_pixmap.setPixmap(self.get_player_icon(active_player))
def get_player_brush(self, player):
return QBrush(self.player1_colour
if player == self.start_state.X_PLAYER
else self.player2_colour)
def get_player_icon(self, player: int) -> QPixmap:
return (self.player1_icon
if player == self.start_state.X_PLAYER
else self.player2_icon)
def update_move_text(self, text: str | None = None):
if self.debug_message:
self.move_text.setText(self.debug_message)
elif text is not None:
self.move_text.setText(text)
def on_hover_enter(self, piece_item: GraphicsPieceItem):
if self.is_piece_played(piece_item):
return
if not self.can_move():
return
move = self.calculate_move(piece_item.row, piece_item.column)
is_valid = self.valid_moves[move]
if not is_valid:
return
piece_item.setBrush(self.get_player_brush(
self.current_state.get_active_player()))
piece_item.setPen(self.line_colour)
piece_item.setOpacity(0.5)
def on_hover_leave(self, piece_item: GraphicsPieceItem):
if self.is_piece_played(piece_item):
return
piece_item.setBrush(self.background_colour)
piece_item.setPen(self.background_colour)
piece_item.setOpacity(1)
def on_click(self, piece_item: GraphicsPieceItem):
if not self.can_move():
return
move = self.calculate_move(piece_item.row, piece_item.column)
is_valid = self.valid_moves[move]
if is_valid:
self.make_move(move)
def calculate_move(self, row, column):
move = row * self.start_state.board_width + column
return move
def is_piece_played(self, piece_item):
current_spaces = self.current_state.get_spaces()
hovered_player = current_spaces[piece_item.row][piece_item.column]
return hovered_player != self.start_state.NO_PLAYER
def close(self):
super().close()
scene = self.ui.game_display.scene()
if scene is not None:
scene.clear() | zero-play | /zero_play-0.5.0-py3-none-any.whl/zero_play/grid_display.py | grid_display.py |
import logging
import math
import typing
from concurrent.futures import (Future, wait, FIRST_COMPLETED, ALL_COMPLETED,
ProcessPoolExecutor)
from operator import itemgetter
import numpy as np
from zero_play.game_state import GameState
from zero_play.heuristic import Heuristic
from zero_play.player import Player
logger = logging.getLogger(__name__)
class SearchNode:
# Controls exploration of new nodes vs. exploitation of good nodes.
exploration_weight = 1.0
def __init__(self,
game_state: GameState,
parent: typing.Optional['SearchNode'] = None,
move: int | None = None):
""" Initialize an instance.
:param game_state: the board state that this node represents
:param parent: the board state that this node came from
:param move: the move to get from parent to this node
"""
self.game_state = game_state
self.parent = parent
self.move = move
self.children: typing.Optional[typing.List[SearchNode]] = None
self.child_predictions: typing.Optional[np.ndarray] = None
self.average_value = 0.0
self.value_count = 0
def __repr__(self):
return f"SearchNode({self.game_state!r})"
def __eq__(self, other):
if isinstance(other, SearchNode):
return self.game_state == other.game_state
return NotImplemented
def select_leaf(self):
if self.value_count == 0:
return self
children = self.find_all_children()
if not children:
return self
best_score = float('-inf')
best_child = None
for child in children:
if self.child_predictions is None:
prior = 1/len(children)
else:
prior = self.child_predictions[child.move]
score = child.average_value + (self.exploration_weight * prior *
math.sqrt(self.value_count) /
(1 + child.value_count))
if score > best_score:
best_score = score
best_child = child
return best_child.select_leaf()
def find_all_children(self) -> typing.List['SearchNode']:
if self.children is not None:
return self.children
children: typing.List['SearchNode'] = []
if self.game_state.is_ended():
return children
for move, is_valid in enumerate(self.game_state.get_valid_moves()):
if is_valid:
child_state = self.game_state.make_move(move)
children.append(SearchNode(child_state, self, move))
self.children = children
return children
def record_value(self,
value: float,
child_predictions: np.ndarray | None = None):
if child_predictions is not None:
self.child_predictions = child_predictions
if (not self.parent or
self.parent.game_state.get_active_player() !=
self.game_state.get_active_player()):
value *= -1
self.average_value = ((self.average_value * self.value_count + value) /
(self.value_count + 1))
self.value_count += 1
if self.parent:
self.parent.record_value(value)
def evaluate(self, heuristic: Heuristic):
value, child_predictions = heuristic.analyse(self.game_state)
self.record_value(value, child_predictions)
def choose_child(self, temperature: float) -> 'SearchNode':
""" Choose a child randomly, ones with higher counts are more likely.
:param temperature: positive value that controls how deterministic the
choice is. The closer to zero, the more likely it is to choose the
child with maximum count.
"""
children = self.find_all_children()
probabilities = self.rank_children(children, temperature)
child_count = len(children)
child_index = np.random.choice(child_count, p=probabilities)
return children[child_index]
@staticmethod
def rank_children(children, temperature):
values = np.array([temperature * child.value_count for child in children])
# Avoid overflow by keeping the weights between 0 and 1.
values -= values.max(initial=0)
weights = np.exp(values)
# Normalize the weights into probabilities that add up to 1.
probabilities = weights / sum(weights)
return probabilities
def find_best_children(self):
children = self.find_all_children()
best_value = float('-inf')
best_children = []
for child in children:
child_value = child.average_value
if child_value > best_value:
best_children = [child]
best_value = child_value
elif child_value == best_value:
best_children.append(child)
return best_children
class SearchManager:
def __init__(self, start_state: GameState,
heuristic: Heuristic,
process_count: int = 1):
self.start_state = start_state
self.heuristic = heuristic
self.current_node = self.reset()
self.process_count = process_count
if process_count <= 1:
self.executor = None
else:
self.executor = ProcessPoolExecutor(process_count)
self.tasks: typing.Dict[Future, SearchNode] = {}
def reset(self) -> SearchNode:
self.current_node = SearchNode(self.start_state)
return self.current_node
def find_node(self, game_state: GameState):
if not game_state == self.current_node.game_state:
for child in self.current_node.find_all_children():
if game_state == child.game_state:
self.current_node = child
break
else:
parent = self.current_node.parent
if parent is not None and game_state == parent.game_state:
self.current_node = parent
else:
self.current_node = SearchNode(game_state)
def search(self, board: GameState, iterations: int):
self.find_node(board)
max_tasks = self.process_count * 2
for _ in range(iterations):
leaf = self.current_node.select_leaf()
if self.executor is None:
leaf.evaluate(self.heuristic)
else:
future = self.executor.submit(self.heuristic.analyse,
leaf.game_state)
self.tasks[future] = leaf
if len(self.tasks) >= max_tasks:
timeout = None
else:
timeout = 0
self.check_tasks(timeout, return_when=FIRST_COMPLETED)
if self.tasks:
self.check_tasks(timeout=None, return_when=ALL_COMPLETED)
if self.current_node.children is None:
self.current_node.select_leaf()
def check_tasks(self, timeout, return_when):
done, not_done = wait(self.tasks.keys(),
timeout,
return_when=return_when)
for done_future in done:
done_leaf = self.tasks.pop(done_future)
value, child_predictions = done_future.result()
done_leaf.record_value(value, child_predictions)
def get_best_move(self) -> int:
best_children = self.current_node.find_best_children()
self.current_node = child = np.random.choice(best_children)
assert child.move is not None
return child.move
def choose_weighted_move(self) -> int:
temperature = 1.0
self.current_node = child = self.current_node.choose_child(temperature)
assert child.move is not None
return child.move
def get_move_probabilities(
self,
game_state: GameState,
limit: int = 10) -> typing.List[typing.Tuple[str,
float,
int,
float]]:
""" Report the probability that each move is the best choice.
:param game_state: the starting position
:param limit: the maximum number of moves to report
:return: [(move_display, probability, value_count, avg_value)], where
value_count is the number of times the value was probed from the move,
and avg_value is the average value from all those probes.
"""
self.find_node(game_state)
children = self.current_node.find_all_children()
temperature = 1.0
probabilities = self.current_node.rank_children(children, temperature)
value_counts = [child.value_count for child in children]
ranked_children = sorted(zip(value_counts, probabilities, children),
key=itemgetter(0),
reverse=True)
top_children = ranked_children[:limit]
child_node: SearchNode
top_moves = [(game_state.display_move(child_node.move),
probability,
value_count,
child_node.average_value)
for value_count, probability, child_node in top_children
if child_node.move is not None]
return top_moves
def create_training_data(self, iterations: int, data_size: int):
game_states: typing.List[typing.Tuple[GameState, np.ndarray]] = []
self.search(self.current_node.game_state, iterations=1) # One extra to start.
report_size = 0
board_shape = self.current_node.game_state.get_spaces().shape
boards = np.zeros((data_size,) + board_shape, int)
move_count = self.current_node.game_state.get_valid_moves().size
outputs = np.zeros((data_size, move_count + 1))
data_count = 0
while True:
self.search(self.current_node.game_state, iterations)
assert self.current_node.children is not None
assert self.current_node.child_predictions is not None
move_weights = np.zeros(self.current_node.child_predictions.size)
for child in self.current_node.children:
move = child.move
move_weights[move] = child.value_count
total_weight = move_weights.sum()
if total_weight:
move_weights /= total_weight
game_states.append((self.current_node.game_state, move_weights))
move = np.random.choice(move_weights.size, p=move_weights)
for child in self.current_node.children:
if child.move == move:
self.current_node = child
break
if self.current_node.game_state.is_ended():
final_value, _ = self.heuristic.analyse(self.current_node.game_state)
final_player = -self.current_node.game_state.get_active_player()
for game_state, move_weights in game_states:
value = final_value
if game_state.get_active_player() != final_player:
value *= -1
boards[data_count] = game_state.get_spaces()
outputs[data_count, :move_count] = move_weights
outputs[data_count, -1] = value
data_count += 1
if data_count >= data_size:
return boards, outputs
if data_count > report_size:
logger.debug('Created %d training examples so far.', data_count)
report_size = data_count * 2
game_states.clear()
self.reset()
class MctsPlayer(Player):
""" Use Monte Carlo Tree Search to choose moves in a game.
This is based on the general discussion of MCTS in Wikipedia:
https://en.wikipedia.org/wiki/Monte_Carlo_tree_search
Also based on the specific adaptations of AlphaZero:
http://web.stanford.edu/~surag/posts/alphazero.html
The original AlphaZero paper:
https://deepmind.com/blog/alphago-zero-learning-scratch/
"""
DEFAULT_ITERATIONS = 80
def __init__(self,
start_state: GameState,
player_number: int = GameState.X_PLAYER,
iteration_count: int = DEFAULT_ITERATIONS,
heuristic: Heuristic | None = None,
process_count: int = 1):
super().__init__(player_number, heuristic)
self.iteration_count = iteration_count
self.search_manager = SearchManager(start_state,
self.heuristic,
process_count)
@property
def heuristic(self) -> Heuristic:
return self._heuristic
@heuristic.setter
def heuristic(self, value: Heuristic):
self._heuristic = value
search_manager = getattr(self, 'search_manager', None)
if search_manager is not None:
search_manager.heuristic = value
def end_game(self, game_state: GameState, opponent: Player):
self.search_manager.reset()
def choose_move(self, game_state: GameState) -> int:
""" Choose a move for the given board.
:param game_state: the current state of the game.
:return: the chosen move's index in the list of valid moves.
"""
self.search_manager.search(game_state, self.iteration_count)
if game_state.get_move_count() < 15:
return self.search_manager.choose_weighted_move()
return self.search_manager.get_best_move()
def get_move_probabilities(self, game_state: GameState) -> typing.List[
typing.Tuple[str, float, int, float]]:
""" Report the probability that each move is the best choice.
:param game_state: the board to analyse
:return: [(move_display, probability, value_count, avg_value)], where
value_count is the number of times the value was probed from the move,
and avg_value is the average value from all those probes.
"""
return self.search_manager.get_move_probabilities(game_state)
def get_summary(self) -> typing.Sequence[str]:
return (('mcts',) + tuple(self.heuristic.get_summary()) +
(f'{self.iteration_count} iterations',)) | zero-play | /zero_play-0.5.0-py3-none-any.whl/zero_play/mcts_player.py | mcts_player.py |
from io import StringIO
from multiprocessing import Process, Queue
import logging
from queue import Empty
import re
import typing
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sn
from PySide6.QtCore import QTimer
from sqlalchemy.orm import Session as BaseSession
# from zero_play.connect4.neural_net import NeuralNet
from zero_play.game_state import GameState as Game, GameState
from zero_play.mcts_player import MctsPlayer
from zero_play.models import SessionBase
from zero_play.models.game import GameRecord
from zero_play.models.match import MatchRecord
from zero_play.models.match_player import MatchPlayerRecord
from zero_play.models.player import PlayerRecord
from zero_play.play_controller import PlayController
from zero_play.playout import Playout
from zero_play.plot_canvas import PlotCanvas
from zero_play.tictactoe.state import TicTacToeState
logger = logging.getLogger(__name__)
class MatchUp:
def __init__(self,
p1_definition: int | str | None = None,
p2_definition: int | str | None = None,
source: typing.Optional['MatchUp'] = None):
if source is None:
self.p1_iterations, self.p1_neural_net = MatchUp.parse_definition(
p1_definition)
self.p2_iterations, self.p2_neural_net = MatchUp.parse_definition(
p2_definition)
self.p1_wins = 0
self.ties = 0
self.p2_wins = 0
else:
self.p1_iterations = source.p1_iterations
self.p1_neural_net = source.p1_neural_net
self.p2_iterations = source.p2_iterations
self.p2_neural_net = source.p2_neural_net
self.p1_wins = source.p1_wins
self.ties = source.ties
self.p2_wins = source.p2_wins
@staticmethod
def parse_definition(definition):
match = re.fullmatch(r'(\d+)(nn)?', str(definition))
return int(match.group(1)), bool(match.group(2))
@staticmethod
def format_definition(iterations, neural_net):
if neural_net:
return f'{iterations}nn'
return iterations
def __repr__(self):
p1_definition = self.format_definition(self.p1_iterations,
self.p1_neural_net)
p2_definition = self.format_definition(self.p2_iterations,
self.p2_neural_net)
return f'MatchUp({p1_definition!r}, {p2_definition!r})'
@property
def key(self):
return (self.p1_iterations,
self.p1_neural_net,
self.p2_iterations,
self.p2_neural_net)
@property
def count(self):
return self.p1_wins + self.ties + self.p2_wins
@property
def p1_win_rate(self):
return self.p1_wins / self.count if self.count else 0.
@property
def tie_rate(self):
return self.ties / self.count if self.count else 0.
@property
def p2_win_rate(self):
return self.p2_wins / self.count if self.count else 0.
def record_result(self, result):
if result < 0:
self.p2_wins += 1
elif result == 0:
self.ties += 1
else:
self.p1_wins += 1
class WinCounter(dict):
def __init__(self,
player_levels: typing.List[int] | None = None,
opponent_min: int | None = None,
opponent_max: int | None = None,
source: typing.Optional['WinCounter'] = None,
player_definitions: typing.List[str | int] | None = None):
super().__init__()
if source is not None:
self.player_definitions: typing.List[
typing.Union[str, int]] = source.player_definitions[:]
self.opponent_levels: typing.List[int] = source.opponent_levels[:]
for key, match_up in source.items():
self[key] = MatchUp(source=match_up)
else:
if player_levels is not None:
player_definitions = []
for player_level in player_levels:
player_definitions.append(player_level)
assert player_definitions is not None
assert opponent_min is not None
assert opponent_max is not None
self.player_definitions = player_definitions[:]
self.opponent_levels = []
opponent_level = opponent_min
while opponent_level <= opponent_max:
self.opponent_levels.append(opponent_level)
opponent_level <<= 1
for player_definition in player_definitions:
for opponent_level in self.opponent_levels:
match_up = MatchUp(player_definition, opponent_level)
self[match_up.key] = match_up
match_up = MatchUp(opponent_level, player_definition)
self[match_up.key] = match_up
def find_next_matchup(self) -> MatchUp:
best_matchup: typing.Optional[MatchUp] = None
lowest_count = None
for matchup in self.values():
if best_matchup is None or matchup.count < lowest_count:
best_matchup = matchup
lowest_count = matchup.count
assert best_matchup is not None
return best_matchup
def build_series(self):
all_series = []
for player_definition in self.player_definitions:
parsed_player = MatchUp.parse_definition(player_definition)
series = (f'wins as 1 with {player_definition}',
[self[parsed_player +
MatchUp.parse_definition(opponent_level)].p1_win_rate
for opponent_level in self.opponent_levels])
all_series.append(series)
series = (f'ties as 1 with {player_definition}',
[self[parsed_player +
MatchUp.parse_definition(opponent_level)].tie_rate
for opponent_level in self.opponent_levels])
all_series.append(series)
series = (f'wins as 2 with {player_definition}',
[self[MatchUp.parse_definition(opponent_level) +
parsed_player].p2_win_rate
for opponent_level in self.opponent_levels])
all_series.append(series)
series = (f'ties as 2 with {player_definition}',
[self[MatchUp.parse_definition(opponent_level) +
parsed_player].tie_rate
for opponent_level in self.opponent_levels])
all_series.append(series)
return all_series
def build_summary(self):
summary = StringIO()
all_series = self.build_series()
print('opponent levels', np.array(self.opponent_levels), file=summary)
for i, player_definition in enumerate(self.player_definitions):
for j in range(2):
if j:
counts = [self[MatchUp.parse_definition(opponent_level) +
MatchUp.parse_definition(player_definition)].count
for opponent_level in self.opponent_levels]
print('counts as 2 with',
player_definition,
np.array(counts),
file=summary)
else:
counts = [self[MatchUp.parse_definition(player_definition) +
MatchUp.parse_definition(opponent_level)].count
for opponent_level in self.opponent_levels]
print('counts as 1 with',
player_definition,
np.array(counts),
file=summary)
for k in range(i * 4 + j * 2, i * 4 + (j + 1) * 2):
name, rates = all_series[k]
percentages = (np.array(rates) * 100).round().astype(int)
print(name, percentages, file=summary)
return summary.getvalue()
class StrengthPlot(PlotCanvas):
def __init__(self, parent=None) -> None:
super().__init__(parent)
self.game: GameState = TicTacToeState()
self.artists: typing.List[plt.Artist] = []
self.plot_lines: typing.List[plt.Line2D] = []
self.win_counter: WinCounter | None = None
self.request_queue: Queue[str] = Queue()
self.result_queue: Queue[
typing.Tuple[int, bool, int, bool, int]] = Queue()
self.db_session: BaseSession | None = None
self.process: Process | None = None
self.timer = QTimer()
# noinspection PyUnresolvedReferences
self.timer.timeout.connect(self.update)
def start(self,
db_session: BaseSession,
controller: PlayController,
player_definitions: typing.List[typing.Union[str, int]],
opponent_min: int,
opponent_max: int):
self.db_session = db_session
self.game = controller.start_state
self.win_counter = WinCounter(player_definitions=player_definitions,
opponent_min=opponent_min,
opponent_max=opponent_max)
self.load_history(db_session)
# self.game_name = controller.start_state.game_name
self.process = Process(target=run_games,
args=(controller,
self.request_queue,
self.result_queue,
WinCounter(source=self.win_counter),
# neural_net_path),
),
daemon=True)
self.process.start()
# self.worker_thread.start()
sn.set()
self.create_plot()
plt.tight_layout()
self.timer.start(30_000)
self.update()
def stop_workers(self):
self.request_queue.put('Stop')
self.timer.stop()
def fetch_strengths(self, db_session) -> typing.List[int]:
if db_session is None:
return []
game_record = GameRecord.find_or_create(db_session, self.game)
strengths = []
datetimes = []
match: MatchRecord
# noinspection PyTypeChecker
for match in game_record.matches: # type: ignore
match_player: MatchPlayerRecord
# noinspection PyTypeChecker
for match_player in match.match_players: # type: ignore
player = match_player.player
if player.type != player.HUMAN_TYPE:
assert player.iterations is not None
strengths.append(player.iterations)
datetimes.append(match.start_time)
return strengths
def requery(self, db_session: SessionBase | None, future_strength: int):
strengths = self.fetch_strengths(db_session)
self.axes.clear()
marker = 'o' if len(strengths) == 1 else ''
self.axes.plot(strengths, marker, label='past')
self.axes.plot([len(strengths)], [future_strength], 'o', label='next')
self.axes.set_ylim(0)
if len(strengths) + 1 < len(self.axes.get_xticks()):
self.axes.set_xticks(list(range(len(strengths) + 1)))
self.axes.set_title('Search iterations over time')
self.axes.set_ylabel('Search iterations')
self.axes.set_xlabel('Number of games played')
self.axes.legend(loc='lower right')
self.axes.figure.canvas.draw()
# noinspection PyMethodOverriding
def update(self, _frame=None) -> None: # type: ignore
messages = []
try:
for _ in range(1000):
messages.append(self.result_queue.get_nowait())
except Empty:
pass
# logger.debug('Plotter.update() found %d messages.', len(messages))
if not messages:
return
assert self.win_counter is not None
for p1_iterations, p1_nn, p2_iterations, p2_nn, result in messages:
match_up: MatchUp = self.win_counter[(p1_iterations,
p1_nn,
p2_iterations,
p2_nn)]
match_up.record_result(result)
self.write_history(match_up, result)
self.artists.clear()
self.create_plot()
self.axes.figure.canvas.draw()
# logger.debug('Plotter.update() done.')
# return self.artists
def create_plot(self):
opponent_levels = self.win_counter.opponent_levels
all_series = self.win_counter.build_series()
total_games = sum(match_up.count for match_up in self.win_counter.values())
self.artists.append(self.axes.set_title(
f'Win Rates After {total_games} '
f'Games of {self.game.game_name}'))
if not self.plot_lines:
self.axes.set_ylabel(f'Win and tie rates')
self.axes.set_xlabel('Opponent MCTS simulation count')
self.axes.set_xscale('log')
self.axes.set_ylim(-0.01, 1.01)
group_num = 0
prev_iter_count = ''
for name, rates in all_series:
fields = name.split()
iter_count = fields[-1]
if iter_count != prev_iter_count:
group_num += 1
prev_iter_count = iter_count
player = fields[2]
result = fields[0]
match (result, player):
case 'ties', '1':
style = ':'
case 'ties', '2':
style = '-.'
case 'wins', '2':
style = '--'
case _:
style = ''
style += f'C{group_num}'
line, = self.axes.plot(opponent_levels, rates, style, label=name)
self.plot_lines.append(
line)
self.axes.legend(bbox_to_anchor=(1.04, 1), loc="upper left")
else:
for line, (name, rates) in zip(self.plot_lines, all_series):
# noinspection PyTypeChecker
line.set_ydata(rates)
self.artists.extend(self.plot_lines)
self.axes.figure.tight_layout()
self.axes.redraw_in_frame()
return self.artists
def load_history(self, db_session: BaseSession):
assert self.win_counter is not None
player1_number = self.game.get_active_player()
game_record = GameRecord.find_or_create(db_session, self.game)
match_record: MatchRecord
for match_record in game_record.matches: # type: ignore
player1_iterations = player2_iterations = result = None
has_human = False
match_player: MatchPlayerRecord
for match_player in match_record.match_players: # type: ignore
player: PlayerRecord = match_player.player
if player.type == PlayerRecord.HUMAN_TYPE:
has_human = True
player_number = match_player.player_number
if player_number == player1_number:
player1_iterations = player.iterations
result = match_player.result
else:
player2_iterations = player.iterations
if has_human:
continue
match_up = self.win_counter.get((player1_iterations,
False,
player2_iterations,
False))
if match_up is not None:
match_up.record_result(result)
print(self.win_counter.build_summary(), end='')
def write_history(self, match_up: MatchUp, result: int):
db_session = self.db_session
assert db_session is not None
game_record = GameRecord.find_or_create(db_session, self.game)
match_record = MatchRecord(game=game_record)
db_session.add(match_record)
mcts_player: typing.Optional[MctsPlayer]
iteration_entries = (match_up.p1_iterations, match_up.p2_iterations)
for i, player_number in enumerate(self.game.get_players()):
iterations = iteration_entries[i]
player_record = db_session.query(PlayerRecord).filter_by(
type=PlayerRecord.PLAYOUT_TYPE,
iterations=iterations).one_or_none()
if player_record is None:
player_record = PlayerRecord(type=PlayerRecord.PLAYOUT_TYPE,
iterations=iterations)
db_session.add(player_record)
player_result = result if i == 0 else -result
match_player = MatchPlayerRecord(match=match_record,
player=player_record,
player_number=player_number,
result=player_result)
db_session.add(match_player)
db_session.commit()
def run_games(controller: PlayController,
request_queue: Queue,
result_queue: Queue,
win_counter: WinCounter,
# checkpoint_path: str = None,
game_count: int | None = None):
""" Run a series of games, and send the results through a queue.
:param controller: tracks game progress
:param request_queue: source of control requests. For now, any message will
tell this process to shut down.
:param result_queue: destination for game results. Each message is a tuple
with the match-up key and the game result: 1, 0, or -1 for player 1.
:param win_counter: defines all the strength combinations to test.
:param game_count: number of games to run, or None to run until stopped.
"""
player1 = controller.players[Game.X_PLAYER]
player2 = controller.players[Game.O_PLAYER]
assert isinstance(player1, MctsPlayer)
assert isinstance(player2, MctsPlayer)
# nn = None
playout = Playout()
while game_count is None or game_count > 0:
match_up = win_counter.find_next_matchup()
player1.iteration_count = match_up.p1_iterations
# if match_up.p1_neural_net:
# nn = nn or load_neural_net(controller.game, checkpoint_path)
# player1.heuristic = nn
# else:
player1.heuristic = playout
player2.iteration_count = match_up.p2_iterations
# if match_up.p2_neural_net:
# nn = nn or load_neural_net(controller.game, checkpoint_path)
# player2.heuristic = nn
# else:
player2.heuristic = playout
# logger.debug(f'checking params {i}, {j} ({x}, {y}) with {counts[i, j]} counts')
controller.start_game()
while not controller.take_turn():
try:
request_queue.get_nowait()
return # Received the quit message.
except Empty:
pass
if controller.board.is_win(Game.X_PLAYER):
result = Game.X_PLAYER
elif controller.board.is_win(Game.O_PLAYER):
result = Game.O_PLAYER
else:
result = 0
logger.debug('Result of pitting %s vs %s: %s.',
match_up.format_definition(match_up.p1_iterations,
match_up.p1_neural_net),
match_up.format_definition(match_up.p2_iterations,
match_up.p2_neural_net),
result)
result_queue.put(match_up.key + (result,))
match_up.record_result(result)
if game_count:
game_count -= 1
# def load_neural_net(game, checkpoint_path):
# nn = NeuralNet(game)
# if checkpoint_path:
# nn.load_checkpoint(filename=checkpoint_path)
# return nn | zero-play | /zero_play-0.5.0-py3-none-any.whl/zero_play/strength_plot.py | strength_plot.py |
from argparse import ArgumentParser
from pathlib import Path
from subprocess import run
from zero_play.rules_formatter import convert_markdown
HTML_PREFIX = """\
<!DOCTYPE html>
<html>
<head><meta charset="UTF-8"></head>
<body>
"""
HTML_SUFFIX = """\
</body>
</html>
"""
def parse_args():
parser = ArgumentParser(description='Compile resources for a Zero Play project.')
# noinspection PyTypeChecker
parser.add_argument('folder',
type=Path,
help='Folder with resource files to pack.')
parser.add_argument('--name',
help='Part of root path for resources, defaults to '
'folder name.')
# noinspection PyTypeChecker
parser.add_argument('--output', '-o',
type=Path,
help='Output Python file, defaults to {NAME}_rc.py.')
parser.add_argument('--markdown', '-m',
action='store_true',
help='Convert .md files to .html files before packing.')
return parser.parse_args()
def main() -> None:
args = parse_args()
folder: Path = args.folder
output_name = args.name or folder.name
resources = sorted(folder.glob('**/*'))
if not resources:
exit(f'No resources found in {folder}.')
files_to_tidy = []
print('Resources can be loaded from these paths:')
for resource in resources:
if args.markdown and resource.suffix == '.md':
html_resource = resource.with_suffix('.html')
if html_resource.exists():
raise FileExistsError(f'HTML already exists in {html_resource}.')
html_fragment = convert_markdown(resource.read_text())
html_resource.write_text(HTML_PREFIX + html_fragment + HTML_SUFFIX)
files_to_tidy.append(html_resource)
resource = html_resource
rel_path = resource.relative_to(folder)
print(f':/{output_name}/{rel_path}')
output = args.output or Path(output_name + '_rc.py')
project_result = run(['pyside6-rcc', '--project'],
check=True,
capture_output=True,
encoding='utf8',
cwd=folder)
project_lines = project_result.stdout.splitlines()
project_text = '\n'.join(line
for line in project_lines
if not line.endswith('.md</file>'))
project_path = folder / (output_name + '.qrc')
project_path.write_text(project_text)
files_to_tidy.append(project_path)
run(['pyside6-rcc',
'-o', output,
'--root', '/'+output_name,
project_path],
check=True)
for path in files_to_tidy:
path.unlink()
print()
print(f"Generated {output} (don't forget to import it).")
if __name__ == '__main__':
main() | zero-play | /zero_play-0.5.0-py3-none-any.whl/zero_play/zero_play_rcc.py | zero_play_rcc.py |
import math
import typing
import numpy as np
from zero_play.game_state import GridGameState
class OthelloState(GridGameState):
game_name = 'Othello'
def __init__(self,
text: str | None = None,
board_height: int = 6,
board_width: int = 6,
spaces: np.ndarray | None = None):
if spaces is not None:
size = spaces.size
board_width = board_height = int(math.sqrt(size-1))
assert text is None
if text is None:
lines = None
next_player_line = None
else:
lines = text.splitlines()
next_player_line = lines.pop()
super().__init__(board_height,
board_width,
lines=lines,
extra_count=1,
spaces=spaces)
if spaces is not None:
return
spaces = self.get_spaces()
if text:
assert next_player_line and next_player_line.startswith('>')
self.board[-1] = (self.X_PLAYER
if next_player_line.endswith('X')
else self.O_PLAYER)
else:
self.board[-1] = self.X_PLAYER
for i in range(self.board_height//2-1, self.board_height//2+1):
for j in range(self.board_width//2-1, self.board_width//2+1):
player = self.X_PLAYER if (i+j) % 2 else self.O_PLAYER
spaces[i, j] = player
def get_valid_moves(self) -> np.ndarray:
spaces = self.get_spaces()
moves = np.zeros(self.board_height * self.board_width + 1, bool)
move_spaces = moves[:-1].reshape(self.board_width, self.board_height)
player = self.get_active_player()
for i, j in self.find_moves(spaces, player):
move_spaces[i, j] = True
if moves.sum() == 0:
# No moves for this player, check opponent.
for _ in self.find_moves(spaces, -player):
# Opponent has a move, pass is allowed.
moves[-1] = True
break
return moves
def find_moves(self, spaces: np.ndarray, player: int):
for i in range(self.board_height):
for j in range(self.board_width):
piece = spaces[i, j]
if piece == player:
yield from self.find_moves_from_space(spaces, i, j, player)
def find_moves_from_space(self, spaces, start_row, start_column, player):
for di in range(-1, 2):
for dj in range(-1, 2):
if not (di or dj):
continue
has_flipped = False
i = start_row + di
j = start_column + dj
while 0 <= i < self.board_height and 0 <= j < self.board_width:
piece = spaces[i, j]
if piece == player:
break
if piece == self.NO_PLAYER:
if has_flipped:
yield i, j
break
else:
has_flipped = True
i += di
j += dj
def display(self, show_coordinates: bool = False) -> str:
result = super().display(show_coordinates)
next_player = self.board[-1]
return result + f'>{self.DISPLAY_CHARS[next_player+1]}\n'
def display_move(self, move: int) -> str:
if move == self.board_width * self.board_height:
return 'PASS'
return super().display_move(move)
def parse_move(self, text: str) -> int:
trimmed = text.strip().replace(' ', '')
if not trimmed:
return self.board_height*self.board_width # It's a pass.
return super().parse_move(trimmed)
def make_move(self, move: int) -> 'OthelloState':
new_board: np.ndarray = self.board.copy()
player = new_board[-1]
new_board[-1] = -player
new_state = OthelloState(spaces=new_board)
if move == self.board_width * self.board_height:
return new_state # It's a pass.
spaces = new_state.get_spaces()
start_row = move // self.board_width
start_column = move % self.board_width
for di in range(-1, 2):
for dj in range(-1, 2):
if not (di or dj):
continue
to_flip: typing.List[typing.Tuple[int, int]] = [] # [(i, j)]
i = start_row + di
j = start_column + dj
while 0 <= i < self.board_height and 0 <= j < self.board_width:
piece = spaces[i, j]
if piece == player:
for i, j in to_flip:
spaces[i, j] *= -1
break
if piece == self.NO_PLAYER:
break
else:
to_flip.append((i, j))
i += di
j += dj
spaces[start_row, start_column] = player
return new_state
def get_active_player(self):
return self.board[-1]
def is_ended(self):
spaces = self.get_spaces()
player = self.board[-1]
for _ in self.find_moves(spaces, player):
return False
for _ in self.find_moves(spaces, -player):
return False
return True
def get_winner(self):
if not self.is_ended():
return self.NO_PLAYER
total = self.board[:-1].sum()
if total > 0:
return self.X_PLAYER
if total < 0:
return self.O_PLAYER
return self.NO_PLAYER
def get_piece_count(self, player: int):
return (self.board[:-1] == player).sum()
def is_win(self, player: int) -> bool:
return self.get_winner() == player | zero-play | /zero_play-0.5.0-py3-none-any.whl/zero_play/othello/game.py | game.py |
import logging
import typing
from argparse import Namespace
from pathlib import Path
import numpy as np
from tensorflow.python.keras import Sequential, regularizers
from tensorflow.python.keras.callbacks import TensorBoard
from tensorflow.python.keras.layers import Dense, Conv2D, Dropout, Flatten
from tensorflow.python.keras.models import load_model
from zero_play.game_state import GridGameState, GameState
from zero_play.heuristic import Heuristic
logger = logging.getLogger(__name__)
class NeuralNet(Heuristic):
def __init__(self, start_state: GameState):
if not isinstance(start_state, GridGameState):
raise ValueError(f'{start_state.__class__} is not a subclass of GridGameState.')
super().__init__()
# start_state params
self.board_height = start_state.board_height
self.board_width = start_state.board_width
self.action_size = len(start_state.get_valid_moves())
self.epochs_completed = 0
self.epochs_to_train = 100
self.start_state = start_state
args = Namespace(lr=0.001,
dropout=0.3,
epochs=10,
batch_size=64,
num_channels=512)
self.checkpoint_name = 'random weights'
self.args = args
num_channels = 512
kernel_size = [3, 3]
dropout = 0.3
model = Sequential()
# regularizer = regularizers.l2(0.00006)
regularizer = regularizers.l2(0.0001)
model.add(Conv2D(num_channels,
kernel_size,
padding='same',
activation='relu',
input_shape=(self.board_height, self.board_width, 1),
activity_regularizer=regularizer))
model.add(Conv2D(num_channels,
kernel_size,
padding='same',
activation='relu',
activity_regularizer=regularizer))
model.add(Conv2D(num_channels,
kernel_size,
activation='relu',
activity_regularizer=regularizer))
model.add(Conv2D(num_channels,
kernel_size,
activation='relu',
activity_regularizer=regularizer))
model.add(Dropout(dropout))
model.add(Dropout(dropout))
model.add(Flatten())
model.add(Dense(self.action_size + 1))
model.compile('adam', 'mean_squared_error')
self.model = model
def get_summary(self) -> typing.Sequence[str]:
return 'neural net', self.checkpoint_name
def analyse(self, board: GameState) -> typing.Tuple[float, np.ndarray]:
if board.is_ended():
return self.analyse_end_game(board)
outputs = self.model.predict(board.get_spaces().reshape(
(1,
self.board_height,
self.board_width,
1)))
policy = outputs[0, :-1]
value = outputs[0, -1]
return value, policy
def get_path(self, folder):
if folder is not None:
folder_path = Path(folder)
else:
game_name = self.start_state.game_name.replace(' ', '-').lower()
folder_path = Path('data') / game_name
return folder_path
def save_checkpoint(self, folder=None, filename='checkpoint.h5'):
self.checkpoint_name = 'model ' + filename
folder_path = self.get_path(folder)
file_path = folder_path / filename
folder_path.mkdir(parents=True, exist_ok=True)
self.model.save(file_path)
def load_checkpoint(self, folder=None, filename='checkpoint.h5'):
self.checkpoint_name = 'model ' + filename
folder_path = self.get_path(folder)
file_path = folder_path / filename
self.model = load_model(file_path)
def train(self, boards: np.ndarray, outputs: np.ndarray, log_dir=None):
""" Train the model on some sample data.
:param boards: Each entry is a board position.
:param outputs: Each entry is an array of policy values for the moves,
as well as the estimated value of the board position.
:param log_dir: Directory for TensorBoard logs. None disables logging.
"""
if log_dir is None:
callbacks = None
else:
callbacks = [TensorBoard(log_dir)]
history = self.model.fit(
np.expand_dims(boards, -1),
outputs,
verbose=0,
initial_epoch=self.epochs_completed,
epochs=self.epochs_completed+self.epochs_to_train,
validation_split=0.2,
callbacks=callbacks)
self.epochs_completed += self.epochs_to_train
return history | zero-play | /zero_play-0.5.0-py3-none-any.whl/zero_play/connect4/neural_net.py | neural_net.py |
import numpy as np
from zero_play.game_state import GridGameState
class Connect4State(GridGameState):
game_name = 'Connect 4'
def __init__(self,
text: str | None = None,
board_height: int = 6,
board_width: int = 7,
spaces: np.ndarray | None = None):
if text is None:
lines = None
else:
lines = text.splitlines()
if len(lines) == board_height+1:
# Trim off coordinates.
lines = lines[1:]
super().__init__(board_height, board_width, lines=lines, spaces=spaces)
def get_valid_moves(self) -> np.ndarray:
if self.get_winner() != self.NO_PLAYER:
return np.zeros(self.board_width, dtype=bool)
# Any zero value in top row is a valid move
return self.board[0] == 0
def display(self, show_coordinates: bool = False) -> str:
header = '1234567\n' if show_coordinates else ''
return header + super().display()
def parse_move(self, text: str) -> int:
move_int = int(text)
if move_int < 1 or self.board_width < move_int:
raise ValueError(f'Move must be between 1 and {self.board_width}.')
return move_int - 1
def make_move(self, move: int) -> 'Connect4State':
moving_player = self.get_active_player()
new_board: np.ndarray = self.board.copy()
available_idx, = np.where(new_board[:, move] == 0)
new_board[available_idx[-1]][move] = moving_player
return Connect4State(spaces=new_board)
def is_win(self, player: int) -> bool:
""" Has the given player collected four in a row in any direction? """
row_count, column_count = self.board.shape
win_count = 4
player_pieces = self.board == player
if self.is_horizontal_win(player_pieces, win_count):
return True
if self.is_horizontal_win(player_pieces.transpose(), win_count):
return True
# check two diagonal strips
for start_row in range(row_count - win_count + 1):
for start_column in range(column_count - win_count + 1):
count1 = count2 = 0
for d in range(win_count):
if self.board[start_row + d, start_column + d] == player:
count1 += 1
if self.board[start_row + d,
start_column + win_count - d - 1] == player:
count2 += 1
if count1 == win_count or count2 == win_count:
return True
return False
@staticmethod
def is_horizontal_win(player_pieces: np.ndarray, win_count):
row_count, column_count = player_pieces.shape
for i in range(row_count):
for j in range(column_count-win_count+1):
count = player_pieces[i, j:j+win_count].sum()
if count >= win_count:
return True
return False | zero-play | /zero_play-0.5.0-py3-none-any.whl/zero_play/connect4/game.py | game.py |
## Introduction
This is a zero-shot relation extractor based on the paper [Exploring the zero-shot limit of FewRel](https://www.aclweb.org/anthology/2020.coling-main.124).
## Installation
```bash
$ pip install zero-shot-re
```
## Run the Extractor
```python
from transformers import AutoTokenizer
from zero_shot_re import RelTaggerModel, RelationExtractor
model = RelTaggerModel.from_pretrained("fractalego/fewrel-zero-shot")
tokenizer = AutoTokenizer.from_pretrained("fractalego/fewrel-zero-shot")
relations = ['noble title', 'founding date', 'occupation of a person']
extractor = RelationExtractor(model, tokenizer, relations)
ranked_rels = extractor.rank(text='John Smith received an OBE', head='John Smith', tail='OBE')
print(ranked_rels)
```
with results
```python3
[('noble title', 0.9690611883997917),
('occupation of a person', 0.0012609362602233887),
('founding date', 0.00024014711380004883)]
```
## Accuracy
The results as in the paper are
| Model | 0-shot 5-ways | 0-shot 10-ways |
|------------------------|--------------|----------------|
|(1) Distillbert |70.1Β±0.5 | 55.9Β±0.6 |
|(2) Bert Large |80.8Β±0.4 | 69.6Β±0.5 |
|(3) Distillbert + SQUAD |81.3Β±0.4 | 70.0Β±0.2 |
|(4) Bert Large + SQUAD |86.0Β±0.6 | 76.2Β±0.4 |
This version uses the (4) Bert Large + SQUAD model
## Cite as
```bibtex
@inproceedings{cetoli-2020-exploring,
title = "Exploring the zero-shot limit of {F}ew{R}el",
author = "Cetoli, Alberto",
booktitle = "Proceedings of the 28th International Conference on Computational Linguistics",
month = dec,
year = "2020",
address = "Barcelona, Spain (Online)",
publisher = "International Committee on Computational Linguistics",
url = "https://www.aclweb.org/anthology/2020.coling-main.124",
doi = "10.18653/v1/2020.coling-main.124",
pages = "1447--1451",
abstract = "This paper proposes a general purpose relation extractor that uses Wikidata descriptions to represent the relation{'}s surface form. The results are tested on the FewRel 1.0 dataset, which provides an excellent framework for training and evaluating the proposed zero-shot learning system in English. This relation extractor architecture exploits the implicit knowledge of a language model through a question-answering approach.",
}
```
| zero-shot-re | /zero-shot-re-0.0.4.tar.gz/zero-shot-re-0.0.4/README.md | README.md |
import torch
class RelationExtractor:
'''
This repository contains a notebook with some examples of how to use the extractor
notebooks/extractor_examples.ipynb
Please refer to that notebook.
'''
def __init__(self, model, tokenizer, relations):
'''
:param model: One of the models in this repository
:param tokenizer: The appropriate tokenizer
:param relations: The list of surface forms, one for each relation
Example: ['noble title', 'founding date', 'occupation of a person']
'''
self._model = model
self._tokenizer = tokenizer
self._relations = relations
def rank(self, text, head, tail):
'''
:param text: The text from which to extract the relation
:param head: The entity that is the subject of the relation
:param tail: The entity that is the object of the relation
Example: (text='John Smith works as a carpenter', head='John Smith', tail='carpenter')
:return: A sorted list of pairs [(surface_form1, probability1), (surface_form2, probability2), ...]
'''
text_tokens = text.split()
head_tokens = head.split()
tail_tokens = tail.split()
start_head = _find_sub_list(text_tokens, head_tokens)
start_tail = _find_sub_list(text_tokens, tail_tokens)
end_head = start_head + len(head_tokens)
end_tail = start_tail + len(tail_tokens)
text_tokens = _double_tokens(text_tokens, start_head, end_head, start_tail, end_tail)
text_tokens = self._tokenizer.encode(' '.join(text_tokens), add_special_tokens=False)
scores = []
for relation_text in self._relations:
relation_tokens = self._tokenizer.encode(relation_text, add_special_tokens=False)
adversarial_score = _get_adversarial_score(self._model, text_tokens, relation_tokens)
scores.append(1 - float(adversarial_score))
to_return = list(zip(self._relations.copy(), scores))
to_return = sorted(to_return, key=lambda x: -x[1])
return to_return
def _run_model(model, text_tokens, relation_tokens):
inputs = torch.tensor([[101] + relation_tokens
+ [102] + text_tokens
+ [102]
])
length = torch.tensor([len(relation_tokens) + 1])
subj_starts, subj_ends, obj_starts, obj_ends = model(inputs.cpu(), length)
return subj_starts[0][0], subj_ends[0][0], obj_starts[0][0], obj_ends[0][0]
def _get_adversarial_score(model, text_tokens, relation_tokens):
adversarial_score = min(_run_model(model, text_tokens, relation_tokens))
return adversarial_score
def _find_sub_list(lst, sublist):
results = []
sll = len(sublist)
for ind in (i for i, e in enumerate(lst) if e == sublist[0]):
if lst[ind:ind + sll] == sublist:
results.append(ind)
if not results:
raise RuntimeError('The entity "' + ' '.join(sublist) + '" is not in the text.')
return results[0]
def _double_tokens(lst, start_head, end_head, start_tail, end_tail):
new_lst = []
for index, item in enumerate(lst):
new_lst.append(item)
if start_head <= index < end_head:
new_lst.append(item)
if start_tail <= index < end_tail:
new_lst.append(item)
return new_lst | zero-shot-re | /zero-shot-re-0.0.4.tar.gz/zero-shot-re-0.0.4/zero_shot_re/extractor.py | extractor.py |
__author__ = 'Jose Maria Zambrana Arze'
__email__ = '[email protected]'
__version__ = '0.1'
__copyright__ = 'Copyright 2012, Mandla Web Studio'
import logging
import traceback
from django import template
from django.http import Http404
from django.shortcuts import render_to_response
from django.shortcuts import redirect
from django.shortcuts import get_object_or_404
from django.core.urlresolvers import reverse
from django.conf import settings
from django.contrib import messages
from django.contrib.auth import authenticate, login as auth_login
from django.contrib.auth.decorators import login_required
from django.contrib.auth.views import login as login_view
from django.contrib.auth.views import logout as logout_view
from django.contrib.auth.models import User
from django.contrib.auth.views import password_reset as django_reset
from django.contrib.auth.views import password_reset_done as django_reset_done
from django.contrib.auth.views import password_reset_confirm as django_reset_confirm
from django.contrib.auth.views import password_reset_complete as django_reset_complete
from django.contrib.sites.models import Site
from django.utils.translation import ugettext_lazy as _
from django.views.generic.list import ListView
from common.views import UpdateView
from common.views import LoginRequiredMixin
from common.views import ListView
from common.views import DetailView
from common.views import OwnerRequiredMixin
from users.forms import PasswordResetForm
from users.forms import RegisterForm
from users.forms import UserForm
from users.forms import ProfileForm
from users.forms import DesignForm
from users.utils import get_dict_by_ids
from users.models import Profile
def register(request, **kwargs):
if request.user.is_authenticated():
messages.error(request, 'Ya estas registrado')
logging.error('Ya estas registrado: %s ' % request.user.username)
return redirect(reverse('error'))
form = RegisterForm()
if request.method == 'POST':
form = RegisterForm(request.POST)
if form.is_valid():
current_site = Site.objects.get_current()
user = form.save()
messages.success(request, _("Bienvenido a %s" % current_site.name))
user = authenticate(username=form.cleaned_data['username'],
password=form.cleaned_data['password1'])
auth_login(request, user)
return redirect(reverse('home'))
context = {
'form': form
}
c = template.RequestContext(request, context)
return render_to_response('page.users.register.html', c)
# password reset
def password_reset(request):
kwargs = {
'template_name': 'page.password_reset.html',
'post_reset_redirect': reverse('password_reset_done'),
'password_reset_form': PasswordResetForm
}
return django_reset(request, **kwargs)
def password_reset_done(request):
kwargs = {
'template_name': 'page.password_reset.done.html'
}
return django_reset_done(request, **kwargs)
def password_reset_confirm(request, uidb36=None, token=None):
kwargs = {
'uidb36': uidb36,
'token': token,
'template_name': 'page.password_reset.confirm.html',
'post_reset_redirect': reverse('password_reset_complete')
}
return django_reset_confirm(request, **kwargs)
def password_reset_complete(request):
kwargs = {
'template_name': 'page.password_reset.complete.html'
}
return django_reset_complete(request, **kwargs)
class AttachActors(object):
"""
AΓ±ade la lista de actores que intervienen en la vista al contexto
"""
def get_actors_ids(self, context):
"""
Retorna los ids de todos los actores que intervienen en esta vista.
"""
raise NotImplementedError
def get_context_data(self, **kwargs):
"""
Retorna el contexto con el diccionario de usuarios y perfiles.
"""
context = super(AttachActors, self).get_context_data(**kwargs)
# AΓ±adimos al contexto los perfiles de los actores en un diccionario.
users_ids = self.get_actors_ids(context)
context['users_dict'] = get_dict_by_ids(User, users_ids)
context['profiles_dict'] = Profile.objects.profiles_dict(users_ids)
return context
class UsersIndex(AttachActors, ListView):
"""
Muestra los usuarios registrados en el sistema.
"""
model = User
view_name = 'users-index'
app_name = 'users'
templates = {
'html': 'page.users.index.html'
}
def get_actors_ids(self, context):
"""
Retorna los ids de los usuario de la vista.
"""
return [user.id for user in context['object_list']]
class UsersUpdate(OwnerRequiredMixin, UpdateView):
"""
Vista para actualizar los datos de la cuenta del usuario.
"""
model = User
form_class = UserForm
view_name = 'users-account'
app_name = 'users'
templates = {
'html': 'page.users.settings.html'
}
def get_object(self):
return self.request.user
def is_owner(self, user, current_user):
return user.id == current_user.id
def form_valid(self, form):
user = form.save()
self.success_message = _(u'Tu cuenta fue actualizada.')
return super(UsersUpdate, self).form_valid(form)
def get_success_redirect_url(self):
return reverse('users_profile', args=[self.request.user.username])
class UsersUpdateProfile(OwnerRequiredMixin, UpdateView):
"""
Vista para actualizar el perfil del usuario.
"""
model = Profile
form_class = ProfileForm
view_name = 'users-personal'
app_name = 'users'
templates = {
'html': 'page.users.settings.html'
}
def get_object(self):
profile = self.request.user.get_profile()
return profile
def form_valid(self, form):
profile = form.save()
self.success_message = _(u'Tus datos fueron actualizados.')
return super(UsersUpdateProfile, self).form_valid(form)
def get_success_redirect_url(self):
return reverse('users_profile', args=[self.request.user.username])
class UsersUpdateDesign(UsersUpdateProfile):
"""
Vista para actualizar el diseΓ±o del perfil del usuario.
"""
form_class = DesignForm
view_name = 'users-design'
def form_valid(self, form):
"""
Procesa el formulario.
"""
profile = form.save()
self.success_message = _(u'El diseΓ±o fue actualizado correctamente')
return super(UsersUpdateDesign, self).form_valid(form)
class UsersProfile(DetailView):
"""
Muestra el perfil del usuario.
"""
model = User
view_name = 'users-profile'
app_name = 'users'
templates = {
'html': 'page.users.profile.html'
}
def get_object(self):
username = self.kwargs.get('username', self.request.user.username)
return get_object_or_404(User, username=username)
def get_context_data(self, **kwargs):
context = super(UsersProfile, self).get_context_data(**kwargs)
context['user_profile'] = context['object'].get_profile()
return context | zero-users | /zero-users-0.1.7.tar.gz/zero-users-0.1.7/users/views.py | views.py |
__author__ = 'Jose Maria Zambrana Arze'
__email__ = '[email protected]'
__version__ = '0.1'
__copyright__ = 'Copyright 2012, Mandla Web Studio'
import os
import uuid
import logging
from time import time
from django.db import models
from django.conf import settings
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.utils.translation import ugettext_lazy as _
from common.fields import DictField
from common.fields import ColorField
from thumbnails.models import ThumbnailMixin
from users.managers import ProfileManager
class Profile(ThumbnailMixin):
"""
Modelo para manejar informaciΓ³n adicional sobre el perfil del usuario.
"""
#: usuario al que pertence el perfil
user = models.ForeignKey(User, related_name='profile')
#: Nombre de usuario
username = models.CharField(_(u'Nombre de usuario'), max_length=255,
blank=True,
null=True)
# Fecha de la ΓΊltima publicaciΓ³n realizada.
last_published = models.DateTimeField(_(u'Γltima publicaciΓ³n'), auto_now_add=True)
#: descripciΓ³n del usuario.
description = models.TextField(_(u'DescripciΓ³n'), blank=True, default='')
#: Website del usuario.
url = models.URLField(_(u'Website'), blank=True, default='')
#: Campo para almacenar informaciΓ³n adicional
extras = DictField(_('Extras'), null=False, blank=True, default={})
#: Imagen de fondo del perfil.
background = models.ImageField(_(u'Imagen de fondo'), blank=True, null=True, upload_to='backgrounds')
#: Color de fondo del perfil.
background_color = ColorField(_(u'Color de fondo'), blank=True, null=True)
#: Color de los enlaces.
links_color = ColorField(_(u'Color de los enlaces'), blank=True, null=True)
#: Color del fondo de los botones
button_background = ColorField(_(u'Fondo botones'), blank=True, null=True)
#: Color del texto de los botones
button_color = ColorField(_(u'Color texto botones'), blank=True, null=True)
objects = ProfileManager()
#: Los tamaΓ±os permitidos en los avatares
sizes = {
'u': (25, 25),
's': (50, 50),
'm': (80, 80),
'l': (115, 115),
}
# directorio base para los avatares
basepath = 'avatars'
#: Avatares por defecto.
defaults = {
'u': 'avatar_u.png',
's': 'avatar_s.png',
'm': 'avatar_m.png',
'l': 'avatar_l.png',
}
def thumbnail_name(self, size):
"""
Retorna el nombre del thumbnail del tamaΓ±o *size*
"""
thumb_name = '%s_%s.jpg' % (self.user.username, str(size))
return os.path.join(self.thumbnail_basepath(), thumb_name)
@models.permalink
def get_absolute_url(self):
"""
Retorna el path absoluto del perfil.
"""
return ('users_profile', [self.username]) | zero-users | /zero-users-0.1.7.tar.gz/zero-users-0.1.7/users/models.py | models.py |
__author__ = 'Jose Maria Zambrana Arze'
__email__ = '[email protected]'
__version__ = '0.1'
__copyright__ = 'Copyright 2012, Mandla Web Studio'
import logging
import re
from django import forms
from django.core.exceptions import MultipleObjectsReturned
from django.forms.widgets import Widget
from django.forms.fields import Field
from django.conf import settings
from django.forms import ModelForm
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.forms import PasswordResetForm
from django.contrib.localflavor.es import forms as es_forms
from django.contrib.auth.tokens import default_token_generator
from django.utils.translation import ugettext_lazy as _
from django.utils.http import int_to_base36
from django.utils.safestring import mark_safe
from django.utils.encoding import force_unicode
from common.mail import Mailer
from thumbnails.templatetags.thumbnails_tags import thumbnail_url
from thumbnails.forms import ThumbnailField
from thumbnails.utils import validate_file_size
from users.models import Profile
UPPER_RE = re.compile('[A-Z]+')
USERNAME_RE = re.compile(r'^[a-z0-9\-]+$')
class MixinClean(object):
"""
Clase para encapsular las validaciones relacionadas con los campos
de un usuario.
"""
def clean_password2(self):
password1 = self.cleaned_data.get("password1", "")
password2 = self.cleaned_data["password2"]
if password1 != password2:
raise forms.ValidationError(_("The two password fields didn't match."))
return password2
def clean_email(self):
email = self.cleaned_data.get('email')
if not email:
raise forms.ValidationError(_(u'El email es obligatorio'))
username = self.cleaned_data.get('username')
if email and User.objects.filter(email=email).exclude(username=username).count():
raise forms.ValidationError(u'El email debe ser ΓΊnico.')
return email
def clean_first_name(self):
first_name = self.cleaned_data.get('first_name')
if not first_name:
raise forms.ValidationError(_(u'El Nombre es obligatorio'))
return first_name
def clean_last_name(self):
last_name = self.cleaned_data.get('last_name')
if not last_name:
raise forms.ValidationError(_(u'Los Apellidos son obligatorios.'))
return last_name
def save(self, commit=True):
user = super(UserCreationForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
class RegisterForm(UserCreationForm, MixinClean):
"""
Formulario para registrar un usuario en el sistema
"""
class Meta:
model = UserCreationForm.Meta.model
fields = ("username", "first_name", "last_name", "email")
def __init__(self, *args, **kwargs):
UserCreationForm.__init__(self, *args, **kwargs)
self.fields.keyOrder = ["first_name", "last_name", "username",
"password1", "password2", "email"]
self.fields['first_name'].label = _('Nombre')
self.fields['last_name'].label = _('Apellidos')
self.fields['email'].label = _('Email')
self.fields['username'].help_text = ""
self.fields['password2'].help_text = ""
self.fields['email'].label = _('Email')
def clean_username(self):
"""
Valida el nombre de usuario.
"""
username = self.cleaned_data.get('username')
if not USERNAME_RE.match(username):
raise forms.ValidationError(_(u"Solo se permiten caracteres alfanumΓ©ricos y el caracter -"))
return username.lower()
def save(self, *args, **kwargs):
"""
Almacena en la base de datos el nuevo usuario y envia un mail de
bienvenida.
"""
# Crea el usuario
user = super(RegisterForm, self).save(*args, **kwargs)
# Envia el mail de bienvenida.
context = {
'user': user,
}
subject = _(u'%(firstname)s Bienvenido a %(sitename)s') % ({
'firstname': user.first_name,
'sitename': current_site.name
})
mail = Mailer(subject, 'mail.welcome.txt',
'mail.welcome.html',
**context)
mail.send(user.email)
return user
class UserForm(forms.ModelForm, MixinClean):
"""
Formulario para configurar los datos del usuario: nombres, apellidos, email
contraseΓ±a.
"""
password1 = forms.CharField(label=_("Password"),
widget=forms.PasswordInput,
required=False)
password2 = forms.CharField(label=_("Password confirmation"),
widget=forms.PasswordInput,
help_text=_("Enter the same password as above,"
" for verification."),
required=False)
class Meta:
model = User
fields = ("first_name", "last_name", "email")
def __init__(self, *args, **kwargs):
super(UserForm, self).__init__(*args, **kwargs)
self.fields.keyOrder = ["first_name", "last_name",
"password1", "password2", "email"]
self.fields['first_name'].label = _('Nombre')
self.fields['last_name'].label = _('Apellidos')
self.fields['password2'].help_text = ""
def clean_email(self):
email = self.cleaned_data.get('email')
if not email:
raise forms.ValidationError(_(u'El email es obligatorio'))
username = self.instance.username
if email and User.objects.filter(email=email).exclude(username=username).exists():
raise forms.ValidationError(u'El email debe ser ΓΊnico.')
return email
def save(self, commit=True):
user = super(UserForm, self).save(commit=False)
if self.cleaned_data["password1"]:
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
class ProfileForm(forms.ModelForm):
"""
Formulario para configurar el perfil del usuario.
"""
image = ThumbnailField(label=_("Avatar"), required=False, help_text=u'TamaΓ±o mΓ‘ximo 5Mb')
class Meta:
model = Profile
fields = ('image', 'url', 'description')
def clean_image(self):
image = self.cleaned_data.get('image')
if image is not None:
validate_file_size(image)
return image
def save(self):
image = self.cleaned_data.get('image')
profile = super(ProfileForm, self).save(commit=True)
if image is not None:
profile.create_thumbnails()
return profile
class DesignForm(forms.ModelForm):
"""
Formulario para configurar el diseΓ±o del perfil de un usuario.
"""
class Meta:
model = Profile
fields = ('background', 'background_color', 'links_color',
'button_background', 'button_color')
class PasswordResetForm(PasswordResetForm):
def save(self, **kwargs):
use_https = kwargs.pop('use_https', False)
token_generator = kwargs.pop('token_generator', default_token_generator)
request = kwargs.pop('request', None)
from_email = kwargs.pop('from_email', None)
for user in self.users_cache:
site_name = current_site.name
domain = current_site.domain
subject = _("Password reset on %s") % site_name
context = {
'email': user.email,
'domain': domain,
'site_name': site_name,
'uid': int_to_base36(user.id),
'user': user,
'token': token_generator.make_token(user),
'protocol': use_https and 'https' or 'http',
}
mail = Mailer(subject, 'mail.password_instructions.txt',
'mail.password_instructions.html',
**context)
mail.send(user.email, from_email) | zero-users | /zero-users-0.1.7.tar.gz/zero-users-0.1.7/users/forms.py | forms.py |
_ZERO_WIDTH_NON_JOINER = 'β'
_ZERO_WIDTH_JOINER = 'β'
_ZERO_WIDTH_SPACE = 'β'
_ZERO_WIDTH_NO_BREAK_SPACE = 'ο»Ώ'
_LEFT_TO_RIGHT_MARK = 'β'
_RIGHT_TO_LEFT_MARK = 'β'
zeroWidthDict = {
_LEFT_TO_RIGHT_MARK: _LEFT_TO_RIGHT_MARK,
_RIGHT_TO_LEFT_MARK: _RIGHT_TO_LEFT_MARK,
_ZERO_WIDTH_NON_JOINER: _ZERO_WIDTH_NON_JOINER,
_ZERO_WIDTH_JOINER: _ZERO_WIDTH_JOINER,
_ZERO_WIDTH_NO_BREAK_SPACE: _ZERO_WIDTH_NO_BREAK_SPACE,
_ZERO_WIDTH_SPACE: _ZERO_WIDTH_SPACE
}
_Quinary2ZeroMap: list = list(zeroWidthDict.values())
_Zero2QuinaryMap: dict = {index: values for values, index in enumerate(_Quinary2ZeroMap)}
def _is_visible(char: str) -> bool:
return char not in _Zero2QuinaryMap
def _find_first_visible(text: str):
for index, char in enumerate(text):
if _is_visible(char):
return index
return -1
def _to_any_base(number: int, radix: int) -> str:
digits = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ!@#$%^&*()_+-={}[]|\\:\";\'<>?,./`~"
max_radix = len(digits)
if 2 > radix > max_radix:
raise ValueError(f"Limit exceeded.")
remstack = []
while number > 0:
rem = number % radix
remstack.append(rem)
number = number // radix
result = ""
while len(remstack):
result += digits[remstack.pop()]
return result
def t2z(t: str) -> str:
z = ''
char: str
for char in list(t):
base10 = ord(char)
base5 = _to_any_base(int(base10), 5)
zero = ''.join([_Quinary2ZeroMap[int(each)] for each in list(base5)])
z = z + zero + _ZERO_WIDTH_SPACE
return z[:-1]
def z2t(z: str) -> str:
t = ''
if len(z) == 0:
return t
char: str
for char in z.split(_ZERO_WIDTH_SPACE):
base5 = ''.join([str(_Zero2QuinaryMap[each]) for each in list(char)])
t += chr(int(base5, 5))
return t
def encode(visible: str, hidden: str) -> str:
hid2z = t2z(hidden)
if len(visible) == 0:
return hid2z
e = f"{visible[:1]}{hid2z}{visible[1:]}"
return e
def extract(text: str) -> dict[str]:
first_visible = _find_first_visible(text)
second_visible = _find_first_visible(text[first_visible + 1:])
visible = ''
hidden = ''
for char in text[:second_visible + 1]:
if _is_visible(char):
visible += char
else:
hidden += char
for char in text[second_visible - 1:]:
if _is_visible(char):
visible += char
return {"visible": visible,
"hidden": hidden}
def decode(visible: str) -> str:
return z2t(extract(visible)['hidden'])
def split(text: str) -> str:
second_visible = _find_first_visible(text[1:])
result = text[:second_visible + 1]
split_list = text[second_visible + 1:]
for char in split_list:
result += f"{char}{_ZERO_WIDTH_SPACE}"
return result | zero-width-lib | /zero_width_lib-1.0.1-py3-none-any.whl/zero_width_lib/util.py | util.py |
# Zero
Linear electronic circuit utility. This package provides tools to simulate responses and noise in
linear electronic circuits, SI unit parsing and formatting, datasheet grabbing, and more.
This tool's simulator is inspired by [LISO](https://wiki.projekt.uni-hannover.de/aei-geo-q/start/software/liso),
and comes bundled with its op-amp library ([including tools to search it](https://docs.ligo.org/sean-leavey/zero/cli/library.html#search-queries)).
It also ([somewhat](https://docs.ligo.org/sean-leavey/zero/liso/input.html#known-incompatibilities))
understands LISO input and output files, and can plot or re-simulate their contents.
## Documentation
See the [online documentation](https://docs.ligo.org/sean-leavey/zero/).
## Installation
This library requires at least Python 3.6. It will not work on earlier versions of Python 3, nor
Python 2. You may wish to use `virtualenv` or `conda` to manage a separate environment with Python
3.
This library contains a `setup.py` file which tells Python how it should be installed. Installation
can be automated using `pip`. Open up a terminal or command prompt (Windows) and type:
```bash
pip install zero
```
This installs the library and adds a console script `zero` which provides access to the package's
command line utility.
If you want to update the library to a later version after having previously installed it, run:
```bash
pip install zero --upgrade
```
## Contributing
Bug reports and feature requests are always welcome, as are code contributions. Please use the
project's [issue tracker](https://git.ligo.org/sean-leavey/zero/issues).
## Future ideas
- Allow arbitrary op-amp noise spectra (interpolate to the frequency vector actually used)
- Split op-amp families into their own library files
- Some sort of system for sharing op-amp, regulator, resistor, etc. library data across the web
- A standardised export file format (XML?)
- Other types of noise, e.g. resistor excess noise
- Op-amp noise optimisation: here's my circuit, this is the frequency band I care about, now
what's the best op-amp to use?
- Grouped components that are represented as a single component in the input definition:
- filters, e.g. whitening filters
- real passive components: capacitors with ESR, resistors with stray inductance, etc.
## Credits
Sean Leavey
<[email protected]>
Invaluable insight into LISO's workings provided by Gerhard Heinzel. The author is also grateful for
contributions by Sebastian Steinlechner.
| zero | /zero-0.9.1.tar.gz/zero-0.9.1/README.md | README.md |
.. include:: /defs.txt
|Zero| documentation
====================
.. note::
|Zero| is still under construction, with the program structure and behaviour **not** stable.
The program, and this documentation, may be altered in ways that break existing scripts at any
time without notice.
|Zero| is a linear circuit simulation library and command line tool. It is able to
perform small signal ac analysis on collections of components such as resistors, capacitors,
inductors and op-amps to predict responses and noise.
===========
Why |Zero|?
===========
Given that tools such as `LTspice
<http://www.analog.com/en/design-center/design-tools-and-calculators/ltspice-simulator.html>`_ and
`Qucs <http://qucs.sourceforge.net/>`_ exist, why use this tool?
The answer is: `it depends`. For circuits where dc analysis is needed, or where you must
model non-linear or time-variant effects, then the above tools are very useful; however,
whilst component manufacturers often provide SPICE models to represent their parts, these often do
not correctly model noise and output impedance. This is especially true for op-amps, at
least historically. One of the key advantages of :ref:`LISO <index:LISO>`, upon which |Zero| is
loosely based, was that *measured* op-amp parameters were provided as standard in its library,
available to be used in simulations to provide accurate gain and noise results. This feature has
become incredibly useful to LISO's users. LISO furthermore provided an optimiser to be able to use
to tune circuit component values, something which is much trickier to do with SPICE.
|Zero| implements around half of what LISO is capable of doing, but extends it in a few ways to
provide greater customisability and ease of post-processing. You can for example easily add new
noise sources to components and simulate how they propagate through circuits without having to edit
any core code, or implement your own types of analysis using the base |Zero| circuit objects. The
results are also provided as a so-called `solution` which contains the simulation data as well as
means to plot, export and further process it.
The other half of LISO is the so-called `root` mode, which includes a powerful optimiser. This is
not part of |Zero|, but given that |Zero| exists within the Python ecosystem is is possible to use
it some other optimisation packages such as `scipy.optimize`. LISO's optimiser may one day be
implemented in |Zero|.
================
What |Zero| does
================
|Zero| can perform small signal analyses on circuits containing linear components. It is inherently
AC, and as such can compute :ref:`frequency responses between nodes or components
<analyses/ac/signal:Small AC signal analysis>` and :ref:`noise spectral densities at nodes
<analyses/ac/noise:Small AC noise analysis>`. Inputs and outputs can be specified in terms of
voltage or current.
For more information, see :ref:`the available AC analyses <analyses/ac/index:Available analyses>`.
==========================
What |Zero| does not do
==========================
|Zero|'s scope is fairly limited to the problem of simple op-amp circuits in the frequency domain
using linear time invariant (LTI) analysis. This means that the parameters of the components within
the circuit cannot change over time, so for example the charging of a capacitor or the switching of
a transistor cannot be simulated. This rules out certain simulations, such as those of switch-mode
power supply circuits and power-on characteristics, and also effects such as distorsion, saturation
and intermodulation that would appear in real circuits. Effects at dc such as op-amp input offset
voltages and currents are also not modelled. Instead, the circuit is assumed to be at its operating
point, and the circuit is linearised around zero, such that if the current through a component is
reversed, the voltage drop across that component is also reversed. A small signal analysis is then
performed to simulate the effect of the circuit's output given small variations in its input. This
is perfect for computing transfer functions and ac noise, but not for non-linear, time-varying
effects. You should bear these points in mind before choosing to use |Zero| for more complete
analyses.
For more information, see :ref:`analyses/ac/index:AC analyses`.
====
LISO
====
|Zero| is loosely based on `LISO
<https://wiki.projekt.uni-hannover.de/aei-geo-q/start/software/liso>`_ by Gerhard Heinzel. It
(mostly) understands LISO circuit mode input files, meaning that it can be used in place of LISO to
simulate circuit signals. It also understands LISO output files, allowing results previously
computed with LISO to be plotted, and for LISO results to be directly compared to those of this
program.
Contents
========
.. toctree::
:maxdepth: 3
introduction/index
circuit/index
components/index
analyses/index
solution/index
plotting/index
data/index
format/index
examples/index
liso/index
cli/index
configuration/index
developers/index
contributing/index
| zero | /zero-0.9.1.tar.gz/zero-0.9.1/docs/index.rst | index.rst |
Contributing
============
Contributions are very welcome, and can be in any form.
Suggestions and bug reports
---------------------------
Please submit suggestions using the `issue tracker`_.
Op-amp library additions
------------------------
Please submit op-amp library entries using the `issue tracker`_. Ideally also submit a datasheet
URL and any measured data from which your op-amp parameters have been derived.
Code contributions
------------------
Please see :ref:`developers/index:Information for developers`.
.. _issue tracker: https://git.ligo.org/sean-leavey/zero/issues
| zero | /zero-0.9.1.tar.gz/zero-0.9.1/docs/contributing/index.rst | index.rst |
.. include:: /defs.txt
Examples
========
This page lists some example circuits in |Zero|. Some similar circuit examples are provided in the
`development repository`_.
Non-inverting whitening filter response
---------------------------------------
The following is the voltage-voltage response from a non-inverting whitening filter, providing
34 dB gain between around 10 Hz to 1 kHz.
.. plot::
:include-source:
import numpy as np
from zero import Circuit
from zero.analysis import AcSignalAnalysis
# 1000 frequencies between 10 mHz to 100 kHz.
frequencies = np.logspace(-2, 5, 1000)
# Create circuit object.
circuit = Circuit()
# Add components.
circuit.add_library_opamp(model="LT1124", node1="n1", node2="n3", node3="n4")
circuit.add_capacitor(value="10u", node1="gnd", node2="n2")
circuit.add_resistor(value="2k", node1="n2", node2="n3")
circuit.add_resistor(value="100k", node1="n3", node2="n4")
circuit.add_capacitor(value="1.5n", node1="n3", node2="n4")
# Solve circuit.
analysis = AcSignalAnalysis(circuit=circuit)
solution = analysis.calculate(frequencies=frequencies, input_type="voltage", node="n1")
# Plot.
plot = solution.plot_responses(sink="n4")
plot.show()
Inverting whitening filter response
-----------------------------------
The following is the voltage-voltage response from an inverting whitening filter, providing 34 dB
gain between around 10 Hz to 1 kHz. The response is almost identical to the `non-inverting
filter above <#non-inverting-whitening-filter-response>`__ except its output has opposite sign (and
the input impedance is much lower).
.. plot::
:include-source:
import numpy as np
from zero import Circuit
from zero.analysis import AcSignalAnalysis
# 1000 frequencies between 10 mHz to 100 kHz.
frequencies = np.logspace(-2, 5, 1000)
# Create circuit object.
circuit = Circuit()
# Add components.
circuit.add_capacitor(value="10u", node1="n1", node2="n2")
circuit.add_resistor(value="2k", node1="n2", node2="n3")
circuit.add_resistor(value="100k", node1="n1", node2="n3")
circuit.add_library_opamp(model="LT1124", node1="gnd", node2="n3", node3="n4")
circuit.add_resistor(value="100k", node1="n3", node2="n4")
circuit.add_capacitor(value="1.5n", node1="n3", node2="n4")
# Solve circuit.
analysis = AcSignalAnalysis(circuit=circuit)
solution = analysis.calculate(frequencies=frequencies, input_type="voltage", node="n1")
# Plot.
plot = solution.plot_responses(sink="n4")
plot.show()
Inverting whitening filter output noise
---------------------------------------
The voltage noise at the output node (``nout``) can be calculated with the code below.
.. plot::
:include-source:
import numpy as np
from zero import Circuit
from zero.analysis import AcNoiseAnalysis
# 1000 frequencies between 1 Hz to 1 MHz
frequencies = np.logspace(0, 6, 1000)
# Create circuit object.
circuit = Circuit()
# Add components.
circuit.add_capacitor(value="10u", node1="gnd", node2="n1")
circuit.add_resistor(value="430", node1="n1", node2="nm", name="r1")
circuit.add_resistor(value="43k", node1="nm", node2="nout")
circuit.add_capacitor(value="47p", node1="nm", node2="nout")
circuit.add_library_opamp(model="LT1124", node1="gnd", node2="nm", node3="nout")
# Solve circuit.
analysis = AcNoiseAnalysis(circuit=circuit)
solution = analysis.calculate(frequencies=frequencies, input_type="voltage", node="n1",
sink="nout", incoherent_sum=True)
# Plot.
plot = solution.plot_noise(sink="nout")
plot.show()
The current noise through the op-amp can be calculated with the code below. Note that it is useful
to give the op-amp an explicit name in order to reference it as the noise sink.
.. plot::
:include-source:
import numpy as np
from zero import Circuit
from zero.analysis import AcNoiseAnalysis
# 1000 frequencies between 1 Hz to 1 MHz
frequencies = np.logspace(0, 6, 1000)
# Create circuit object.
circuit = Circuit()
# Add components.
circuit.add_capacitor(value="10u", node1="gnd", node2="n1")
circuit.add_resistor(value="430", node1="n1", node2="nm", name="r1")
circuit.add_resistor(value="43k", node1="nm", node2="nout")
circuit.add_capacitor(value="47p", node1="nm", node2="nout")
circuit.add_library_opamp(name="op1", model="LT1124", node1="gnd", node2="nm", node3="nout")
# Solve circuit.
analysis = AcNoiseAnalysis(circuit=circuit)
solution = analysis.calculate(frequencies=frequencies, input_type="voltage", node="n1",
sink="op1", incoherent_sum=True)
# Plot.
plot = solution.plot_noise(sink="op1")
plot.show()
Input-projected photodetector readout noise
-------------------------------------------
With photodetectors used in optics experiments, it is often very useful to present the readout noise
from the electronics in terms of the equivalent input noise. Photodiodes used at the input of these
readout circuits can usually be considered as current sources, so projecting the noise to the input
involves projecting into units of ``A/sqrt(Hz)``. It is often yet more useful to scale this input
noise some other unit, such as displacement (useful for instance in interferometer experiments).
The example here shows the projection of the total output voltage noise to the equivalent input
current noise and the scaling of the input current noise to displacement noise.
.. plot::
:include-source:
import numpy as np
from zero import Circuit
from zero.analysis import AcNoiseAnalysis
from zero.tools import create_response
# 1000 frequencies between 10 mHz to 1 kHz
frequencies = np.logspace(-2, 3, 1000)
# Create circuit object.
circuit = Circuit()
# The photodiode is a current source that connects through a photodiode circuit model (shunt
# capacitor and series resistor).
circuit.add_capacitor(value="200p", node1="gnd", node2="nd")
circuit.add_resistor(value="10", node1="nd", node2="nm")
# Transimpedance amplifier.
circuit.add_library_opamp(model="OP27", node1="gnd", node2="nm", node3="nout")
circuit.add_resistor(value="1k", node1="nm", node2="nout")
# Solve circuit. Assume an input impedance of 1 GΞ© for the photodiode.
analysis = AcNoiseAnalysis(circuit=circuit)
solution = analysis.calculate(frequencies=frequencies, input_type="current", node="nd",
sink="nout", impedance="1G", incoherent_sum=True,
input_refer=True)
# Scale all noise at the input to displacement (response assumed to be 1 nm/A).
pd_to_displacement = create_response(source="input", sink="displacement", source_unit="A",
sink_unit="m", data=1e-9*np.ones_like(frequencies),
frequencies=frequencies)
solution.scale_noise(pd_to_displacement, sink="input")
# Plot. Note that the sink is now the input, since we projected the noise there. We also set a
# y-limit so the plot doesn't expand to show the (very small) resistor noise.
plot = solution.plot_noise(sink="displacement", title="Photodetector noise",
ylim=(1e-22, 1e-19))
plot.show()
LISO scripts
------------
Input file parsing
~~~~~~~~~~~~~~~~~~
|Zero| is capable of parsing :ref:`most <liso/input:Known incompatibilities>` LISO input files.
To start, create a new parser:
.. plot::
:include-source:
:context:
:nofigs:
from zero.liso import LisoInputParser
parser = LisoInputParser()
To parse a LISO circuit, either call the :meth:`~.LisoParser.parse` method with text:
.. plot::
:include-source:
:context:
:nofigs:
parser.parse("""
c c1 10u gnd n1
r r1 430 n1 nm
r r2 43k nm nout
c c2 47p nm nout
op op1 lt1124 nin nm nout
freq log 1 100k 100
uinput nin 0
uoutput nout:db:deg
""")
Or point it to a file using the :code:`path` parameter:
.. code-block:: python
parser.parse(path="/path/to/liso/script.fil")
Get the solution with :meth:`~.LisoParser.solution` and plot and show it with
:meth:`.Solution.plot` and :meth:`.Solution.show`:
.. plot::
:include-source:
:context:
solution = parser.solution()
solution.plot()
solution.show()
You can at any time list the circuit's constituent components with :code:`print(parser.circuit)`:
.. code-block:: console
Circuit with 6 components and 5 nodes
1. c1 [in=gnd, out=n1, C=1e-05]
2. c2 [in=nm, out=nout, C=4.7e-11]
3. input [in=gnd, out=nin, Z=default]
4. op1 [in+=nin, in-=nm, out=nout, model=LT1124]
5. r1 [in=n1, out=nm, R=430.0]
6. r2 [in=nm, out=nout, R=43000.0]
Generating a circuit graph
--------------------------
You can plot the circuit's node network using Graphviz, if installed. Using the circuit from the
`above example <#inverting-whitening-filter>`__, the following code will generate and open a PDF of
the circuit's node network:
.. code:: python
from zero.display import NodeGraph
graph = NodeGraph(circuit)
graph.view_pdf()
.. image:: /_static/liso-input-node-graph.svg
Graphviz almost always produces a messy representation, but it can sometimes be useful to help
spot simple mistakes in circuit definitions.
.. _development repository: https://git.ligo.org/sean-leavey/zero/tree/master/examples/native
| zero | /zero-0.9.1.tar.gz/zero-0.9.1/docs/examples/index.rst | index.rst |
########
Circuits
########
.. code-block:: python
>>> from zero import Circuit
====================
What is a 'circuit'?
====================
A :class:`circuit <.Circuit>` describes a collection of :class:`components <.Component>`
connected at :class:`nodes <.Node>`. It may contain :class:`resistors <.Resistor>`,
:class:`capacitors <.Capacitor>`, :class:`inductors <.Inductor>` and
:class:`op-amps <.OpAmp>`, and the circuit can be supplied with an :class:`input <.Input>`
in order to produce a current through and voltage across these components.
A circuit can be instantiated without arguments:
.. code-block:: python
>>> circuit = Circuit()
You can print the circuit to retrieve a list of its constituents:
.. code-block:: python
>>> print(circuit)
Circuit with 0 components and 0 nodes
Circuits are only useful once you add components. This is achieved using the various ``add_``
methods, such as :meth:`.add_resistor`, :meth:`.add_capacitor`, :meth:`.add_inductor` and
:meth:`.add_opamp`.
====================
Circuit manipulation
====================
Circuits can be modified before and after applying :ref:`analyses <analyses/index:Analyses>`.
Circuit components can be removed with :meth:`.remove_component` or replaced with
:meth:`.replace_component`.
When a component is removed, any connected nodes shared by other components are preserved.
When a component is replaced with another one, its nodes are copied to the new component and the new
component's nodes are overwritten. The components being swapped must be compatible: the number of
nodes in the current and replacement component must be the same, meaning that :ref:`passive
components <components/passive-components:Passive components>` can only be swapped for other passive
components, and :ref:`op-amps <components/op-amps:Op-amps>` can only be swapped for other op-amps.
| zero | /zero-0.9.1.tar.gz/zero-0.9.1/docs/circuit/index.rst | index.rst |
.. include:: /defs.txt
######################
Command line interface
######################
|Zero| provides a command line interface to perform some common tasks:
- :ref:`Run LISO scripts <cli/liso:LISO tools>`
- :ref:`Edit the user configuration <cli/settings:Settings>`
- :ref:`Find, open search the component library <cli/library:Component library tools>`
- :ref:`Download and display datasheets <cli/datasheets:Datasheets>`
========
Commands
========
.. toctree::
:maxdepth: 2
settings
library
datasheets
liso
| zero | /zero-0.9.1.tar.gz/zero-0.9.1/docs/cli/index.rst | index.rst |
.. include:: /defs.txt
########
Settings
########
|Zero|'s command line interface can be used to create, edit, remove and list the settings file.
Listing the user configuration file path
----------------------------------------
The default settings can be supplemented or overridden by a user-defined configuration file. This
file is stored within the user's home directory in a location that depends on the operating system.
The path to this file can be listed with the command ``zero config path``.
Creating a user configuration
-----------------------------
An empty user configuration can be created with ``zero config create``.
Opening the user configuration for editing
------------------------------------------
The user configuration can be opened with the command ``zero config edit``.
Removing the user configuration
-------------------------------
The user library can be removed with ``zero library remove``.
Showing the configuration
-------------------------
The combined contents of the built-in configuration and any user-defined additions or overrides can
be printed to the screen with:
.. command-output:: zero config show
For large configurations, it is often useful to specify the ``--paged`` flag to allow the contents
to be navigated.
Styling plots
-------------
Plots generated with Matplotlib can have their style overridden by specifying commands in the
``plot.matplotlib`` section. For example, to specify the default line thickness, use the following
configuration::
plot:
matplotlib:
lines.linewidth: 3
Refer to `this Matplotlib sample configuration file <https://matplotlib.org/users/customizing.html#a-sample-matplotlibrc-file>`_
for more configuration parameters.
Command reference
-----------------
.. click:: zero.__main__:config
:prog: zero config
:show-nested:
| zero | /zero-0.9.1.tar.gz/zero-0.9.1/docs/cli/settings.rst | settings.rst |
.. include:: /defs.txt
##########
LISO tools
##########
.. hint::
Also see the documentation on :ref:`LISO compatibility <liso/index:LISO Compatibility>`.
|Zero| can parse LISO input and output files, run them natively or run them via a local
LISO binary and display the results. It can also compare its native results to that of
LISO by overlaying results in a plot or displaying a table of values.
Script path
-----------
For all calls to ``zero liso``, one or more script paths (``FILE``) must be specified. These can
either be LISO input or output file (commonly given ``.fil`` and ``.out`` extensions, respectively),
and |Zero| will choose an appropriate parser based on what it finds.
When more than one script is specified, they are simulated separately and the results combined. See
:ref:`cli/liso:Simulating multiple input files together` for more information.
Verbose output
--------------
By default, the command line utility does not output any text except that which is requested.
Verbosity can be switched on with the ``-v`` flag. Specify ``-vv`` for greater verbosity.
.. note::
The ``-v`` flag must be specified before the ``liso`` subcommand, i.e. ``zero -v liso [FILE]``.
An error will occur if the flag is specified after a subcommand.
Simulating a LISO input script with |Zero|
------------------------------------------
LISO input scripts can be run natively with the ``zero liso`` command. The input file is first
parsed and then built into an :class:`analysis <.BaseAnalysis>` which is then solved.
.. code-block:: bash
$ zero liso /path/to/liso/script.fil
The plotted functions specified in the LISO input file are reproduced in the default |Zero| plot,
including noise sums.
Re-simulating a LISO output file with |Zero|
--------------------------------------------
LISO result files contain a complete description of the simulated circuit, and as such can be
parsed by |Zero| and re-simulated natively.
.. code-block:: bash
$ zero liso /path/to/liso/script.out
Simulating a LISO input script with an external LISO binary
-----------------------------------------------------------
|Zero| can simulate a LISO input script with a locally installed LISO binary using the ``--liso``
flag. |Zero| runs the script with LISO and then parses the output file so that you can take
advantage of its plotting capabilities.
The LISO binary path must be specified with the ``--liso-path`` option. This must point to the exact
binary file, not just its directory, but may be relative to the current directory.
.. code-block:: bash
$ zero liso /path/to/liso/script.fil --liso --liso-path /path/to/liso/fil
An alternative is to set the ``LISO_PATH`` environment variable to point to the LISO binary. Since
LISO anyway requests that users set the ``LISO_DIR`` environment variable, on Unix systems this can
be used to set ``LISO_PATH`` either in the terminal profile (e.g. during the call with e.g.
``LISO_PATH=$LISO_DIR/fil_static zero liso ...``) or as part of the call:
.. code-block:: bash
$ LISO_PATH=$LISO_DIR/fil_static zero liso /path/to/liso/script.fil --liso
.. warning::
LISO uses a separate op-amp library to |Zero|, and these may differ if modifications have been
made to one but not the other. Take care when comparing results between the two tools.
Comparing a native simulation to LISO
-------------------------------------
As |Zero| can simulate LISO input scripts both natively and using the LISO binary, it can also
overlay the results on one plot, or report the difference between the results textually.
To overlay the results in a plot, specify the ``--compare`` flag. |Zero| will then run the specified
input file itself and with LISO, then it will parse the LISO results and combine them with its own.
The resulting plot then contains each function, with the native results with solid lines and the
LISO results with dashed lines:
.. image:: /_static/liso-compare-response.svg
A textual representation of the differences can also be displayed by specifying ``--diff``. This
must be provided in addition to ``--compare``. When specified, this prints a table containing
the worst relative and absolute differences between the two solutions, and the frequencies at which
they occur:
.. code-block:: text
ββββββββββββββββββββ€ββββββββββββββββββββββββββββββββ€ββββββββββββββββββββββββββββββββ
β β Worst difference (absolute) β Worst difference (relative) β
ββββββββββββββββββββͺββββββββββββββββββββββββββββββββͺββββββββββββββββββββββββββββββββ‘
β nin to op1 (A/V) β 1.08e-11 (f = 316.23 kHz) β 9.78e-10 (f = 316.23 kHz) β
ββββββββββββββββββββΌββββββββββββββββββββββββββββββββΌββββββββββββββββββββββββββββββββ€
β nin to no (V/V) β 1.04e-08 (f = 79.433 kHz) β 9.54e-10 (f = 79.433 kHz) β
ββββββββββββββββββββ§ββββββββββββββββββββββββββββββββ§ββββββββββββββββββββββββββββββββ
Simulating multiple input files together
----------------------------------------
Multiple input or output files may be specified in the ``zero liso`` call. These are simulated
separately and the results are merged together such that they can be plotted on one graph if
possible. The results can only be combined with the simulations contain the same frequency vectors.
If they do not have the same frequency vectors, an error is displayed and the program exits.
This can be useful for example for simulating similar circuits with different component values on
one graph. Each script is plotted with a different line style and a gradually lighter colour map.
Here is an example that shows the noise at an output node and the same noise referred to the input
on one plot:
.. code-block:: bash
$ zero liso noise1.fil noise2.fil
.. image:: /_static/liso-two-noises.svg
Contents of ``noise1.fil``:
.. code-block:: text
r r1 400k nin n1
r r2 400k n1 n2
r r3 50 n5 n3
r rs 230 n5 n6
r led 48.6 n6 gnd
c c1 20u n1 n3
c c2 10u n2 gnd
op op1 op27 n2 n3 n4
op op2 buf634 n4 n5 n5
freq log .003 300 1000
uinput nin 0
noise n6 sum
noisy all
Contents of ``noise2.fil``:
.. code-block:: text
r r1 400k nin n1
r r2 400k n1 n2
r r3 50 n5 n3
r rs 230 n5 n6
r led 48.6 n6 gnd
c c1 20u n1 n3
c c2 10u n2 gnd
op op1 op27 n2 n3 n4
op op2 buf634 n4 n5 n5
freq log .003 300 1000
uinput nin 0
inputnoise n6 sum
noisy all
Scaling response plots
----------------------
Responses can be scaled in either decibels or absolute values. The default is to scale in decibels
(``--resp-scale-db``, on by default), but this can be switched off with the ``--resp-scale-abs``
flag.
Saving figures
--------------
Figures can be saved using the ``--save-figure`` option, which must be followed by a file path.
The format of the figure is controlled by the specified file extension. For example, save PNGs, PDFs
and SVGs with ``--save-figure response.png``, ``--save-figure response.pdf`` and ``--save-figure response.svg``,
respectively.
The ``--save-figure`` option can be specified multiple times to save multiple figures, e.g.:
.. code-block:: bash
$ zero liso /path/to/liso/script.fil --save-figure response.png --save-figure response.pdf
Command reference
-----------------
.. click:: zero.__main__:liso
:prog: zero liso
:show-nested:
| zero | /zero-0.9.1.tar.gz/zero-0.9.1/docs/cli/liso.rst | liso.rst |
.. include:: /defs.txt
#######################
Component library tools
#######################
|Zero|'s command line interface can be used to search the :class:`op-amp <.OpAmp>`
library bundled with the project.
Listing the user library file path
----------------------------------
The built-in op-amp definitions can be supplemented or overridden by a user-defined
op-amp library. This library is stored within the user's home directory in a location
that depends on the operating system.
The path to this file can be listed with the command ``zero library path``.
Creating a user library
-----------------------
An empty user library can be created with ``zero library create``.
Opening the user library for editing
------------------------------------
The user library can be opened with the command ``zero library edit``.
Removing the user library
-------------------------
The user library can be removed with ``zero library remove``.
Showing the library
-------------------
The combined contents of the built-in library and any user-defined additions or overrides can be
printed to the screen with ``zero library show``. For large libraries, it is often useful to
specify the ``--paged`` flag to allow the contents to be navigated.
Search queries
--------------
Search queries are specified as a set of declarative filters after the ``zero library search``
command. |Zero| implements an expression parser which allows queries to be
arbitrarily long and complex, e.g.:
.. command-output:: zero library search "model != OP* & ((vnoise <= 2n & vcorner < 10) | (vnoise <= 25n & inoise < 100f & icorner < 100))"
The expression must be defined on one line. Whitespace is ignored. Where values are specified,
such as "1n", these are parsed by :class:`.Quantity`
(see :ref:`Parsing and displaying quantities <format/index:Parsing and displaying quantities>`).
Where a string comparison is made, e.g. with ``model``, wildcards are supported:
``*``
Match any number of characters (including zero), e.g. ``OP*`` would match ``OP27``, ``OP37``,
``OP227``, etc.
``?``
Match a single character, e.g. ``LT1?28`` would match ``LT1028`` and ``LT1128`` but not
``LT10028``.
Available parameters
~~~~~~~~~~~~~~~~~~~~
The following op-amp library parameters can be searched:
``model``
Model name, e.g. `OP27`.
``a0``
Open loop gain.
``gbw``
Gain-bandwidth product.
``delay``
Delay.
``vnoise``
Flat voltage noise.
``vcorner``
Voltage noise corner frequency.
``inoise``
Flat current noise.
``icorner``
Current noise corner frequency.
``vmax``
Maximum output voltage.
``imax``
Maximum output current.
``sr``
Slew rate.
Operators
~~~~~~~~~
Expressions can use the following operators:
``==``
Equal.
``!=``
Not equal.
``<``
Less than.
``<=``
Less than or equal.
``>``
Greater than.
``>=``
Greater than or equal.
``&``
Logical AND.
``|``
Logical OR.
Groups
~~~~~~
Parentheses may be used to delimit groups:
.. code-block:: text
(vnoise < 10n & inoise < 10p) | (vnoise < 100n & inoise < 1p)
Display
~~~~~~~
The results are by default displayed in a table. The rows are sorted based on the order in which the
parameters are defined in the search query, from left to right, with the leftmost parameter being
sorted last. The default sort direction is defined based on the parameter. The sort direction can be
specified explicitly as ``ASC`` (ascending) or ``DESC`` (descending) with the corresponding
``--sort`` parameter:
================== =========== =================
Flag Parameter Default direction
================== =========== =================
``--sort-a0`` ``a0`` descending
``--sort-gbw`` ``gbw`` descending
``--sort-delay`` ``delay`` ascending
``--sort-vnoise`` ``vnoise`` ascending
``--sort-vcorner`` ``vcorner`` ascending
``--sort-inoise`` ``inoise`` ascending
``--sort-icorner`` ``icorner`` ascending
``--sort-vmax`` ``vmax`` descending
``--sort-imax`` ``imax`` descending
``--sort-sr`` ``sr`` ascending
================== =========== =================
Parameters that are not explicitly searched are not ordered.
The display of the results table can be disabled using the ``--no-show-table`` flag. The results
can also be saved into a text file by specifying it with ``--save-data``. The specified file
extension will be used to guess the format to use, e.g. `csv` for comma-separated values or `txt`
for tab-separated values.
Results can also be plotted. The flags ``--plot-voltage-noise``, ``--plot-current-noise`` and
``--plot-gain`` can be used to plot the voltage and current noise or open loop gain of the op-amp,
respectively. Generated plots can also be saved by specifying a filename (or multiple filenames,
if you like) with the ``--save-voltage-noise-figure``, ``--save-current-noise-figure`` and
``--save-gain-figure`` options, respectively. Figures can be saved without being displayed with
``--no-plot-voltage-noise``, ``--no-plot-current-noise`` and ``--no-plot-gain``, respectively.
The following command will produce the plot below.
.. code-block:: bash
$ zero library search "gbw > 800M & ((vnoise < 10n & inoise < 10p) | (vnoise < 100n & inoise < 1p)) & model != OP00" --plot-gain --fstop 1M
.. image:: /_static/cli-opamp-gain.svg
Command reference
-----------------
.. click:: zero.__main__:library
:prog: zero library
:show-nested:
| zero | /zero-0.9.1.tar.gz/zero-0.9.1/docs/cli/library.rst | library.rst |
.. include:: /defs.txt
##########
Datasheets
##########
|Zero|'s command line interface can be used to download and display datasheets using
`Octopart <https://octopart.com/>`__'s API.
Searching for parts
-------------------
Specify a search term like this:
.. code-block:: bash
$ zero datasheet "OP27"
Partial matches are made based on the search string by default. To force exact matches only,
specify ``--exact``.
If there are multiple parts found, a list is displayed and the user is prompted to enter a
number corresponding to the part to display. Once a part is selected, either its datasheet is
shown or, in the event that there are multiple datasheets available for the specified part, the
user is again prompted to choose a datasheet.
The selected datasheet is downloaded and displayed using the default viwer. To download the
datasheet without displaying it, use the ``--download-only`` flag and set the ``--path`` option to
the path to save the file. If no ``--path`` is specified, the datasheet is saved to a temporary
location and the location is printed to the screen.
To download and display the first part's latest datasheet, specify the ``--first`` flag, e.g.:
.. code-block:: bash
$ zero datasheet "OP27" --first
This will immediately download and display the latest OP27 datasheet.
Updating the API endpoint and key
---------------------------------
|Zero| comes bundled with an API key which is open to use for any users. If for some reason this
API key is no longer available, a new key can be specified in the
:ref:`configuration file <configuration/index:Configuration>`.
Command reference
-----------------
.. click:: zero.__main__:datasheet
:prog: zero datasheet
:show-nested:
| zero | /zero-0.9.1.tar.gz/zero-0.9.1/docs/cli/datasheets.rst | datasheets.rst |
.. include:: /defs.txt
Solutions
=========
.. code-block:: python
>>> from zero.solution import Solution
The :class:`.Solution` class provides a mechanism for storing, displaying and saving the output of
an :ref:`analysis <analyses/index:Analyses>`; these are usually :ref:`responses
<data/index:Responses>` and :ref:`noise spectral densities <data/index:Noise spectral densities>`.
Retrieving functions
--------------------
Solutions contain methods to retrieve functions contained within those solutions using a variety of
filters. The methods :meth:`.filter_responses`, :meth:`.filter_noise` and :meth:`.filter_noise_sums`
provide ways to match functions against their sources, sinks, :ref:`groups <solution/index:Groups>`
and :ref:`labels <data/index:Labels>`. These methods return a :class:`dict` containing the matched
functions in lists keyed by their group names (see :ref:`Groups <solution/index:Groups>`).
To retrieve an individual function directly, three convenience methods are available:
:meth:`.get_response`, :meth:`~.Solution.get_noise` and :meth:`~.Solution.get_noise_sum`. These take
as arguments the source, sink, group and/or label of the :class:`~.data.Response`,
:class:`~.data.NoiseDensity` or :class:`~.data.MultiNoiseDensity` to retrieve. The source and sink
in :meth:`.get_response` and the sink in :meth:`~.Solution.get_noise` and
:meth:`~.Solution.get_noise_sum` can be :class:`components <.Component>` or :class:`nodes <.Node>`
or names, while the source in :meth:`~.Solution.get_noise` can be a :class:`~.components.Noise` or
:ref:`noise specifier <solution/index:Specifying noise sources and sinks>` such as ``V(op1)``.
Sources cannot be searched against when using :meth:`~.Solution.get_noise_sum`. You can use these
convenience methods to retrieve functions when you know enough information about it to match it
amongst the solution's functions. If multiple functions are found as a result of the filters you
provide, a :class:`ValueError` is thrown.
The table below lists the available filters for the ``filter_`` methods for each function type.
With the exception of the multi-valued filters, i.e. ``sources``, ``sinks``, ``groups`` and
``labels``, these parameters are also available when using the ``get_`` methods.
=========== ============================ ========= ===== ==========
Filter Possible values Responses Noise Noise sums
=========== ============================ ========= ===== ==========
``source`` Source :class:`.Component`, β β β
:class:`.Node` or
:class:`.Noise`
``sources`` :class:`List <list>` of β β β
sources, or ``all`` for all
sources
``sink`` Sink :class:`.Component`, β β β
:class:`.Node`
``sinks`` :class:`List <list>` of β β β
sinks, or ``all`` for all
sinks
``group`` Function group name β β β
(:class:`str`)
``groups`` :class:`List <list>` of β β β
group names, or ``all`` for
all groups
``label`` Function label β β β
(:class:`str`)
``labels`` :class:`List <list>` of β β β
labels, or ``all`` for all
labels
=========== ============================ ========= ===== ==========
Specifying response sources and sinks
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:class:`~.data.Response` sources and sinks (and :class:`~.components.Noise` sinks) specified in
:meth:`~.Solution.get_noise` are always components or nodes. You can specify these using either the
corresponding :class:`.Component` or :class:`.Node` objects or by specifying their name as a string.
Assuming that a circuit is built in the following way...
.. code-block:: python
>>> import numpy as np
>>> from zero import Circuit
>>> from zero.analysis import AcSignalAnalysis
>>> circuit = Circuit()
>>> circuit.add_opamp(name="op1", model="OP27", node1="gnd", node2="nin", node3="nout")
>>> circuit.add_resistor(name="r1", value="1k", node1="nin", node2="nout")
>>> signal_analysis = AcSignalAnalysis(circuit)
>>> solution = signal_analysis.calculate(frequencies=np.logspace(0, 4, 1001), input_type="voltage", node="nin")
...responses between the input node and various nodes and components can be retrieved in the
following ways:
.. code-block:: python
>>> nin = circuit["nin"] # get the input node object
>>> nout = circuit["nout"] # get the output node object
>>> print(solution.get_response(nin, nout)) # response between input and output nodes
nin to nout (V/V)
>>> print(solution.get_response("nin", "nout")) # alternative string specifier
nin to nout (V/V)
>>> print(solution.get_response("nin", "r1")) # response between input node and resistor current (note the units)
n1 to r1 (A/V)
>>> print(solution.get_response(label="nin to r1 (A/V)")) # label specifier
n1 to r1 (A/V)
Specifying noise sources and sinks
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In order to retrieve a noise function from a solution, you must specify the noise source in
:meth:`~.Solution.get_noise`. Noise sources can either be specified using their :class:`noise object
<.components.Noise>` or by building a noise specifier string. Noise sinks are specified in the same
way as response sinks (:ref:`see above <solution/index:Specifying response sources and sinks>`).
Specifying the noise source by its object involves first retrieving the component that produces the
noise. Each component holds its noise sources in its properties. For example, op-amps have voltage noise at their output and current noise at their
inverting and non-inverting inputs. Assuming the op-amp is referenced by ``op1``, these can be
retrieved using ``op1.voltage_noise``, ``op1.inv_current_noise`` and ``op1.non_inv_current_noise``,
respectively.
An alternative approach is to use a noise specifier string. These are strings constructed in the
form ``prefix(component-name[, node-name])``, with the prefix representing the type of noise as
shown in this table:
============================ ====== ==============
Noise type Prefix Example
============================ ====== ==============
Resistor (Johnson) ``R`` ``R(r1)``
Op-amp voltage ``V`` ``V(op1)``
Op-amp non-inverting current ``I`` ``I(op1, np)``
Op-amp inverting current ``I`` ``I(op1, nm)``
============================ ====== ==============
Assuming that a circuit is built in the following way...
.. code-block:: python
>>> import numpy as np
>>> from zero import Circuit
>>> from zero.analysis import AcNoiseAnalysis
>>> circuit = Circuit()
>>> circuit.add_opamp(name="op1", model="OP27", node1="gnd", node2="nin", node3="nout")
>>> circuit.add_resistor(name="r1", value="1k", node1="nin", node2="nout")
>>> noise_analysis = AcNoiseAnalysis(circuit)
>>> solution = noise_analysis.calculate(frequencies=np.logspace(0, 4, 1001), input_type="voltage", node="nin", sink="nout")
...noise functions can be retrieved with e.g.:
.. code-block:: python
>>> op1 = circuit["op1"] # get the op1 object
>>> print(solution.get_noise(op1.voltage_noise, "nout")) # voltage noise at op1
V(op1) to nout
>>> print(solution.get_noise("V(op1)", "nout")) # alternative string specifier
V(op1) to nout
>>> print(solution.get_noise(op1.inv_current_noise, "nout")) # current noise at op1's inverting input
I(op1, nin) to nout
>>> print(solution.get_noise("I(op1, nin)", "nout")) # alternative string specifier
I(op1, nin) to nout
>>> print(solution.get_noise(label="I(op1, nin) to nout")) # label specifier
I(op1, nin) to nout
Groups
------
Solutions support grouping as a means to keep different sets of functions separate, such as those
from different analyses. In most cases, groups do not need to be considered when accessing,
manipulating and plotting a solution's functions, but they become important when solutions are
:ref:`combined <solution/index:Combining solutions>`.
By default, functions are added to a solution's default group. Functions can be added to another
group by passing the ``group`` parameter to one of :meth:`.add_response`,
:meth:`~.Solution.add_noise` or :meth:`.add_noise_sum`. Groups can be renamed with
:meth:`.rename_group` and merged with :meth:`.merge_group`. The functions in the default group can
be moved to a new group with :meth:`.move_default_group_functions`.
Plotting with groups
~~~~~~~~~~~~~~~~~~~~
When a solution containing multiple groups is plotted, the functions in each group have different
formatting applied. The colours of functions within each group follow roughly the same progression
as the first group, but with gradually lighter shades and different line styles.
To plot functions from different groups without different shades or line styles, you should combine
them into the same group (see above).
Combining solutions
-------------------
Solutions from different analyses can be combined and plotted together. The method
:meth:`~.Solution.combine` takes as an argument another solution or multiple solutions, and returns
a new solution containing the combined functions.
.. warning::
In order to be combined, the solutions must have identical frequency vectors.
Solutions can be combined in two ways as determined by :meth:`.combine`'s ``merge_groups`` flag.
When ``merge_groups`` is ``False`` (the default), the groups in each source solution are copied into
the resulting solution. The default group in each source solution is copied into a group with the
name of the corresponding source solution, and other groups have the corresponding source solution's
name appended in brackets. This form of combination supports the ``sol_a + sol_b`` syntax. When
``merge_groups`` is ``True``, the groups in each source solution are merged in the resulting
solution. This requires that the source solutions contain *no* identical functions in cases where
the group names are the same (including the default group).
The resulting solution's group names can be changed using :meth:`.rename_group`.
.. hint::
Solutions containing different types of function can be combined, such as solutions with the
results of :ref:`signal analyses <analyses/ac/signal:Small AC signal analysis>` and solutions
with the results of :ref:`noise analyses <analyses/ac/noise:Small AC noise analysis>`. In order
to plot all of the combined solution's functions in such a case, you must call both
:meth:`.plot_responses` and :meth:`.plot_noise`.
Here is an example of solution combination using a :ref:`LISO model <liso/index:LISO compatibility>`
of an RF summing box with two inputs and one output:
.. plot::
:include-source:
from zero.liso import LisoInputParser
# Create parser.
parser = LisoInputParser()
base_circuit = """
l l2 420n nlf nout
c c4 47p nlf nout
c c1 1n nrf gnd
r r1 1k nrf gnd
l l1 600n nrf n_l1_c2
c c2 330p n_l1_c2 n_c2_c3
c c3 33p n_c2_c3 nout
c load 20p nout gnd
freq log 100k 100M 1000
uoutput nout
"""
# Parse the base circuit.
parser.parse(base_circuit)
# Set the circuit input to the low frequency port.
parser.parse("uinput nlf 50")
# Ground the unused input.
parser.parse("r nrfsrc 5 nrf gnd")
# Calculate the solution.
solutionlf = parser.solution()
solutionlf.name = "LF Circuit"
# Reset the parser's state.
parser.reset()
# Parse the base circuit.
parser.parse(base_circuit)
# Set the input to the radio frequency port.
parser.parse("uinput nrf 50")
# Ground the unused input.
parser.parse("r nlfsrc 5 nlf gnd")
# Calculate the solution.
solutionrf = parser.solution()
solutionrf.name = "RF Circuit"
# Combine the solutions. By default, this keeps the functions from each source solution in
# different groups in the resulting solution. This makes the plot show the functions with
# different styles and shows the source solution's name as a suffix on each legend label.
solution = solutionlf.combine(solutionrf)
# Plot.
solution.plot()
solution.show()
.. hint::
The above example makes a call to :meth:`~.Solution.plot`. This relies on :ref:`default
functions <solution/index:Default functions>` having been set, in this case by the :ref:`LISO
compatibility module <liso/index:LISO compatibility>`, which is normally not the case when a
circuit is constructed and simulated natively. In such cases, calls to :meth:`.plot_responses`
and :meth:`.plot_noise` with filter parameters are usually required.
Default functions
-----------------
Default functions are functions that are plotted when a call is made to :meth:`.plot_responses` or
:meth:`.plot_noise` without any filters. Functions are not normally marked as default when an
:ref:`analysis <analyses/index:Analyses>` builds a solution.
A function can be made default by setting the ``default`` flag to ``True`` when calling
:meth:`~.Solution.add_response`, :meth:`~.Solution.add_noise` or :meth:`~.Solution.add_noise_sum`.
.. note::
When a :ref:`LISO script <liso/index:LISO compatibility>` is simulated by |Zero|, the functions
plotted by the LISO script are marked as defaults. This behaviour assists when :ref:`comparing a
LISO solution to that of Zero <cli/liso:Comparing a native simulation to LISO>`, since LISO
does not output every possible response or noise whereas |Zero| does. In this case, only the
functions that are requested in the LISO script are set as defaults in the |Zero| solution, so
that only the relevant functions are compared.
| zero | /zero-0.9.1.tar.gz/zero-0.9.1/docs/solution/index.rst | index.rst |
.. include:: /defs.txt
Information for developers
==========================
Submission of small bug fixes and features is encouraged. For larger features, please contact the
author to discuss feasibility and structure.
Code style
~~~~~~~~~~
Follow `PEP 8`_ where possible.
Documentation style
~~~~~~~~~~~~~~~~~~~
Use `NumPy docstring format`_. Language and grammar should follow `Google style`_.
Development environment
~~~~~~~~~~~~~~~~~~~~~~~
A Visual Studio Code configuration file is provided in the project root when checked out via
``git``, which sets some code format settings which should be followed. This configuration file is
used automatically if the project is opened in Visual Studio Code from its root directory.
It may be useful to run |Zero| within a ``conda`` or ``pipenv`` environment to allow for separation
of dependencies from your system and from other projects. In both cases it is still recommended to
install |Zero| via ``pip``. For rapid development, it is highly recommended to make the project
`editable` so changes to project files reflect immediately in the library and CLI, and to install
the extra `dev` dependencies to allow you to build the documentation and run code linting tools:
.. code-block:: bash
pip install -e .[dev]
Merge requests
~~~~~~~~~~~~~~
If you have code to submit for inclusion in |Zero|, please open a `merge request`_ on GitLab
targeting the ``develop`` branch. To keep the git repository's merge graph clean, ideally you should
make your changes on a branch with one of the following conventions depending on what kind of change
you make:
- ``feature/my-feature`` for new features
- ``hotfix/my-fix`` for bug fixes
Replace ``my-feature`` or ``my-fix`` with an appropriate short description. This naming scheme
roughly follows that presented in `A successful Git branching model`_.
Creating new releases
~~~~~~~~~~~~~~~~~~~~~
The steps below should be followed when creating a new release:
#. Ensure all tests pass and all bundled examples work as intended, and all documentation is
up-to-date.
#. Create a new release branch from ``develop``, where ``x.x.x`` is the intended new version number:
``git checkout -b release/x.x.x develop``.
#. Update default user config and component library ``distributed_with`` keys to match the new
intended version number.
#. Commit changes and checkout ``develop``.
#. Checkout ``develop`` branch then merge release without fast-forwarding:
``git merge --no-ff release/x.x.x``.
#. Checkout ``master`` branch then merge ``release/x.x.x`` without fast-forwarding:
``git merge --no-ff release/x.x.x``.
#. Tag the release on ``master`` with the version: ``git tag -a x.x.x``.
#. Delete the release branch: ``git branch -d release/x.x.x``.
#. Push all changes to ``master`` and ``develop`` and the new tag to origin.
Note that when a new tag is pushed to the `ligo.org` GitLab server, the CI runner automatically
creates and uploads a new PyPI release.
Updating PyPI (pip) package
---------------------------
This requires `twine <https://packaging.python.org/key_projects/#twine>`__ and the credentials for
the |Zero| PyPI project.
By default, the GitLab CI runner will deploy a PyPI package automatically whenever a new tag is
created. The instructions below are for when this must be done manually:
#. Go to the source root directory.
#. Checkout the ``master`` branch (so the release uses the correct tag).
#. Remove previously generated distribution files:
``rm -rf build dist``
#. Create new distribution files:
``python setup.py sdist bdist_wheel``
#. (Optional) Upload distribution files to PyPI test server, entering the required credentials when
prompted:
``python -m twine upload --repository-url https://test.pypi.org/legacy/ dist/*``
You can then check the package is uploaded properly by viewing the `Zero project on the PyPI test server`_.
You can also check that it installs correctly with:
``pip install --index-url https://test.pypi.org/simple/ --no-deps zero``
Note: even if everything installs correctly, the test package will not work correctly due to lack
of dependencies (forced by the ``--no-deps`` flag, since they are not all available on the PyPI
test server).
#. Upload distribution files to PyPI, entering the required credentials when prompted:
``python -m twine upload dist/*``
#. Verify everything is up-to-date on `PyPI <https://pypi.org/project/zero/>`__.
API documentation
~~~~~~~~~~~~~~~~~
.. toctree::
:maxdepth: 2
api/modules
.. _PEP 8: https://www.python.org/dev/peps/pep-0008/
.. _NumPy docstring format: https://numpydoc.readthedocs.io/en/latest/example.html
.. _Google style: https://developers.google.com/style/
.. _merge request: https://git.ligo.org/sean-leavey/zero/merge_requests
.. _A successful Git branching model: https://nvie.com/posts/a-successful-git-branching-model/
.. _Zero project on the PyPI test server: https://test.pypi.org/project/zero/
| zero | /zero-0.9.1.tar.gz/zero-0.9.1/docs/developers/index.rst | index.rst |
.. include:: /defs.txt
.. currentmodule:: zero.components
Op-amps
-------
:class:`Op-amps <OpAmp>` in |Zero| take differential inputs and provide a
single output.
Voltage gain
============
The :meth:`voltage gain <OpAmp.gain>` of an op-amp is defined by its open
loop gain (`a0`), gain-bandwidth product (`gbw`), delay and poles or
zeros. The gain is as function of frequency.
Special case: voltage followers
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When an op-amp is configured as a `voltage follower` (otherwise known as a
`buffer`), where the output node is the same as the inverting input node,
the voltage gain is modified.
Noise
=====
Op-amps produce voltage noise across their input and output nodes, and current noise is present at
their input nodes. See :ref:`components/noise:Op-amp noise` for more details.
Library
=======
| zero | /zero-0.9.1.tar.gz/zero-0.9.1/docs/components/op-amps.rst | op-amps.rst |
.. include:: /defs.txt
.. currentmodule:: zero.components
Components
==========
.. code-block:: python
>>> from zero.components import Resistor, Capacitor, Inductor, OpAmp
.. toctree::
:maxdepth: 2
passive-components
op-amps
noise
What is a 'component'?
----------------------
A :class:`component <.Component>` represents a circuit device which sources or sinks current, and
produces voltage drops between its :class:`nodes <.Node>`. :class:`Passive <.PassiveComponent>`
components such as :class:`resistors <.Resistor>`, :class:`capacitors <.Capacitor>` and
:class:`inductors <.Inductor>` do not produce or amplify signals, but only apply an impedance to
their input. Active components such as :class:`op-amps <.OpAmp>` can source current.
Instantiated components may be added to :ref:`circuits <circuit/index:Circuits>` using
:meth:`.add_component`; however, the methods :meth:`.add_resistor`, :meth:`.add_capacitor`,
:meth:`.add_inductor` and :meth:`.add_opamp` allow components to be created and added to a circuit
at the same time, and avoid the need to import them directly.
.. note::
The recommended way to add components to a circuit is to use the :meth:`.add_resistor`,
:meth:`.add_capacitor`, :meth:`.add_inductor` and :meth:`.add_opamp` methods provided by
:class:`.Circuit`. These offer the same functionality as when creating
component objects directly, but avoid the need to directly import the component classes into
your script.
Component names
---------------
Components may be provided with a name on creation using the ``name`` keyword argument, i.e.
.. code-block:: python
>>> r = Resistor(name="r1", value="430k", node1="n1", node2="n2")
or
.. code-block:: python
>>> from zero import Circuit
>>> circuit = Circuit()
>>> circuit.add_resistor(name="rin", value="430k", node1="n1", node2="n2")
Names can also be set using the :attr:`~.Component.name` property:
.. code-block:: python
>>> r.name = "r1"
Component names can be used to retrieve components from circuits:
.. code-block:: python
>>> r = circuit["rin"]
>>> print(r)
rin [in=n1, out=n2, R=430.00k]
Component names must be unique within a given circuit. When trying to add a component to a circuit
where its name is already used by another circuit component, a :class:`ValueError` is raised.
.. note::
Component names do not need to be unique within the global namespace. That means components with
different values or nodes can have the same name as long as they are not part of the same
circuit.
Naming of components is not required; however, when a component is added to a circuit it is assigned
a name if it does not yet have one. This name uses a prefix followed by a number (the lowest
positive integer not resulting in a name which matches that of a component already present in the
circuit). The character(s) used depend on the component type:
========= ====== =======
Component Prefix Example
========= ====== =======
Resistor r r1
Capacitor c c1
Inductor l l1
Op-amp op op1
========= ====== =======
Setting a component's value
---------------------------
A passive component's :attr:`~.PassiveComponent.value` may be altered. First, get the component:
.. code:: python
c1 = circuit["c1"]
You can then set the value using the object's :attr:`~.PassiveComponent.value` attribute:
.. code:: python
c1.value = "1u"
In the above example, the string is parsed parsed by :class:`.Quantity` into an appropriate
:class:`float` representation. You may also specify a :class:`float` or :class:`int` directly:
.. code:: python
c1.value = 1e-6
You may also provide a string with units or scales:
.. code:: python
# Quantity with scale factor and unit.
c1.value = "2.2nF"
The above value is parsed as ``2.2e-9``, with unit ``F``. The unit is stored alongside the numeric
part within the object, and the unit will be printed alongside the component's value when it is
displayed.
.. note::
Units are just for display and are not used for any calculations. Be careful when specifying
units which differ from those used internally by |Zero|.
| zero | /zero-0.9.1.tar.gz/zero-0.9.1/docs/components/index.rst | index.rst |
.. currentmodule:: zero.components
Passive components
------------------
Passive components do not produce or amplify signals, but only apply an impedance to their input.
They have two nodes, :attr:`~.PassiveComponent.node1` and :attr:`~.PassiveComponent.node2`. The node
order does not matter. Passive components have a complex, frequency dependent
:meth:`~.PassiveComponent.impedance`; the specific component type - resistor, capacitor or inductor
- governs how this impedance behaves as a function of frequency.
Resistors
=========
.. code-block:: python
>>> from zero.components import Resistor
Resistors have a real impedance, i.e. a resistance, with no frequency dependence. This resistance
has units of ohm (Ξ©). A resistor object can be instantiated by providing the resistance and the name
of two nodes:
.. code-block:: python
>>> r = Resistor(value="430k", node1="n1", node2="n2")
The resistance can be changed using the resistor's :meth:`~Resistor.resistance` property:
.. code-block:: python
>>> r.resistance = "1.1M"
In a circuit, resistor produce :class:`Johnson noise <.ResistorJohnsonNoise>`.
Capacitors
==========
.. code-block:: python
>>> from zero.components import Capacitor
Capacitors have an imaginary, frequency dependent impedance determined by its capacitance in units
of farad (F). A capacitor object can be instantiated by providing the capacitance and the name of
two nodes:
.. code-block:: python
>>> c = Capacitor(value="47n", node1="n1", node2="n2")
The capacitance can be changed using the capacitor's :meth:`~Capacitor.capacitance` property:
.. code-block:: python
>>> c.capacitance = "100n"
Capacitors are considered ideal and do not produce noise.
Inductors
=========
.. code-block:: python
>>> from zero.components import Inductor
Inductors have an imaginary, frequency dependent impedance determined by its inductance in units of
henry (H). An inductor object can be instantiated by providing the inductance and the name of two
nodes:
.. code-block:: python
>>> l = Inductor(value="1.6u", node1="n1", node2="n2")
The inductance can be changed using the inductor's :meth:`~Inductor.inductance` property:
.. code-block:: python
>>> l.inductance = "2.2u"
Inductors are considered ideal and do not produce noise.
A pair of inductors can also be configured as mutual inductors, allowing for transformers to be
simulated.
| zero | /zero-0.9.1.tar.gz/zero-0.9.1/docs/components/passive-components.rst | passive-components.rst |
.. include:: /defs.txt
.. currentmodule:: zero.components
Noise
=====
Some components in |Zero| produce noise, such as resistors (:ref:`components/noise:Johnson noise`)
and op-amps (:ref:`voltage <components/noise:Op-amp voltage noise>` and :ref:`current
<components/noise:Op-amp current noise>` noise). Other components such as :ref:`capacitors
<components/passive-components:Capacitors>` and :ref:`inductors
<components/passive-components:Inductors>` do not produce noise by default, although noise can be
:ref:`added by the user <components/noise:Defining new noise sources>`.
Johnson noise
-------------
`Johnson noise <https://en.wikipedia.org/wiki/Johnson%E2%80%93Nyquist_noise>`__ is a type of
voltage noise in resistors that arises from thermal agitation of charge carriers. This is a function
of temperature but has no dependence on applied voltage or current.
The default temperature assumed in |Zero| calculations is set in the :ref:`configuration
<configuration/index:Configuration>`.
Op-amp noise
------------
Op-amps produce voltage noise across their input and output nodes, and current noise is present at
their input nodes.
Op-amp voltage noise
~~~~~~~~~~~~~~~~~~~~
Op-amps produce voltage noise across their input and output nodes. The noise is a function of
frequency, usually with a flat component at all frequencies and a component rising towards low
frequencies. The cross-over between these two noise components is typically around 1 to 100 Hz,
though this varies depending on the type of op-amp. BJT-based op-amps typically have the lowest
voltage noise.
Op-amp current noise
~~~~~~~~~~~~~~~~~~~~
Current noise is present at op-amps' inputs. The noise is a function of frequency, usually with a
flat component at all frequencies and a component rising towards low frequencies. The cross-over
between these two noise components is typically around 100 Hz to 1 kHz, though this varies depending
on the type of op-amp. FET-based op-amps typically have the lowest current noise.
Current noise is converted to voltage noise by resistors connected to the op-amp inputs. That means
that in a standard op-amp circuit with a feedback resistor, the current noise scales with the
feedback resistance.
In |Zero|, current noise is considered identical for both input nodes. This is usually a valid
assumption for voltage-feedback op-amps, which are the type that |Zero| models.
Defining new noise sources
--------------------------
New noise sources can be defined in |Zero| and added to components. The noise will then appear in
:ref:`noise analyses <analyses/ac/noise:Small AC noise analysis>`.
Noise sources can be created by subclassing one of the available noise types: :class:`.VoltageNoise`
or :class:`.CurrentNoise`. The implementation must define a ``label`` property and set a method to
call when computing the noise. This method will receive the current frequency vector and it must
return the corresponding noise.
Here is an example of defining a resistor current noise source and using it in a circuit:
.. plot::
:include-source:
import numpy as np
from zero import Circuit
from zero.analysis import AcNoiseAnalysis
from zero.noise import VoltageNoise
# Create a new noise type.
class ResistorCurrentNoise(VoltageNoise):
"""Resistor current noise source.
This models resistor current noise. See e.g. https://dcc.ligo.org/LIGO-T0900200/public
for more details. This noise depends on resistor composition and on its current. Be
careful when using this noise - it generally does not transfer to different circuits
with identical resistors as it depends on the voltage drop across the resistor.
Parameters
----------
vnoise : :class:`float`
The voltage noise at the specified frequency (V/sqrt(Hz)).
frequency : :class:`float`
The frequency at which the specified voltage noise is defined (Hz).
exponent : :class:`float`
The frequency exponent to use for calculating the frequency response.
"""
def __init__(self, vnoise, frequency=1.0, exponent=0.5, **kwargs):
super().__init__(**kwargs)
self.vnoise = vnoise
self.frequency = frequency
self.exponent = exponent
def noise_voltage(self, frequencies, **kwargs):
return self.vnoise * self.frequency / frequencies ** self.exponent
@property
def label(self):
return f"RE({self.component.name})"
# 1000 frequencies between 0.1 Hz to 10 kHz
frequencies = np.logspace(-1, 4, 1000)
# Create circuit object.
circuit = Circuit()
# Add components.
circuit.add_capacitor(value="10u", node1="gnd", node2="n1")
circuit.add_resistor(value="430", node1="n1", node2="nm", name="r1")
circuit.add_resistor(value="43k", node1="nm", node2="nout")
circuit.add_capacitor(value="47p", node1="nm", node2="nout")
circuit.add_library_opamp(model="LT1124", node1="gnd", node2="nm", node3="nout")
# Add resistor current noise to r1 with 10 nV/sqrt(Hz) at 1 Hz, with 1/f^2 drop-off.
r1 = circuit["r1"]
r1.add_noise(ResistorCurrentNoise(vnoise=1e-8, frequency=1.0, exponent=0.5))
# Solve circuit.
analysis = AcNoiseAnalysis(circuit=circuit)
solution = analysis.calculate(frequencies=frequencies, input_type="voltage", node="n1",
sink="nout", incoherent_sum=True)
# Plot.
solution.plot_noise(sink="nout")
solution.show()
| zero | /zero-0.9.1.tar.gz/zero-0.9.1/docs/components/noise.rst | noise.rst |
.. include:: /defs.txt
LISO output file parsing
========================
Known incompatibilities
-----------------------
Duplicate component and node names
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In LISO, nodes and components may share the same name, and the user is warned
that this may lead to confusion. In |Zero|, nodes and components cannot share
the same name.
Output coordinates
~~~~~~~~~~~~~~~~~~
The parser assumes all outputs are in `db` and `degrees` (noise columns are
handled appropriately, however). This leads to incorrect results. This is
easily fixed but not yet implemented.
Differences in behaviour
------------------------
Input noise sinks
~~~~~~~~~~~~~~~~~
In LISO, input noise is always specified at the input `node`, and not the input `component`, even if
the circuit input is a current (i.e. ``iinput``). This makes no difference to the computed spectra,
but it does influence the labels used to plot the data. In |Zero| simulations, and in parsed LISO
output files, the input noise sink is always set to whatever the circuit input actually is - either
the input node in the case of ``uinput`` or the input component in the case of ``iinput``.
| zero | /zero-0.9.1.tar.gz/zero-0.9.1/docs/liso/output.rst | output.rst |
.. include:: /defs.txt
LISO input file parsing
=======================
Known incompatibilities
-----------------------
Duplicate component and node names
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In LISO, nodes and components may share the same name, and the user is warned that this may lead to
confusion. In |Zero|, nodes and components cannot share the same name.
Outputs
~~~~~~~
|Zero| does not support the ``deg+`` or ``deg-`` output coordinates. Please use ``deg`` instead. It
also throws an error when a LISO script's ``ioutput`` or ``uoutput`` commands contain only a phase
coordinate, e.g.:
.. code-block:: text
uoutput nout:deg
Such outputs could in principle be handled by |Zero|, but it would add complexity to the
:class:`Solution` and :class:`Series` classes that is not worth the effort given how rare this type
of output is. In order to use such scripts with |Zero|, simply add a magnitude unit, e.g.
.. code-block:: text
uoutput nout:db:deg
Root mode
~~~~~~~~~
|Zero| does not support LISO's root mode, meaning that the fitting tools provided in LISO for
responses and noise spectra are not replicated. It is suggested to instead use |Zero| with a Python
optimisation library such as `scipy.optimize
<https://docs.scipy.org/doc/scipy/reference/optimize.html>`_. Note that it is very important for
circuit responses and noise fitting to use a well-suited optimiser, particularly one that can fit in
log space. LISO's fitting library performs very well for this purpose.
Commands
~~~~~~~~
The following commands are not yet supported:
- :code:`factor` (input multiplicative factor)
- :code:`zin` (input impedance)
- :code:`opdiff` (plot op-amp input differential voltage)
- :code:`margin` (compute op-amp phase margin; replaced :code:`opstab` in LISO v1.78)
- :code:`sens` (print table of component sensitivities)
Here are some commands which will probably not be supported:
- other `max` or `min` based commands, e.g. :code:`maxinput`
- :code:`eagle` (produce EAGLE file)
- :code:`gnuterm`
- component :code:`C0805` (0805 capacitor with parasitic properties; not implemented in
favour of grouping components together with macros)
Op-amp library
~~~~~~~~~~~~~~
LISO's op-amp library format is not supported, but the full LISO library is bundled in |Zero|'s
native format. Among other features, |Zero|'s library improves on that of LISO's by allowing an
``alias`` setting where you can specify other op-amps with the same properties.
The parameters ``un``, ``uc`` ``in`` and ``ic`` have been renamed ``vnoise``, ``vcorner``,
``inoise`` and ``icorner``, respectively.
Submissions of op-amp parameters to |Zero|'s library are strongly encouraged
(see :ref:`contributing/index:Op-amp library additions`).
LISO Perl commands
~~~~~~~~~~~~~~~~~~
Commands used for running LISO in a loop with :code:`pfil` are not supported. Instead you can use
|Zero| as part of a Python script to run either LISO or native |Zero| simulations in a loop.
Differences in behaviour
------------------------
Command order
~~~~~~~~~~~~~
In LISO, the output must be specified *after* the components. In |Zero|, order is irrelevant.
`Noisy` command
~~~~~~~~~~~~~~~
.. code-block:: text
noisy all|allr|allop|noise-source [all|allr|allop|noise-source] ...
The LISO manual states in section 7.3 regarding the noise sources used to calculate the :code:`sum`
output:
Note also that all noise sources that are included in the `noise` instruction, i.e. those that
are plotted individually, are automatically considered "noisy", i.e. they are always included in
the sum.
In LISO, if the :code:`sum` output is present but there is no :code:`noisy` command, the following
error is displayed:
.. code-block:: text
*** Error: No noisy components! (Try 'noisy all')
In |Zero|, the :code:`noisy` command does not need to be present as by default, even in LISO, the
noise sources that contribute to the :code:`sum` output always includes those specified in the
output itself. The :code:`noisy` command is available merely to add additional noise sources to the
:code:`sum` that are not explicitly plotted.
As the lack of presence of a :code:`noisy` command in this case does not yield *different* results
to LISO, only an error in one case and a reasonable output in the other, this behaviour is not
considered a bug.
String lengths
~~~~~~~~~~~~~~
LISO has a limit of 16 for most strings (component names, op-amp types, node names, etc.). In |Zero|
the limit is effectively arbitrary.
.. hint::
In the case of *mutual inductance* commands, the name is entirely ignored. This is used in LISO
only for fitting routines, which are not part of |Zero|.
| zero | /zero-0.9.1.tar.gz/zero-0.9.1/docs/liso/input.rst | input.rst |
.. include:: /defs.txt
LISO compatibility
==================
|Zero| somewhat understands `LISO <https://wiki.projekt.uni-hannover.de/aei-geo-q/start/software/liso>`_
input and output files. It is also capable of running a locally available LISO binary
and then plotting its results.
.. note::
In order to solve a circuit, |Zero| implicitly calculates responses to all sinks or noise from
all sources, depending on the type of analysis. LISO, however, only outputs the functions
specified as outputs or noise sources in the script. Instead of throwing away this extra data,
|Zero| stores all calculated functions in its :ref:`solution <solution/index:Solutions>`.
In order for the produced plots to be identical to those of LISO, the functions requested in
LISO are set as `default` in the solution such that they are plotted by :meth:`.Solution.plot`.
The other functions, however, are still available to be plotted by calling
:meth:`.Solution.plot_responses` or :meth:`.Solution.plot_noise` with appropriate arguments.
Parsing LISO files
------------------
.. toctree::
:maxdepth: 2
input
output
| zero | /zero-0.9.1.tar.gz/zero-0.9.1/docs/liso/index.rst | index.rst |
.. include:: /defs.txt
.. currentmodule:: zero.data
###############
Data containers
###############
.. code-block:: python
>>> from zero.data import Series, Response, NoiseDensity
|Zero| :ref:`analysis <analyses/index:Analyses>` results (responses and noise spectra) are stored
within `function` containers. These are relatively low level objects that hold each function's data
(within a :ref:`series <data/index:Series>`), its frequency axis, and any meta data produced by the
analysis. These objects are able to plot themselves when provided a figure to draw to. They also
contain logic to compare themselves to other functions, to check for equivalency.
In normal circumstances, you should not need to directly interact with these objects; rather, you
can plot and save their underlying data using a :ref:`Solution <solution/index:Solutions>`.
Series
------
Underlying function data is stored in a :class:`.Series`. This contains two dimensional data. Series
support basic mathematical operations such as multiplication, division and inversion.
Functions
---------
Responses
~~~~~~~~~
:class:`Responses <.data.Response>` contain the response of a component or node to another component
or node. Each response contains references to the source and sink component or node, and its units.
The response's underlying complex data is stored in its :attr:`~.Response.complex_magnitude`
property. The magnitude and phase can be retrieved using the :attr:`~.Response.magnitude` and
:attr:`~.Response.phase` properties, respectively. The decibel-scaled magnitude can be retrieved
using :attr:`~.Response.db_magnitude`.
.. note::
:attr:`~.Response.db_magnitude` is returned with power scaling, i.e.
:math:`20 \log_{10} \left| x \right|` where :math:`x` is the complex response.
.. code-block:: python
>>> response.complex_magnitude
array([-1.44905660e+06+271698.11320755j, -1.28956730e+06+520929.0994604j ,
-8.53524671e+05+742820.7338082j , -3.32179931e+05+622837.37024221j,
-8.66146537e+04+349885.52751013j, -1.95460509e+04+170108.87173014j,
-4.25456479e+03 +79773.08987768j, -9.18662496e+02 +37109.9690498j ,
-1.98014980e+02 +17233.2022651j , -4.26654531e+01 +7999.77245092j])
>>> response.db_magnitude
array([123.37176609, 122.86535272, 121.07307338, 116.97464649,
111.13682633, 104.67150284, 98.04946401, 91.39247246,
84.72789306, 78.06167621])
>>> response.phase
array([169.38034472, 158.00343425, 138.96701713, 118.07248694,
103.90412599, 96.55472157, 93.05288251, 91.41807544,
90.65831778, 90.30557459])
Noise spectral densities
~~~~~~~~~~~~~~~~~~~~~~~~
:class:`Noise spectral densities <.data.NoiseDensity>` contain the noise at a particular component
or node arising from noise produced by another component or node. They contain the :class:`noise
source <.components.Noise>` that produces the noise and a reference to the component or node that
the noise is measured at, and its units. :class:`Multi-noise spectra <.data.MultiNoiseDensity>`
contain a list of multiple noise sources; these are used to represent noise sums.
The noise spectral density's underlying data is stored in its
:attr:`~.NoiseDensityBase.spectral_density` property.
.. code-block:: python
>>> response.spectral_density
array([1.29259971e-07, 1.00870891e-07, 8.45132667e-08, 7.57294937e-08,
7.12855936e-08, 6.91259094e-08, 6.81002020e-08, 6.76188164e-08,
6.73941734e-08, 6.72894850e-08])
Labels
~~~~~~
Functions can have labels that are used in plot legends and when :ref:`searching for functions in a
solution <solution/index:Retrieving functions>`.
Labels can be set for functions using their :attr:`~.data.BaseFunction.label` property. If no label
is set by the user, a default label is produced using the function's source and sink in the case of
single-source and -sink functions, or "Incoherent sum" for :class:`noise sums <.MultiNoiseDensity>`.
Mathematical operations
~~~~~~~~~~~~~~~~~~~~~~~
The underlying data within a function can be multiplied, divided and inverted by applying
mathematical operations to the function object. Multiplication and division can be applied using
scalars or other functions. For example, :ref:`noise spectra <data/index:Noise spectral densities>`
can be multiplied by :ref:`responses <data/index:Responses>` to project noise to a different part of
a circuit (used for example to :ref:`refer noise to the circuit input <analyses/ac/noise:Referring
noise to the input>`).
When an operation involves two functions, the units of each function are checked for
validity. As determined by the order of operation, the left function's sink must have the same units
as the right function's source. The resulting function then takes the left functions' source and the
right function's sink.
.. hint::
While the inner sources and sinks of such operations must have the same units, they do not need
to be the same :class:`element <.BaseElement>`. This is to allow functions to be lightweight and
not have to maintain a reference to the component, node or noise source objects they originally
represented (rather, just their label). It is up to the user to check that each operation makes
physical sense.
Some operations are not possible, such as multiplying noise by noise. In these cases, a
:class:`ValueError` is raised.
| zero | /zero-0.9.1.tar.gz/zero-0.9.1/docs/data/index.rst | index.rst |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.